##// END OF EJS Templates
fixed cache problem,...
marcink -
r777:aac24db5 beta
parent child Browse files
Show More
@@ -1,108 +1,108 b''
1 .. _changelog:
1 .. _changelog:
2
2
3 Changelog
3 Changelog
4 =========
4 =========
5
5
6 1.1.0 (**2010-XX-XX**)
6 1.1.0 (**2010-XX-XX**)
7 ----------------------
7 ----------------------
8
8
9 :status: in-progress
9 :status: in-progress
10 :branch: beta
10 :branch: beta
11
11
12 news
12 news
13 ++++
13 ++++
14
14
15 - rewrite of internals for vcs >=0.1.10
15 - rewrite of internals for vcs >=0.1.10
16 - anonymous access, authentication via ldap
16 - anonymous access, authentication via ldap
17 - performance upgrade for cached repos list - each repository has it's own
17 - performance upgrade for cached repos list - each repository has it's own
18 cache that's invalidated when needed.
18 cache that's invalidated when needed.
19 - main page quick filter for filtering repositories
19 - main page quick filter for filtering repositories
20 - user dashboards with ability to follow chosen repositories actions
20 - user dashboards with ability to follow chosen repositories actions
21 - sends email to admin on new user registration
21 - sends email to admin on new user registration
22 - added cache/statistics reset options into repository settings
22 - added cache/statistics reset options into repository settings
23 - more detailed action logger (based on hooks) with pushed changesets lists
23 - more detailed action logger (based on hooks) with pushed changesets lists
24 and options to disable those hooks from admin panel
24 and options to disable those hooks from admin panel
25 - introduced new enhanced changelog for merges that shows more accurate results
25 - introduced new enhanced changelog for merges that shows more accurate results
26 - gui optimizations, fixed application width to 1024px
26 - gui optimizations, fixed application width to 1024px
27 - whoosh,celeryd,upgrade moved to paster command
27 - whoosh, celeryd, upgrade moved to paster command
28
28
29 fixes
29 fixes
30 +++++
30 +++++
31
31
32 - fixes #61 forked repo was showing only after cache expired
32 - fixes #61 forked repo was showing only after cache expired
33 - fixes #76 no confirmation on user deletes
33 - fixes #76 no confirmation on user deletes
34 - fixes #66 Name field misspelled
34 - fixes #66 Name field misspelled
35 - fixes #72 block user removal when he owns repositories
35 - fixes #72 block user removal when he owns repositories
36 - fixes #69 added password confirmation fields
36 - fixes #69 added password confirmation fields
37 - numerous small bugfixes
37 - numerous small bugfixes
38 - a lot of fixes and tweaks for file browser
38 - a lot of fixes and tweaks for file browser
39 - fixed detached session issues
39 - fixed detached session issues
40
40
41 (special thanks for TkSoh for detailed feedback)
41 (special thanks for TkSoh for detailed feedback)
42
42
43
43
44 1.0.2 (**2010-11-12**)
44 1.0.2 (**2010-11-12**)
45 ----------------------
45 ----------------------
46
46
47 news
47 news
48 ++++
48 ++++
49
49
50 - tested under python2.7
50 - tested under python2.7
51 - bumped sqlalchemy and celery versions
51 - bumped sqlalchemy and celery versions
52
52
53 fixes
53 fixes
54 +++++
54 +++++
55
55
56 - fixed #59 missing graph.js
56 - fixed #59 missing graph.js
57 - fixed repo_size crash when repository had broken symlinks
57 - fixed repo_size crash when repository had broken symlinks
58 - fixed python2.5 crashes.
58 - fixed python2.5 crashes.
59
59
60
60
61 1.0.1 (**2010-11-10**)
61 1.0.1 (**2010-11-10**)
62 ----------------------
62 ----------------------
63
63
64 news
64 news
65 ++++
65 ++++
66
66
67 - small css updated
67 - small css updated
68
68
69 fixes
69 fixes
70 +++++
70 +++++
71
71
72 - fixed #53 python2.5 incompatible enumerate calls
72 - fixed #53 python2.5 incompatible enumerate calls
73 - fixed #52 disable mercurial extension for web
73 - fixed #52 disable mercurial extension for web
74 - fixed #51 deleting repositories don't delete it's dependent objects
74 - fixed #51 deleting repositories don't delete it's dependent objects
75
75
76
76
77 1.0.0 (**2010-11-02**)
77 1.0.0 (**2010-11-02**)
78 ----------------------
78 ----------------------
79
79
80 - security bugfix simplehg wasn't checking for permissions on commands
80 - security bugfix simplehg wasn't checking for permissions on commands
81 other than pull or push.
81 other than pull or push.
82 - fixed doubled messages after push or pull in admin journal
82 - fixed doubled messages after push or pull in admin journal
83 - templating and css corrections, fixed repo switcher on chrome, updated titles
83 - templating and css corrections, fixed repo switcher on chrome, updated titles
84 - admin menu accessible from options menu on repository view
84 - admin menu accessible from options menu on repository view
85 - permissions cached queries
85 - permissions cached queries
86
86
87 1.0.0rc4 (**2010-10-12**)
87 1.0.0rc4 (**2010-10-12**)
88 --------------------------
88 --------------------------
89
89
90 - fixed python2.5 missing simplejson imports (thanks to Jens BΓ€ckman)
90 - fixed python2.5 missing simplejson imports (thanks to Jens BΓ€ckman)
91 - removed cache_manager settings from sqlalchemy meta
91 - removed cache_manager settings from sqlalchemy meta
92 - added sqlalchemy cache settings to ini files
92 - added sqlalchemy cache settings to ini files
93 - validated password length and added second try of failure on paster setup-app
93 - validated password length and added second try of failure on paster setup-app
94 - fixed setup database destroy prompt even when there was no db
94 - fixed setup database destroy prompt even when there was no db
95
95
96
96
97 1.0.0rc3 (**2010-10-11**)
97 1.0.0rc3 (**2010-10-11**)
98 -------------------------
98 -------------------------
99
99
100 - fixed i18n during installation.
100 - fixed i18n during installation.
101
101
102 1.0.0rc2 (**2010-10-11**)
102 1.0.0rc2 (**2010-10-11**)
103 -------------------------
103 -------------------------
104
104
105 - Disabled dirsize in file browser, it's causing nasty bug when dir renames
105 - Disabled dirsize in file browser, it's causing nasty bug when dir renames
106 occure. After vcs is fixed it'll be put back again.
106 occure. After vcs is fixed it'll be put back again.
107 - templating/css rewrites, optimized css.
107 - templating/css rewrites, optimized css.
108
108
@@ -1,237 +1,253 b''
1 .. _setup:
1 .. _setup:
2
2
3 Setup
3 Setup
4 =====
4 =====
5
5
6
6
7 Setting up the application
7 Setting up the application
8 --------------------------
8 --------------------------
9
9
10 ::
10 ::
11
11
12 paster make-config RhodeCode production.ini
12 paster make-config RhodeCode production.ini
13
13
14 - This will create `production.ini` config inside the directory
14 - This will create `production.ini` config inside the directory
15 this config contains various settings for RhodeCode, e.g proxy port,
15 this config contains various settings for RhodeCode, e.g proxy port,
16 email settings,static files, cache and logging.
16 email settings,static files, cache and logging.
17
17
18 ::
18 ::
19
19
20 paster setup-app production.ini
20 paster setup-app production.ini
21
21
22 - This command will create all needed tables and an admin account.
22 - This command will create all needed tables and an admin account.
23 When asked for a path You can either use a new location of one with already
23 When asked for a path You can either use a new location of one with already
24 existing ones. RhodeCode will simply add all new found repositories to
24 existing ones. RhodeCode will simply add all new found repositories to
25 it's database. Also make sure You specify correct path to repositories.
25 it's database. Also make sure You specify correct path to repositories.
26 - Remember that the given path for mercurial_ repositories must be write
26 - Remember that the given path for mercurial_ repositories must be write
27 accessible for the application. It's very important since RhodeCode web interface
27 accessible for the application. It's very important since RhodeCode web interface
28 will work even without such an access but, when trying to do a push it'll
28 will work even without such an access but, when trying to do a push it'll
29 eventually fail with permission denied errors.
29 eventually fail with permission denied errors.
30 - Run
30 - Run
31
31
32 ::
32 ::
33
33
34 paster serve production.ini
34 paster serve production.ini
35
35
36 - This command runs the RhodeCode server the app should be available at the
36 - This command runs the RhodeCode server the app should be available at the
37 127.0.0.1:5000. This ip and port is configurable via the production.ini
37 127.0.0.1:5000. This ip and port is configurable via the production.ini
38 file created in previous step
38 file created in previous step
39 - Use admin account you created to login.
39 - Use admin account you created to login.
40 - Default permissions on each repository is read, and owner is admin. So
40 - Default permissions on each repository is read, and owner is admin. So
41 remember to update these if needed.
41 remember to update these if needed.
42
42
43
43
44 Setting up Whoosh full text search
44 Setting up Whoosh full text search
45 ----------------------------------
45 ----------------------------------
46
46
47 Index for whoosh can be build starting from version 1.1 using paster command
47 Index for whoosh can be build starting from version 1.1 using paster command
48 passing repo locations to index, as well as Your config file that stores
48 passing repo locations to index, as well as Your config file that stores
49 whoosh index files locations. There is possible to pass `-f` to the options
49 whoosh index files locations. There is possible to pass `-f` to the options
50 to enable full index rebuild. Without that indexing will run always in in
50 to enable full index rebuild. Without that indexing will run always in in
51 incremental mode.
51 incremental mode.
52
52
53 ::
53 ::
54
54
55 paster make-index --repo-location=<location for repos> production.ini
55 paster make-index --repo-location=<location for repos> production.ini
56
56
57 for full index rebuild You can use
57 for full index rebuild You can use
58
58
59 ::
59 ::
60
60
61 paster make-index -f --repo-location=<location for repos> production.ini
61 paster make-index -f --repo-location=<location for repos> production.ini
62
62
63 - For full text search You can either put crontab entry for
63 - For full text search You can either put crontab entry for
64
64
65 This command can be run even from crontab in order to do periodical
65 This command can be run even from crontab in order to do periodical
66 index builds and keep Your index always up to date. An example entry might
66 index builds and keep Your index always up to date. An example entry might
67 look like this
67 look like this
68
68
69 ::
69 ::
70
70
71 /path/to/python/bin/paster --repo-location=<location for repos> /path/to/rhodecode/production.ini
71 /path/to/python/bin/paster --repo-location=<location for repos> /path/to/rhodecode/production.ini
72
72
73 When using incremental(default) mode whoosh will check last modification date
73 When using incremental(default) mode whoosh will check last modification date
74 of each file and add it to reindex if newer file is available. Also indexing
74 of each file and add it to reindex if newer file is available. Also indexing
75 daemon checks for removed files and removes them from index.
75 daemon checks for removed files and removes them from index.
76
76
77 Sometime You might want to rebuild index from scratch. You can do that using
77 Sometime You might want to rebuild index from scratch. You can do that using
78 the `-f` flag passed to paster command or, in admin panel You can check
78 the `-f` flag passed to paster command or, in admin panel You can check
79 `build from scratch` flag.
79 `build from scratch` flag.
80
80
81
81
82 Setting up LDAP support
82 Setting up LDAP support
83 -----------------------
83 -----------------------
84
84
85 RhodeCode starting from version 1.1 supports ldap authentication. In order
85 RhodeCode starting from version 1.1 supports ldap authentication. In order
86 to use ldap, You have to install python-ldap package. This package is available
86 to use ldap, You have to install python-ldap package. This package is available
87 via pypi, so You can install it by running
87 via pypi, so You can install it by running
88
88
89 ::
89 ::
90
90
91 easy_install python-ldap
91 easy_install python-ldap
92
92
93 ::
93 ::
94
94
95 pip install python-ldap
95 pip install python-ldap
96
96
97 .. note::
97 .. note::
98 python-ldap requires some certain libs on Your system, so before installing
98 python-ldap requires some certain libs on Your system, so before installing
99 it check that You have at least `openldap`, and `sasl` libraries.
99 it check that You have at least `openldap`, and `sasl` libraries.
100
100
101 ldap settings are located in admin->ldap section,
101 ldap settings are located in admin->ldap section,
102
102
103 Here's a typical ldap setup::
103 Here's a typical ldap setup::
104
104
105 Enable ldap = checked #controls if ldap access is enabled
105 Enable ldap = checked #controls if ldap access is enabled
106 Host = host.domain.org #actual ldap server to connect
106 Host = host.domain.org #actual ldap server to connect
107 Port = 389 or 689 for ldaps #ldap server ports
107 Port = 389 or 689 for ldaps #ldap server ports
108 Enable LDAPS = unchecked #enable disable ldaps
108 Enable LDAPS = unchecked #enable disable ldaps
109 Account = <account> #access for ldap server(if required)
109 Account = <account> #access for ldap server(if required)
110 Password = <password> #password for ldap server(if required)
110 Password = <password> #password for ldap server(if required)
111 Base DN = uid=%(user)s,CN=users,DC=host,DC=domain,DC=org
111 Base DN = uid=%(user)s,CN=users,DC=host,DC=domain,DC=org
112
112
113
113
114 `Account` and `Password` are optional, and used for two-phase ldap
114 `Account` and `Password` are optional, and used for two-phase ldap
115 authentication so those are credentials to access Your ldap, if it doesn't
115 authentication so those are credentials to access Your ldap, if it doesn't
116 support anonymous search/user lookups.
116 support anonymous search/user lookups.
117
117
118 Base DN must have %(user)s template inside, it's a placer where Your uid used
118 Base DN must have %(user)s template inside, it's a placer where Your uid used
119 to login would go, it allows admins to specify not standard schema for uid
119 to login would go, it allows admins to specify not standard schema for uid
120 variable
120 variable
121
121
122 If all data are entered correctly, and `python-ldap` is properly installed
122 If all data are entered correctly, and `python-ldap` is properly installed
123 Users should be granted to access RhodeCode wit ldap accounts. When
123 Users should be granted to access RhodeCode wit ldap accounts. When
124 logging at the first time an special ldap account is created inside RhodeCode,
124 logging at the first time an special ldap account is created inside RhodeCode,
125 so You can control over permissions even on ldap users. If such user exists
125 so You can control over permissions even on ldap users. If such user exists
126 already in RhodeCode database ldap user with the same username would be not
126 already in RhodeCode database ldap user with the same username would be not
127 able to access RhodeCode.
127 able to access RhodeCode.
128
128
129 If You have problems with ldap access and believe You entered correct
129 If You have problems with ldap access and believe You entered correct
130 information check out the RhodeCode logs,any error messages sent from
130 information check out the RhodeCode logs,any error messages sent from
131 ldap will be saved there.
131 ldap will be saved there.
132
132
133
133
134
135 Setting Up Celery
136 -----------------
137
138 Since version 1.1 celery is configured by the rhodecode ini configuration files
139 simply set use_celery=true in the ini file then add / change the configuration
140 variables inside the ini file.
141
142 Remember that the ini files uses format with '.' not with '_' like celery
143 so for example setting `BROKER_HOST` in celery means setting `broker.host` in
144 the config file.
145
146 In order to make start using celery run::
147 paster celeryd <configfile.ini>
148
149
134 Nginx virtual host example
150 Nginx virtual host example
135 --------------------------
151 --------------------------
136
152
137 Sample config for nginx using proxy::
153 Sample config for nginx using proxy::
138
154
139 server {
155 server {
140 listen 80;
156 listen 80;
141 server_name hg.myserver.com;
157 server_name hg.myserver.com;
142 access_log /var/log/nginx/rhodecode.access.log;
158 access_log /var/log/nginx/rhodecode.access.log;
143 error_log /var/log/nginx/rhodecode.error.log;
159 error_log /var/log/nginx/rhodecode.error.log;
144 location / {
160 location / {
145 root /var/www/rhodecode/rhodecode/public/;
161 root /var/www/rhodecode/rhodecode/public/;
146 if (!-f $request_filename){
162 if (!-f $request_filename){
147 proxy_pass http://127.0.0.1:5000;
163 proxy_pass http://127.0.0.1:5000;
148 }
164 }
149 #this is important for https !!!
165 #this is important for https !!!
150 proxy_set_header X-Url-Scheme $scheme;
166 proxy_set_header X-Url-Scheme $scheme;
151 include /etc/nginx/proxy.conf;
167 include /etc/nginx/proxy.conf;
152 }
168 }
153 }
169 }
154
170
155 Here's the proxy.conf. It's tuned so it'll not timeout on long
171 Here's the proxy.conf. It's tuned so it'll not timeout on long
156 pushes and also on large pushes::
172 pushes and also on large pushes::
157
173
158 proxy_redirect off;
174 proxy_redirect off;
159 proxy_set_header Host $host;
175 proxy_set_header Host $host;
160 proxy_set_header X-Host $http_host;
176 proxy_set_header X-Host $http_host;
161 proxy_set_header X-Real-IP $remote_addr;
177 proxy_set_header X-Real-IP $remote_addr;
162 proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
178 proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
163 proxy_set_header Proxy-host $proxy_host;
179 proxy_set_header Proxy-host $proxy_host;
164 client_max_body_size 400m;
180 client_max_body_size 400m;
165 client_body_buffer_size 128k;
181 client_body_buffer_size 128k;
166 proxy_buffering off;
182 proxy_buffering off;
167 proxy_connect_timeout 3600;
183 proxy_connect_timeout 3600;
168 proxy_send_timeout 3600;
184 proxy_send_timeout 3600;
169 proxy_read_timeout 3600;
185 proxy_read_timeout 3600;
170 proxy_buffer_size 8k;
186 proxy_buffer_size 8k;
171 proxy_buffers 8 32k;
187 proxy_buffers 8 32k;
172 proxy_busy_buffers_size 64k;
188 proxy_busy_buffers_size 64k;
173 proxy_temp_file_write_size 64k;
189 proxy_temp_file_write_size 64k;
174
190
175 Also when using root path with nginx You might set the static files to false
191 Also when using root path with nginx You might set the static files to false
176 in production.ini file::
192 in production.ini file::
177
193
178 [app:main]
194 [app:main]
179 use = egg:rhodecode
195 use = egg:rhodecode
180 full_stack = true
196 full_stack = true
181 static_files = false
197 static_files = false
182 lang=en
198 lang=en
183 cache_dir = %(here)s/data
199 cache_dir = %(here)s/data
184
200
185 To not have the statics served by the application. And improve speed.
201 To not have the statics served by the application. And improve speed.
186
202
187 Apache reverse proxy
203 Apache reverse proxy
188 --------------------
204 --------------------
189 Tutorial can be found here
205 Tutorial can be found here
190 http://wiki.pylonshq.com/display/pylonscookbook/Apache+as+a+reverse+proxy+for+Pylons
206 http://wiki.pylonshq.com/display/pylonscookbook/Apache+as+a+reverse+proxy+for+Pylons
191
207
192
208
193 Apache's example FCGI config
209 Apache's example FCGI config
194 ----------------------------
210 ----------------------------
195
211
196 TODO !
212 TODO !
197
213
198 Other configuration files
214 Other configuration files
199 -------------------------
215 -------------------------
200
216
201 Some extra configuration files and examples can be found here:
217 Some extra configuration files and examples can be found here:
202 http://hg.python-works.com/rhodecode/files/tip/init.d
218 http://hg.python-works.com/rhodecode/files/tip/init.d
203
219
204 and also an celeryconfig file can be use from here:
220 and also an celeryconfig file can be use from here:
205 http://hg.python-works.com/rhodecode/files/tip/celeryconfig.py
221 http://hg.python-works.com/rhodecode/files/tip/celeryconfig.py
206
222
207 Troubleshooting
223 Troubleshooting
208 ---------------
224 ---------------
209
225
210 - missing static files ?
226 - missing static files ?
211
227
212 - make sure either to set the `static_files = true` in the .ini file or
228 - make sure either to set the `static_files = true` in the .ini file or
213 double check the root path for Your http setup. It should point to
229 double check the root path for Your http setup. It should point to
214 for example:
230 for example:
215 /home/my-virtual-python/lib/python2.6/site-packages/rhodecode/public
231 /home/my-virtual-python/lib/python2.6/site-packages/rhodecode/public
216
232
217 - can't install celery/rabbitmq
233 - can't install celery/rabbitmq
218
234
219 - don't worry RhodeCode works without them too. No extra setup required
235 - don't worry RhodeCode works without them too. No extra setup required
220
236
221
237
222 - long lasting push timeouts ?
238 - long lasting push timeouts ?
223
239
224 - make sure You set a longer timeouts in Your proxy/fcgi settings, timeouts
240 - make sure You set a longer timeouts in Your proxy/fcgi settings, timeouts
225 are caused by https server and not RhodeCode
241 are caused by https server and not RhodeCode
226
242
227 - large pushes timeouts ?
243 - large pushes timeouts ?
228
244
229 - make sure You set a proper max_body_size for the http server
245 - make sure You set a proper max_body_size for the http server
230
246
231
247
232
248
233 .. _virtualenv: http://pypi.python.org/pypi/virtualenv
249 .. _virtualenv: http://pypi.python.org/pypi/virtualenv
234 .. _python: http://www.python.org/
250 .. _python: http://www.python.org/
235 .. _mercurial: http://mercurial.selenic.com/
251 .. _mercurial: http://mercurial.selenic.com/
236 .. _celery: http://celeryproject.org/
252 .. _celery: http://celeryproject.org/
237 .. _rabbitmq: http://www.rabbitmq.com/ No newline at end of file
253 .. _rabbitmq: http://www.rabbitmq.com/
@@ -1,363 +1,389 b''
1 from celery.decorators import task
1 from celery.decorators import task
2
2
3 import os
3 import os
4 import traceback
4 import traceback
5 import beaker
5 import beaker
6 from time import mktime
6 from time import mktime
7 from operator import itemgetter
7 from operator import itemgetter
8
8
9 from pylons import config
9 from pylons import config
10 from pylons.i18n.translation import _
10 from pylons.i18n.translation import _
11
11
12 from rhodecode.lib.celerylib import run_task, locked_task, str2bool
12 from rhodecode.lib.celerylib import run_task, locked_task, str2bool
13 from rhodecode.lib.helpers import person
13 from rhodecode.lib.helpers import person
14 from rhodecode.lib.smtp_mailer import SmtpMailer
14 from rhodecode.lib.smtp_mailer import SmtpMailer
15 from rhodecode.lib.utils import OrderedDict
15 from rhodecode.lib.utils import OrderedDict
16 from rhodecode.model import init_model
16 from rhodecode.model import init_model
17 from rhodecode.model import meta
17 from rhodecode.model import meta
18 from rhodecode.model.db import RhodeCodeUi
18 from rhodecode.model.db import RhodeCodeUi
19
19
20 from vcs.backends import get_repo
20 from vcs.backends import get_repo
21
21
22 from sqlalchemy import engine_from_config
22 from sqlalchemy import engine_from_config
23
23
24 #set cache regions for beaker so celery can utilise it
25 def add_cache(settings):
26 cache_settings = {'regions':None}
27 for key in settings.keys():
28 for prefix in ['beaker.cache.', 'cache.']:
29 if key.startswith(prefix):
30 name = key.split(prefix)[1].strip()
31 cache_settings[name] = settings[key].strip()
32 if cache_settings['regions']:
33 for region in cache_settings['regions'].split(','):
34 region = region.strip()
35 region_settings = {}
36 for key, value in cache_settings.items():
37 if key.startswith(region):
38 region_settings[key.split('.')[1]] = value
39 region_settings['expire'] = int(region_settings.get('expire',
40 60))
41 region_settings.setdefault('lock_dir',
42 cache_settings.get('lock_dir'))
43 if 'type' not in region_settings:
44 region_settings['type'] = cache_settings.get('type',
45 'memory')
46 beaker.cache.cache_regions[region] = region_settings
47 add_cache(config)
48
24 try:
49 try:
25 import json
50 import json
26 except ImportError:
51 except ImportError:
27 #python 2.5 compatibility
52 #python 2.5 compatibility
28 import simplejson as json
53 import simplejson as json
29
54
30 __all__ = ['whoosh_index', 'get_commits_stats',
55 __all__ = ['whoosh_index', 'get_commits_stats',
31 'reset_user_password', 'send_email']
56 'reset_user_password', 'send_email']
32
57
33 CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
58 CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
34
59
35 def get_session():
60 def get_session():
36 if CELERY_ON:
61 if CELERY_ON:
37 engine = engine_from_config(config, 'sqlalchemy.db1.')
62 engine = engine_from_config(config, 'sqlalchemy.db1.')
38 init_model(engine)
63 init_model(engine)
39 sa = meta.Session()
64 sa = meta.Session()
40 return sa
65 return sa
41
66
42 def get_repos_path():
67 def get_repos_path():
43 sa = get_session()
68 sa = get_session()
44 q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
69 q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
45 return q.ui_value
70 return q.ui_value
46
71
47 @task
72 @task
48 @locked_task
73 @locked_task
49 def whoosh_index(repo_location, full_index):
74 def whoosh_index(repo_location, full_index):
50 log = whoosh_index.get_logger()
75 log = whoosh_index.get_logger()
51 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
76 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
52 index_location = config['index_dir']
77 index_location = config['index_dir']
53 WhooshIndexingDaemon(index_location=index_location,
78 WhooshIndexingDaemon(index_location=index_location,
54 repo_location=repo_location).run(full_index=full_index)
79 repo_location=repo_location, sa=get_session())\
80 .run(full_index=full_index)
55
81
56 @task
82 @task
57 @locked_task
83 @locked_task
58 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
84 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
59 from rhodecode.model.db import Statistics, Repository
85 from rhodecode.model.db import Statistics, Repository
60 log = get_commits_stats.get_logger()
86 log = get_commits_stats.get_logger()
61
87
62 #for js data compatibilty
88 #for js data compatibilty
63 author_key_cleaner = lambda k: person(k).replace('"', "")
89 author_key_cleaner = lambda k: person(k).replace('"', "")
64
90
65 commits_by_day_author_aggregate = {}
91 commits_by_day_author_aggregate = {}
66 commits_by_day_aggregate = {}
92 commits_by_day_aggregate = {}
67 repos_path = get_repos_path()
93 repos_path = get_repos_path()
68 p = os.path.join(repos_path, repo_name)
94 p = os.path.join(repos_path, repo_name)
69 repo = get_repo(p)
95 repo = get_repo(p)
70
96
71 skip_date_limit = True
97 skip_date_limit = True
72 parse_limit = 250 #limit for single task changeset parsing optimal for
98 parse_limit = 250 #limit for single task changeset parsing optimal for
73 last_rev = 0
99 last_rev = 0
74 last_cs = None
100 last_cs = None
75 timegetter = itemgetter('time')
101 timegetter = itemgetter('time')
76
102
77 sa = get_session()
103 sa = get_session()
78
104
79 dbrepo = sa.query(Repository)\
105 dbrepo = sa.query(Repository)\
80 .filter(Repository.repo_name == repo_name).scalar()
106 .filter(Repository.repo_name == repo_name).scalar()
81 cur_stats = sa.query(Statistics)\
107 cur_stats = sa.query(Statistics)\
82 .filter(Statistics.repository == dbrepo).scalar()
108 .filter(Statistics.repository == dbrepo).scalar()
83 if cur_stats:
109 if cur_stats:
84 last_rev = cur_stats.stat_on_revision
110 last_rev = cur_stats.stat_on_revision
85 if not repo.revisions:
111 if not repo.revisions:
86 return True
112 return True
87
113
88 if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
114 if last_rev == repo.revisions[-1] and len(repo.revisions) > 1:
89 #pass silently without any work if we're not on first revision or
115 #pass silently without any work if we're not on first revision or
90 #current state of parsing revision(from db marker) is the last revision
116 #current state of parsing revision(from db marker) is the last revision
91 return True
117 return True
92
118
93 if cur_stats:
119 if cur_stats:
94 commits_by_day_aggregate = OrderedDict(
120 commits_by_day_aggregate = OrderedDict(
95 json.loads(
121 json.loads(
96 cur_stats.commit_activity_combined))
122 cur_stats.commit_activity_combined))
97 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
123 commits_by_day_author_aggregate = json.loads(cur_stats.commit_activity)
98
124
99 log.debug('starting parsing %s', parse_limit)
125 log.debug('starting parsing %s', parse_limit)
100 lmktime = mktime
126 lmktime = mktime
101
127
102 for cnt, rev in enumerate(repo.revisions[last_rev:]):
128 for cnt, rev in enumerate(repo.revisions[last_rev:]):
103 last_cs = cs = repo.get_changeset(rev)
129 last_cs = cs = repo.get_changeset(rev)
104 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
130 k = '%s-%s-%s' % (cs.date.timetuple()[0], cs.date.timetuple()[1],
105 cs.date.timetuple()[2])
131 cs.date.timetuple()[2])
106 timetupple = [int(x) for x in k.split('-')]
132 timetupple = [int(x) for x in k.split('-')]
107 timetupple.extend([0 for _ in xrange(6)])
133 timetupple.extend([0 for _ in xrange(6)])
108 k = lmktime(timetupple)
134 k = lmktime(timetupple)
109 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
135 if commits_by_day_author_aggregate.has_key(author_key_cleaner(cs.author)):
110 try:
136 try:
111 l = [timegetter(x) for x in commits_by_day_author_aggregate\
137 l = [timegetter(x) for x in commits_by_day_author_aggregate\
112 [author_key_cleaner(cs.author)]['data']]
138 [author_key_cleaner(cs.author)]['data']]
113 time_pos = l.index(k)
139 time_pos = l.index(k)
114 except ValueError:
140 except ValueError:
115 time_pos = False
141 time_pos = False
116
142
117 if time_pos >= 0 and time_pos is not False:
143 if time_pos >= 0 and time_pos is not False:
118
144
119 datadict = commits_by_day_author_aggregate\
145 datadict = commits_by_day_author_aggregate\
120 [author_key_cleaner(cs.author)]['data'][time_pos]
146 [author_key_cleaner(cs.author)]['data'][time_pos]
121
147
122 datadict["commits"] += 1
148 datadict["commits"] += 1
123 datadict["added"] += len(cs.added)
149 datadict["added"] += len(cs.added)
124 datadict["changed"] += len(cs.changed)
150 datadict["changed"] += len(cs.changed)
125 datadict["removed"] += len(cs.removed)
151 datadict["removed"] += len(cs.removed)
126
152
127 else:
153 else:
128 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
154 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
129
155
130 datadict = {"time":k,
156 datadict = {"time":k,
131 "commits":1,
157 "commits":1,
132 "added":len(cs.added),
158 "added":len(cs.added),
133 "changed":len(cs.changed),
159 "changed":len(cs.changed),
134 "removed":len(cs.removed),
160 "removed":len(cs.removed),
135 }
161 }
136 commits_by_day_author_aggregate\
162 commits_by_day_author_aggregate\
137 [author_key_cleaner(cs.author)]['data'].append(datadict)
163 [author_key_cleaner(cs.author)]['data'].append(datadict)
138
164
139 else:
165 else:
140 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
166 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
141 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
167 commits_by_day_author_aggregate[author_key_cleaner(cs.author)] = {
142 "label":author_key_cleaner(cs.author),
168 "label":author_key_cleaner(cs.author),
143 "data":[{"time":k,
169 "data":[{"time":k,
144 "commits":1,
170 "commits":1,
145 "added":len(cs.added),
171 "added":len(cs.added),
146 "changed":len(cs.changed),
172 "changed":len(cs.changed),
147 "removed":len(cs.removed),
173 "removed":len(cs.removed),
148 }],
174 }],
149 "schema":["commits"],
175 "schema":["commits"],
150 }
176 }
151
177
152 #gather all data by day
178 #gather all data by day
153 if commits_by_day_aggregate.has_key(k):
179 if commits_by_day_aggregate.has_key(k):
154 commits_by_day_aggregate[k] += 1
180 commits_by_day_aggregate[k] += 1
155 else:
181 else:
156 commits_by_day_aggregate[k] = 1
182 commits_by_day_aggregate[k] = 1
157
183
158 if cnt >= parse_limit:
184 if cnt >= parse_limit:
159 #don't fetch to much data since we can freeze application
185 #don't fetch to much data since we can freeze application
160 break
186 break
161 overview_data = []
187 overview_data = []
162 for k, v in commits_by_day_aggregate.items():
188 for k, v in commits_by_day_aggregate.items():
163 overview_data.append([k, v])
189 overview_data.append([k, v])
164 overview_data = sorted(overview_data, key=itemgetter(0))
190 overview_data = sorted(overview_data, key=itemgetter(0))
165 if not commits_by_day_author_aggregate:
191 if not commits_by_day_author_aggregate:
166 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
192 commits_by_day_author_aggregate[author_key_cleaner(repo.contact)] = {
167 "label":author_key_cleaner(repo.contact),
193 "label":author_key_cleaner(repo.contact),
168 "data":[0, 1],
194 "data":[0, 1],
169 "schema":["commits"],
195 "schema":["commits"],
170 }
196 }
171
197
172 stats = cur_stats if cur_stats else Statistics()
198 stats = cur_stats if cur_stats else Statistics()
173 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
199 stats.commit_activity = json.dumps(commits_by_day_author_aggregate)
174 stats.commit_activity_combined = json.dumps(overview_data)
200 stats.commit_activity_combined = json.dumps(overview_data)
175
201
176 log.debug('last revison %s', last_rev)
202 log.debug('last revison %s', last_rev)
177 leftovers = len(repo.revisions[last_rev:])
203 leftovers = len(repo.revisions[last_rev:])
178 log.debug('revisions to parse %s', leftovers)
204 log.debug('revisions to parse %s', leftovers)
179
205
180 if last_rev == 0 or leftovers < parse_limit:
206 if last_rev == 0 or leftovers < parse_limit:
181 stats.languages = json.dumps(__get_codes_stats(repo_name))
207 stats.languages = json.dumps(__get_codes_stats(repo_name))
182
208
183 stats.repository = dbrepo
209 stats.repository = dbrepo
184 stats.stat_on_revision = last_cs.revision
210 stats.stat_on_revision = last_cs.revision
185
211
186 try:
212 try:
187 sa.add(stats)
213 sa.add(stats)
188 sa.commit()
214 sa.commit()
189 except:
215 except:
190 log.error(traceback.format_exc())
216 log.error(traceback.format_exc())
191 sa.rollback()
217 sa.rollback()
192 return False
218 return False
193 if len(repo.revisions) > 1:
219 if len(repo.revisions) > 1:
194 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
220 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
195
221
196 return True
222 return True
197
223
198 @task
224 @task
199 def reset_user_password(user_email):
225 def reset_user_password(user_email):
200 log = reset_user_password.get_logger()
226 log = reset_user_password.get_logger()
201 from rhodecode.lib import auth
227 from rhodecode.lib import auth
202 from rhodecode.model.db import User
228 from rhodecode.model.db import User
203
229
204 try:
230 try:
205 try:
231 try:
206 sa = get_session()
232 sa = get_session()
207 user = sa.query(User).filter(User.email == user_email).scalar()
233 user = sa.query(User).filter(User.email == user_email).scalar()
208 new_passwd = auth.PasswordGenerator().gen_password(8,
234 new_passwd = auth.PasswordGenerator().gen_password(8,
209 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
235 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
210 if user:
236 if user:
211 user.password = auth.get_crypt_password(new_passwd)
237 user.password = auth.get_crypt_password(new_passwd)
212 sa.add(user)
238 sa.add(user)
213 sa.commit()
239 sa.commit()
214 log.info('change password for %s', user_email)
240 log.info('change password for %s', user_email)
215 if new_passwd is None:
241 if new_passwd is None:
216 raise Exception('unable to generate new password')
242 raise Exception('unable to generate new password')
217
243
218 except:
244 except:
219 log.error(traceback.format_exc())
245 log.error(traceback.format_exc())
220 sa.rollback()
246 sa.rollback()
221
247
222 run_task(send_email, user_email,
248 run_task(send_email, user_email,
223 "Your new rhodecode password",
249 "Your new rhodecode password",
224 'Your new rhodecode password:%s' % (new_passwd))
250 'Your new rhodecode password:%s' % (new_passwd))
225 log.info('send new password mail to %s', user_email)
251 log.info('send new password mail to %s', user_email)
226
252
227
253
228 except:
254 except:
229 log.error('Failed to update user password')
255 log.error('Failed to update user password')
230 log.error(traceback.format_exc())
256 log.error(traceback.format_exc())
231
257
232 return True
258 return True
233
259
234 @task
260 @task
235 def send_email(recipients, subject, body):
261 def send_email(recipients, subject, body):
236 """
262 """
237 Sends an email with defined parameters from the .ini files.
263 Sends an email with defined parameters from the .ini files.
238
264
239
265
240 :param recipients: list of recipients, it this is empty the defined email
266 :param recipients: list of recipients, it this is empty the defined email
241 address from field 'email_to' is used instead
267 address from field 'email_to' is used instead
242 :param subject: subject of the mail
268 :param subject: subject of the mail
243 :param body: body of the mail
269 :param body: body of the mail
244 """
270 """
245 log = send_email.get_logger()
271 log = send_email.get_logger()
246 email_config = config
272 email_config = config
247
273
248 if not recipients:
274 if not recipients:
249 recipients = [email_config.get('email_to')]
275 recipients = [email_config.get('email_to')]
250
276
251 mail_from = email_config.get('app_email_from')
277 mail_from = email_config.get('app_email_from')
252 user = email_config.get('smtp_username')
278 user = email_config.get('smtp_username')
253 passwd = email_config.get('smtp_password')
279 passwd = email_config.get('smtp_password')
254 mail_server = email_config.get('smtp_server')
280 mail_server = email_config.get('smtp_server')
255 mail_port = email_config.get('smtp_port')
281 mail_port = email_config.get('smtp_port')
256 tls = str2bool(email_config.get('smtp_use_tls'))
282 tls = str2bool(email_config.get('smtp_use_tls'))
257 ssl = str2bool(email_config.get('smtp_use_ssl'))
283 ssl = str2bool(email_config.get('smtp_use_ssl'))
258
284
259 try:
285 try:
260 m = SmtpMailer(mail_from, user, passwd, mail_server,
286 m = SmtpMailer(mail_from, user, passwd, mail_server,
261 mail_port, ssl, tls)
287 mail_port, ssl, tls)
262 m.send(recipients, subject, body)
288 m.send(recipients, subject, body)
263 except:
289 except:
264 log.error('Mail sending failed')
290 log.error('Mail sending failed')
265 log.error(traceback.format_exc())
291 log.error(traceback.format_exc())
266 return False
292 return False
267 return True
293 return True
268
294
269 @task
295 @task
270 def create_repo_fork(form_data, cur_user):
296 def create_repo_fork(form_data, cur_user):
271 from rhodecode.model.repo import RepoModel
297 from rhodecode.model.repo import RepoModel
272 from vcs import get_backend
298 from vcs import get_backend
273 log = create_repo_fork.get_logger()
299 log = create_repo_fork.get_logger()
274 repo_model = RepoModel(get_session())
300 repo_model = RepoModel(get_session())
275 repo_model.create(form_data, cur_user, just_db=True, fork=True)
301 repo_model.create(form_data, cur_user, just_db=True, fork=True)
276 repo_name = form_data['repo_name']
302 repo_name = form_data['repo_name']
277 repos_path = get_repos_path()
303 repos_path = get_repos_path()
278 repo_path = os.path.join(repos_path, repo_name)
304 repo_path = os.path.join(repos_path, repo_name)
279 repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
305 repo_fork_path = os.path.join(repos_path, form_data['fork_name'])
280 alias = form_data['repo_type']
306 alias = form_data['repo_type']
281
307
282 log.info('creating repo fork %s as %s', repo_name, repo_path)
308 log.info('creating repo fork %s as %s', repo_name, repo_path)
283 backend = get_backend(alias)
309 backend = get_backend(alias)
284 backend(str(repo_fork_path), create=True, src_url=str(repo_path))
310 backend(str(repo_fork_path), create=True, src_url=str(repo_path))
285
311
286 def __get_codes_stats(repo_name):
312 def __get_codes_stats(repo_name):
287 LANGUAGES_EXTENSIONS_MAP = {'scm': 'Scheme', 'asmx': 'VbNetAspx', 'Rout':
313 LANGUAGES_EXTENSIONS_MAP = {'scm': 'Scheme', 'asmx': 'VbNetAspx', 'Rout':
288 'RConsole', 'rest': 'Rst', 'abap': 'ABAP', 'go': 'Go', 'phtml': 'HtmlPhp',
314 'RConsole', 'rest': 'Rst', 'abap': 'ABAP', 'go': 'Go', 'phtml': 'HtmlPhp',
289 'ns2': 'Newspeak', 'xml': 'EvoqueXml', 'sh-session': 'BashSession', 'ads':
315 'ns2': 'Newspeak', 'xml': 'EvoqueXml', 'sh-session': 'BashSession', 'ads':
290 'Ada', 'clj': 'Clojure', 'll': 'Llvm', 'ebuild': 'Bash', 'adb': 'Ada',
316 'Ada', 'clj': 'Clojure', 'll': 'Llvm', 'ebuild': 'Bash', 'adb': 'Ada',
291 'ada': 'Ada', 'c++-objdump': 'CppObjdump', 'aspx':
317 'ada': 'Ada', 'c++-objdump': 'CppObjdump', 'aspx':
292 'VbNetAspx', 'ksh': 'Bash', 'coffee': 'CoffeeScript', 'vert': 'GLShader',
318 'VbNetAspx', 'ksh': 'Bash', 'coffee': 'CoffeeScript', 'vert': 'GLShader',
293 'Makefile.*': 'Makefile', 'di': 'D', 'dpatch': 'DarcsPatch', 'rake':
319 'Makefile.*': 'Makefile', 'di': 'D', 'dpatch': 'DarcsPatch', 'rake':
294 'Ruby', 'moo': 'MOOCode', 'erl-sh': 'ErlangShell', 'geo': 'GLShader',
320 'Ruby', 'moo': 'MOOCode', 'erl-sh': 'ErlangShell', 'geo': 'GLShader',
295 'pov': 'Povray', 'bas': 'VbNet', 'bat': 'Batch', 'd': 'D', 'lisp':
321 'pov': 'Povray', 'bas': 'VbNet', 'bat': 'Batch', 'd': 'D', 'lisp':
296 'CommonLisp', 'h': 'C', 'rbx': 'Ruby', 'tcl': 'Tcl', 'c++': 'Cpp', 'md':
322 'CommonLisp', 'h': 'C', 'rbx': 'Ruby', 'tcl': 'Tcl', 'c++': 'Cpp', 'md':
297 'MiniD', '.vimrc': 'Vim', 'xsd': 'Xml', 'ml': 'Ocaml', 'el': 'CommonLisp',
323 'MiniD', '.vimrc': 'Vim', 'xsd': 'Xml', 'ml': 'Ocaml', 'el': 'CommonLisp',
298 'befunge': 'Befunge', 'xsl': 'Xslt', 'pyx': 'Cython', 'cfm':
324 'befunge': 'Befunge', 'xsl': 'Xslt', 'pyx': 'Cython', 'cfm':
299 'ColdfusionHtml', 'evoque': 'Evoque', 'cfg': 'Ini', 'htm': 'Html',
325 'ColdfusionHtml', 'evoque': 'Evoque', 'cfg': 'Ini', 'htm': 'Html',
300 'Makefile': 'Makefile', 'cfc': 'ColdfusionHtml', 'tex': 'Tex', 'cs':
326 'Makefile': 'Makefile', 'cfc': 'ColdfusionHtml', 'tex': 'Tex', 'cs':
301 'CSharp', 'mxml': 'Mxml', 'patch': 'Diff', 'apache.conf': 'ApacheConf',
327 'CSharp', 'mxml': 'Mxml', 'patch': 'Diff', 'apache.conf': 'ApacheConf',
302 'scala': 'Scala', 'applescript': 'AppleScript', 'GNUmakefile': 'Makefile',
328 'scala': 'Scala', 'applescript': 'AppleScript', 'GNUmakefile': 'Makefile',
303 'c-objdump': 'CObjdump', 'lua': 'Lua', 'apache2.conf': 'ApacheConf', 'rb':
329 'c-objdump': 'CObjdump', 'lua': 'Lua', 'apache2.conf': 'ApacheConf', 'rb':
304 'Ruby', 'gemspec': 'Ruby', 'rl': 'RagelObjectiveC', 'vala': 'Vala', 'tmpl':
330 'Ruby', 'gemspec': 'Ruby', 'rl': 'RagelObjectiveC', 'vala': 'Vala', 'tmpl':
305 'Cheetah', 'bf': 'Brainfuck', 'plt': 'Gnuplot', 'G': 'AntlrRuby', 'xslt':
331 'Cheetah', 'bf': 'Brainfuck', 'plt': 'Gnuplot', 'G': 'AntlrRuby', 'xslt':
306 'Xslt', 'flxh': 'Felix', 'asax': 'VbNetAspx', 'Rakefile': 'Ruby', 'S': 'S',
332 'Xslt', 'flxh': 'Felix', 'asax': 'VbNetAspx', 'Rakefile': 'Ruby', 'S': 'S',
307 'wsdl': 'Xml', 'js': 'Javascript', 'autodelegate': 'Myghty', 'properties':
333 'wsdl': 'Xml', 'js': 'Javascript', 'autodelegate': 'Myghty', 'properties':
308 'Ini', 'bash': 'Bash', 'c': 'C', 'g': 'AntlrRuby', 'r3': 'Rebol', 's':
334 'Ini', 'bash': 'Bash', 'c': 'C', 'g': 'AntlrRuby', 'r3': 'Rebol', 's':
309 'Gas', 'ashx': 'VbNetAspx', 'cxx': 'Cpp', 'boo': 'Boo', 'prolog': 'Prolog',
335 'Gas', 'ashx': 'VbNetAspx', 'cxx': 'Cpp', 'boo': 'Boo', 'prolog': 'Prolog',
310 'sqlite3-console': 'SqliteConsole', 'cl': 'CommonLisp', 'cc': 'Cpp', 'pot':
336 'sqlite3-console': 'SqliteConsole', 'cl': 'CommonLisp', 'cc': 'Cpp', 'pot':
311 'Gettext', 'vim': 'Vim', 'pxi': 'Cython', 'yaml': 'Yaml', 'SConstruct':
337 'Gettext', 'vim': 'Vim', 'pxi': 'Cython', 'yaml': 'Yaml', 'SConstruct':
312 'Python', 'diff': 'Diff', 'txt': 'Text', 'cw': 'Redcode', 'pxd': 'Cython',
338 'Python', 'diff': 'Diff', 'txt': 'Text', 'cw': 'Redcode', 'pxd': 'Cython',
313 'plot': 'Gnuplot', 'java': 'Java', 'hrl': 'Erlang', 'py': 'Python',
339 'plot': 'Gnuplot', 'java': 'Java', 'hrl': 'Erlang', 'py': 'Python',
314 'makefile': 'Makefile', 'squid.conf': 'SquidConf', 'asm': 'Nasm', 'toc':
340 'makefile': 'Makefile', 'squid.conf': 'SquidConf', 'asm': 'Nasm', 'toc':
315 'Tex', 'kid': 'Genshi', 'rhtml': 'Rhtml', 'po': 'Gettext', 'pl': 'Prolog',
341 'Tex', 'kid': 'Genshi', 'rhtml': 'Rhtml', 'po': 'Gettext', 'pl': 'Prolog',
316 'pm': 'Perl', 'hx': 'Haxe', 'ascx': 'VbNetAspx', 'ooc': 'Ooc', 'asy':
342 'pm': 'Perl', 'hx': 'Haxe', 'ascx': 'VbNetAspx', 'ooc': 'Ooc', 'asy':
317 'Asymptote', 'hs': 'Haskell', 'SConscript': 'Python', 'pytb':
343 'Asymptote', 'hs': 'Haskell', 'SConscript': 'Python', 'pytb':
318 'PythonTraceback', 'myt': 'Myghty', 'hh': 'Cpp', 'R': 'S', 'aux': 'Tex',
344 'PythonTraceback', 'myt': 'Myghty', 'hh': 'Cpp', 'R': 'S', 'aux': 'Tex',
319 'rst': 'Rst', 'cpp-objdump': 'CppObjdump', 'lgt': 'Logtalk', 'rss': 'Xml',
345 'rst': 'Rst', 'cpp-objdump': 'CppObjdump', 'lgt': 'Logtalk', 'rss': 'Xml',
320 'flx': 'Felix', 'b': 'Brainfuck', 'f': 'Fortran', 'rbw': 'Ruby',
346 'flx': 'Felix', 'b': 'Brainfuck', 'f': 'Fortran', 'rbw': 'Ruby',
321 '.htaccess': 'ApacheConf', 'cxx-objdump': 'CppObjdump', 'j': 'ObjectiveJ',
347 '.htaccess': 'ApacheConf', 'cxx-objdump': 'CppObjdump', 'j': 'ObjectiveJ',
322 'mll': 'Ocaml', 'yml': 'Yaml', 'mu': 'MuPAD', 'r': 'Rebol', 'ASM': 'Nasm',
348 'mll': 'Ocaml', 'yml': 'Yaml', 'mu': 'MuPAD', 'r': 'Rebol', 'ASM': 'Nasm',
323 'erl': 'Erlang', 'mly': 'Ocaml', 'mo': 'Modelica', 'def': 'Modula2', 'ini':
349 'erl': 'Erlang', 'mly': 'Ocaml', 'mo': 'Modelica', 'def': 'Modula2', 'ini':
324 'Ini', 'control': 'DebianControl', 'vb': 'VbNet', 'vapi': 'Vala', 'pro':
350 'Ini', 'control': 'DebianControl', 'vb': 'VbNet', 'vapi': 'Vala', 'pro':
325 'Prolog', 'spt': 'Cheetah', 'mli': 'Ocaml', 'as': 'ActionScript3', 'cmd':
351 'Prolog', 'spt': 'Cheetah', 'mli': 'Ocaml', 'as': 'ActionScript3', 'cmd':
326 'Batch', 'cpp': 'Cpp', 'io': 'Io', 'tac': 'Python', 'haml': 'Haml', 'rkt':
352 'Batch', 'cpp': 'Cpp', 'io': 'Io', 'tac': 'Python', 'haml': 'Haml', 'rkt':
327 'Racket', 'st':'Smalltalk', 'inc': 'Povray', 'pas': 'Delphi', 'cmake':
353 'Racket', 'st':'Smalltalk', 'inc': 'Povray', 'pas': 'Delphi', 'cmake':
328 'CMake', 'csh':'Tcsh', 'hpp': 'Cpp', 'feature': 'Gherkin', 'html': 'Html',
354 'CMake', 'csh':'Tcsh', 'hpp': 'Cpp', 'feature': 'Gherkin', 'html': 'Html',
329 'php':'Php', 'php3':'Php', 'php4':'Php', 'php5':'Php', 'xhtml': 'Html',
355 'php':'Php', 'php3':'Php', 'php4':'Php', 'php5':'Php', 'xhtml': 'Html',
330 'hxx': 'Cpp', 'eclass': 'Bash', 'css': 'Css',
356 'hxx': 'Cpp', 'eclass': 'Bash', 'css': 'Css',
331 'frag': 'GLShader', 'd-objdump': 'DObjdump', 'weechatlog': 'IrcLogs',
357 'frag': 'GLShader', 'd-objdump': 'DObjdump', 'weechatlog': 'IrcLogs',
332 'tcsh': 'Tcsh', 'objdump': 'Objdump', 'pyw': 'Python', 'h++': 'Cpp',
358 'tcsh': 'Tcsh', 'objdump': 'Objdump', 'pyw': 'Python', 'h++': 'Cpp',
333 'py3tb': 'Python3Traceback', 'jsp': 'Jsp', 'sql': 'Sql', 'mak': 'Makefile',
359 'py3tb': 'Python3Traceback', 'jsp': 'Jsp', 'sql': 'Sql', 'mak': 'Makefile',
334 'php': 'Php', 'mao': 'Mako', 'man': 'Groff', 'dylan': 'Dylan', 'sass':
360 'php': 'Php', 'mao': 'Mako', 'man': 'Groff', 'dylan': 'Dylan', 'sass':
335 'Sass', 'cfml': 'ColdfusionHtml', 'darcspatch': 'DarcsPatch', 'tpl':
361 'Sass', 'cfml': 'ColdfusionHtml', 'darcspatch': 'DarcsPatch', 'tpl':
336 'Smarty', 'm': 'ObjectiveC', 'f90': 'Fortran', 'mod': 'Modula2', 'sh':
362 'Smarty', 'm': 'ObjectiveC', 'f90': 'Fortran', 'mod': 'Modula2', 'sh':
337 'Bash', 'lhs': 'LiterateHaskell', 'sources.list': 'SourcesList', 'axd':
363 'Bash', 'lhs': 'LiterateHaskell', 'sources.list': 'SourcesList', 'axd':
338 'VbNetAspx', 'sc': 'Python'}
364 'VbNetAspx', 'sc': 'Python'}
339
365
340 repos_path = get_repos_path()
366 repos_path = get_repos_path()
341 p = os.path.join(repos_path, repo_name)
367 p = os.path.join(repos_path, repo_name)
342 repo = get_repo(p)
368 repo = get_repo(p)
343 tip = repo.get_changeset()
369 tip = repo.get_changeset()
344 code_stats = {}
370 code_stats = {}
345
371
346 def aggregate(cs):
372 def aggregate(cs):
347 for f in cs[2]:
373 for f in cs[2]:
348 ext = f.extension
374 ext = f.extension
349 key = LANGUAGES_EXTENSIONS_MAP.get(ext, ext)
375 key = LANGUAGES_EXTENSIONS_MAP.get(ext, ext)
350 key = key or ext
376 key = key or ext
351 if ext in LANGUAGES_EXTENSIONS_MAP.keys():
377 if ext in LANGUAGES_EXTENSIONS_MAP.keys():
352 if code_stats.has_key(key):
378 if code_stats.has_key(key):
353 code_stats[key] += 1
379 code_stats[key] += 1
354 else:
380 else:
355 code_stats[key] = 1
381 code_stats[key] = 1
356
382
357 map(aggregate, tip.walk('/'))
383 map(aggregate, tip.walk('/'))
358
384
359 return code_stats or {}
385 return code_stats or {}
360
386
361
387
362
388
363
389
@@ -1,215 +1,215 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3 # whoosh indexer daemon for rhodecode
3 # whoosh indexer daemon for rhodecode
4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
4 # Copyright (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
5 #
5 #
6 # This program is free software; you can redistribute it and/or
6 # This program is free software; you can redistribute it and/or
7 # modify it under the terms of the GNU General Public License
7 # modify it under the terms of the GNU General Public License
8 # as published by the Free Software Foundation; version 2
8 # as published by the Free Software Foundation; version 2
9 # of the License or (at your opinion) any later version of the license.
9 # of the License or (at your opinion) any later version of the license.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19 # MA 02110-1301, USA.
19 # MA 02110-1301, USA.
20 """
20 """
21 Created on Jan 26, 2010
21 Created on Jan 26, 2010
22
22
23 @author: marcink
23 @author: marcink
24 A deamon will read from task table and run tasks
24 A deamon will read from task table and run tasks
25 """
25 """
26 import sys
26 import sys
27 import os
27 import os
28 from os.path import dirname as dn
28 from os.path import dirname as dn
29 from os.path import join as jn
29 from os.path import join as jn
30
30
31 #to get the rhodecode import
31 #to get the rhodecode import
32 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
32 project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
33 sys.path.append(project_path)
33 sys.path.append(project_path)
34
34
35
35
36 from rhodecode.model.scm import ScmModel
36 from rhodecode.model.scm import ScmModel
37 from rhodecode.lib.helpers import safe_unicode
37 from rhodecode.lib.helpers import safe_unicode
38 from whoosh.index import create_in, open_dir
38 from whoosh.index import create_in, open_dir
39 from shutil import rmtree
39 from shutil import rmtree
40 from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME
40 from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME
41
41
42 from time import mktime
42 from time import mktime
43 from vcs.exceptions import ChangesetError, RepositoryError
43 from vcs.exceptions import ChangesetError, RepositoryError
44
44
45 import logging
45 import logging
46
46
47 log = logging.getLogger('whooshIndexer')
47 log = logging.getLogger('whooshIndexer')
48 # create logger
48 # create logger
49 log.setLevel(logging.DEBUG)
49 log.setLevel(logging.DEBUG)
50 log.propagate = False
50 log.propagate = False
51 # create console handler and set level to debug
51 # create console handler and set level to debug
52 ch = logging.StreamHandler()
52 ch = logging.StreamHandler()
53 ch.setLevel(logging.DEBUG)
53 ch.setLevel(logging.DEBUG)
54
54
55 # create formatter
55 # create formatter
56 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
56 formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
57
57
58 # add formatter to ch
58 # add formatter to ch
59 ch.setFormatter(formatter)
59 ch.setFormatter(formatter)
60
60
61 # add ch to logger
61 # add ch to logger
62 log.addHandler(ch)
62 log.addHandler(ch)
63
63
64 class WhooshIndexingDaemon(object):
64 class WhooshIndexingDaemon(object):
65 """
65 """
66 Deamon for atomic jobs
66 Deamon for atomic jobs
67 """
67 """
68
68
69 def __init__(self, indexname='HG_INDEX', index_location=None,
69 def __init__(self, indexname='HG_INDEX', index_location=None,
70 repo_location=None):
70 repo_location=None, sa=None):
71 self.indexname = indexname
71 self.indexname = indexname
72
72
73 self.index_location = index_location
73 self.index_location = index_location
74 if not index_location:
74 if not index_location:
75 raise Exception('You have to provide index location')
75 raise Exception('You have to provide index location')
76
76
77 self.repo_location = repo_location
77 self.repo_location = repo_location
78 if not repo_location:
78 if not repo_location:
79 raise Exception('You have to provide repositories location')
79 raise Exception('You have to provide repositories location')
80
80
81 self.repo_paths = ScmModel().repo_scan(self.repo_location, None)
81 self.repo_paths = ScmModel(sa).repo_scan(self.repo_location, None)
82 self.initial = False
82 self.initial = False
83 if not os.path.isdir(self.index_location):
83 if not os.path.isdir(self.index_location):
84 os.makedirs(self.index_location)
84 os.makedirs(self.index_location)
85 log.info('Cannot run incremental index since it does not'
85 log.info('Cannot run incremental index since it does not'
86 ' yet exist running full build')
86 ' yet exist running full build')
87 self.initial = True
87 self.initial = True
88
88
89 def get_paths(self, repo):
89 def get_paths(self, repo):
90 """recursive walk in root dir and return a set of all path in that dir
90 """recursive walk in root dir and return a set of all path in that dir
91 based on repository walk function
91 based on repository walk function
92 """
92 """
93 index_paths_ = set()
93 index_paths_ = set()
94 try:
94 try:
95 for topnode, dirs, files in repo.walk('/', 'tip'):
95 for topnode, dirs, files in repo.walk('/', 'tip'):
96 for f in files:
96 for f in files:
97 index_paths_.add(jn(repo.path, f.path))
97 index_paths_.add(jn(repo.path, f.path))
98 for dir in dirs:
98 for dir in dirs:
99 for f in files:
99 for f in files:
100 index_paths_.add(jn(repo.path, f.path))
100 index_paths_.add(jn(repo.path, f.path))
101
101
102 except RepositoryError:
102 except RepositoryError:
103 pass
103 pass
104 return index_paths_
104 return index_paths_
105
105
106 def get_node(self, repo, path):
106 def get_node(self, repo, path):
107 n_path = path[len(repo.path) + 1:]
107 n_path = path[len(repo.path) + 1:]
108 node = repo.get_changeset().get_node(n_path)
108 node = repo.get_changeset().get_node(n_path)
109 return node
109 return node
110
110
111 def get_node_mtime(self, node):
111 def get_node_mtime(self, node):
112 return mktime(node.last_changeset.date.timetuple())
112 return mktime(node.last_changeset.date.timetuple())
113
113
114 def add_doc(self, writer, path, repo):
114 def add_doc(self, writer, path, repo):
115 """Adding doc to writer this function itself fetches data from
115 """Adding doc to writer this function itself fetches data from
116 the instance of vcs backend"""
116 the instance of vcs backend"""
117 node = self.get_node(repo, path)
117 node = self.get_node(repo, path)
118
118
119 #we just index the content of chosen files
119 #we just index the content of chosen files
120 if node.extension in INDEX_EXTENSIONS:
120 if node.extension in INDEX_EXTENSIONS:
121 log.debug(' >> %s [WITH CONTENT]' % path)
121 log.debug(' >> %s [WITH CONTENT]' % path)
122 u_content = node.content
122 u_content = node.content
123 else:
123 else:
124 log.debug(' >> %s' % path)
124 log.debug(' >> %s' % path)
125 #just index file name without it's content
125 #just index file name without it's content
126 u_content = u''
126 u_content = u''
127
127
128 writer.add_document(owner=unicode(repo.contact),
128 writer.add_document(owner=unicode(repo.contact),
129 repository=safe_unicode(repo.name),
129 repository=safe_unicode(repo.name),
130 path=safe_unicode(path),
130 path=safe_unicode(path),
131 content=u_content,
131 content=u_content,
132 modtime=self.get_node_mtime(node),
132 modtime=self.get_node_mtime(node),
133 extension=node.extension)
133 extension=node.extension)
134
134
135
135
136 def build_index(self):
136 def build_index(self):
137 if os.path.exists(self.index_location):
137 if os.path.exists(self.index_location):
138 log.debug('removing previous index')
138 log.debug('removing previous index')
139 rmtree(self.index_location)
139 rmtree(self.index_location)
140
140
141 if not os.path.exists(self.index_location):
141 if not os.path.exists(self.index_location):
142 os.mkdir(self.index_location)
142 os.mkdir(self.index_location)
143
143
144 idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
144 idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
145 writer = idx.writer()
145 writer = idx.writer()
146
146
147 for cnt, repo in enumerate(self.repo_paths.values()):
147 for cnt, repo in enumerate(self.repo_paths.values()):
148 log.debug('building index @ %s' % repo.path)
148 log.debug('building index @ %s' % repo.path)
149
149
150 for idx_path in self.get_paths(repo):
150 for idx_path in self.get_paths(repo):
151 self.add_doc(writer, idx_path, repo)
151 self.add_doc(writer, idx_path, repo)
152
152
153 log.debug('>> COMMITING CHANGES <<')
153 log.debug('>> COMMITING CHANGES <<')
154 writer.commit(merge=True)
154 writer.commit(merge=True)
155 log.debug('>>> FINISHED BUILDING INDEX <<<')
155 log.debug('>>> FINISHED BUILDING INDEX <<<')
156
156
157
157
158 def update_index(self):
158 def update_index(self):
159 log.debug('STARTING INCREMENTAL INDEXING UPDATE')
159 log.debug('STARTING INCREMENTAL INDEXING UPDATE')
160
160
161 idx = open_dir(self.index_location, indexname=self.indexname)
161 idx = open_dir(self.index_location, indexname=self.indexname)
162 # The set of all paths in the index
162 # The set of all paths in the index
163 indexed_paths = set()
163 indexed_paths = set()
164 # The set of all paths we need to re-index
164 # The set of all paths we need to re-index
165 to_index = set()
165 to_index = set()
166
166
167 reader = idx.reader()
167 reader = idx.reader()
168 writer = idx.writer()
168 writer = idx.writer()
169
169
170 # Loop over the stored fields in the index
170 # Loop over the stored fields in the index
171 for fields in reader.all_stored_fields():
171 for fields in reader.all_stored_fields():
172 indexed_path = fields['path']
172 indexed_path = fields['path']
173 indexed_paths.add(indexed_path)
173 indexed_paths.add(indexed_path)
174
174
175 repo = self.repo_paths[fields['repository']]
175 repo = self.repo_paths[fields['repository']]
176
176
177 try:
177 try:
178 node = self.get_node(repo, indexed_path)
178 node = self.get_node(repo, indexed_path)
179 except ChangesetError:
179 except ChangesetError:
180 # This file was deleted since it was indexed
180 # This file was deleted since it was indexed
181 log.debug('removing from index %s' % indexed_path)
181 log.debug('removing from index %s' % indexed_path)
182 writer.delete_by_term('path', indexed_path)
182 writer.delete_by_term('path', indexed_path)
183
183
184 else:
184 else:
185 # Check if this file was changed since it was indexed
185 # Check if this file was changed since it was indexed
186 indexed_time = fields['modtime']
186 indexed_time = fields['modtime']
187 mtime = self.get_node_mtime(node)
187 mtime = self.get_node_mtime(node)
188 if mtime > indexed_time:
188 if mtime > indexed_time:
189 # The file has changed, delete it and add it to the list of
189 # The file has changed, delete it and add it to the list of
190 # files to reindex
190 # files to reindex
191 log.debug('adding to reindex list %s' % indexed_path)
191 log.debug('adding to reindex list %s' % indexed_path)
192 writer.delete_by_term('path', indexed_path)
192 writer.delete_by_term('path', indexed_path)
193 to_index.add(indexed_path)
193 to_index.add(indexed_path)
194
194
195 # Loop over the files in the filesystem
195 # Loop over the files in the filesystem
196 # Assume we have a function that gathers the filenames of the
196 # Assume we have a function that gathers the filenames of the
197 # documents to be indexed
197 # documents to be indexed
198 for repo in self.repo_paths.values():
198 for repo in self.repo_paths.values():
199 for path in self.get_paths(repo):
199 for path in self.get_paths(repo):
200 if path in to_index or path not in indexed_paths:
200 if path in to_index or path not in indexed_paths:
201 # This is either a file that's changed, or a new file
201 # This is either a file that's changed, or a new file
202 # that wasn't indexed before. So index it!
202 # that wasn't indexed before. So index it!
203 self.add_doc(writer, path, repo)
203 self.add_doc(writer, path, repo)
204 log.debug('re indexing %s' % path)
204 log.debug('re indexing %s' % path)
205
205
206 log.debug('>> COMMITING CHANGES <<')
206 log.debug('>> COMMITING CHANGES <<')
207 writer.commit(merge=True)
207 writer.commit(merge=True)
208 log.debug('>>> FINISHED REBUILDING INDEX <<<')
208 log.debug('>>> FINISHED REBUILDING INDEX <<<')
209
209
210 def run(self, full_index=False):
210 def run(self, full_index=False):
211 """Run daemon"""
211 """Run daemon"""
212 if full_index or self.initial:
212 if full_index or self.initial:
213 self.build_index()
213 self.build_index()
214 else:
214 else:
215 self.update_index()
215 self.update_index()
General Comments 0
You need to be logged in to leave comments. Login now