Show More
@@ -1,106 +1,109 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.celerylib.__init__ |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | celery libs for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Nov 27, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import sys |
|
28 | 28 | import socket |
|
29 | 29 | import traceback |
|
30 | 30 | import logging |
|
31 | from os.path import dirname as dn, join as jn | |
|
31 | 32 | |
|
32 | 33 | from hashlib import md5 |
|
33 | 34 | from decorator import decorator |
|
34 | 35 | from pylons import config |
|
35 | 36 | |
|
36 | 37 | from vcs.utils.lazy import LazyProperty |
|
37 | 38 | |
|
38 | 39 | from rhodecode.lib import str2bool |
|
39 | 40 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
40 | 41 | |
|
41 | 42 | from celery.messaging import establish_connection |
|
42 | 43 | |
|
43 | 44 | |
|
44 | 45 | log = logging.getLogger(__name__) |
|
45 | 46 | |
|
46 | 47 | try: |
|
47 | 48 | CELERY_ON = str2bool(config['app_conf'].get('use_celery')) |
|
48 | 49 | except KeyError: |
|
49 | 50 | CELERY_ON = False |
|
50 | 51 | |
|
51 | 52 | |
|
52 | 53 | class ResultWrapper(object): |
|
53 | 54 | def __init__(self, task): |
|
54 | 55 | self.task = task |
|
55 | 56 | |
|
56 | 57 | @LazyProperty |
|
57 | 58 | def result(self): |
|
58 | 59 | return self.task |
|
59 | 60 | |
|
60 | 61 | |
|
61 | 62 | def run_task(task, *args, **kwargs): |
|
62 | 63 | if CELERY_ON: |
|
63 | 64 | try: |
|
64 | 65 | t = task.apply_async(args=args, kwargs=kwargs) |
|
65 | 66 | log.info('running task %s:%s', t.task_id, task) |
|
66 | 67 | return t |
|
67 | 68 | |
|
68 | 69 | except socket.error, e: |
|
69 | 70 | if e.errno == 111: |
|
70 | 71 | log.debug('Unable to connect to celeryd. Sync execution') |
|
71 | 72 | else: |
|
72 | 73 | log.error(traceback.format_exc()) |
|
73 | 74 | except KeyError, e: |
|
74 | 75 | log.debug('Unable to connect to celeryd. Sync execution') |
|
75 | 76 | except Exception, e: |
|
76 | 77 | log.error(traceback.format_exc()) |
|
77 | 78 | |
|
78 | 79 | log.debug('executing task %s in sync mode', task) |
|
79 | 80 | return ResultWrapper(task(*args, **kwargs)) |
|
80 | 81 | |
|
81 | 82 | |
|
82 | 83 | def __get_lockkey(func, *fargs, **fkwargs): |
|
83 | 84 | params = list(fargs) |
|
84 | 85 | params.extend(['%s-%s' % ar for ar in fkwargs.items()]) |
|
85 | 86 | |
|
86 | 87 | func_name = str(func.__name__) if hasattr(func, '__name__') else str(func) |
|
87 | 88 | |
|
88 | lockkey = 'task_%s' % \ | |
|
89 | lockkey = 'task_%s.lock' % \ | |
|
89 | 90 | md5(func_name + '-' + '-'.join(map(str, params))).hexdigest() |
|
90 | 91 | return lockkey |
|
91 | 92 | |
|
92 | 93 | |
|
93 | 94 | def locked_task(func): |
|
94 | 95 | def __wrapper(func, *fargs, **fkwargs): |
|
95 | 96 | lockkey = __get_lockkey(func, *fargs, **fkwargs) |
|
97 | lockkey_path = dn(dn(dn(os.path.abspath(__file__)))) | |
|
98 | ||
|
96 | 99 | log.info('running task with lockkey %s', lockkey) |
|
97 | 100 | try: |
|
98 | l = DaemonLock(lockkey) | |
|
101 | l = DaemonLock(jn(lockkey_path, lockkey)) | |
|
99 | 102 | ret = func(*fargs, **fkwargs) |
|
100 | 103 | l.release() |
|
101 | 104 | return ret |
|
102 | 105 | except LockHeld: |
|
103 | 106 | log.info('LockHeld') |
|
104 | 107 | return 'Task with key %s already running' % lockkey |
|
105 | 108 | |
|
106 | 109 | return decorator(__wrapper, func) |
@@ -1,375 +1,378 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.celerylib.tasks |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | RhodeCode task modules, containing all task that suppose to be run |
|
7 | 7 | by celery daemon |
|
8 | 8 | |
|
9 | 9 | :created_on: Oct 6, 2010 |
|
10 | 10 | :author: marcink |
|
11 | 11 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> |
|
12 | 12 | :license: GPLv3, see COPYING for more details. |
|
13 | 13 | """ |
|
14 | 14 | # This program is free software: you can redistribute it and/or modify |
|
15 | 15 | # it under the terms of the GNU General Public License as published by |
|
16 | 16 | # the Free Software Foundation, either version 3 of the License, or |
|
17 | 17 | # (at your option) any later version. |
|
18 | 18 | # |
|
19 | 19 | # This program is distributed in the hope that it will be useful, |
|
20 | 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
21 | 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
22 | 22 | # GNU General Public License for more details. |
|
23 | 23 | # |
|
24 | 24 | # You should have received a copy of the GNU General Public License |
|
25 | 25 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
26 | 26 | from celery.decorators import task |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import traceback |
|
30 | 30 | import logging |
|
31 | from os.path import dirname as dn, join as jn | |
|
31 | 32 | |
|
32 | 33 | from time import mktime |
|
33 | 34 | from operator import itemgetter |
|
34 | 35 | from string import lower |
|
35 | 36 | |
|
36 | 37 | from pylons import config |
|
37 | 38 | from pylons.i18n.translation import _ |
|
38 | 39 | |
|
39 | 40 | from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP |
|
40 | 41 | from rhodecode.lib.celerylib import run_task, locked_task, str2bool, \ |
|
41 | 42 | __get_lockkey, LockHeld, DaemonLock |
|
42 | 43 | from rhodecode.lib.helpers import person |
|
43 | 44 | from rhodecode.lib.smtp_mailer import SmtpMailer |
|
44 | 45 | from rhodecode.lib.utils import add_cache |
|
45 | 46 | from rhodecode.lib.odict import OrderedDict |
|
46 | 47 | from rhodecode.model import init_model |
|
47 | 48 | from rhodecode.model import meta |
|
48 | 49 | from rhodecode.model.db import RhodeCodeUi, Statistics, Repository |
|
49 | 50 | |
|
50 | 51 | from vcs.backends import get_repo |
|
51 | 52 | |
|
52 | 53 | from sqlalchemy import engine_from_config |
|
53 | 54 | |
|
54 | 55 | add_cache(config) |
|
55 | 56 | |
|
56 | 57 | try: |
|
57 | 58 | import json |
|
58 | 59 | except ImportError: |
|
59 | 60 | #python 2.5 compatibility |
|
60 | 61 | import simplejson as json |
|
61 | 62 | |
|
62 | 63 | __all__ = ['whoosh_index', 'get_commits_stats', |
|
63 | 64 | 'reset_user_password', 'send_email'] |
|
64 | 65 | |
|
65 | 66 | CELERY_ON = str2bool(config['app_conf'].get('use_celery')) |
|
66 | 67 | |
|
67 | 68 | |
|
68 | 69 | |
|
69 | 70 | def get_session(): |
|
70 | 71 | if CELERY_ON: |
|
71 | 72 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
72 | 73 | init_model(engine) |
|
73 | 74 | sa = meta.Session() |
|
74 | 75 | return sa |
|
75 | 76 | |
|
76 | 77 | |
|
77 | 78 | def get_repos_path(): |
|
78 | 79 | sa = get_session() |
|
79 | 80 | q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() |
|
80 | 81 | return q.ui_value |
|
81 | 82 | |
|
82 | 83 | |
|
83 | 84 | @task(ignore_result=True) |
|
84 | 85 | @locked_task |
|
85 | 86 | def whoosh_index(repo_location, full_index): |
|
86 | 87 | #log = whoosh_index.get_logger() |
|
87 | 88 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
88 | 89 | index_location = config['index_dir'] |
|
89 | 90 | WhooshIndexingDaemon(index_location=index_location, |
|
90 | 91 | repo_location=repo_location, sa=get_session())\ |
|
91 | 92 | .run(full_index=full_index) |
|
92 | 93 | |
|
93 | 94 | |
|
94 | 95 | @task(ignore_result=True) |
|
95 | 96 | def get_commits_stats(repo_name, ts_min_y, ts_max_y): |
|
96 | 97 | try: |
|
97 | 98 | log = get_commits_stats.get_logger() |
|
98 | 99 | except: |
|
99 | 100 | log = logging.getLogger(__name__) |
|
100 | 101 | |
|
101 | 102 | lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, |
|
102 | 103 | ts_max_y) |
|
104 | lockkey_path = dn(dn(dn(dn(os.path.abspath(__file__))))) | |
|
105 | print jn(lockkey_path, lockkey) | |
|
103 | 106 | log.info('running task with lockkey %s', lockkey) |
|
104 | 107 | try: |
|
105 | lock = DaemonLock(lockkey) | |
|
108 | lock = l = DaemonLock(jn(lockkey_path, lockkey)) | |
|
106 | 109 | |
|
107 | 110 | #for js data compatibilty cleans the key for person from ' |
|
108 | 111 | akc = lambda k: person(k).replace('"', "") |
|
109 | 112 | |
|
110 | 113 | co_day_auth_aggr = {} |
|
111 | 114 | commits_by_day_aggregate = {} |
|
112 | 115 | repos_path = get_repos_path() |
|
113 | 116 | p = os.path.join(repos_path, repo_name) |
|
114 | 117 | repo = get_repo(p) |
|
115 | 118 | repo_size = len(repo.revisions) |
|
116 | 119 | #return if repo have no revisions |
|
117 | 120 | if repo_size < 1: |
|
118 | 121 | lock.release() |
|
119 | 122 | return True |
|
120 | 123 | |
|
121 | 124 | skip_date_limit = True |
|
122 | 125 | parse_limit = int(config['app_conf'].get('commit_parse_limit')) |
|
123 | 126 | last_rev = 0 |
|
124 | 127 | last_cs = None |
|
125 | 128 | timegetter = itemgetter('time') |
|
126 | 129 | |
|
127 | 130 | sa = get_session() |
|
128 | 131 | |
|
129 | 132 | dbrepo = sa.query(Repository)\ |
|
130 | 133 | .filter(Repository.repo_name == repo_name).scalar() |
|
131 | 134 | cur_stats = sa.query(Statistics)\ |
|
132 | 135 | .filter(Statistics.repository == dbrepo).scalar() |
|
133 | 136 | |
|
134 | 137 | if cur_stats is not None: |
|
135 | 138 | last_rev = cur_stats.stat_on_revision |
|
136 | 139 | |
|
137 | 140 | if last_rev == repo.get_changeset().revision and repo_size > 1: |
|
138 | 141 | #pass silently without any work if we're not on first revision or |
|
139 | 142 | #current state of parsing revision(from db marker) is the |
|
140 | 143 | #last revision |
|
141 | 144 | lock.release() |
|
142 | 145 | return True |
|
143 | 146 | |
|
144 | 147 | if cur_stats: |
|
145 | 148 | commits_by_day_aggregate = OrderedDict(json.loads( |
|
146 | 149 | cur_stats.commit_activity_combined)) |
|
147 | 150 | co_day_auth_aggr = json.loads(cur_stats.commit_activity) |
|
148 | 151 | |
|
149 | 152 | log.debug('starting parsing %s', parse_limit) |
|
150 | 153 | lmktime = mktime |
|
151 | 154 | |
|
152 | 155 | last_rev = last_rev + 1 if last_rev > 0 else last_rev |
|
153 | 156 | |
|
154 | 157 | for cs in repo[last_rev:last_rev + parse_limit]: |
|
155 | 158 | last_cs = cs # remember last parsed changeset |
|
156 | 159 | k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], |
|
157 | 160 | cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) |
|
158 | 161 | |
|
159 | 162 | if akc(cs.author) in co_day_auth_aggr: |
|
160 | 163 | try: |
|
161 | 164 | l = [timegetter(x) for x in |
|
162 | 165 | co_day_auth_aggr[akc(cs.author)]['data']] |
|
163 | 166 | time_pos = l.index(k) |
|
164 | 167 | except ValueError: |
|
165 | 168 | time_pos = False |
|
166 | 169 | |
|
167 | 170 | if time_pos >= 0 and time_pos is not False: |
|
168 | 171 | |
|
169 | 172 | datadict = \ |
|
170 | 173 | co_day_auth_aggr[akc(cs.author)]['data'][time_pos] |
|
171 | 174 | |
|
172 | 175 | datadict["commits"] += 1 |
|
173 | 176 | datadict["added"] += len(cs.added) |
|
174 | 177 | datadict["changed"] += len(cs.changed) |
|
175 | 178 | datadict["removed"] += len(cs.removed) |
|
176 | 179 | |
|
177 | 180 | else: |
|
178 | 181 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
179 | 182 | |
|
180 | 183 | datadict = {"time": k, |
|
181 | 184 | "commits": 1, |
|
182 | 185 | "added": len(cs.added), |
|
183 | 186 | "changed": len(cs.changed), |
|
184 | 187 | "removed": len(cs.removed), |
|
185 | 188 | } |
|
186 | 189 | co_day_auth_aggr[akc(cs.author)]['data']\ |
|
187 | 190 | .append(datadict) |
|
188 | 191 | |
|
189 | 192 | else: |
|
190 | 193 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
191 | 194 | co_day_auth_aggr[akc(cs.author)] = { |
|
192 | 195 | "label": akc(cs.author), |
|
193 | 196 | "data": [{"time":k, |
|
194 | 197 | "commits":1, |
|
195 | 198 | "added":len(cs.added), |
|
196 | 199 | "changed":len(cs.changed), |
|
197 | 200 | "removed":len(cs.removed), |
|
198 | 201 | }], |
|
199 | 202 | "schema": ["commits"], |
|
200 | 203 | } |
|
201 | 204 | |
|
202 | 205 | #gather all data by day |
|
203 | 206 | if k in commits_by_day_aggregate: |
|
204 | 207 | commits_by_day_aggregate[k] += 1 |
|
205 | 208 | else: |
|
206 | 209 | commits_by_day_aggregate[k] = 1 |
|
207 | 210 | |
|
208 | 211 | overview_data = sorted(commits_by_day_aggregate.items(), |
|
209 | 212 | key=itemgetter(0)) |
|
210 | 213 | |
|
211 | 214 | if not co_day_auth_aggr: |
|
212 | 215 | co_day_auth_aggr[akc(repo.contact)] = { |
|
213 | 216 | "label": akc(repo.contact), |
|
214 | 217 | "data": [0, 1], |
|
215 | 218 | "schema": ["commits"], |
|
216 | 219 | } |
|
217 | 220 | |
|
218 | 221 | stats = cur_stats if cur_stats else Statistics() |
|
219 | 222 | stats.commit_activity = json.dumps(co_day_auth_aggr) |
|
220 | 223 | stats.commit_activity_combined = json.dumps(overview_data) |
|
221 | 224 | |
|
222 | 225 | log.debug('last revison %s', last_rev) |
|
223 | 226 | leftovers = len(repo.revisions[last_rev:]) |
|
224 | 227 | log.debug('revisions to parse %s', leftovers) |
|
225 | 228 | |
|
226 | 229 | if last_rev == 0 or leftovers < parse_limit: |
|
227 | 230 | log.debug('getting code trending stats') |
|
228 | 231 | stats.languages = json.dumps(__get_codes_stats(repo_name)) |
|
229 | 232 | |
|
230 | 233 | try: |
|
231 | 234 | stats.repository = dbrepo |
|
232 | 235 | stats.stat_on_revision = last_cs.revision if last_cs else 0 |
|
233 | 236 | sa.add(stats) |
|
234 | 237 | sa.commit() |
|
235 | 238 | except: |
|
236 | 239 | log.error(traceback.format_exc()) |
|
237 | 240 | sa.rollback() |
|
238 | 241 | lock.release() |
|
239 | 242 | return False |
|
240 | 243 | |
|
241 | 244 | #final release |
|
242 | 245 | lock.release() |
|
243 | 246 | |
|
244 | 247 | #execute another task if celery is enabled |
|
245 | 248 | if len(repo.revisions) > 1 and CELERY_ON: |
|
246 | 249 | run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y) |
|
247 | 250 | return True |
|
248 | 251 | except LockHeld: |
|
249 | 252 | log.info('LockHeld') |
|
250 | 253 | return 'Task with key %s already running' % lockkey |
|
251 | 254 | |
|
252 | 255 | |
|
253 | 256 | @task(ignore_result=True) |
|
254 | 257 | def reset_user_password(user_email): |
|
255 | 258 | try: |
|
256 | 259 | log = reset_user_password.get_logger() |
|
257 | 260 | except: |
|
258 | 261 | log = logging.getLogger(__name__) |
|
259 | 262 | |
|
260 | 263 | from rhodecode.lib import auth |
|
261 | 264 | from rhodecode.model.db import User |
|
262 | 265 | |
|
263 | 266 | try: |
|
264 | 267 | try: |
|
265 | 268 | sa = get_session() |
|
266 | 269 | user = sa.query(User).filter(User.email == user_email).scalar() |
|
267 | 270 | new_passwd = auth.PasswordGenerator().gen_password(8, |
|
268 | 271 | auth.PasswordGenerator.ALPHABETS_BIG_SMALL) |
|
269 | 272 | if user: |
|
270 | 273 | user.password = auth.get_crypt_password(new_passwd) |
|
271 | 274 | user.api_key = auth.generate_api_key(user.username) |
|
272 | 275 | sa.add(user) |
|
273 | 276 | sa.commit() |
|
274 | 277 | log.info('change password for %s', user_email) |
|
275 | 278 | if new_passwd is None: |
|
276 | 279 | raise Exception('unable to generate new password') |
|
277 | 280 | |
|
278 | 281 | except: |
|
279 | 282 | log.error(traceback.format_exc()) |
|
280 | 283 | sa.rollback() |
|
281 | 284 | |
|
282 | 285 | run_task(send_email, user_email, |
|
283 | 286 | "Your new rhodecode password", |
|
284 | 287 | 'Your new rhodecode password:%s' % (new_passwd)) |
|
285 | 288 | log.info('send new password mail to %s', user_email) |
|
286 | 289 | |
|
287 | 290 | except: |
|
288 | 291 | log.error('Failed to update user password') |
|
289 | 292 | log.error(traceback.format_exc()) |
|
290 | 293 | |
|
291 | 294 | return True |
|
292 | 295 | |
|
293 | 296 | |
|
294 | 297 | @task(ignore_result=True) |
|
295 | 298 | def send_email(recipients, subject, body): |
|
296 | 299 | """ |
|
297 | 300 | Sends an email with defined parameters from the .ini files. |
|
298 | 301 | |
|
299 | 302 | :param recipients: list of recipients, it this is empty the defined email |
|
300 | 303 | address from field 'email_to' is used instead |
|
301 | 304 | :param subject: subject of the mail |
|
302 | 305 | :param body: body of the mail |
|
303 | 306 | """ |
|
304 | 307 | try: |
|
305 | 308 | log = send_email.get_logger() |
|
306 | 309 | except: |
|
307 | 310 | log = logging.getLogger(__name__) |
|
308 | 311 | |
|
309 | 312 | email_config = config |
|
310 | 313 | |
|
311 | 314 | if not recipients: |
|
312 | 315 | recipients = [email_config.get('email_to')] |
|
313 | 316 | |
|
314 | 317 | mail_from = email_config.get('app_email_from') |
|
315 | 318 | user = email_config.get('smtp_username') |
|
316 | 319 | passwd = email_config.get('smtp_password') |
|
317 | 320 | mail_server = email_config.get('smtp_server') |
|
318 | 321 | mail_port = email_config.get('smtp_port') |
|
319 | 322 | tls = str2bool(email_config.get('smtp_use_tls')) |
|
320 | 323 | ssl = str2bool(email_config.get('smtp_use_ssl')) |
|
321 | 324 | debug = str2bool(config.get('debug')) |
|
322 | 325 | |
|
323 | 326 | try: |
|
324 | 327 | m = SmtpMailer(mail_from, user, passwd, mail_server, |
|
325 | 328 | mail_port, ssl, tls, debug=debug) |
|
326 | 329 | m.send(recipients, subject, body) |
|
327 | 330 | except: |
|
328 | 331 | log.error('Mail sending failed') |
|
329 | 332 | log.error(traceback.format_exc()) |
|
330 | 333 | return False |
|
331 | 334 | return True |
|
332 | 335 | |
|
333 | 336 | |
|
334 | 337 | @task(ignore_result=True) |
|
335 | 338 | def create_repo_fork(form_data, cur_user): |
|
336 | 339 | from rhodecode.model.repo import RepoModel |
|
337 | 340 | from vcs import get_backend |
|
338 | 341 | |
|
339 | 342 | try: |
|
340 | 343 | log = create_repo_fork.get_logger() |
|
341 | 344 | except: |
|
342 | 345 | log = logging.getLogger(__name__) |
|
343 | 346 | |
|
344 | 347 | repo_model = RepoModel(get_session()) |
|
345 | 348 | repo_model.create(form_data, cur_user, just_db=True, fork=True) |
|
346 | 349 | repo_name = form_data['repo_name'] |
|
347 | 350 | repos_path = get_repos_path() |
|
348 | 351 | repo_path = os.path.join(repos_path, repo_name) |
|
349 | 352 | repo_fork_path = os.path.join(repos_path, form_data['fork_name']) |
|
350 | 353 | alias = form_data['repo_type'] |
|
351 | 354 | |
|
352 | 355 | log.info('creating repo fork %s as %s', repo_name, repo_path) |
|
353 | 356 | backend = get_backend(alias) |
|
354 | 357 | backend(str(repo_fork_path), create=True, src_url=str(repo_path)) |
|
355 | 358 | |
|
356 | 359 | |
|
357 | 360 | def __get_codes_stats(repo_name): |
|
358 | 361 | repos_path = get_repos_path() |
|
359 | 362 | p = os.path.join(repos_path, repo_name) |
|
360 | 363 | repo = get_repo(p) |
|
361 | 364 | tip = repo.get_changeset() |
|
362 | 365 | code_stats = {} |
|
363 | 366 | |
|
364 | 367 | def aggregate(cs): |
|
365 | 368 | for f in cs[2]: |
|
366 | 369 | ext = lower(f.extension) |
|
367 | 370 | if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary: |
|
368 | 371 | if ext in code_stats: |
|
369 | 372 | code_stats[ext] += 1 |
|
370 | 373 | else: |
|
371 | 374 | code_stats[ext] = 1 |
|
372 | 375 | |
|
373 | 376 | map(aggregate, tip.walk('/')) |
|
374 | 377 | |
|
375 | 378 | return code_stats or {} |
@@ -1,224 +1,224 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.indexers.__init__ |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Whoosh indexing module for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Aug 17, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | import os |
|
26 | 26 | import sys |
|
27 | 27 | import traceback |
|
28 | 28 | from os.path import dirname as dn, join as jn |
|
29 | 29 | |
|
30 | 30 | #to get the rhodecode import |
|
31 | 31 | sys.path.append(dn(dn(dn(os.path.realpath(__file__))))) |
|
32 | 32 | |
|
33 | 33 | from string import strip |
|
34 | 34 | from shutil import rmtree |
|
35 | 35 | |
|
36 | 36 | from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter |
|
37 | 37 | from whoosh.fields import TEXT, ID, STORED, Schema, FieldType |
|
38 | 38 | from whoosh.index import create_in, open_dir |
|
39 | 39 | from whoosh.formats import Characters |
|
40 | 40 | from whoosh.highlight import highlight, SimpleFragmenter, HtmlFormatter |
|
41 | 41 | |
|
42 | 42 | from webhelpers.html.builder import escape |
|
43 | 43 | from sqlalchemy import engine_from_config |
|
44 | 44 | from vcs.utils.lazy import LazyProperty |
|
45 | 45 | |
|
46 | 46 | from rhodecode.model import init_model |
|
47 | 47 | from rhodecode.model.scm import ScmModel |
|
48 | 48 | from rhodecode.config.environment import load_environment |
|
49 | 49 | from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP |
|
50 | 50 | from rhodecode.lib.utils import BasePasterCommand, Command, add_cache |
|
51 | 51 | |
|
52 | 52 | #EXTENSIONS WE WANT TO INDEX CONTENT OFF |
|
53 | 53 | INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys() |
|
54 | 54 | |
|
55 | 55 | #CUSTOM ANALYZER wordsplit + lowercase filter |
|
56 | 56 | ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter() |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | #INDEX SCHEMA DEFINITION |
|
60 | 60 | SCHEMA = Schema(owner=TEXT(), |
|
61 | 61 | repository=TEXT(stored=True), |
|
62 | 62 | path=TEXT(stored=True), |
|
63 | 63 | content=FieldType(format=Characters(ANALYZER), |
|
64 | 64 | scorable=True, stored=True), |
|
65 | 65 | modtime=STORED(), extension=TEXT(stored=True)) |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | IDX_NAME = 'HG_INDEX' |
|
69 | 69 | FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n') |
|
70 | 70 | FRAGMENTER = SimpleFragmenter(200) |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | class MakeIndex(BasePasterCommand): |
|
74 | 74 | |
|
75 | 75 | max_args = 1 |
|
76 | 76 | min_args = 1 |
|
77 | 77 | |
|
78 | 78 | usage = "CONFIG_FILE" |
|
79 | 79 | summary = "Creates index for full text search given configuration file" |
|
80 | 80 | group_name = "RhodeCode" |
|
81 | 81 | takes_config_file = -1 |
|
82 | 82 | parser = Command.standard_parser(verbose=True) |
|
83 | 83 | |
|
84 | 84 | def command(self): |
|
85 | 85 | |
|
86 | 86 | from pylons import config |
|
87 | 87 | add_cache(config) |
|
88 | 88 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
89 | 89 | init_model(engine) |
|
90 | 90 | |
|
91 | 91 | index_location = config['index_dir'] |
|
92 | 92 | repo_location = self.options.repo_location |
|
93 | 93 | repo_list = map(strip, self.options.repo_list.split(',')) \ |
|
94 | 94 | if self.options.repo_list else None |
|
95 | 95 | |
|
96 | 96 | #====================================================================== |
|
97 | 97 | # WHOOSH DAEMON |
|
98 | 98 | #====================================================================== |
|
99 | 99 | from rhodecode.lib.pidlock import LockHeld, DaemonLock |
|
100 | 100 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
101 | 101 | try: |
|
102 | l = DaemonLock() | |
|
102 | l = DaemonLock(file=jn(dn(dn(index_location)), 'make_index.lock')) | |
|
103 | 103 | WhooshIndexingDaemon(index_location=index_location, |
|
104 | 104 | repo_location=repo_location, |
|
105 | 105 | repo_list=repo_list)\ |
|
106 | 106 | .run(full_index=self.options.full_index) |
|
107 | 107 | l.release() |
|
108 | 108 | except LockHeld: |
|
109 | 109 | sys.exit(1) |
|
110 | 110 | |
|
111 | 111 | def update_parser(self): |
|
112 | 112 | self.parser.add_option('--repo-location', |
|
113 | 113 | action='store', |
|
114 | 114 | dest='repo_location', |
|
115 | 115 | help="Specifies repositories location to index REQUIRED", |
|
116 | 116 | ) |
|
117 | 117 | self.parser.add_option('--index-only', |
|
118 | 118 | action='store', |
|
119 | 119 | dest='repo_list', |
|
120 | 120 | help="Specifies a comma separated list of repositores " |
|
121 | 121 | "to build index on OPTIONAL", |
|
122 | 122 | ) |
|
123 | 123 | self.parser.add_option('-f', |
|
124 | 124 | action='store_true', |
|
125 | 125 | dest='full_index', |
|
126 | 126 | help="Specifies that index should be made full i.e" |
|
127 | 127 | " destroy old and build from scratch", |
|
128 | 128 | default=False) |
|
129 | 129 | |
|
130 | 130 | class ResultWrapper(object): |
|
131 | 131 | def __init__(self, search_type, searcher, matcher, highlight_items): |
|
132 | 132 | self.search_type = search_type |
|
133 | 133 | self.searcher = searcher |
|
134 | 134 | self.matcher = matcher |
|
135 | 135 | self.highlight_items = highlight_items |
|
136 | 136 | self.fragment_size = 200 / 2 |
|
137 | 137 | |
|
138 | 138 | @LazyProperty |
|
139 | 139 | def doc_ids(self): |
|
140 | 140 | docs_id = [] |
|
141 | 141 | while self.matcher.is_active(): |
|
142 | 142 | docnum = self.matcher.id() |
|
143 | 143 | chunks = [offsets for offsets in self.get_chunks()] |
|
144 | 144 | docs_id.append([docnum, chunks]) |
|
145 | 145 | self.matcher.next() |
|
146 | 146 | return docs_id |
|
147 | 147 | |
|
148 | 148 | def __str__(self): |
|
149 | 149 | return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids)) |
|
150 | 150 | |
|
151 | 151 | def __repr__(self): |
|
152 | 152 | return self.__str__() |
|
153 | 153 | |
|
154 | 154 | def __len__(self): |
|
155 | 155 | return len(self.doc_ids) |
|
156 | 156 | |
|
157 | 157 | def __iter__(self): |
|
158 | 158 | """ |
|
159 | 159 | Allows Iteration over results,and lazy generate content |
|
160 | 160 | |
|
161 | 161 | *Requires* implementation of ``__getitem__`` method. |
|
162 | 162 | """ |
|
163 | 163 | for docid in self.doc_ids: |
|
164 | 164 | yield self.get_full_content(docid) |
|
165 | 165 | |
|
166 | 166 | def __getitem__(self, key): |
|
167 | 167 | """ |
|
168 | 168 | Slicing of resultWrapper |
|
169 | 169 | """ |
|
170 | 170 | i, j = key.start, key.stop |
|
171 | 171 | |
|
172 | 172 | slice = [] |
|
173 | 173 | for docid in self.doc_ids[i:j]: |
|
174 | 174 | slice.append(self.get_full_content(docid)) |
|
175 | 175 | return slice |
|
176 | 176 | |
|
177 | 177 | |
|
178 | 178 | def get_full_content(self, docid): |
|
179 | 179 | res = self.searcher.stored_fields(docid[0]) |
|
180 | 180 | f_path = res['path'][res['path'].find(res['repository']) \ |
|
181 | 181 | + len(res['repository']):].lstrip('/') |
|
182 | 182 | |
|
183 | 183 | content_short = self.get_short_content(res, docid[1]) |
|
184 | 184 | res.update({'content_short':content_short, |
|
185 | 185 | 'content_short_hl':self.highlight(content_short), |
|
186 | 186 | 'f_path':f_path}) |
|
187 | 187 | |
|
188 | 188 | return res |
|
189 | 189 | |
|
190 | 190 | def get_short_content(self, res, chunks): |
|
191 | 191 | |
|
192 | 192 | return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks]) |
|
193 | 193 | |
|
194 | 194 | def get_chunks(self): |
|
195 | 195 | """ |
|
196 | 196 | Smart function that implements chunking the content |
|
197 | 197 | but not overlap chunks so it doesn't highlight the same |
|
198 | 198 | close occurrences twice. |
|
199 | 199 | |
|
200 | 200 | :param matcher: |
|
201 | 201 | :param size: |
|
202 | 202 | """ |
|
203 | 203 | memory = [(0, 0)] |
|
204 | 204 | for span in self.matcher.spans(): |
|
205 | 205 | start = span.startchar or 0 |
|
206 | 206 | end = span.endchar or 0 |
|
207 | 207 | start_offseted = max(0, start - self.fragment_size) |
|
208 | 208 | end_offseted = end + self.fragment_size |
|
209 | 209 | |
|
210 | 210 | if start_offseted < memory[-1][1]: |
|
211 | 211 | start_offseted = memory[-1][1] |
|
212 | 212 | memory.append((start_offseted, end_offseted,)) |
|
213 | 213 | yield (start_offseted, end_offseted,) |
|
214 | 214 | |
|
215 | 215 | def highlight(self, content, top=5): |
|
216 | 216 | if self.search_type != 'content': |
|
217 | 217 | return '' |
|
218 | 218 | hl = highlight(escape(content), |
|
219 | 219 | self.highlight_items, |
|
220 | 220 | analyzer=ANALYZER, |
|
221 | 221 | fragmenter=FRAGMENTER, |
|
222 | 222 | formatter=FORMATTER, |
|
223 | 223 | top=top) |
|
224 | 224 | return hl |
@@ -1,605 +1,606 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | rhodecode.lib.utils |
|
4 | 4 | ~~~~~~~~~~~~~~~~~~~ |
|
5 | 5 | |
|
6 | 6 | Utilities library for RhodeCode |
|
7 | 7 | |
|
8 | 8 | :created_on: Apr 18, 2010 |
|
9 | 9 | :author: marcink |
|
10 | 10 | :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com> |
|
11 | 11 | :license: GPLv3, see COPYING for more details. |
|
12 | 12 | """ |
|
13 | 13 | # This program is free software: you can redistribute it and/or modify |
|
14 | 14 | # it under the terms of the GNU General Public License as published by |
|
15 | 15 | # the Free Software Foundation, either version 3 of the License, or |
|
16 | 16 | # (at your option) any later version. |
|
17 | 17 | # |
|
18 | 18 | # This program is distributed in the hope that it will be useful, |
|
19 | 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
20 | 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
21 | 21 | # GNU General Public License for more details. |
|
22 | 22 | # |
|
23 | 23 | # You should have received a copy of the GNU General Public License |
|
24 | 24 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import logging |
|
28 | 28 | import datetime |
|
29 | 29 | import traceback |
|
30 | 30 | import paste |
|
31 | 31 | import beaker |
|
32 | from os.path import dirname as dn, join as jn | |
|
32 | 33 | |
|
33 | 34 | from paste.script.command import Command, BadCommand |
|
34 | 35 | |
|
35 | 36 | from UserDict import DictMixin |
|
36 | 37 | |
|
37 | 38 | from mercurial import ui, config, hg |
|
38 | 39 | from mercurial.error import RepoError |
|
39 | 40 | |
|
40 | 41 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
41 | 42 | |
|
42 | 43 | from vcs.backends.base import BaseChangeset |
|
43 | 44 | from vcs.utils.lazy import LazyProperty |
|
44 | 45 | |
|
45 | 46 | from rhodecode.model import meta |
|
46 | 47 | from rhodecode.model.caching_query import FromCache |
|
47 | 48 | from rhodecode.model.db import Repository, User, RhodeCodeUi, UserLog, Group, \ |
|
48 | 49 | RhodeCodeSettings |
|
49 | 50 | from rhodecode.model.repo import RepoModel |
|
50 | 51 | from rhodecode.model.user import UserModel |
|
51 | 52 | |
|
52 | 53 | log = logging.getLogger(__name__) |
|
53 | 54 | |
|
54 | 55 | |
|
55 | 56 | def recursive_replace(str, replace=' '): |
|
56 | 57 | """Recursive replace of given sign to just one instance |
|
57 | 58 | |
|
58 | 59 | :param str: given string |
|
59 | 60 | :param replace: char to find and replace multiple instances |
|
60 | 61 | |
|
61 | 62 | Examples:: |
|
62 | 63 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
63 | 64 | 'Mighty-Mighty-Bo-sstones' |
|
64 | 65 | """ |
|
65 | 66 | |
|
66 | 67 | if str.find(replace * 2) == -1: |
|
67 | 68 | return str |
|
68 | 69 | else: |
|
69 | 70 | str = str.replace(replace * 2, replace) |
|
70 | 71 | return recursive_replace(str, replace) |
|
71 | 72 | |
|
72 | 73 | |
|
73 | 74 | def repo_name_slug(value): |
|
74 | 75 | """Return slug of name of repository |
|
75 | 76 | This function is called on each creation/modification |
|
76 | 77 | of repository to prevent bad names in repo |
|
77 | 78 | """ |
|
78 | 79 | |
|
79 | 80 | slug = remove_formatting(value) |
|
80 | 81 | slug = strip_tags(slug) |
|
81 | 82 | |
|
82 | 83 | for c in """=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
83 | 84 | slug = slug.replace(c, '-') |
|
84 | 85 | slug = recursive_replace(slug, '-') |
|
85 | 86 | slug = collapse(slug, '-') |
|
86 | 87 | return slug |
|
87 | 88 | |
|
88 | 89 | |
|
89 | 90 | def get_repo_slug(request): |
|
90 | 91 | return request.environ['pylons.routes_dict'].get('repo_name') |
|
91 | 92 | |
|
92 | 93 | |
|
93 | 94 | def action_logger(user, action, repo, ipaddr='', sa=None): |
|
94 | 95 | """ |
|
95 | 96 | Action logger for various actions made by users |
|
96 | 97 | |
|
97 | 98 | :param user: user that made this action, can be a unique username string or |
|
98 | 99 | object containing user_id attribute |
|
99 | 100 | :param action: action to log, should be on of predefined unique actions for |
|
100 | 101 | easy translations |
|
101 | 102 | :param repo: string name of repository or object containing repo_id, |
|
102 | 103 | that action was made on |
|
103 | 104 | :param ipaddr: optional ip address from what the action was made |
|
104 | 105 | :param sa: optional sqlalchemy session |
|
105 | 106 | |
|
106 | 107 | """ |
|
107 | 108 | |
|
108 | 109 | if not sa: |
|
109 | 110 | sa = meta.Session() |
|
110 | 111 | |
|
111 | 112 | try: |
|
112 | 113 | um = UserModel() |
|
113 | 114 | if hasattr(user, 'user_id'): |
|
114 | 115 | user_obj = user |
|
115 | 116 | elif isinstance(user, basestring): |
|
116 | 117 | user_obj = um.get_by_username(user, cache=False) |
|
117 | 118 | else: |
|
118 | 119 | raise Exception('You have to provide user object or username') |
|
119 | 120 | |
|
120 | 121 | rm = RepoModel() |
|
121 | 122 | if hasattr(repo, 'repo_id'): |
|
122 | 123 | repo_obj = rm.get(repo.repo_id, cache=False) |
|
123 | 124 | repo_name = repo_obj.repo_name |
|
124 | 125 | elif isinstance(repo, basestring): |
|
125 | 126 | repo_name = repo.lstrip('/') |
|
126 | 127 | repo_obj = rm.get_by_repo_name(repo_name, cache=False) |
|
127 | 128 | else: |
|
128 | 129 | raise Exception('You have to provide repository to action logger') |
|
129 | 130 | |
|
130 | 131 | user_log = UserLog() |
|
131 | 132 | user_log.user_id = user_obj.user_id |
|
132 | 133 | user_log.action = action |
|
133 | 134 | |
|
134 | 135 | user_log.repository_id = repo_obj.repo_id |
|
135 | 136 | user_log.repository_name = repo_name |
|
136 | 137 | |
|
137 | 138 | user_log.action_date = datetime.datetime.now() |
|
138 | 139 | user_log.user_ip = ipaddr |
|
139 | 140 | sa.add(user_log) |
|
140 | 141 | sa.commit() |
|
141 | 142 | |
|
142 | 143 | log.info('Adding user %s, action %s on %s', user_obj, action, repo) |
|
143 | 144 | except: |
|
144 | 145 | log.error(traceback.format_exc()) |
|
145 | 146 | sa.rollback() |
|
146 | 147 | |
|
147 | 148 | |
|
148 | 149 | def get_repos(path, recursive=False): |
|
149 | 150 | """ |
|
150 | 151 | Scans given path for repos and return (name,(type,path)) tuple |
|
151 | 152 | |
|
152 | 153 | :param path: path to scann for repositories |
|
153 | 154 | :param recursive: recursive search and return names with subdirs in front |
|
154 | 155 | """ |
|
155 | 156 | from vcs.utils.helpers import get_scm |
|
156 | 157 | from vcs.exceptions import VCSError |
|
157 | 158 | |
|
158 | 159 | if path.endswith(os.sep): |
|
159 | 160 | #remove ending slash for better results |
|
160 | 161 | path = path[:-1] |
|
161 | 162 | |
|
162 | 163 | def _get_repos(p): |
|
163 | 164 | if not os.access(p, os.W_OK): |
|
164 | 165 | return |
|
165 | 166 | for dirpath in os.listdir(p): |
|
166 | 167 | if os.path.isfile(os.path.join(p, dirpath)): |
|
167 | 168 | continue |
|
168 | 169 | cur_path = os.path.join(p, dirpath) |
|
169 | 170 | try: |
|
170 | 171 | scm_info = get_scm(cur_path) |
|
171 | 172 | yield scm_info[1].split(path)[-1].lstrip(os.sep), scm_info |
|
172 | 173 | except VCSError: |
|
173 | 174 | if not recursive: |
|
174 | 175 | continue |
|
175 | 176 | #check if this dir containts other repos for recursive scan |
|
176 | 177 | rec_path = os.path.join(p, dirpath) |
|
177 | 178 | if os.path.isdir(rec_path): |
|
178 | 179 | for inner_scm in _get_repos(rec_path): |
|
179 | 180 | yield inner_scm |
|
180 | 181 | |
|
181 | 182 | return _get_repos(path) |
|
182 | 183 | |
|
183 | 184 | |
|
184 | 185 | def check_repo_fast(repo_name, base_path): |
|
185 | 186 | """ |
|
186 | 187 | Check given path for existence of directory |
|
187 | 188 | :param repo_name: |
|
188 | 189 | :param base_path: |
|
189 | 190 | |
|
190 | 191 | :return False: if this directory is present |
|
191 | 192 | """ |
|
192 | 193 | if os.path.isdir(os.path.join(base_path, repo_name)): |
|
193 | 194 | return False |
|
194 | 195 | return True |
|
195 | 196 | |
|
196 | 197 | |
|
197 | 198 | def check_repo(repo_name, base_path, verify=True): |
|
198 | 199 | |
|
199 | 200 | repo_path = os.path.join(base_path, repo_name) |
|
200 | 201 | |
|
201 | 202 | try: |
|
202 | 203 | if not check_repo_fast(repo_name, base_path): |
|
203 | 204 | return False |
|
204 | 205 | r = hg.repository(ui.ui(), repo_path) |
|
205 | 206 | if verify: |
|
206 | 207 | hg.verify(r) |
|
207 | 208 | #here we hnow that repo exists it was verified |
|
208 | 209 | log.info('%s repo is already created', repo_name) |
|
209 | 210 | return False |
|
210 | 211 | except RepoError: |
|
211 | 212 | #it means that there is no valid repo there... |
|
212 | 213 | log.info('%s repo is free for creation', repo_name) |
|
213 | 214 | return True |
|
214 | 215 | |
|
215 | 216 | |
|
216 | 217 | def ask_ok(prompt, retries=4, complaint='Yes or no, please!'): |
|
217 | 218 | while True: |
|
218 | 219 | ok = raw_input(prompt) |
|
219 | 220 | if ok in ('y', 'ye', 'yes'): |
|
220 | 221 | return True |
|
221 | 222 | if ok in ('n', 'no', 'nop', 'nope'): |
|
222 | 223 | return False |
|
223 | 224 | retries = retries - 1 |
|
224 | 225 | if retries < 0: |
|
225 | 226 | raise IOError |
|
226 | 227 | print complaint |
|
227 | 228 | |
|
228 | 229 | #propagated from mercurial documentation |
|
229 | 230 | ui_sections = ['alias', 'auth', |
|
230 | 231 | 'decode/encode', 'defaults', |
|
231 | 232 | 'diff', 'email', |
|
232 | 233 | 'extensions', 'format', |
|
233 | 234 | 'merge-patterns', 'merge-tools', |
|
234 | 235 | 'hooks', 'http_proxy', |
|
235 | 236 | 'smtp', 'patch', |
|
236 | 237 | 'paths', 'profiling', |
|
237 | 238 | 'server', 'trusted', |
|
238 | 239 | 'ui', 'web', ] |
|
239 | 240 | |
|
240 | 241 | |
|
241 | 242 | def make_ui(read_from='file', path=None, checkpaths=True): |
|
242 | 243 | """A function that will read python rc files or database |
|
243 | 244 | and make an mercurial ui object from read options |
|
244 | 245 | |
|
245 | 246 | :param path: path to mercurial config file |
|
246 | 247 | :param checkpaths: check the path |
|
247 | 248 | :param read_from: read from 'file' or 'db' |
|
248 | 249 | """ |
|
249 | 250 | |
|
250 | 251 | baseui = ui.ui() |
|
251 | 252 | |
|
252 | 253 | #clean the baseui object |
|
253 | 254 | baseui._ocfg = config.config() |
|
254 | 255 | baseui._ucfg = config.config() |
|
255 | 256 | baseui._tcfg = config.config() |
|
256 | 257 | |
|
257 | 258 | if read_from == 'file': |
|
258 | 259 | if not os.path.isfile(path): |
|
259 | 260 | log.warning('Unable to read config file %s' % path) |
|
260 | 261 | return False |
|
261 | 262 | log.debug('reading hgrc from %s', path) |
|
262 | 263 | cfg = config.config() |
|
263 | 264 | cfg.read(path) |
|
264 | 265 | for section in ui_sections: |
|
265 | 266 | for k, v in cfg.items(section): |
|
266 | 267 | log.debug('settings ui from file[%s]%s:%s', section, k, v) |
|
267 | 268 | baseui.setconfig(section, k, v) |
|
268 | 269 | |
|
269 | 270 | elif read_from == 'db': |
|
270 | 271 | sa = meta.Session() |
|
271 | 272 | ret = sa.query(RhodeCodeUi)\ |
|
272 | 273 | .options(FromCache("sql_cache_short", |
|
273 | 274 | "get_hg_ui_settings")).all() |
|
274 | 275 | |
|
275 | 276 | hg_ui = ret |
|
276 | 277 | for ui_ in hg_ui: |
|
277 | 278 | if ui_.ui_active: |
|
278 | 279 | log.debug('settings ui from db[%s]%s:%s', ui_.ui_section, |
|
279 | 280 | ui_.ui_key, ui_.ui_value) |
|
280 | 281 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
281 | 282 | |
|
282 | 283 | meta.Session.remove() |
|
283 | 284 | return baseui |
|
284 | 285 | |
|
285 | 286 | |
|
286 | 287 | def set_rhodecode_config(config): |
|
287 | 288 | """Updates pylons config with new settings from database |
|
288 | 289 | |
|
289 | 290 | :param config: |
|
290 | 291 | """ |
|
291 | 292 | hgsettings = RhodeCodeSettings.get_app_settings() |
|
292 | 293 | |
|
293 | 294 | for k, v in hgsettings.items(): |
|
294 | 295 | config[k] = v |
|
295 | 296 | |
|
296 | 297 | |
|
297 | 298 | def invalidate_cache(cache_key, *args): |
|
298 | 299 | """Puts cache invalidation task into db for |
|
299 | 300 | further global cache invalidation |
|
300 | 301 | """ |
|
301 | 302 | |
|
302 | 303 | from rhodecode.model.scm import ScmModel |
|
303 | 304 | |
|
304 | 305 | if cache_key.startswith('get_repo_cached_'): |
|
305 | 306 | name = cache_key.split('get_repo_cached_')[-1] |
|
306 | 307 | ScmModel().mark_for_invalidation(name) |
|
307 | 308 | |
|
308 | 309 | |
|
309 | 310 | class EmptyChangeset(BaseChangeset): |
|
310 | 311 | """ |
|
311 | 312 | An dummy empty changeset. It's possible to pass hash when creating |
|
312 | 313 | an EmptyChangeset |
|
313 | 314 | """ |
|
314 | 315 | |
|
315 | 316 | def __init__(self, cs='0' * 40, repo=None): |
|
316 | 317 | self._empty_cs = cs |
|
317 | 318 | self.revision = -1 |
|
318 | 319 | self.message = '' |
|
319 | 320 | self.author = '' |
|
320 | 321 | self.date = '' |
|
321 | 322 | self.repository = repo |
|
322 | 323 | |
|
323 | 324 | @LazyProperty |
|
324 | 325 | def raw_id(self): |
|
325 | 326 | """Returns raw string identifying this changeset, useful for web |
|
326 | 327 | representation. |
|
327 | 328 | """ |
|
328 | 329 | |
|
329 | 330 | return self._empty_cs |
|
330 | 331 | |
|
331 | 332 | @LazyProperty |
|
332 | 333 | def short_id(self): |
|
333 | 334 | return self.raw_id[:12] |
|
334 | 335 | |
|
335 | 336 | def get_file_changeset(self, path): |
|
336 | 337 | return self |
|
337 | 338 | |
|
338 | 339 | def get_file_content(self, path): |
|
339 | 340 | return u'' |
|
340 | 341 | |
|
341 | 342 | def get_file_size(self, path): |
|
342 | 343 | return 0 |
|
343 | 344 | |
|
344 | 345 | |
|
345 | 346 | def map_groups(groups): |
|
346 | 347 | """Checks for groups existence, and creates groups structures. |
|
347 | 348 | It returns last group in structure |
|
348 | 349 | |
|
349 | 350 | :param groups: list of groups structure |
|
350 | 351 | """ |
|
351 | 352 | sa = meta.Session() |
|
352 | 353 | |
|
353 | 354 | parent = None |
|
354 | 355 | group = None |
|
355 | 356 | for lvl, group_name in enumerate(groups[:-1]): |
|
356 | 357 | group = sa.query(Group).filter(Group.group_name == group_name).scalar() |
|
357 | 358 | |
|
358 | 359 | if group is None: |
|
359 | 360 | group = Group(group_name, parent) |
|
360 | 361 | sa.add(group) |
|
361 | 362 | sa.commit() |
|
362 | 363 | |
|
363 | 364 | parent = group |
|
364 | 365 | |
|
365 | 366 | return group |
|
366 | 367 | |
|
367 | 368 | |
|
368 | 369 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
369 | 370 | """maps all repos given in initial_repo_list, non existing repositories |
|
370 | 371 | are created, if remove_obsolete is True it also check for db entries |
|
371 | 372 | that are not in initial_repo_list and removes them. |
|
372 | 373 | |
|
373 | 374 | :param initial_repo_list: list of repositories found by scanning methods |
|
374 | 375 | :param remove_obsolete: check for obsolete entries in database |
|
375 | 376 | """ |
|
376 | 377 | |
|
377 | 378 | sa = meta.Session() |
|
378 | 379 | rm = RepoModel() |
|
379 | 380 | user = sa.query(User).filter(User.admin == True).first() |
|
380 | 381 | added = [] |
|
381 | 382 | for name, repo in initial_repo_list.items(): |
|
382 | 383 | group = map_groups(name.split('/')) |
|
383 | 384 | if not rm.get_by_repo_name(name, cache=False): |
|
384 | 385 | log.info('repository %s not found creating default', name) |
|
385 | 386 | added.append(name) |
|
386 | 387 | form_data = { |
|
387 | 388 | 'repo_name': name, |
|
388 | 389 | 'repo_type': repo.alias, |
|
389 | 390 | 'description': repo.description \ |
|
390 | 391 | if repo.description != 'unknown' else \ |
|
391 | 392 | '%s repository' % name, |
|
392 | 393 | 'private': False, |
|
393 | 394 | 'group_id': getattr(group, 'group_id', None) |
|
394 | 395 | } |
|
395 | 396 | rm.create(form_data, user, just_db=True) |
|
396 | 397 | |
|
397 | 398 | removed = [] |
|
398 | 399 | if remove_obsolete: |
|
399 | 400 | #remove from database those repositories that are not in the filesystem |
|
400 | 401 | for repo in sa.query(Repository).all(): |
|
401 | 402 | if repo.repo_name not in initial_repo_list.keys(): |
|
402 | 403 | removed.append(repo.repo_name) |
|
403 | 404 | sa.delete(repo) |
|
404 | 405 | sa.commit() |
|
405 | 406 | |
|
406 | 407 | return added, removed |
|
407 | 408 | |
|
408 | 409 | #set cache regions for beaker so celery can utilise it |
|
409 | 410 | def add_cache(settings): |
|
410 | 411 | cache_settings = {'regions': None} |
|
411 | 412 | for key in settings.keys(): |
|
412 | 413 | for prefix in ['beaker.cache.', 'cache.']: |
|
413 | 414 | if key.startswith(prefix): |
|
414 | 415 | name = key.split(prefix)[1].strip() |
|
415 | 416 | cache_settings[name] = settings[key].strip() |
|
416 | 417 | if cache_settings['regions']: |
|
417 | 418 | for region in cache_settings['regions'].split(','): |
|
418 | 419 | region = region.strip() |
|
419 | 420 | region_settings = {} |
|
420 | 421 | for key, value in cache_settings.items(): |
|
421 | 422 | if key.startswith(region): |
|
422 | 423 | region_settings[key.split('.')[1]] = value |
|
423 | 424 | region_settings['expire'] = int(region_settings.get('expire', |
|
424 | 425 | 60)) |
|
425 | 426 | region_settings.setdefault('lock_dir', |
|
426 | 427 | cache_settings.get('lock_dir')) |
|
427 | 428 | region_settings.setdefault('data_dir', |
|
428 | 429 | cache_settings.get('data_dir')) |
|
429 | 430 | |
|
430 | 431 | if 'type' not in region_settings: |
|
431 | 432 | region_settings['type'] = cache_settings.get('type', |
|
432 | 433 | 'memory') |
|
433 | 434 | beaker.cache.cache_regions[region] = region_settings |
|
434 | 435 | |
|
435 | 436 | |
|
436 | 437 | def get_current_revision(): |
|
437 | 438 | """Returns tuple of (number, id) from repository containing this package |
|
438 | 439 | or None if repository could not be found. |
|
439 | 440 | """ |
|
440 | 441 | |
|
441 | 442 | try: |
|
442 | 443 | from vcs import get_repo |
|
443 | 444 | from vcs.utils.helpers import get_scm |
|
444 | 445 | from vcs.exceptions import RepositoryError, VCSError |
|
445 | 446 | repopath = os.path.join(os.path.dirname(__file__), '..', '..') |
|
446 | 447 | scm = get_scm(repopath)[0] |
|
447 | 448 | repo = get_repo(path=repopath, alias=scm) |
|
448 | 449 | tip = repo.get_changeset() |
|
449 | 450 | return (tip.revision, tip.short_id) |
|
450 | 451 | except (ImportError, RepositoryError, VCSError), err: |
|
451 | 452 | logging.debug("Cannot retrieve rhodecode's revision. Original error " |
|
452 | 453 | "was: %s" % err) |
|
453 | 454 | return None |
|
454 | 455 | |
|
455 | 456 | |
|
456 | 457 | #============================================================================== |
|
457 | 458 | # TEST FUNCTIONS AND CREATORS |
|
458 | 459 | #============================================================================== |
|
459 | 460 | def create_test_index(repo_location, full_index): |
|
460 | 461 | """Makes default test index |
|
461 | 462 | :param repo_location: |
|
462 | 463 | :param full_index: |
|
463 | 464 | """ |
|
464 | 465 | from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon |
|
465 | 466 | from rhodecode.lib.pidlock import DaemonLock, LockHeld |
|
466 | 467 | import shutil |
|
467 | 468 | |
|
468 | 469 | index_location = os.path.join(repo_location, 'index') |
|
469 | 470 | if os.path.exists(index_location): |
|
470 | 471 | shutil.rmtree(index_location) |
|
471 | 472 | |
|
472 | 473 | try: |
|
473 | l = DaemonLock() | |
|
474 | l = DaemonLock(file=jn(dn(dn(index_location)), 'make_index.lock')) | |
|
474 | 475 | WhooshIndexingDaemon(index_location=index_location, |
|
475 | 476 | repo_location=repo_location)\ |
|
476 | 477 | .run(full_index=full_index) |
|
477 | 478 | l.release() |
|
478 | 479 | except LockHeld: |
|
479 | 480 | pass |
|
480 | 481 | |
|
481 | 482 | |
|
482 | 483 | def create_test_env(repos_test_path, config): |
|
483 | 484 | """Makes a fresh database and |
|
484 | 485 | install test repository into tmp dir |
|
485 | 486 | """ |
|
486 | 487 | from rhodecode.lib.db_manage import DbManage |
|
487 | 488 | from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \ |
|
488 | 489 | HG_FORK, GIT_FORK, TESTS_TMP_PATH |
|
489 | 490 | import tarfile |
|
490 | 491 | import shutil |
|
491 | 492 | from os.path import dirname as dn, join as jn, abspath |
|
492 | 493 | |
|
493 | 494 | log = logging.getLogger('TestEnvCreator') |
|
494 | 495 | # create logger |
|
495 | 496 | log.setLevel(logging.DEBUG) |
|
496 | 497 | log.propagate = True |
|
497 | 498 | # create console handler and set level to debug |
|
498 | 499 | ch = logging.StreamHandler() |
|
499 | 500 | ch.setLevel(logging.DEBUG) |
|
500 | 501 | |
|
501 | 502 | # create formatter |
|
502 | 503 | formatter = logging.Formatter("%(asctime)s - %(name)s -" |
|
503 | 504 | " %(levelname)s - %(message)s") |
|
504 | 505 | |
|
505 | 506 | # add formatter to ch |
|
506 | 507 | ch.setFormatter(formatter) |
|
507 | 508 | |
|
508 | 509 | # add ch to logger |
|
509 | 510 | log.addHandler(ch) |
|
510 | 511 | |
|
511 | 512 | #PART ONE create db |
|
512 | 513 | dbconf = config['sqlalchemy.db1.url'] |
|
513 | 514 | log.debug('making test db %s', dbconf) |
|
514 | 515 | |
|
515 | 516 | dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'], |
|
516 | 517 | tests=True) |
|
517 | 518 | dbmanage.create_tables(override=True) |
|
518 | 519 | dbmanage.create_settings(dbmanage.config_prompt(repos_test_path)) |
|
519 | 520 | dbmanage.create_default_user() |
|
520 | 521 | dbmanage.admin_prompt() |
|
521 | 522 | dbmanage.create_permissions() |
|
522 | 523 | dbmanage.populate_default_permissions() |
|
523 | 524 | |
|
524 | 525 | #PART TWO make test repo |
|
525 | 526 | log.debug('making test vcs repositories') |
|
526 | 527 | |
|
527 | 528 | #remove old one from previos tests |
|
528 | 529 | for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]: |
|
529 | 530 | |
|
530 | 531 | if os.path.isdir(jn(TESTS_TMP_PATH, r)): |
|
531 | 532 | log.debug('removing %s', r) |
|
532 | 533 | shutil.rmtree(jn(TESTS_TMP_PATH, r)) |
|
533 | 534 | |
|
534 | 535 | #CREATE DEFAULT HG REPOSITORY |
|
535 | 536 | cur_dir = dn(dn(abspath(__file__))) |
|
536 | 537 | tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz")) |
|
537 | 538 | tar.extractall(jn(TESTS_TMP_PATH, HG_REPO)) |
|
538 | 539 | tar.close() |
|
539 | 540 | |
|
540 | 541 | |
|
541 | 542 | #============================================================================== |
|
542 | 543 | # PASTER COMMANDS |
|
543 | 544 | #============================================================================== |
|
544 | 545 | class BasePasterCommand(Command): |
|
545 | 546 | """ |
|
546 | 547 | Abstract Base Class for paster commands. |
|
547 | 548 | |
|
548 | 549 | The celery commands are somewhat aggressive about loading |
|
549 | 550 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
550 | 551 | environment variable to our loader, we have to bootstrap a bit and |
|
551 | 552 | make sure we've had a chance to load the pylons config off of the |
|
552 | 553 | command line, otherwise everything fails. |
|
553 | 554 | """ |
|
554 | 555 | min_args = 1 |
|
555 | 556 | min_args_error = "Please provide a paster config file as an argument." |
|
556 | 557 | takes_config_file = 1 |
|
557 | 558 | requires_config_file = True |
|
558 | 559 | |
|
559 | 560 | def notify_msg(self, msg, log=False): |
|
560 | 561 | """Make a notification to user, additionally if logger is passed |
|
561 | 562 | it logs this action using given logger |
|
562 | 563 | |
|
563 | 564 | :param msg: message that will be printed to user |
|
564 | 565 | :param log: logging instance, to use to additionally log this message |
|
565 | 566 | |
|
566 | 567 | """ |
|
567 | 568 | if log and isinstance(log, logging): |
|
568 | 569 | log(msg) |
|
569 | 570 | |
|
570 | 571 | def run(self, args): |
|
571 | 572 | """ |
|
572 | 573 | Overrides Command.run |
|
573 | 574 | |
|
574 | 575 | Checks for a config file argument and loads it. |
|
575 | 576 | """ |
|
576 | 577 | if len(args) < self.min_args: |
|
577 | 578 | raise BadCommand( |
|
578 | 579 | self.min_args_error % {'min_args': self.min_args, |
|
579 | 580 | 'actual_args': len(args)}) |
|
580 | 581 | |
|
581 | 582 | # Decrement because we're going to lob off the first argument. |
|
582 | 583 | # @@ This is hacky |
|
583 | 584 | self.min_args -= 1 |
|
584 | 585 | self.bootstrap_config(args[0]) |
|
585 | 586 | self.update_parser() |
|
586 | 587 | return super(BasePasterCommand, self).run(args[1:]) |
|
587 | 588 | |
|
588 | 589 | def update_parser(self): |
|
589 | 590 | """ |
|
590 | 591 | Abstract method. Allows for the class's parser to be updated |
|
591 | 592 | before the superclass's `run` method is called. Necessary to |
|
592 | 593 | allow options/arguments to be passed through to the underlying |
|
593 | 594 | celery command. |
|
594 | 595 | """ |
|
595 | 596 | raise NotImplementedError("Abstract Method.") |
|
596 | 597 | |
|
597 | 598 | def bootstrap_config(self, conf): |
|
598 | 599 | """ |
|
599 | 600 | Loads the pylons configuration. |
|
600 | 601 | """ |
|
601 | 602 | from pylons import config as pylonsconfig |
|
602 | 603 | |
|
603 | 604 | path_to_ini_file = os.path.realpath(conf) |
|
604 | 605 | conf = paste.deploy.appconfig('config:' + path_to_ini_file) |
|
605 | 606 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
General Comments 0
You need to be logged in to leave comments.
Login now