##// END OF EJS Templates
fixes issue #747, load changeset cache after forking to refresh lightweight dashboard caches
marcink -
r3279:4c401e37 beta
parent child Browse files
Show More
@@ -1,456 +1,457 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.celerylib.tasks
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 RhodeCode task modules, containing all task that suppose to be run
7 7 by celery daemon
8 8
9 9 :created_on: Oct 6, 2010
10 10 :author: marcink
11 11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 12 :license: GPLv3, see COPYING for more details.
13 13 """
14 14 # This program is free software: you can redistribute it and/or modify
15 15 # it under the terms of the GNU General Public License as published by
16 16 # the Free Software Foundation, either version 3 of the License, or
17 17 # (at your option) any later version.
18 18 #
19 19 # This program is distributed in the hope that it will be useful,
20 20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 22 # GNU General Public License for more details.
23 23 #
24 24 # You should have received a copy of the GNU General Public License
25 25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 26 from celery.decorators import task
27 27
28 28 import os
29 29 import traceback
30 30 import logging
31 31 from os.path import join as jn
32 32
33 33 from time import mktime
34 34 from operator import itemgetter
35 35 from string import lower
36 36
37 37 from pylons import config, url
38 38 from pylons.i18n.translation import _
39 39
40 40 from rhodecode.lib.vcs import get_backend
41 41
42 42 from rhodecode import CELERY_ON, CELERY_EAGER
43 43 from rhodecode.lib.utils2 import safe_str
44 44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
45 45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
46 46 from rhodecode.lib.helpers import person
47 47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
48 48 from rhodecode.lib.utils import add_cache, action_logger
49 49 from rhodecode.lib.compat import json, OrderedDict
50 50 from rhodecode.lib.hooks import log_create_repository
51 51
52 52 from rhodecode.model.db import Statistics, Repository, User
53 53 from rhodecode.model.scm import ScmModel
54 54
55 55
56 56 add_cache(config)
57 57
58 58 __all__ = ['whoosh_index', 'get_commits_stats',
59 59 'reset_user_password', 'send_email']
60 60
61 61
62 62 def get_logger(cls):
63 63 if CELERY_ON:
64 64 try:
65 65 log = cls.get_logger()
66 66 except:
67 67 log = logging.getLogger(__name__)
68 68 else:
69 69 log = logging.getLogger(__name__)
70 70
71 71 return log
72 72
73 73
74 74 @task(ignore_result=True)
75 75 @locked_task
76 76 @dbsession
77 77 def whoosh_index(repo_location, full_index):
78 78 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
79 79 log = get_logger(whoosh_index)
80 80 DBS = get_session()
81 81
82 82 index_location = config['index_dir']
83 83 WhooshIndexingDaemon(index_location=index_location,
84 84 repo_location=repo_location, sa=DBS)\
85 85 .run(full_index=full_index)
86 86
87 87
88 88 @task(ignore_result=True)
89 89 @dbsession
90 90 def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
91 91 log = get_logger(get_commits_stats)
92 92 DBS = get_session()
93 93 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
94 94 ts_max_y)
95 95 lockkey_path = config['app_conf']['cache_dir']
96 96
97 97 log.info('running task with lockkey %s' % lockkey)
98 98
99 99 try:
100 100 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
101 101
102 102 # for js data compatibility cleans the key for person from '
103 103 akc = lambda k: person(k).replace('"', "")
104 104
105 105 co_day_auth_aggr = {}
106 106 commits_by_day_aggregate = {}
107 107 repo = Repository.get_by_repo_name(repo_name)
108 108 if repo is None:
109 109 return True
110 110
111 111 repo = repo.scm_instance
112 112 repo_size = repo.count()
113 113 # return if repo have no revisions
114 114 if repo_size < 1:
115 115 lock.release()
116 116 return True
117 117
118 118 skip_date_limit = True
119 119 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
120 120 last_rev = None
121 121 last_cs = None
122 122 timegetter = itemgetter('time')
123 123
124 124 dbrepo = DBS.query(Repository)\
125 125 .filter(Repository.repo_name == repo_name).scalar()
126 126 cur_stats = DBS.query(Statistics)\
127 127 .filter(Statistics.repository == dbrepo).scalar()
128 128
129 129 if cur_stats is not None:
130 130 last_rev = cur_stats.stat_on_revision
131 131
132 132 if last_rev == repo.get_changeset().revision and repo_size > 1:
133 133 # pass silently without any work if we're not on first revision or
134 134 # current state of parsing revision(from db marker) is the
135 135 # last revision
136 136 lock.release()
137 137 return True
138 138
139 139 if cur_stats:
140 140 commits_by_day_aggregate = OrderedDict(json.loads(
141 141 cur_stats.commit_activity_combined))
142 142 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
143 143
144 144 log.debug('starting parsing %s' % parse_limit)
145 145 lmktime = mktime
146 146
147 147 last_rev = last_rev + 1 if last_rev >= 0 else 0
148 148 log.debug('Getting revisions from %s to %s' % (
149 149 last_rev, last_rev + parse_limit)
150 150 )
151 151 for cs in repo[last_rev:last_rev + parse_limit]:
152 152 log.debug('parsing %s' % cs)
153 153 last_cs = cs # remember last parsed changeset
154 154 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
155 155 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
156 156
157 157 if akc(cs.author) in co_day_auth_aggr:
158 158 try:
159 159 l = [timegetter(x) for x in
160 160 co_day_auth_aggr[akc(cs.author)]['data']]
161 161 time_pos = l.index(k)
162 162 except ValueError:
163 163 time_pos = False
164 164
165 165 if time_pos >= 0 and time_pos is not False:
166 166
167 167 datadict = \
168 168 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
169 169
170 170 datadict["commits"] += 1
171 171 datadict["added"] += len(cs.added)
172 172 datadict["changed"] += len(cs.changed)
173 173 datadict["removed"] += len(cs.removed)
174 174
175 175 else:
176 176 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
177 177
178 178 datadict = {"time": k,
179 179 "commits": 1,
180 180 "added": len(cs.added),
181 181 "changed": len(cs.changed),
182 182 "removed": len(cs.removed),
183 183 }
184 184 co_day_auth_aggr[akc(cs.author)]['data']\
185 185 .append(datadict)
186 186
187 187 else:
188 188 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
189 189 co_day_auth_aggr[akc(cs.author)] = {
190 190 "label": akc(cs.author),
191 191 "data": [{"time":k,
192 192 "commits":1,
193 193 "added":len(cs.added),
194 194 "changed":len(cs.changed),
195 195 "removed":len(cs.removed),
196 196 }],
197 197 "schema": ["commits"],
198 198 }
199 199
200 200 #gather all data by day
201 201 if k in commits_by_day_aggregate:
202 202 commits_by_day_aggregate[k] += 1
203 203 else:
204 204 commits_by_day_aggregate[k] = 1
205 205
206 206 overview_data = sorted(commits_by_day_aggregate.items(),
207 207 key=itemgetter(0))
208 208
209 209 if not co_day_auth_aggr:
210 210 co_day_auth_aggr[akc(repo.contact)] = {
211 211 "label": akc(repo.contact),
212 212 "data": [0, 1],
213 213 "schema": ["commits"],
214 214 }
215 215
216 216 stats = cur_stats if cur_stats else Statistics()
217 217 stats.commit_activity = json.dumps(co_day_auth_aggr)
218 218 stats.commit_activity_combined = json.dumps(overview_data)
219 219
220 220 log.debug('last revison %s' % last_rev)
221 221 leftovers = len(repo.revisions[last_rev:])
222 222 log.debug('revisions to parse %s' % leftovers)
223 223
224 224 if last_rev == 0 or leftovers < parse_limit:
225 225 log.debug('getting code trending stats')
226 226 stats.languages = json.dumps(__get_codes_stats(repo_name))
227 227
228 228 try:
229 229 stats.repository = dbrepo
230 230 stats.stat_on_revision = last_cs.revision if last_cs else 0
231 231 DBS.add(stats)
232 232 DBS.commit()
233 233 except:
234 234 log.error(traceback.format_exc())
235 235 DBS.rollback()
236 236 lock.release()
237 237 return False
238 238
239 239 # final release
240 240 lock.release()
241 241
242 242 # execute another task if celery is enabled
243 243 if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
244 244 recurse_limit -= 1
245 245 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
246 246 recurse_limit)
247 247 if recurse_limit <= 0:
248 248 log.debug('Breaking recursive mode due to reach of recurse limit')
249 249 return True
250 250 except LockHeld:
251 251 log.info('LockHeld')
252 252 return 'Task with key %s already running' % lockkey
253 253
254 254 @task(ignore_result=True)
255 255 @dbsession
256 256 def send_password_link(user_email):
257 257 from rhodecode.model.notification import EmailNotificationModel
258 258
259 259 log = get_logger(send_password_link)
260 260 DBS = get_session()
261 261
262 262 try:
263 263 user = User.get_by_email(user_email)
264 264 if user:
265 265 log.debug('password reset user found %s' % user)
266 266 link = url('reset_password_confirmation', key=user.api_key,
267 267 qualified=True)
268 268 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
269 269 body = EmailNotificationModel().get_email_tmpl(reg_type,
270 270 **{'user':user.short_contact,
271 271 'reset_url':link})
272 272 log.debug('sending email')
273 273 run_task(send_email, user_email,
274 274 _("password reset link"), body)
275 275 log.info('send new password mail to %s' % user_email)
276 276 else:
277 277 log.debug("password reset email %s not found" % user_email)
278 278 except:
279 279 log.error(traceback.format_exc())
280 280 return False
281 281
282 282 return True
283 283
284 284 @task(ignore_result=True)
285 285 @dbsession
286 286 def reset_user_password(user_email):
287 287 from rhodecode.lib import auth
288 288
289 289 log = get_logger(reset_user_password)
290 290 DBS = get_session()
291 291
292 292 try:
293 293 try:
294 294 user = User.get_by_email(user_email)
295 295 new_passwd = auth.PasswordGenerator().gen_password(8,
296 296 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
297 297 if user:
298 298 user.password = auth.get_crypt_password(new_passwd)
299 299 user.api_key = auth.generate_api_key(user.username)
300 300 DBS.add(user)
301 301 DBS.commit()
302 302 log.info('change password for %s' % user_email)
303 303 if new_passwd is None:
304 304 raise Exception('unable to generate new password')
305 305 except:
306 306 log.error(traceback.format_exc())
307 307 DBS.rollback()
308 308
309 309 run_task(send_email, user_email,
310 310 'Your new password',
311 311 'Your new RhodeCode password:%s' % (new_passwd))
312 312 log.info('send new password mail to %s' % user_email)
313 313
314 314 except:
315 315 log.error('Failed to update user password')
316 316 log.error(traceback.format_exc())
317 317
318 318 return True
319 319
320 320
321 321 @task(ignore_result=True)
322 322 @dbsession
323 323 def send_email(recipients, subject, body, html_body=''):
324 324 """
325 325 Sends an email with defined parameters from the .ini files.
326 326
327 327 :param recipients: list of recipients, it this is empty the defined email
328 328 address from field 'email_to' is used instead
329 329 :param subject: subject of the mail
330 330 :param body: body of the mail
331 331 :param html_body: html version of body
332 332 """
333 333 log = get_logger(send_email)
334 334 DBS = get_session()
335 335
336 336 email_config = config
337 337 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
338 338 if not recipients:
339 339 # if recipients are not defined we send to email_config + all admins
340 340 admins = [u.email for u in User.query()
341 341 .filter(User.admin == True).all()]
342 342 recipients = [email_config.get('email_to')] + admins
343 343
344 344 mail_from = email_config.get('app_email_from', 'RhodeCode')
345 345 user = email_config.get('smtp_username')
346 346 passwd = email_config.get('smtp_password')
347 347 mail_server = email_config.get('smtp_server')
348 348 mail_port = email_config.get('smtp_port')
349 349 tls = str2bool(email_config.get('smtp_use_tls'))
350 350 ssl = str2bool(email_config.get('smtp_use_ssl'))
351 351 debug = str2bool(config.get('debug'))
352 352 smtp_auth = email_config.get('smtp_auth')
353 353
354 354 if not mail_server:
355 355 log.error("SMTP mail server not configured - cannot send mail")
356 356 return False
357 357
358 358 try:
359 359 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
360 360 mail_port, ssl, tls, debug=debug)
361 361 m.send(recipients, subject, body, html_body)
362 362 except:
363 363 log.error('Mail sending failed')
364 364 log.error(traceback.format_exc())
365 365 return False
366 366 return True
367 367
368 368
369 369 @task(ignore_result=True)
370 370 @dbsession
371 371 def create_repo_fork(form_data, cur_user):
372 372 """
373 373 Creates a fork of repository using interval VCS methods
374 374
375 375 :param form_data:
376 376 :param cur_user:
377 377 """
378 378 from rhodecode.model.repo import RepoModel
379 379 from rhodecode.model.user import UserModel
380 380
381 381 log = get_logger(create_repo_fork)
382 382 DBS = get_session()
383 383
384 384 base_path = Repository.base_path()
385 385 cur_user = UserModel(DBS)._get_user(cur_user)
386 386
387 387 fork_name = form_data['repo_name_full']
388 388 repo_type = form_data['repo_type']
389 389 description = form_data['description']
390 390 owner = cur_user
391 391 private = form_data['private']
392 392 clone_uri = form_data.get('clone_uri')
393 393 repos_group = form_data['repo_group']
394 394 landing_rev = form_data['landing_rev']
395 395 copy_fork_permissions = form_data.get('copy_permissions')
396 396 fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
397 397
398 398 fork_repo = RepoModel(DBS).create_repo(
399 399 fork_name, repo_type, description, owner, private, clone_uri,
400 400 repos_group, landing_rev, just_db=True, fork_of=fork_of,
401 401 copy_fork_permissions=copy_fork_permissions
402 402 )
403 403
404 404 update_after_clone = form_data['update_after_clone']
405 405
406 406 source_repo_path = os.path.join(base_path, fork_of.repo_name)
407 407 destination_fork_path = os.path.join(base_path, fork_name)
408 408
409 409 log.info('creating fork of %s as %s', source_repo_path,
410 410 destination_fork_path)
411 411 backend = get_backend(repo_type)
412 412
413 413 if repo_type == 'git':
414 414 r = backend(safe_str(destination_fork_path), create=True,
415 415 src_url=safe_str(source_repo_path),
416 416 update_after_clone=update_after_clone,
417 417 bare=True)
418 418 # add rhodecode hook into this repo
419 419 ScmModel().install_git_hook(repo=r)
420 420 elif repo_type == 'hg':
421 421 r = backend(safe_str(destination_fork_path), create=True,
422 422 src_url=safe_str(source_repo_path),
423 423 update_after_clone=update_after_clone)
424 424 else:
425 425 raise Exception('Unknown backend type %s' % repo_type)
426 426
427 427 log_create_repository(fork_repo.get_dict(), created_by=cur_user.username)
428 428
429 429 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
430 430 fork_of.repo_name, '', DBS)
431 431
432 432 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
433 433 fork_name, '', DBS)
434 434 # finally commit at latest possible stage
435 435 DBS.commit()
436 fork_repo.update_changeset_cache()
436 437
437 438
438 439 def __get_codes_stats(repo_name):
439 440 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
440 441 repo = Repository.get_by_repo_name(repo_name).scm_instance
441 442
442 443 tip = repo.get_changeset()
443 444 code_stats = {}
444 445
445 446 def aggregate(cs):
446 447 for f in cs[2]:
447 448 ext = lower(f.extension)
448 449 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
449 450 if ext in code_stats:
450 451 code_stats[ext] += 1
451 452 else:
452 453 code_stats[ext] = 1
453 454
454 455 map(aggregate, tip.walk('/'))
455 456
456 457 return code_stats or {}
General Comments 0
You need to be logged in to leave comments. Login now