##// END OF EJS Templates
celery tasks: use None as special value...
Mads Kiilerich -
r3626:0a1417ef beta
parent child Browse files
Show More
@@ -1,391 +1,391 b''
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 rhodecode.lib.celerylib.tasks
4 4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5 5
6 6 RhodeCode task modules, containing all task that suppose to be run
7 7 by celery daemon
8 8
9 9 :created_on: Oct 6, 2010
10 10 :author: marcink
11 11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 12 :license: GPLv3, see COPYING for more details.
13 13 """
14 14 # This program is free software: you can redistribute it and/or modify
15 15 # it under the terms of the GNU General Public License as published by
16 16 # the Free Software Foundation, either version 3 of the License, or
17 17 # (at your option) any later version.
18 18 #
19 19 # This program is distributed in the hope that it will be useful,
20 20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 22 # GNU General Public License for more details.
23 23 #
24 24 # You should have received a copy of the GNU General Public License
25 25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 26 from celery.decorators import task
27 27
28 28 import os
29 29 import traceback
30 30 import logging
31 31 from os.path import join as jn
32 32
33 33 from time import mktime
34 34 from operator import itemgetter
35 35 from string import lower
36 36
37 37 from pylons import config, url
38 38 from pylons.i18n.translation import _
39 39
40 40 from rhodecode.lib.vcs import get_backend
41 41
42 42 from rhodecode import CELERY_ON, CELERY_EAGER
43 43 from rhodecode.lib.utils2 import safe_str
44 44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
45 45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
46 46 from rhodecode.lib.helpers import person
47 47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
48 48 from rhodecode.lib.utils import add_cache, action_logger
49 49 from rhodecode.lib.compat import json, OrderedDict
50 50 from rhodecode.lib.hooks import log_create_repository
51 51
52 52 from rhodecode.model.db import Statistics, Repository, User
53 53 from rhodecode.model.scm import ScmModel
54 54
55 55
56 56 add_cache(config)
57 57
58 58 __all__ = ['whoosh_index', 'get_commits_stats',
59 59 'reset_user_password', 'send_email']
60 60
61 61
62 62 def get_logger(cls):
63 63 if CELERY_ON:
64 64 try:
65 65 log = cls.get_logger()
66 66 except:
67 67 log = logging.getLogger(__name__)
68 68 else:
69 69 log = logging.getLogger(__name__)
70 70
71 71 return log
72 72
73 73
74 74 @task(ignore_result=True)
75 75 @locked_task
76 76 @dbsession
77 77 def whoosh_index(repo_location, full_index):
78 78 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
79 79 log = get_logger(whoosh_index)
80 80 DBS = get_session()
81 81
82 82 index_location = config['index_dir']
83 83 WhooshIndexingDaemon(index_location=index_location,
84 84 repo_location=repo_location, sa=DBS)\
85 85 .run(full_index=full_index)
86 86
87 87
88 88 @task(ignore_result=True)
89 89 @dbsession
90 90 def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
91 91 log = get_logger(get_commits_stats)
92 92 DBS = get_session()
93 93 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
94 94 ts_max_y)
95 95 lockkey_path = config['app_conf']['cache_dir']
96 96
97 97 log.info('running task with lockkey %s' % lockkey)
98 98
99 99 try:
100 100 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
101 101
102 102 # for js data compatibility cleans the key for person from '
103 103 akc = lambda k: person(k).replace('"', "")
104 104
105 105 co_day_auth_aggr = {}
106 106 commits_by_day_aggregate = {}
107 107 repo = Repository.get_by_repo_name(repo_name)
108 108 if repo is None:
109 109 return True
110 110
111 111 repo = repo.scm_instance
112 112 repo_size = repo.count()
113 113 # return if repo have no revisions
114 114 if repo_size < 1:
115 115 lock.release()
116 116 return True
117 117
118 118 skip_date_limit = True
119 119 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
120 120 last_rev = None
121 121 last_cs = None
122 122 timegetter = itemgetter('time')
123 123
124 124 dbrepo = DBS.query(Repository)\
125 125 .filter(Repository.repo_name == repo_name).scalar()
126 126 cur_stats = DBS.query(Statistics)\
127 127 .filter(Statistics.repository == dbrepo).scalar()
128 128
129 129 if cur_stats is not None:
130 130 last_rev = cur_stats.stat_on_revision
131 131
132 132 if last_rev == repo.get_changeset().revision and repo_size > 1:
133 133 # pass silently without any work if we're not on first revision or
134 134 # current state of parsing revision(from db marker) is the
135 135 # last revision
136 136 lock.release()
137 137 return True
138 138
139 139 if cur_stats:
140 140 commits_by_day_aggregate = OrderedDict(json.loads(
141 141 cur_stats.commit_activity_combined))
142 142 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
143 143
144 144 log.debug('starting parsing %s' % parse_limit)
145 145 lmktime = mktime
146 146
147 147 last_rev = last_rev + 1 if last_rev >= 0 else 0
148 148 log.debug('Getting revisions from %s to %s' % (
149 149 last_rev, last_rev + parse_limit)
150 150 )
151 151 for cs in repo[last_rev:last_rev + parse_limit]:
152 152 log.debug('parsing %s' % cs)
153 153 last_cs = cs # remember last parsed changeset
154 154 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
155 155 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
156 156
157 157 if akc(cs.author) in co_day_auth_aggr:
158 158 try:
159 159 l = [timegetter(x) for x in
160 160 co_day_auth_aggr[akc(cs.author)]['data']]
161 161 time_pos = l.index(k)
162 162 except ValueError:
163 time_pos = False
163 time_pos = None
164 164
165 if time_pos >= 0 and time_pos is not False:
165 if time_pos >= 0 and time_pos is not None:
166 166
167 167 datadict = \
168 168 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
169 169
170 170 datadict["commits"] += 1
171 171 datadict["added"] += len(cs.added)
172 172 datadict["changed"] += len(cs.changed)
173 173 datadict["removed"] += len(cs.removed)
174 174
175 175 else:
176 176 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
177 177
178 178 datadict = {"time": k,
179 179 "commits": 1,
180 180 "added": len(cs.added),
181 181 "changed": len(cs.changed),
182 182 "removed": len(cs.removed),
183 183 }
184 184 co_day_auth_aggr[akc(cs.author)]['data']\
185 185 .append(datadict)
186 186
187 187 else:
188 188 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
189 189 co_day_auth_aggr[akc(cs.author)] = {
190 190 "label": akc(cs.author),
191 191 "data": [{"time":k,
192 192 "commits":1,
193 193 "added":len(cs.added),
194 194 "changed":len(cs.changed),
195 195 "removed":len(cs.removed),
196 196 }],
197 197 "schema": ["commits"],
198 198 }
199 199
200 200 #gather all data by day
201 201 if k in commits_by_day_aggregate:
202 202 commits_by_day_aggregate[k] += 1
203 203 else:
204 204 commits_by_day_aggregate[k] = 1
205 205
206 206 overview_data = sorted(commits_by_day_aggregate.items(),
207 207 key=itemgetter(0))
208 208
209 209 if not co_day_auth_aggr:
210 210 co_day_auth_aggr[akc(repo.contact)] = {
211 211 "label": akc(repo.contact),
212 212 "data": [0, 1],
213 213 "schema": ["commits"],
214 214 }
215 215
216 216 stats = cur_stats if cur_stats else Statistics()
217 217 stats.commit_activity = json.dumps(co_day_auth_aggr)
218 218 stats.commit_activity_combined = json.dumps(overview_data)
219 219
220 220 log.debug('last revison %s' % last_rev)
221 221 leftovers = len(repo.revisions[last_rev:])
222 222 log.debug('revisions to parse %s' % leftovers)
223 223
224 224 if last_rev == 0 or leftovers < parse_limit:
225 225 log.debug('getting code trending stats')
226 226 stats.languages = json.dumps(__get_codes_stats(repo_name))
227 227
228 228 try:
229 229 stats.repository = dbrepo
230 230 stats.stat_on_revision = last_cs.revision if last_cs else 0
231 231 DBS.add(stats)
232 232 DBS.commit()
233 233 except:
234 234 log.error(traceback.format_exc())
235 235 DBS.rollback()
236 236 lock.release()
237 237 return False
238 238
239 239 # final release
240 240 lock.release()
241 241
242 242 # execute another task if celery is enabled
243 243 if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
244 244 recurse_limit -= 1
245 245 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
246 246 recurse_limit)
247 247 if recurse_limit <= 0:
248 248 log.debug('Breaking recursive mode due to reach of recurse limit')
249 249 return True
250 250 except LockHeld:
251 251 log.info('LockHeld')
252 252 return 'Task with key %s already running' % lockkey
253 253
254 254
255 255 @task(ignore_result=True)
256 256 @dbsession
257 257 def send_email(recipients, subject, body='', html_body=''):
258 258 """
259 259 Sends an email with defined parameters from the .ini files.
260 260
261 261 :param recipients: list of recipients, it this is empty the defined email
262 262 address from field 'email_to' is used instead
263 263 :param subject: subject of the mail
264 264 :param body: body of the mail
265 265 :param html_body: html version of body
266 266 """
267 267 log = get_logger(send_email)
268 268 DBS = get_session()
269 269
270 270 email_config = config
271 271 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
272 272 if not recipients:
273 273 # if recipients are not defined we send to email_config + all admins
274 274 admins = [u.email for u in User.query()
275 275 .filter(User.admin == True).all()]
276 276 recipients = [email_config.get('email_to')] + admins
277 277
278 278 mail_from = email_config.get('app_email_from', 'RhodeCode')
279 279 user = email_config.get('smtp_username')
280 280 passwd = email_config.get('smtp_password')
281 281 mail_server = email_config.get('smtp_server')
282 282 mail_port = email_config.get('smtp_port')
283 283 tls = str2bool(email_config.get('smtp_use_tls'))
284 284 ssl = str2bool(email_config.get('smtp_use_ssl'))
285 285 debug = str2bool(email_config.get('debug'))
286 286 smtp_auth = email_config.get('smtp_auth')
287 287
288 288 if not mail_server:
289 289 log.error("SMTP mail server not configured - cannot send mail")
290 290 return False
291 291
292 292 try:
293 293 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
294 294 mail_port, ssl, tls, debug=debug)
295 295 m.send(recipients, subject, body, html_body)
296 296 except:
297 297 log.error('Mail sending failed')
298 298 log.error(traceback.format_exc())
299 299 return False
300 300 return True
301 301
302 302
303 303 @task(ignore_result=True)
304 304 @dbsession
305 305 def create_repo_fork(form_data, cur_user):
306 306 """
307 307 Creates a fork of repository using interval VCS methods
308 308
309 309 :param form_data:
310 310 :param cur_user:
311 311 """
312 312 from rhodecode.model.repo import RepoModel
313 313 from rhodecode.model.user import UserModel
314 314
315 315 log = get_logger(create_repo_fork)
316 316 DBS = get_session()
317 317
318 318 base_path = Repository.base_path()
319 319 cur_user = UserModel(DBS)._get_user(cur_user)
320 320
321 321 fork_name = form_data['repo_name_full']
322 322 repo_type = form_data['repo_type']
323 323 description = form_data['description']
324 324 owner = cur_user
325 325 private = form_data['private']
326 326 clone_uri = form_data.get('clone_uri')
327 327 repos_group = form_data['repo_group']
328 328 landing_rev = form_data['landing_rev']
329 329 copy_fork_permissions = form_data.get('copy_permissions')
330 330 fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
331 331
332 332 fork_repo = RepoModel(DBS).create_repo(
333 333 fork_name, repo_type, description, owner, private, clone_uri,
334 334 repos_group, landing_rev, just_db=True, fork_of=fork_of,
335 335 copy_fork_permissions=copy_fork_permissions
336 336 )
337 337
338 338 update_after_clone = form_data['update_after_clone']
339 339
340 340 source_repo_path = os.path.join(base_path, fork_of.repo_name)
341 341 destination_fork_path = os.path.join(base_path, fork_name)
342 342
343 343 log.info('creating fork of %s as %s', source_repo_path,
344 344 destination_fork_path)
345 345 backend = get_backend(repo_type)
346 346
347 347 if repo_type == 'git':
348 348 r = backend(safe_str(destination_fork_path), create=True,
349 349 src_url=safe_str(source_repo_path),
350 350 update_after_clone=update_after_clone,
351 351 bare=True)
352 352 # add rhodecode hook into this repo
353 353 ScmModel().install_git_hook(repo=r)
354 354 elif repo_type == 'hg':
355 355 r = backend(safe_str(destination_fork_path), create=True,
356 356 src_url=safe_str(source_repo_path),
357 357 update_after_clone=update_after_clone)
358 358 else:
359 359 raise Exception('Unknown backend type %s' % repo_type)
360 360
361 361 log_create_repository(fork_repo.get_dict(), created_by=cur_user.username)
362 362
363 363 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
364 364 fork_of.repo_name, '', DBS)
365 365
366 366 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
367 367 fork_name, '', DBS)
368 368 # finally commit at latest possible stage
369 369 DBS.commit()
370 370 fork_repo.update_changeset_cache()
371 371
372 372
373 373 def __get_codes_stats(repo_name):
374 374 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
375 375 repo = Repository.get_by_repo_name(repo_name).scm_instance
376 376
377 377 tip = repo.get_changeset()
378 378 code_stats = {}
379 379
380 380 def aggregate(cs):
381 381 for f in cs[2]:
382 382 ext = lower(f.extension)
383 383 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
384 384 if ext in code_stats:
385 385 code_stats[ext] += 1
386 386 else:
387 387 code_stats[ext] = 1
388 388
389 389 map(aggregate, tip.walk('/'))
390 390
391 391 return code_stats or {}
General Comments 0
You need to be logged in to leave comments. Login now