##// END OF EJS Templates
TurboGears: drop workaround for < 2.4...
Mads Kiilerich -
r8599:1b683a4e default
parent child Browse files
Show More
@@ -1,133 +1,131 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 kallithea.lib.celerylib
16 16 ~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 celery libs for Kallithea
19 19
20 20 This file was forked by the Kallithea project in July 2014.
21 21 Original author and date, and relevant copyright and licensing information is below:
22 22 :created_on: Nov 27, 2010
23 23 :author: marcink
24 24 :copyright: (c) 2013 RhodeCode GmbH, and others.
25 25 :license: GPLv3, see LICENSE.md for more details.
26 26 """
27 27
28 28
29 29 import logging
30 30 import os
31 31 from hashlib import sha1
32 32
33 33 from decorator import decorator
34 34 from tg import config
35 35
36 36 import kallithea
37 37 from kallithea.lib.pidlock import DaemonLock, LockHeld
38 38 from kallithea.lib.utils2 import safe_bytes
39 39 from kallithea.model import meta
40 40
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 class FakeTask(object):
46 46 """Fake a sync result to make it look like a finished task"""
47 47
48 48 def __init__(self, result):
49 49 self.result = result
50 50
51 51 def failed(self):
52 52 return False
53 53
54 54 traceback = None # if failed
55 55
56 56 task_id = None
57 57
58 58
59 59 def task(f_org):
60 60 """Wrapper of celery.task.task, running async if CELERY_APP
61 61 """
62 62
63 63 if kallithea.CELERY_APP:
64 64 def f_async(*args, **kwargs):
65 65 log.info('executing %s task', f_org.__name__)
66 66 try:
67 67 f_org(*args, **kwargs)
68 68 finally:
69 69 log.info('executed %s task', f_org.__name__)
70 70 f_async.__name__ = f_org.__name__
71 71 runner = kallithea.CELERY_APP.task(ignore_result=True)(f_async)
72 72
73 73 def f_wrapped(*args, **kwargs):
74 74 t = runner.apply_async(args=args, kwargs=kwargs)
75 75 log.info('executing task %s in async mode - id %s', f_org, t.task_id)
76 76 return t
77 77 else:
78 78 def f_wrapped(*args, **kwargs):
79 79 log.info('executing task %s in sync', f_org.__name__)
80 80 try:
81 81 result = f_org(*args, **kwargs)
82 82 except Exception as e:
83 83 log.error('exception executing sync task %s in sync: %r', f_org.__name__, e)
84 84 raise # TODO: return this in FakeTask as with async tasks?
85 85 return FakeTask(result)
86 86
87 87 return f_wrapped
88 88
89 89
90 90 def __get_lockkey(func, *fargs, **fkwargs):
91 91 params = list(fargs)
92 92 params.extend(['%s-%s' % ar for ar in fkwargs.items()])
93 93
94 94 func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
95 95
96 96 lockkey = 'task_%s.lock' % \
97 97 sha1(safe_bytes(func_name + '-' + '-'.join(str(x) for x in params))).hexdigest()
98 98 return lockkey
99 99
100 100
101 101 def locked_task(func):
102 102 def __wrapper(func, *fargs, **fkwargs):
103 103 lockkey = __get_lockkey(func, *fargs, **fkwargs)
104 lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir'] # Backward compatibility for TurboGears < 2.4
105
106 104 log.info('running task with lockkey %s', lockkey)
107 105 try:
108 l = DaemonLock(os.path.join(lockkey_path, lockkey))
106 l = DaemonLock(os.path.join(config['cache_dir'], lockkey))
109 107 ret = func(*fargs, **fkwargs)
110 108 l.release()
111 109 return ret
112 110 except LockHeld:
113 111 log.info('LockHeld')
114 112 return 'Task with key %s already running' % lockkey
115 113
116 114 return decorator(__wrapper, func)
117 115
118 116
119 117 def get_session():
120 118 sa = meta.Session()
121 119 return sa
122 120
123 121
124 122 def dbsession(func):
125 123 def __wrapper(func, *fargs, **fkwargs):
126 124 try:
127 125 ret = func(*fargs, **fkwargs)
128 126 return ret
129 127 finally:
130 128 if kallithea.CELERY_APP and not kallithea.CELERY_APP.conf.task_always_eager:
131 129 meta.Session.remove()
132 130
133 131 return decorator(__wrapper, func)
@@ -1,527 +1,524 b''
1 1 # -*- coding: utf-8 -*-
2 2 # This program is free software: you can redistribute it and/or modify
3 3 # it under the terms of the GNU General Public License as published by
4 4 # the Free Software Foundation, either version 3 of the License, or
5 5 # (at your option) any later version.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 """
15 15 kallithea.model.async_tasks
16 16 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
17 17
18 18 Kallithea task modules, containing all task that suppose to be run
19 19 by celery daemon
20 20
21 21 This file was forked by the Kallithea project in July 2014.
22 22 Original author and date, and relevant copyright and licensing information is below:
23 23 :created_on: Oct 6, 2010
24 24 :author: marcink
25 25 :copyright: (c) 2013 RhodeCode GmbH, and others.
26 26 :license: GPLv3, see LICENSE.md for more details.
27 27 """
28 28
29 29 import email.message
30 30 import email.utils
31 31 import os
32 32 import smtplib
33 33 import time
34 34 import traceback
35 35 from collections import OrderedDict
36 36 from operator import itemgetter
37 37 from time import mktime
38 38
39 39 import celery.utils.log
40 40 from tg import config
41 41
42 42 import kallithea
43 43 from kallithea.lib import celerylib, conf, ext_json, hooks
44 44 from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
45 45 from kallithea.lib.utils2 import asbool, ascii_bytes
46 46 from kallithea.lib.vcs.utils import author_email, author_name
47 47 from kallithea.model import db, repo, userlog
48 48
49 49
50 50 __all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
51 51
52 52
53 53 log = celery.utils.log.get_task_logger(__name__)
54 54
55 55
56 56 @celerylib.task
57 57 @celerylib.locked_task
58 58 @celerylib.dbsession
59 59 def whoosh_index(repo_location, full_index):
60 60 celerylib.get_session() # initialize database connection
61 61
62 62 index_location = config['index_dir']
63 63 WhooshIndexingDaemon(index_location=index_location,
64 64 repo_location=repo_location) \
65 65 .run(full_index=full_index)
66 66
67 67
68 68 def _author_username(author):
69 69 """Return the username of the user identified by the email part of the 'author' string,
70 70 default to the name or email.
71 71 Kind of similar to h.person() ."""
72 72 email = author_email(author)
73 73 if email:
74 74 user = db.User.get_by_email(email)
75 75 if user is not None:
76 76 return user.username
77 77 # Still nothing? Just pass back the author name if any, else the email
78 78 return author_name(author) or email
79 79
80 80
81 81 @celerylib.task
82 82 @celerylib.dbsession
83 83 def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
84 84 DBS = celerylib.get_session()
85 85 lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y,
86 86 ts_max_y)
87 lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir'] # Backward compatibility for TurboGears < 2.4
88
89 87 log.info('running task with lockkey %s', lockkey)
90
91 88 try:
92 lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey))
89 lock = celerylib.DaemonLock(os.path.join(config['cache_dir'], lockkey))
93 90
94 91 co_day_auth_aggr = {}
95 92 commits_by_day_aggregate = {}
96 93 db_repo = db.Repository.get_by_repo_name(repo_name)
97 94 if db_repo is None:
98 95 return True
99 96
100 97 scm_repo = db_repo.scm_instance
101 98 repo_size = scm_repo.count()
102 99 # return if repo have no revisions
103 100 if repo_size < 1:
104 101 lock.release()
105 102 return True
106 103
107 104 skip_date_limit = True
108 105 parse_limit = int(config.get('commit_parse_limit'))
109 106 last_rev = None
110 107 last_cs = None
111 108 timegetter = itemgetter('time')
112 109
113 110 dbrepo = DBS.query(db.Repository) \
114 111 .filter(db.Repository.repo_name == repo_name).scalar()
115 112 cur_stats = DBS.query(db.Statistics) \
116 113 .filter(db.Statistics.repository == dbrepo).scalar()
117 114
118 115 if cur_stats is not None:
119 116 last_rev = cur_stats.stat_on_revision
120 117
121 118 if last_rev == scm_repo.get_changeset().revision and repo_size > 1:
122 119 # pass silently without any work if we're not on first revision or
123 120 # current state of parsing revision(from db marker) is the
124 121 # last revision
125 122 lock.release()
126 123 return True
127 124
128 125 if cur_stats:
129 126 commits_by_day_aggregate = OrderedDict(ext_json.loads(
130 127 cur_stats.commit_activity_combined))
131 128 co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity)
132 129
133 130 log.debug('starting parsing %s', parse_limit)
134 131
135 132 last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0
136 133 log.debug('Getting revisions from %s to %s',
137 134 last_rev, last_rev + parse_limit
138 135 )
139 136 usernames_cache = {}
140 137 for cs in scm_repo[last_rev:last_rev + parse_limit]:
141 138 log.debug('parsing %s', cs)
142 139 last_cs = cs # remember last parsed changeset
143 140 tt = cs.date.timetuple()
144 141 k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0))
145 142
146 143 # get username from author - similar to what h.person does
147 144 username = usernames_cache.get(cs.author)
148 145 if username is None:
149 146 username = _author_username(cs.author)
150 147 usernames_cache[cs.author] = username
151 148
152 149 if username in co_day_auth_aggr:
153 150 try:
154 151 l = [timegetter(x) for x in
155 152 co_day_auth_aggr[username]['data']]
156 153 time_pos = l.index(k)
157 154 except ValueError:
158 155 time_pos = None
159 156
160 157 if time_pos is not None and time_pos >= 0:
161 158 datadict = \
162 159 co_day_auth_aggr[username]['data'][time_pos]
163 160
164 161 datadict["commits"] += 1
165 162 datadict["added"] += len(cs.added)
166 163 datadict["changed"] += len(cs.changed)
167 164 datadict["removed"] += len(cs.removed)
168 165
169 166 else:
170 167 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
171 168
172 169 datadict = {"time": k,
173 170 "commits": 1,
174 171 "added": len(cs.added),
175 172 "changed": len(cs.changed),
176 173 "removed": len(cs.removed),
177 174 }
178 175 co_day_auth_aggr[username]['data'] \
179 176 .append(datadict)
180 177
181 178 else:
182 179 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
183 180 co_day_auth_aggr[username] = {
184 181 "label": username,
185 182 "data": [{"time": k,
186 183 "commits": 1,
187 184 "added": len(cs.added),
188 185 "changed": len(cs.changed),
189 186 "removed": len(cs.removed),
190 187 }],
191 188 "schema": ["commits"],
192 189 }
193 190
194 191 # gather all data by day
195 192 if k in commits_by_day_aggregate:
196 193 commits_by_day_aggregate[k] += 1
197 194 else:
198 195 commits_by_day_aggregate[k] = 1
199 196
200 197 overview_data = sorted(commits_by_day_aggregate.items(),
201 198 key=itemgetter(0))
202 199
203 200 stats = cur_stats if cur_stats else db.Statistics()
204 201 stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr))
205 202 stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data))
206 203
207 204 log.debug('last revision %s', last_rev)
208 205 leftovers = len(scm_repo.revisions[last_rev:])
209 206 log.debug('revisions to parse %s', leftovers)
210 207
211 208 if last_rev == 0 or leftovers < parse_limit:
212 209 log.debug('getting code trending stats')
213 210 stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name)))
214 211
215 212 try:
216 213 stats.repository = dbrepo
217 214 stats.stat_on_revision = last_cs.revision if last_cs else 0
218 215 DBS.add(stats)
219 216 DBS.commit()
220 217 except:
221 218 log.error(traceback.format_exc())
222 219 DBS.rollback()
223 220 lock.release()
224 221 return False
225 222
226 223 # final release
227 224 lock.release()
228 225
229 226 # execute another task if celery is enabled
230 227 if len(scm_repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0:
231 228 get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
232 229 elif recurse_limit <= 0:
233 230 log.debug('Not recursing - limit has been reached')
234 231 else:
235 232 log.debug('Not recursing')
236 233 except celerylib.LockHeld:
237 234 log.info('Task with key %s already running', lockkey)
238 235 return 'Task with key %s already running' % lockkey
239 236
240 237
241 238 @celerylib.task
242 239 @celerylib.dbsession
243 240 def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None):
244 241 """
245 242 Sends an email with defined parameters from the .ini files.
246 243
247 244 :param recipients: list of recipients, if this is None, the defined email
248 245 address from field 'email_to' and all admins is used instead
249 246 :param subject: subject of the mail
250 247 :param body: plain text body of the mail
251 248 :param html_body: html version of body
252 249 :param headers: dictionary of prepopulated e-mail headers
253 250 :param from_name: full name to be used as sender of this mail - often a
254 251 .full_name_or_username value
255 252 """
256 253 assert isinstance(recipients, list), recipients
257 254 if headers is None:
258 255 headers = {}
259 256 else:
260 257 # do not modify the original headers object passed by the caller
261 258 headers = headers.copy()
262 259
263 260 email_config = config
264 261 email_prefix = email_config.get('email_prefix', '')
265 262 if email_prefix:
266 263 subject = "%s %s" % (email_prefix, subject)
267 264
268 265 if not recipients:
269 266 # if recipients are not defined we send to email_config + all admins
270 267 recipients = [u.email for u in db.User.query()
271 268 .filter(db.User.admin == True).all()]
272 269 if email_config.get('email_to') is not None:
273 270 recipients += email_config.get('email_to').split(',')
274 271
275 272 # If there are still no recipients, there are no admins and no address
276 273 # configured in email_to, so return.
277 274 if not recipients:
278 275 log.error("No recipients specified and no fallback available.")
279 276 return False
280 277
281 278 log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
282 279
283 280 # SMTP sender
284 281 app_email_from = email_config.get('app_email_from', 'Kallithea')
285 282 # 'From' header
286 283 if from_name is not None:
287 284 # set From header based on from_name but with a generic e-mail address
288 285 # In case app_email_from is in "Some Name <e-mail>" format, we first
289 286 # extract the e-mail address.
290 287 envelope_addr = author_email(app_email_from)
291 288 headers['From'] = '"%s" <%s>' % (
292 289 email.utils.quote('%s (no-reply)' % from_name),
293 290 envelope_addr)
294 291
295 292 smtp_server = email_config.get('smtp_server')
296 293 smtp_port = email_config.get('smtp_port')
297 294 smtp_use_tls = asbool(email_config.get('smtp_use_tls'))
298 295 smtp_use_ssl = asbool(email_config.get('smtp_use_ssl'))
299 296 smtp_auth = email_config.get('smtp_auth') # undocumented - overrule automatic choice of auth mechanism
300 297 smtp_username = email_config.get('smtp_username')
301 298 smtp_password = email_config.get('smtp_password')
302 299
303 300 logmsg = ("Mail details:\n"
304 301 "recipients: %s\n"
305 302 "headers: %s\n"
306 303 "subject: %s\n"
307 304 "body:\n%s\n"
308 305 "html:\n%s\n"
309 306 % (' '.join(recipients), headers, subject, body, html_body))
310 307
311 308 if smtp_server:
312 309 log.debug("Sending e-mail. " + logmsg)
313 310 else:
314 311 log.error("SMTP mail server not configured - cannot send e-mail.")
315 312 log.warning(logmsg)
316 313 return False
317 314
318 315 msg = email.message.EmailMessage()
319 316 msg['Subject'] = subject
320 317 msg['From'] = app_email_from # fallback - might be overridden by a header
321 318 msg['To'] = ', '.join(recipients)
322 319 msg['Date'] = email.utils.formatdate(time.time())
323 320
324 321 for key, value in headers.items():
325 322 del msg[key] # Delete key first to make sure add_header will replace header (if any), no matter the casing
326 323 msg.add_header(key, value)
327 324
328 325 msg.set_content(body)
329 326 msg.add_alternative(html_body, subtype='html')
330 327
331 328 try:
332 329 if smtp_use_ssl:
333 330 smtp_serv = smtplib.SMTP_SSL(smtp_server, smtp_port)
334 331 else:
335 332 smtp_serv = smtplib.SMTP(smtp_server, smtp_port)
336 333
337 334 if smtp_use_tls:
338 335 smtp_serv.starttls()
339 336
340 337 if smtp_auth:
341 338 smtp_serv.ehlo() # populate esmtp_features
342 339 smtp_serv.esmtp_features["auth"] = smtp_auth
343 340
344 341 if smtp_username and smtp_password is not None:
345 342 smtp_serv.login(smtp_username, smtp_password)
346 343
347 344 smtp_serv.sendmail(app_email_from, recipients, msg.as_string())
348 345 smtp_serv.quit()
349 346
350 347 log.info('Mail was sent to: %s' % recipients)
351 348 except:
352 349 log.error('Mail sending failed')
353 350 log.error(traceback.format_exc())
354 351 return False
355 352 return True
356 353
357 354
358 355 @celerylib.task
359 356 @celerylib.dbsession
360 357 def create_repo(form_data, cur_user):
361 358 DBS = celerylib.get_session()
362 359
363 360 cur_user = db.User.guess_instance(cur_user)
364 361
365 362 owner = cur_user
366 363 repo_name = form_data['repo_name']
367 364 repo_name_full = form_data['repo_name_full']
368 365 repo_type = form_data['repo_type']
369 366 description = form_data['repo_description']
370 367 private = form_data['repo_private']
371 368 clone_uri = form_data.get('clone_uri')
372 369 repo_group = form_data['repo_group']
373 370 landing_rev = form_data['repo_landing_rev']
374 371 copy_fork_permissions = form_data.get('copy_permissions')
375 372 copy_group_permissions = form_data.get('repo_copy_permissions')
376 373 fork_of = form_data.get('fork_parent_id')
377 374 state = form_data.get('repo_state', db.Repository.STATE_PENDING)
378 375
379 376 # repo creation defaults, private and repo_type are filled in form
380 377 defs = db.Setting.get_default_repo_settings(strip_prefix=True)
381 378 enable_statistics = defs.get('repo_enable_statistics')
382 379 enable_downloads = defs.get('repo_enable_downloads')
383 380
384 381 try:
385 382 db_repo = repo.RepoModel()._create_repo(
386 383 repo_name=repo_name_full,
387 384 repo_type=repo_type,
388 385 description=description,
389 386 owner=owner,
390 387 private=private,
391 388 clone_uri=clone_uri,
392 389 repo_group=repo_group,
393 390 landing_rev=landing_rev,
394 391 fork_of=fork_of,
395 392 copy_fork_permissions=copy_fork_permissions,
396 393 copy_group_permissions=copy_group_permissions,
397 394 enable_statistics=enable_statistics,
398 395 enable_downloads=enable_downloads,
399 396 state=state
400 397 )
401 398
402 399 userlog.action_logger(cur_user, 'user_created_repo',
403 400 form_data['repo_name_full'], '')
404 401
405 402 DBS.commit()
406 403 # now create this repo on Filesystem
407 404 repo.RepoModel()._create_filesystem_repo(
408 405 repo_name=repo_name,
409 406 repo_type=repo_type,
410 407 repo_group=db.RepoGroup.guess_instance(repo_group),
411 408 clone_uri=clone_uri,
412 409 )
413 410 db_repo = db.Repository.get_by_repo_name(repo_name_full)
414 411 hooks.log_create_repository(db_repo.get_dict(), created_by=owner.username)
415 412
416 413 # update repo changeset caches initially
417 414 db_repo.update_changeset_cache()
418 415
419 416 # set new created state
420 417 db_repo.set_state(db.Repository.STATE_CREATED)
421 418 DBS.commit()
422 419 except Exception as e:
423 420 log.warning('Exception %s occurred when forking repository, '
424 421 'doing cleanup...' % e)
425 422 # rollback things manually !
426 423 db_repo = db.Repository.get_by_repo_name(repo_name_full)
427 424 if db_repo:
428 425 db.Repository.delete(db_repo.repo_id)
429 426 DBS.commit()
430 427 repo.RepoModel()._delete_filesystem_repo(db_repo)
431 428 raise
432 429
433 430 return True
434 431
435 432
436 433 @celerylib.task
437 434 @celerylib.dbsession
438 435 def create_repo_fork(form_data, cur_user):
439 436 """
440 437 Creates a fork of repository using interval VCS methods
441 438
442 439 :param form_data:
443 440 :param cur_user:
444 441 """
445 442 DBS = celerylib.get_session()
446 443
447 444 base_path = kallithea.CONFIG['base_path']
448 445 cur_user = db.User.guess_instance(cur_user)
449 446
450 447 repo_name = form_data['repo_name'] # fork in this case
451 448 repo_name_full = form_data['repo_name_full']
452 449
453 450 repo_type = form_data['repo_type']
454 451 owner = cur_user
455 452 private = form_data['private']
456 453 clone_uri = form_data.get('clone_uri')
457 454 repo_group = form_data['repo_group']
458 455 landing_rev = form_data['landing_rev']
459 456 copy_fork_permissions = form_data.get('copy_permissions')
460 457
461 458 try:
462 459 fork_of = db.Repository.guess_instance(form_data.get('fork_parent_id'))
463 460
464 461 repo.RepoModel()._create_repo(
465 462 repo_name=repo_name_full,
466 463 repo_type=repo_type,
467 464 description=form_data['description'],
468 465 owner=owner,
469 466 private=private,
470 467 clone_uri=clone_uri,
471 468 repo_group=repo_group,
472 469 landing_rev=landing_rev,
473 470 fork_of=fork_of,
474 471 copy_fork_permissions=copy_fork_permissions
475 472 )
476 473 userlog.action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
477 474 fork_of.repo_name, '')
478 475 DBS.commit()
479 476
480 477 source_repo_path = os.path.join(base_path, fork_of.repo_name)
481 478
482 479 # now create this repo on Filesystem
483 480 repo.RepoModel()._create_filesystem_repo(
484 481 repo_name=repo_name,
485 482 repo_type=repo_type,
486 483 repo_group=db.RepoGroup.guess_instance(repo_group),
487 484 clone_uri=source_repo_path,
488 485 )
489 486 db_repo = db.Repository.get_by_repo_name(repo_name_full)
490 487 hooks.log_create_repository(db_repo.get_dict(), created_by=owner.username)
491 488
492 489 # update repo changeset caches initially
493 490 db_repo.update_changeset_cache()
494 491
495 492 # set new created state
496 493 db_repo.set_state(db.Repository.STATE_CREATED)
497 494 DBS.commit()
498 495 except Exception as e:
499 496 log.warning('Exception %s occurred when forking repository, '
500 497 'doing cleanup...' % e)
501 498 # rollback things manually !
502 499 db_repo = db.Repository.get_by_repo_name(repo_name_full)
503 500 if db_repo:
504 501 db.Repository.delete(db_repo.repo_id)
505 502 DBS.commit()
506 503 repo.RepoModel()._delete_filesystem_repo(db_repo)
507 504 raise
508 505
509 506 return True
510 507
511 508
512 509 def __get_codes_stats(repo_name):
513 510 scm_repo = db.Repository.get_by_repo_name(repo_name).scm_instance
514 511
515 512 tip = scm_repo.get_changeset()
516 513 code_stats = {}
517 514
518 515 for _topnode, _dirnodes, filenodes in tip.walk('/'):
519 516 for filenode in filenodes:
520 517 ext = filenode.extension.lower()
521 518 if ext in conf.LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary:
522 519 if ext in code_stats:
523 520 code_stats[ext] += 1
524 521 else:
525 522 code_stats[ext] = 1
526 523
527 524 return code_stats or {}
General Comments 0
You need to be logged in to leave comments. Login now