##// END OF EJS Templates
fix leftover bad code on fork function
marcink -
r1941:58c684ba beta
parent child Browse files
Show More
@@ -1,414 +1,414 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.celerylib.tasks
3 rhodecode.lib.celerylib.tasks
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 RhodeCode task modules, containing all task that suppose to be run
6 RhodeCode task modules, containing all task that suppose to be run
7 by celery daemon
7 by celery daemon
8
8
9 :created_on: Oct 6, 2010
9 :created_on: Oct 6, 2010
10 :author: marcink
10 :author: marcink
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 :license: GPLv3, see COPYING for more details.
12 :license: GPLv3, see COPYING for more details.
13 """
13 """
14 # This program is free software: you can redistribute it and/or modify
14 # This program is free software: you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation, either version 3 of the License, or
16 # the Free Software Foundation, either version 3 of the License, or
17 # (at your option) any later version.
17 # (at your option) any later version.
18 #
18 #
19 # This program is distributed in the hope that it will be useful,
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
22 # GNU General Public License for more details.
23 #
23 #
24 # You should have received a copy of the GNU General Public License
24 # You should have received a copy of the GNU General Public License
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 from celery.decorators import task
26 from celery.decorators import task
27
27
28 import os
28 import os
29 import traceback
29 import traceback
30 import logging
30 import logging
31 from os.path import join as jn
31 from os.path import join as jn
32
32
33 from time import mktime
33 from time import mktime
34 from operator import itemgetter
34 from operator import itemgetter
35 from string import lower
35 from string import lower
36
36
37 from pylons import config, url
37 from pylons import config, url
38 from pylons.i18n.translation import _
38 from pylons.i18n.translation import _
39
39
40 from vcs import get_backend
40 from vcs import get_backend
41
41
42 from rhodecode import CELERY_ON
42 from rhodecode import CELERY_ON
43 from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, safe_str
43 from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, safe_str
44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
46 from rhodecode.lib.helpers import person
46 from rhodecode.lib.helpers import person
47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
48 from rhodecode.lib.utils import add_cache, action_logger
48 from rhodecode.lib.utils import add_cache, action_logger
49 from rhodecode.lib.compat import json, OrderedDict
49 from rhodecode.lib.compat import json, OrderedDict
50
50
51 from rhodecode.model.db import Statistics, Repository, User
51 from rhodecode.model.db import Statistics, Repository, User
52
52
53
53
54 add_cache(config)
54 add_cache(config)
55
55
56 __all__ = ['whoosh_index', 'get_commits_stats',
56 __all__ = ['whoosh_index', 'get_commits_stats',
57 'reset_user_password', 'send_email']
57 'reset_user_password', 'send_email']
58
58
59
59
60 def get_logger(cls):
60 def get_logger(cls):
61 if CELERY_ON:
61 if CELERY_ON:
62 try:
62 try:
63 log = cls.get_logger()
63 log = cls.get_logger()
64 except:
64 except:
65 log = logging.getLogger(__name__)
65 log = logging.getLogger(__name__)
66 else:
66 else:
67 log = logging.getLogger(__name__)
67 log = logging.getLogger(__name__)
68
68
69 return log
69 return log
70
70
71
71
72 @task(ignore_result=True)
72 @task(ignore_result=True)
73 @locked_task
73 @locked_task
74 @dbsession
74 @dbsession
75 def whoosh_index(repo_location, full_index):
75 def whoosh_index(repo_location, full_index):
76 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
76 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
77 log = whoosh_index.get_logger(whoosh_index)
77 log = whoosh_index.get_logger(whoosh_index)
78 DBS = get_session()
78 DBS = get_session()
79
79
80 index_location = config['index_dir']
80 index_location = config['index_dir']
81 WhooshIndexingDaemon(index_location=index_location,
81 WhooshIndexingDaemon(index_location=index_location,
82 repo_location=repo_location, sa=DBS)\
82 repo_location=repo_location, sa=DBS)\
83 .run(full_index=full_index)
83 .run(full_index=full_index)
84
84
85
85
86 @task(ignore_result=True)
86 @task(ignore_result=True)
87 @dbsession
87 @dbsession
88 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
88 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
89 log = get_logger(get_commits_stats)
89 log = get_logger(get_commits_stats)
90 DBS = get_session()
90 DBS = get_session()
91 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
91 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
92 ts_max_y)
92 ts_max_y)
93 lockkey_path = config['here']
93 lockkey_path = config['here']
94
94
95 log.info('running task with lockkey %s', lockkey)
95 log.info('running task with lockkey %s', lockkey)
96
96
97 try:
97 try:
98 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
98 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
99
99
100 # for js data compatibilty cleans the key for person from '
100 # for js data compatibilty cleans the key for person from '
101 akc = lambda k: person(k).replace('"', "")
101 akc = lambda k: person(k).replace('"', "")
102
102
103 co_day_auth_aggr = {}
103 co_day_auth_aggr = {}
104 commits_by_day_aggregate = {}
104 commits_by_day_aggregate = {}
105 repo = Repository.get_by_repo_name(repo_name)
105 repo = Repository.get_by_repo_name(repo_name)
106 if repo is None:
106 if repo is None:
107 return True
107 return True
108
108
109 repo = repo.scm_instance
109 repo = repo.scm_instance
110 repo_size = repo.count()
110 repo_size = repo.count()
111 # return if repo have no revisions
111 # return if repo have no revisions
112 if repo_size < 1:
112 if repo_size < 1:
113 lock.release()
113 lock.release()
114 return True
114 return True
115
115
116 skip_date_limit = True
116 skip_date_limit = True
117 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
117 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
118 last_rev = None
118 last_rev = None
119 last_cs = None
119 last_cs = None
120 timegetter = itemgetter('time')
120 timegetter = itemgetter('time')
121
121
122 dbrepo = DBS.query(Repository)\
122 dbrepo = DBS.query(Repository)\
123 .filter(Repository.repo_name == repo_name).scalar()
123 .filter(Repository.repo_name == repo_name).scalar()
124 cur_stats = DBS.query(Statistics)\
124 cur_stats = DBS.query(Statistics)\
125 .filter(Statistics.repository == dbrepo).scalar()
125 .filter(Statistics.repository == dbrepo).scalar()
126
126
127 if cur_stats is not None:
127 if cur_stats is not None:
128 last_rev = cur_stats.stat_on_revision
128 last_rev = cur_stats.stat_on_revision
129
129
130 if last_rev == repo.get_changeset().revision and repo_size > 1:
130 if last_rev == repo.get_changeset().revision and repo_size > 1:
131 # pass silently without any work if we're not on first revision or
131 # pass silently without any work if we're not on first revision or
132 # current state of parsing revision(from db marker) is the
132 # current state of parsing revision(from db marker) is the
133 # last revision
133 # last revision
134 lock.release()
134 lock.release()
135 return True
135 return True
136
136
137 if cur_stats:
137 if cur_stats:
138 commits_by_day_aggregate = OrderedDict(json.loads(
138 commits_by_day_aggregate = OrderedDict(json.loads(
139 cur_stats.commit_activity_combined))
139 cur_stats.commit_activity_combined))
140 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
140 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
141
141
142 log.debug('starting parsing %s', parse_limit)
142 log.debug('starting parsing %s', parse_limit)
143 lmktime = mktime
143 lmktime = mktime
144
144
145 last_rev = last_rev + 1 if last_rev >= 0 else 0
145 last_rev = last_rev + 1 if last_rev >= 0 else 0
146 log.debug('Getting revisions from %s to %s' % (
146 log.debug('Getting revisions from %s to %s' % (
147 last_rev, last_rev + parse_limit)
147 last_rev, last_rev + parse_limit)
148 )
148 )
149 for cs in repo[last_rev:last_rev + parse_limit]:
149 for cs in repo[last_rev:last_rev + parse_limit]:
150 last_cs = cs # remember last parsed changeset
150 last_cs = cs # remember last parsed changeset
151 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
151 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
152 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
152 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
153
153
154 if akc(cs.author) in co_day_auth_aggr:
154 if akc(cs.author) in co_day_auth_aggr:
155 try:
155 try:
156 l = [timegetter(x) for x in
156 l = [timegetter(x) for x in
157 co_day_auth_aggr[akc(cs.author)]['data']]
157 co_day_auth_aggr[akc(cs.author)]['data']]
158 time_pos = l.index(k)
158 time_pos = l.index(k)
159 except ValueError:
159 except ValueError:
160 time_pos = False
160 time_pos = False
161
161
162 if time_pos >= 0 and time_pos is not False:
162 if time_pos >= 0 and time_pos is not False:
163
163
164 datadict = \
164 datadict = \
165 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
165 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
166
166
167 datadict["commits"] += 1
167 datadict["commits"] += 1
168 datadict["added"] += len(cs.added)
168 datadict["added"] += len(cs.added)
169 datadict["changed"] += len(cs.changed)
169 datadict["changed"] += len(cs.changed)
170 datadict["removed"] += len(cs.removed)
170 datadict["removed"] += len(cs.removed)
171
171
172 else:
172 else:
173 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
173 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
174
174
175 datadict = {"time": k,
175 datadict = {"time": k,
176 "commits": 1,
176 "commits": 1,
177 "added": len(cs.added),
177 "added": len(cs.added),
178 "changed": len(cs.changed),
178 "changed": len(cs.changed),
179 "removed": len(cs.removed),
179 "removed": len(cs.removed),
180 }
180 }
181 co_day_auth_aggr[akc(cs.author)]['data']\
181 co_day_auth_aggr[akc(cs.author)]['data']\
182 .append(datadict)
182 .append(datadict)
183
183
184 else:
184 else:
185 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
185 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
186 co_day_auth_aggr[akc(cs.author)] = {
186 co_day_auth_aggr[akc(cs.author)] = {
187 "label": akc(cs.author),
187 "label": akc(cs.author),
188 "data": [{"time":k,
188 "data": [{"time":k,
189 "commits":1,
189 "commits":1,
190 "added":len(cs.added),
190 "added":len(cs.added),
191 "changed":len(cs.changed),
191 "changed":len(cs.changed),
192 "removed":len(cs.removed),
192 "removed":len(cs.removed),
193 }],
193 }],
194 "schema": ["commits"],
194 "schema": ["commits"],
195 }
195 }
196
196
197 #gather all data by day
197 #gather all data by day
198 if k in commits_by_day_aggregate:
198 if k in commits_by_day_aggregate:
199 commits_by_day_aggregate[k] += 1
199 commits_by_day_aggregate[k] += 1
200 else:
200 else:
201 commits_by_day_aggregate[k] = 1
201 commits_by_day_aggregate[k] = 1
202
202
203 overview_data = sorted(commits_by_day_aggregate.items(),
203 overview_data = sorted(commits_by_day_aggregate.items(),
204 key=itemgetter(0))
204 key=itemgetter(0))
205
205
206 if not co_day_auth_aggr:
206 if not co_day_auth_aggr:
207 co_day_auth_aggr[akc(repo.contact)] = {
207 co_day_auth_aggr[akc(repo.contact)] = {
208 "label": akc(repo.contact),
208 "label": akc(repo.contact),
209 "data": [0, 1],
209 "data": [0, 1],
210 "schema": ["commits"],
210 "schema": ["commits"],
211 }
211 }
212
212
213 stats = cur_stats if cur_stats else Statistics()
213 stats = cur_stats if cur_stats else Statistics()
214 stats.commit_activity = json.dumps(co_day_auth_aggr)
214 stats.commit_activity = json.dumps(co_day_auth_aggr)
215 stats.commit_activity_combined = json.dumps(overview_data)
215 stats.commit_activity_combined = json.dumps(overview_data)
216
216
217 log.debug('last revison %s', last_rev)
217 log.debug('last revison %s', last_rev)
218 leftovers = len(repo.revisions[last_rev:])
218 leftovers = len(repo.revisions[last_rev:])
219 log.debug('revisions to parse %s', leftovers)
219 log.debug('revisions to parse %s', leftovers)
220
220
221 if last_rev == 0 or leftovers < parse_limit:
221 if last_rev == 0 or leftovers < parse_limit:
222 log.debug('getting code trending stats')
222 log.debug('getting code trending stats')
223 stats.languages = json.dumps(__get_codes_stats(repo_name))
223 stats.languages = json.dumps(__get_codes_stats(repo_name))
224
224
225 try:
225 try:
226 stats.repository = dbrepo
226 stats.repository = dbrepo
227 stats.stat_on_revision = last_cs.revision if last_cs else 0
227 stats.stat_on_revision = last_cs.revision if last_cs else 0
228 DBS.add(stats)
228 DBS.add(stats)
229 DBS.commit()
229 DBS.commit()
230 except:
230 except:
231 log.error(traceback.format_exc())
231 log.error(traceback.format_exc())
232 DBS.rollback()
232 DBS.rollback()
233 lock.release()
233 lock.release()
234 return False
234 return False
235
235
236 #final release
236 #final release
237 lock.release()
237 lock.release()
238
238
239 #execute another task if celery is enabled
239 #execute another task if celery is enabled
240 if len(repo.revisions) > 1 and CELERY_ON:
240 if len(repo.revisions) > 1 and CELERY_ON:
241 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
241 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
242 return True
242 return True
243 except LockHeld:
243 except LockHeld:
244 log.info('LockHeld')
244 log.info('LockHeld')
245 return 'Task with key %s already running' % lockkey
245 return 'Task with key %s already running' % lockkey
246
246
247 @task(ignore_result=True)
247 @task(ignore_result=True)
248 @dbsession
248 @dbsession
249 def send_password_link(user_email):
249 def send_password_link(user_email):
250 from rhodecode.model.notification import EmailNotificationModel
250 from rhodecode.model.notification import EmailNotificationModel
251
251
252 log = get_logger(send_password_link)
252 log = get_logger(send_password_link)
253 DBS = get_session()
253 DBS = get_session()
254
254
255 try:
255 try:
256 user = User.get_by_email(user_email)
256 user = User.get_by_email(user_email)
257 if user:
257 if user:
258 log.debug('password reset user found %s' % user)
258 log.debug('password reset user found %s' % user)
259 link = url('reset_password_confirmation', key=user.api_key,
259 link = url('reset_password_confirmation', key=user.api_key,
260 qualified=True)
260 qualified=True)
261 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
261 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
262 body = EmailNotificationModel().get_email_tmpl(reg_type,
262 body = EmailNotificationModel().get_email_tmpl(reg_type,
263 **{'user':user.short_contact,
263 **{'user':user.short_contact,
264 'reset_url':link})
264 'reset_url':link})
265 log.debug('sending email')
265 log.debug('sending email')
266 run_task(send_email, user_email,
266 run_task(send_email, user_email,
267 _("password reset link"), body)
267 _("password reset link"), body)
268 log.info('send new password mail to %s', user_email)
268 log.info('send new password mail to %s', user_email)
269 else:
269 else:
270 log.debug("password reset email %s not found" % user_email)
270 log.debug("password reset email %s not found" % user_email)
271 except:
271 except:
272 log.error(traceback.format_exc())
272 log.error(traceback.format_exc())
273 return False
273 return False
274
274
275 return True
275 return True
276
276
277 @task(ignore_result=True)
277 @task(ignore_result=True)
278 @dbsession
278 @dbsession
279 def reset_user_password(user_email):
279 def reset_user_password(user_email):
280 from rhodecode.lib import auth
280 from rhodecode.lib import auth
281
281
282 log = get_logger(reset_user_password)
282 log = get_logger(reset_user_password)
283 DBS = get_session()
283 DBS = get_session()
284
284
285 try:
285 try:
286 try:
286 try:
287 user = User.get_by_email(user_email)
287 user = User.get_by_email(user_email)
288 new_passwd = auth.PasswordGenerator().gen_password(8,
288 new_passwd = auth.PasswordGenerator().gen_password(8,
289 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
289 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
290 if user:
290 if user:
291 user.password = auth.get_crypt_password(new_passwd)
291 user.password = auth.get_crypt_password(new_passwd)
292 user.api_key = auth.generate_api_key(user.username)
292 user.api_key = auth.generate_api_key(user.username)
293 DBS.add(user)
293 DBS.add(user)
294 DBS.commit()
294 DBS.commit()
295 log.info('change password for %s', user_email)
295 log.info('change password for %s', user_email)
296 if new_passwd is None:
296 if new_passwd is None:
297 raise Exception('unable to generate new password')
297 raise Exception('unable to generate new password')
298 except:
298 except:
299 log.error(traceback.format_exc())
299 log.error(traceback.format_exc())
300 DBS.rollback()
300 DBS.rollback()
301
301
302 run_task(send_email, user_email,
302 run_task(send_email, user_email,
303 'Your new password',
303 'Your new password',
304 'Your new RhodeCode password:%s' % (new_passwd))
304 'Your new RhodeCode password:%s' % (new_passwd))
305 log.info('send new password mail to %s', user_email)
305 log.info('send new password mail to %s', user_email)
306
306
307 except:
307 except:
308 log.error('Failed to update user password')
308 log.error('Failed to update user password')
309 log.error(traceback.format_exc())
309 log.error(traceback.format_exc())
310
310
311 return True
311 return True
312
312
313
313
314 @task(ignore_result=True)
314 @task(ignore_result=True)
315 @dbsession
315 @dbsession
316 def send_email(recipients, subject, body, html_body=''):
316 def send_email(recipients, subject, body, html_body=''):
317 """
317 """
318 Sends an email with defined parameters from the .ini files.
318 Sends an email with defined parameters from the .ini files.
319
319
320 :param recipients: list of recipients, it this is empty the defined email
320 :param recipients: list of recipients, it this is empty the defined email
321 address from field 'email_to' is used instead
321 address from field 'email_to' is used instead
322 :param subject: subject of the mail
322 :param subject: subject of the mail
323 :param body: body of the mail
323 :param body: body of the mail
324 :param html_body: html version of body
324 :param html_body: html version of body
325 """
325 """
326 log = get_logger(send_email)
326 log = get_logger(send_email)
327 DBS = get_session()
327 DBS = get_session()
328
328
329 email_config = config
329 email_config = config
330 subject = "%s %s" % (email_config.get('email_prefix'), subject)
330 subject = "%s %s" % (email_config.get('email_prefix'), subject)
331 if not recipients:
331 if not recipients:
332 # if recipients are not defined we send to email_config + all admins
332 # if recipients are not defined we send to email_config + all admins
333 admins = [u.email for u in User.query()
333 admins = [u.email for u in User.query()
334 .filter(User.admin == True).all()]
334 .filter(User.admin == True).all()]
335 recipients = [email_config.get('email_to')] + admins
335 recipients = [email_config.get('email_to')] + admins
336
336
337 mail_from = email_config.get('app_email_from', 'RhodeCode')
337 mail_from = email_config.get('app_email_from', 'RhodeCode')
338 user = email_config.get('smtp_username')
338 user = email_config.get('smtp_username')
339 passwd = email_config.get('smtp_password')
339 passwd = email_config.get('smtp_password')
340 mail_server = email_config.get('smtp_server')
340 mail_server = email_config.get('smtp_server')
341 mail_port = email_config.get('smtp_port')
341 mail_port = email_config.get('smtp_port')
342 tls = str2bool(email_config.get('smtp_use_tls'))
342 tls = str2bool(email_config.get('smtp_use_tls'))
343 ssl = str2bool(email_config.get('smtp_use_ssl'))
343 ssl = str2bool(email_config.get('smtp_use_ssl'))
344 debug = str2bool(config.get('debug'))
344 debug = str2bool(config.get('debug'))
345 smtp_auth = email_config.get('smtp_auth')
345 smtp_auth = email_config.get('smtp_auth')
346
346
347 try:
347 try:
348 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
348 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
349 mail_port, ssl, tls, debug=debug)
349 mail_port, ssl, tls, debug=debug)
350 m.send(recipients, subject, body, html_body)
350 m.send(recipients, subject, body, html_body)
351 except:
351 except:
352 log.error('Mail sending failed')
352 log.error('Mail sending failed')
353 log.error(traceback.format_exc())
353 log.error(traceback.format_exc())
354 return False
354 return False
355 return True
355 return True
356
356
357
357
358 @task(ignore_result=True)
358 @task(ignore_result=True)
359 @dbsession
359 @dbsession
360 def create_repo_fork(form_data, cur_user):
360 def create_repo_fork(form_data, cur_user):
361 """
361 """
362 Creates a fork of repository using interval VCS methods
362 Creates a fork of repository using interval VCS methods
363
363
364 :param form_data:
364 :param form_data:
365 :param cur_user:
365 :param cur_user:
366 """
366 """
367 from rhodecode.model.repo import RepoModel
367 from rhodecode.model.repo import RepoModel
368
368
369 log = get_logger(create_repo_fork)
369 log = get_logger(create_repo_fork)
370 DBS = create_repo_fork.DBS
370 DBS = get_session()
371
371
372 base_path = Repository.base_path()
372 base_path = Repository.base_path()
373
373
374 RepoModel(DBS).create(form_data, cur_user, just_db=True, fork=True)
374 RepoModel(DBS).create(form_data, cur_user, just_db=True, fork=True)
375
375
376 alias = form_data['repo_type']
376 alias = form_data['repo_type']
377 org_repo_name = form_data['org_path']
377 org_repo_name = form_data['org_path']
378 fork_name = form_data['repo_name_full']
378 fork_name = form_data['repo_name_full']
379 update_after_clone = form_data['update_after_clone']
379 update_after_clone = form_data['update_after_clone']
380 source_repo_path = os.path.join(base_path, org_repo_name)
380 source_repo_path = os.path.join(base_path, org_repo_name)
381 destination_fork_path = os.path.join(base_path, fork_name)
381 destination_fork_path = os.path.join(base_path, fork_name)
382
382
383 log.info('creating fork of %s as %s', source_repo_path,
383 log.info('creating fork of %s as %s', source_repo_path,
384 destination_fork_path)
384 destination_fork_path)
385 backend = get_backend(alias)
385 backend = get_backend(alias)
386 backend(safe_str(destination_fork_path), create=True,
386 backend(safe_str(destination_fork_path), create=True,
387 src_url=safe_str(source_repo_path),
387 src_url=safe_str(source_repo_path),
388 update_after_clone=update_after_clone)
388 update_after_clone=update_after_clone)
389 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
389 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
390 org_repo_name, '', DBS)
390 org_repo_name, '', DBS)
391
391
392 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
392 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
393 fork_name, '', DBS)
393 fork_name, '', DBS)
394 # finally commit at latest possible stage
394 # finally commit at latest possible stage
395 DBS.commit()
395 DBS.commit()
396
396
397 def __get_codes_stats(repo_name):
397 def __get_codes_stats(repo_name):
398 repo = Repository.get_by_repo_name(repo_name).scm_instance
398 repo = Repository.get_by_repo_name(repo_name).scm_instance
399
399
400 tip = repo.get_changeset()
400 tip = repo.get_changeset()
401 code_stats = {}
401 code_stats = {}
402
402
403 def aggregate(cs):
403 def aggregate(cs):
404 for f in cs[2]:
404 for f in cs[2]:
405 ext = lower(f.extension)
405 ext = lower(f.extension)
406 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
406 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
407 if ext in code_stats:
407 if ext in code_stats:
408 code_stats[ext] += 1
408 code_stats[ext] += 1
409 else:
409 else:
410 code_stats[ext] = 1
410 code_stats[ext] = 1
411
411
412 map(aggregate, tip.walk('/'))
412 map(aggregate, tip.walk('/'))
413
413
414 return code_stats or {}
414 return code_stats or {}
General Comments 0
You need to be logged in to leave comments. Login now