##// END OF EJS Templates
fixed fork journal entry
marcink -
r1730:ce0b4753 beta
parent child Browse files
Show More
@@ -1,404 +1,405 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.celerylib.tasks
3 rhodecode.lib.celerylib.tasks
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 RhodeCode task modules, containing all task that suppose to be run
6 RhodeCode task modules, containing all task that suppose to be run
7 by celery daemon
7 by celery daemon
8
8
9 :created_on: Oct 6, 2010
9 :created_on: Oct 6, 2010
10 :author: marcink
10 :author: marcink
11 :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
11 :copyright: (C) 2009-2011 Marcin Kuzminski <marcin@python-works.com>
12 :license: GPLv3, see COPYING for more details.
12 :license: GPLv3, see COPYING for more details.
13 """
13 """
14 # This program is free software: you can redistribute it and/or modify
14 # This program is free software: you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation, either version 3 of the License, or
16 # the Free Software Foundation, either version 3 of the License, or
17 # (at your option) any later version.
17 # (at your option) any later version.
18 #
18 #
19 # This program is distributed in the hope that it will be useful,
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
22 # GNU General Public License for more details.
23 #
23 #
24 # You should have received a copy of the GNU General Public License
24 # You should have received a copy of the GNU General Public License
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 from celery.decorators import task
26 from celery.decorators import task
27
27
28 import os
28 import os
29 import traceback
29 import traceback
30 import logging
30 import logging
31 from os.path import join as jn
31 from os.path import join as jn
32
32
33 from time import mktime
33 from time import mktime
34 from operator import itemgetter
34 from operator import itemgetter
35 from string import lower
35 from string import lower
36
36
37 from pylons import config, url
37 from pylons import config, url
38 from pylons.i18n.translation import _
38 from pylons.i18n.translation import _
39
39
40 from vcs import get_backend
40 from vcs import get_backend
41
41
42 from rhodecode import CELERY_ON
42 from rhodecode import CELERY_ON
43 from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, safe_str
43 from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, safe_str
44 from rhodecode.lib.celerylib import run_task, locked_task, str2bool, \
44 from rhodecode.lib.celerylib import run_task, locked_task, str2bool, \
45 __get_lockkey, LockHeld, DaemonLock
45 __get_lockkey, LockHeld, DaemonLock
46 from rhodecode.lib.helpers import person
46 from rhodecode.lib.helpers import person
47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
48 from rhodecode.lib.utils import add_cache, action_logger
48 from rhodecode.lib.utils import add_cache, action_logger
49 from rhodecode.lib.compat import json, OrderedDict
49 from rhodecode.lib.compat import json, OrderedDict
50
50
51 from rhodecode.model import init_model
51 from rhodecode.model import init_model
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import Statistics, Repository, User
53 from rhodecode.model.db import Statistics, Repository, User
54
54
55 from sqlalchemy import engine_from_config
55 from sqlalchemy import engine_from_config
56
56
57 add_cache(config)
57 add_cache(config)
58
58
59 __all__ = ['whoosh_index', 'get_commits_stats',
59 __all__ = ['whoosh_index', 'get_commits_stats',
60 'reset_user_password', 'send_email']
60 'reset_user_password', 'send_email']
61
61
62
62
63 def get_session():
63 def get_session():
64 if CELERY_ON:
64 if CELERY_ON:
65 engine = engine_from_config(config, 'sqlalchemy.db1.')
65 engine = engine_from_config(config, 'sqlalchemy.db1.')
66 init_model(engine)
66 init_model(engine)
67 sa = meta.Session()
67 sa = meta.Session()
68 return sa
68 return sa
69
69
70 def get_logger(cls):
70 def get_logger(cls):
71 if CELERY_ON:
71 if CELERY_ON:
72 try:
72 try:
73 log = cls.get_logger()
73 log = cls.get_logger()
74 except:
74 except:
75 log = logging.getLogger(__name__)
75 log = logging.getLogger(__name__)
76 else:
76 else:
77 log = logging.getLogger(__name__)
77 log = logging.getLogger(__name__)
78
78
79 return log
79 return log
80
80
81 @task(ignore_result=True)
81 @task(ignore_result=True)
82 @locked_task
82 @locked_task
83 def whoosh_index(repo_location, full_index):
83 def whoosh_index(repo_location, full_index):
84 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
84 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
85
85
86 #log = whoosh_index.get_logger()
86 #log = whoosh_index.get_logger()
87
87
88 index_location = config['index_dir']
88 index_location = config['index_dir']
89 WhooshIndexingDaemon(index_location=index_location,
89 WhooshIndexingDaemon(index_location=index_location,
90 repo_location=repo_location, sa=get_session())\
90 repo_location=repo_location, sa=get_session())\
91 .run(full_index=full_index)
91 .run(full_index=full_index)
92
92
93
93
94 @task(ignore_result=True)
94 @task(ignore_result=True)
95 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
95 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
96 log = get_logger(get_commits_stats)
96 log = get_logger(get_commits_stats)
97
97
98 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
98 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
99 ts_max_y)
99 ts_max_y)
100 lockkey_path = config['here']
100 lockkey_path = config['here']
101
101
102 log.info('running task with lockkey %s', lockkey)
102 log.info('running task with lockkey %s', lockkey)
103 try:
103 try:
104 sa = get_session()
104 sa = get_session()
105 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
105 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
106
106
107 # for js data compatibilty cleans the key for person from '
107 # for js data compatibilty cleans the key for person from '
108 akc = lambda k: person(k).replace('"', "")
108 akc = lambda k: person(k).replace('"', "")
109
109
110 co_day_auth_aggr = {}
110 co_day_auth_aggr = {}
111 commits_by_day_aggregate = {}
111 commits_by_day_aggregate = {}
112 repo = Repository.get_by_repo_name(repo_name).scm_instance
112 repo = Repository.get_by_repo_name(repo_name).scm_instance
113 repo_size = len(repo.revisions)
113 repo_size = len(repo.revisions)
114 #return if repo have no revisions
114 #return if repo have no revisions
115 if repo_size < 1:
115 if repo_size < 1:
116 lock.release()
116 lock.release()
117 return True
117 return True
118
118
119 skip_date_limit = True
119 skip_date_limit = True
120 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
120 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
121 last_rev = 0
121 last_rev = 0
122 last_cs = None
122 last_cs = None
123 timegetter = itemgetter('time')
123 timegetter = itemgetter('time')
124
124
125 dbrepo = sa.query(Repository)\
125 dbrepo = sa.query(Repository)\
126 .filter(Repository.repo_name == repo_name).scalar()
126 .filter(Repository.repo_name == repo_name).scalar()
127 cur_stats = sa.query(Statistics)\
127 cur_stats = sa.query(Statistics)\
128 .filter(Statistics.repository == dbrepo).scalar()
128 .filter(Statistics.repository == dbrepo).scalar()
129
129
130 if cur_stats is not None:
130 if cur_stats is not None:
131 last_rev = cur_stats.stat_on_revision
131 last_rev = cur_stats.stat_on_revision
132
132
133 if last_rev == repo.get_changeset().revision and repo_size > 1:
133 if last_rev == repo.get_changeset().revision and repo_size > 1:
134 # pass silently without any work if we're not on first revision or
134 # pass silently without any work if we're not on first revision or
135 # current state of parsing revision(from db marker) is the
135 # current state of parsing revision(from db marker) is the
136 # last revision
136 # last revision
137 lock.release()
137 lock.release()
138 return True
138 return True
139
139
140 if cur_stats:
140 if cur_stats:
141 commits_by_day_aggregate = OrderedDict(json.loads(
141 commits_by_day_aggregate = OrderedDict(json.loads(
142 cur_stats.commit_activity_combined))
142 cur_stats.commit_activity_combined))
143 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
143 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
144
144
145 log.debug('starting parsing %s', parse_limit)
145 log.debug('starting parsing %s', parse_limit)
146 lmktime = mktime
146 lmktime = mktime
147
147
148 last_rev = last_rev + 1 if last_rev > 0 else last_rev
148 last_rev = last_rev + 1 if last_rev > 0 else last_rev
149
149
150 for cs in repo[last_rev:last_rev + parse_limit]:
150 for cs in repo[last_rev:last_rev + parse_limit]:
151 last_cs = cs # remember last parsed changeset
151 last_cs = cs # remember last parsed changeset
152 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
152 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
153 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
153 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
154
154
155 if akc(cs.author) in co_day_auth_aggr:
155 if akc(cs.author) in co_day_auth_aggr:
156 try:
156 try:
157 l = [timegetter(x) for x in
157 l = [timegetter(x) for x in
158 co_day_auth_aggr[akc(cs.author)]['data']]
158 co_day_auth_aggr[akc(cs.author)]['data']]
159 time_pos = l.index(k)
159 time_pos = l.index(k)
160 except ValueError:
160 except ValueError:
161 time_pos = False
161 time_pos = False
162
162
163 if time_pos >= 0 and time_pos is not False:
163 if time_pos >= 0 and time_pos is not False:
164
164
165 datadict = \
165 datadict = \
166 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
166 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
167
167
168 datadict["commits"] += 1
168 datadict["commits"] += 1
169 datadict["added"] += len(cs.added)
169 datadict["added"] += len(cs.added)
170 datadict["changed"] += len(cs.changed)
170 datadict["changed"] += len(cs.changed)
171 datadict["removed"] += len(cs.removed)
171 datadict["removed"] += len(cs.removed)
172
172
173 else:
173 else:
174 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
174 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
175
175
176 datadict = {"time": k,
176 datadict = {"time": k,
177 "commits": 1,
177 "commits": 1,
178 "added": len(cs.added),
178 "added": len(cs.added),
179 "changed": len(cs.changed),
179 "changed": len(cs.changed),
180 "removed": len(cs.removed),
180 "removed": len(cs.removed),
181 }
181 }
182 co_day_auth_aggr[akc(cs.author)]['data']\
182 co_day_auth_aggr[akc(cs.author)]['data']\
183 .append(datadict)
183 .append(datadict)
184
184
185 else:
185 else:
186 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
186 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
187 co_day_auth_aggr[akc(cs.author)] = {
187 co_day_auth_aggr[akc(cs.author)] = {
188 "label": akc(cs.author),
188 "label": akc(cs.author),
189 "data": [{"time":k,
189 "data": [{"time":k,
190 "commits":1,
190 "commits":1,
191 "added":len(cs.added),
191 "added":len(cs.added),
192 "changed":len(cs.changed),
192 "changed":len(cs.changed),
193 "removed":len(cs.removed),
193 "removed":len(cs.removed),
194 }],
194 }],
195 "schema": ["commits"],
195 "schema": ["commits"],
196 }
196 }
197
197
198 #gather all data by day
198 #gather all data by day
199 if k in commits_by_day_aggregate:
199 if k in commits_by_day_aggregate:
200 commits_by_day_aggregate[k] += 1
200 commits_by_day_aggregate[k] += 1
201 else:
201 else:
202 commits_by_day_aggregate[k] = 1
202 commits_by_day_aggregate[k] = 1
203
203
204 overview_data = sorted(commits_by_day_aggregate.items(),
204 overview_data = sorted(commits_by_day_aggregate.items(),
205 key=itemgetter(0))
205 key=itemgetter(0))
206
206
207 if not co_day_auth_aggr:
207 if not co_day_auth_aggr:
208 co_day_auth_aggr[akc(repo.contact)] = {
208 co_day_auth_aggr[akc(repo.contact)] = {
209 "label": akc(repo.contact),
209 "label": akc(repo.contact),
210 "data": [0, 1],
210 "data": [0, 1],
211 "schema": ["commits"],
211 "schema": ["commits"],
212 }
212 }
213
213
214 stats = cur_stats if cur_stats else Statistics()
214 stats = cur_stats if cur_stats else Statistics()
215 stats.commit_activity = json.dumps(co_day_auth_aggr)
215 stats.commit_activity = json.dumps(co_day_auth_aggr)
216 stats.commit_activity_combined = json.dumps(overview_data)
216 stats.commit_activity_combined = json.dumps(overview_data)
217
217
218 log.debug('last revison %s', last_rev)
218 log.debug('last revison %s', last_rev)
219 leftovers = len(repo.revisions[last_rev:])
219 leftovers = len(repo.revisions[last_rev:])
220 log.debug('revisions to parse %s', leftovers)
220 log.debug('revisions to parse %s', leftovers)
221
221
222 if last_rev == 0 or leftovers < parse_limit:
222 if last_rev == 0 or leftovers < parse_limit:
223 log.debug('getting code trending stats')
223 log.debug('getting code trending stats')
224 stats.languages = json.dumps(__get_codes_stats(repo_name))
224 stats.languages = json.dumps(__get_codes_stats(repo_name))
225
225
226 try:
226 try:
227 stats.repository = dbrepo
227 stats.repository = dbrepo
228 stats.stat_on_revision = last_cs.revision if last_cs else 0
228 stats.stat_on_revision = last_cs.revision if last_cs else 0
229 sa.add(stats)
229 sa.add(stats)
230 sa.commit()
230 sa.commit()
231 except:
231 except:
232 log.error(traceback.format_exc())
232 log.error(traceback.format_exc())
233 sa.rollback()
233 sa.rollback()
234 lock.release()
234 lock.release()
235 return False
235 return False
236
236
237 #final release
237 #final release
238 lock.release()
238 lock.release()
239
239
240 #execute another task if celery is enabled
240 #execute another task if celery is enabled
241 if len(repo.revisions) > 1 and CELERY_ON:
241 if len(repo.revisions) > 1 and CELERY_ON:
242 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
242 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
243 return True
243 return True
244 except LockHeld:
244 except LockHeld:
245 log.info('LockHeld')
245 log.info('LockHeld')
246 return 'Task with key %s already running' % lockkey
246 return 'Task with key %s already running' % lockkey
247
247
248 @task(ignore_result=True)
248 @task(ignore_result=True)
249 def send_password_link(user_email):
249 def send_password_link(user_email):
250 from rhodecode.model.notification import EmailNotificationModel
250 from rhodecode.model.notification import EmailNotificationModel
251
251
252 log = get_logger(send_password_link)
252 log = get_logger(send_password_link)
253
253
254 try:
254 try:
255 sa = get_session()
255 sa = get_session()
256 user = User.get_by_email(user_email)
256 user = User.get_by_email(user_email)
257 if user:
257 if user:
258 log.debug('password reset user found %s' % user)
258 log.debug('password reset user found %s' % user)
259 link = url('reset_password_confirmation', key=user.api_key,
259 link = url('reset_password_confirmation', key=user.api_key,
260 qualified=True)
260 qualified=True)
261 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
261 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
262 body = EmailNotificationModel().get_email_tmpl(reg_type,
262 body = EmailNotificationModel().get_email_tmpl(reg_type,
263 **{'user':user.short_contact,
263 **{'user':user.short_contact,
264 'reset_url':link})
264 'reset_url':link})
265 log.debug('sending email')
265 log.debug('sending email')
266 run_task(send_email, user_email,
266 run_task(send_email, user_email,
267 _("password reset link"), body)
267 _("password reset link"), body)
268 log.info('send new password mail to %s', user_email)
268 log.info('send new password mail to %s', user_email)
269 else:
269 else:
270 log.debug("password reset email %s not found" % user_email)
270 log.debug("password reset email %s not found" % user_email)
271 except:
271 except:
272 log.error(traceback.format_exc())
272 log.error(traceback.format_exc())
273 return False
273 return False
274
274
275 return True
275 return True
276
276
277 @task(ignore_result=True)
277 @task(ignore_result=True)
278 def reset_user_password(user_email):
278 def reset_user_password(user_email):
279 from rhodecode.lib import auth
279 from rhodecode.lib import auth
280
280
281 log = get_logger(reset_user_password)
281 log = get_logger(reset_user_password)
282
282
283 try:
283 try:
284 try:
284 try:
285 sa = get_session()
285 sa = get_session()
286 user = User.get_by_email(user_email)
286 user = User.get_by_email(user_email)
287 new_passwd = auth.PasswordGenerator().gen_password(8,
287 new_passwd = auth.PasswordGenerator().gen_password(8,
288 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
288 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
289 if user:
289 if user:
290 user.password = auth.get_crypt_password(new_passwd)
290 user.password = auth.get_crypt_password(new_passwd)
291 user.api_key = auth.generate_api_key(user.username)
291 user.api_key = auth.generate_api_key(user.username)
292 sa.add(user)
292 sa.add(user)
293 sa.commit()
293 sa.commit()
294 log.info('change password for %s', user_email)
294 log.info('change password for %s', user_email)
295 if new_passwd is None:
295 if new_passwd is None:
296 raise Exception('unable to generate new password')
296 raise Exception('unable to generate new password')
297 except:
297 except:
298 log.error(traceback.format_exc())
298 log.error(traceback.format_exc())
299 sa.rollback()
299 sa.rollback()
300
300
301 run_task(send_email, user_email,
301 run_task(send_email, user_email,
302 'Your new password',
302 'Your new password',
303 'Your new RhodeCode password:%s' % (new_passwd))
303 'Your new RhodeCode password:%s' % (new_passwd))
304 log.info('send new password mail to %s', user_email)
304 log.info('send new password mail to %s', user_email)
305
305
306 except:
306 except:
307 log.error('Failed to update user password')
307 log.error('Failed to update user password')
308 log.error(traceback.format_exc())
308 log.error(traceback.format_exc())
309
309
310 return True
310 return True
311
311
312
312
313 @task(ignore_result=True)
313 @task(ignore_result=True)
314 def send_email(recipients, subject, body, html_body=''):
314 def send_email(recipients, subject, body, html_body=''):
315 """
315 """
316 Sends an email with defined parameters from the .ini files.
316 Sends an email with defined parameters from the .ini files.
317
317
318 :param recipients: list of recipients, it this is empty the defined email
318 :param recipients: list of recipients, it this is empty the defined email
319 address from field 'email_to' is used instead
319 address from field 'email_to' is used instead
320 :param subject: subject of the mail
320 :param subject: subject of the mail
321 :param body: body of the mail
321 :param body: body of the mail
322 :param html_body: html version of body
322 :param html_body: html version of body
323 """
323 """
324 log = get_logger(send_email)
324 log = get_logger(send_email)
325 sa = get_session()
325 sa = get_session()
326 email_config = config
326 email_config = config
327 subject = "%s %s" % (email_config.get('email_prefix'), subject)
327 subject = "%s %s" % (email_config.get('email_prefix'), subject)
328 if not recipients:
328 if not recipients:
329 # if recipients are not defined we send to email_config + all admins
329 # if recipients are not defined we send to email_config + all admins
330 admins = [u.email for u in User.query()
330 admins = [u.email for u in User.query()
331 .filter(User.admin == True).all()]
331 .filter(User.admin == True).all()]
332 recipients = [email_config.get('email_to')] + admins
332 recipients = [email_config.get('email_to')] + admins
333
333
334 mail_from = email_config.get('app_email_from', 'RhodeCode')
334 mail_from = email_config.get('app_email_from', 'RhodeCode')
335 user = email_config.get('smtp_username')
335 user = email_config.get('smtp_username')
336 passwd = email_config.get('smtp_password')
336 passwd = email_config.get('smtp_password')
337 mail_server = email_config.get('smtp_server')
337 mail_server = email_config.get('smtp_server')
338 mail_port = email_config.get('smtp_port')
338 mail_port = email_config.get('smtp_port')
339 tls = str2bool(email_config.get('smtp_use_tls'))
339 tls = str2bool(email_config.get('smtp_use_tls'))
340 ssl = str2bool(email_config.get('smtp_use_ssl'))
340 ssl = str2bool(email_config.get('smtp_use_ssl'))
341 debug = str2bool(config.get('debug'))
341 debug = str2bool(config.get('debug'))
342 smtp_auth = email_config.get('smtp_auth')
342 smtp_auth = email_config.get('smtp_auth')
343
343
344 try:
344 try:
345 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
345 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
346 mail_port, ssl, tls, debug=debug)
346 mail_port, ssl, tls, debug=debug)
347 m.send(recipients, subject, body, html_body)
347 m.send(recipients, subject, body, html_body)
348 except:
348 except:
349 log.error('Mail sending failed')
349 log.error('Mail sending failed')
350 log.error(traceback.format_exc())
350 log.error(traceback.format_exc())
351 return False
351 return False
352 return True
352 return True
353
353
354
354
355 @task(ignore_result=True)
355 @task(ignore_result=True)
356 def create_repo_fork(form_data, cur_user):
356 def create_repo_fork(form_data, cur_user):
357 """
357 """
358 Creates a fork of repository using interval VCS methods
358 Creates a fork of repository using interval VCS methods
359
359
360 :param form_data:
360 :param form_data:
361 :param cur_user:
361 :param cur_user:
362 """
362 """
363 from rhodecode.model.repo import RepoModel
363 from rhodecode.model.repo import RepoModel
364
364
365 log = get_logger(create_repo_fork)
365 log = get_logger(create_repo_fork)
366
366
367 Session = get_session()
367 Session = get_session()
368 base_path = Repository.base_path()
368 base_path = Repository.base_path()
369
369
370 RepoModel(Session).create(form_data, cur_user, just_db=True, fork=True)
370 RepoModel(Session).create(form_data, cur_user, just_db=True, fork=True)
371
371
372 alias = form_data['repo_type']
372 alias = form_data['repo_type']
373 org_repo_name = form_data['org_path']
373 org_repo_name = form_data['org_path']
374 fork_name = form_data['repo_name_full']
374 source_repo_path = os.path.join(base_path, org_repo_name)
375 source_repo_path = os.path.join(base_path, org_repo_name)
375 destination_fork_path = os.path.join(base_path, form_data['repo_name_full'])
376 destination_fork_path = os.path.join(base_path, fork_name)
376
377
377 log.info('creating fork of %s as %s', source_repo_path,
378 log.info('creating fork of %s as %s', source_repo_path,
378 destination_fork_path)
379 destination_fork_path)
379 backend = get_backend(alias)
380 backend = get_backend(alias)
380 backend(safe_str(destination_fork_path), create=True,
381 backend(safe_str(destination_fork_path), create=True,
381 src_url=safe_str(source_repo_path))
382 src_url=safe_str(source_repo_path))
382 action_logger(cur_user, 'user_forked_repo:%s' % org_repo_name,
383 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
383 org_repo_name, '', Session)
384 org_repo_name, '', Session)
384 # finally commit at latest possible stage
385 # finally commit at latest possible stage
385 Session.commit()
386 Session.commit()
386
387
387 def __get_codes_stats(repo_name):
388 def __get_codes_stats(repo_name):
388 repo = Repository.get_by_repo_name(repo_name).scm_instance
389 repo = Repository.get_by_repo_name(repo_name).scm_instance
389
390
390 tip = repo.get_changeset()
391 tip = repo.get_changeset()
391 code_stats = {}
392 code_stats = {}
392
393
393 def aggregate(cs):
394 def aggregate(cs):
394 for f in cs[2]:
395 for f in cs[2]:
395 ext = lower(f.extension)
396 ext = lower(f.extension)
396 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
397 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
397 if ext in code_stats:
398 if ext in code_stats:
398 code_stats[ext] += 1
399 code_stats[ext] += 1
399 else:
400 else:
400 code_stats[ext] = 1
401 code_stats[ext] = 1
401
402
402 map(aggregate, tip.walk('/'))
403 map(aggregate, tip.walk('/'))
403
404
404 return code_stats or {}
405 return code_stats or {}
General Comments 0
You need to be logged in to leave comments. Login now