##// END OF EJS Templates
Fixed mercurial backend doesn't take bare parameter when forking
marcink -
r2813:25b10f9d beta
parent child Browse files
Show More
@@ -1,435 +1,445 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.celerylib.tasks
3 rhodecode.lib.celerylib.tasks
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 RhodeCode task modules, containing all task that suppose to be run
6 RhodeCode task modules, containing all task that suppose to be run
7 by celery daemon
7 by celery daemon
8
8
9 :created_on: Oct 6, 2010
9 :created_on: Oct 6, 2010
10 :author: marcink
10 :author: marcink
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 :license: GPLv3, see COPYING for more details.
12 :license: GPLv3, see COPYING for more details.
13 """
13 """
14 # This program is free software: you can redistribute it and/or modify
14 # This program is free software: you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation, either version 3 of the License, or
16 # the Free Software Foundation, either version 3 of the License, or
17 # (at your option) any later version.
17 # (at your option) any later version.
18 #
18 #
19 # This program is distributed in the hope that it will be useful,
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
22 # GNU General Public License for more details.
23 #
23 #
24 # You should have received a copy of the GNU General Public License
24 # You should have received a copy of the GNU General Public License
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 from celery.decorators import task
26 from celery.decorators import task
27
27
28 import os
28 import os
29 import traceback
29 import traceback
30 import logging
30 import logging
31 from os.path import join as jn
31 from os.path import join as jn
32
32
33 from time import mktime
33 from time import mktime
34 from operator import itemgetter
34 from operator import itemgetter
35 from string import lower
35 from string import lower
36
36
37 from pylons import config, url
37 from pylons import config, url
38 from pylons.i18n.translation import _
38 from pylons.i18n.translation import _
39
39
40 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs import get_backend
41
41
42 from rhodecode import CELERY_ON, CELERY_EAGER
42 from rhodecode import CELERY_ON, CELERY_EAGER
43 from rhodecode.lib.utils2 import safe_str
43 from rhodecode.lib.utils2 import safe_str
44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
46 from rhodecode.lib.helpers import person
46 from rhodecode.lib.helpers import person
47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
48 from rhodecode.lib.utils import add_cache, action_logger
48 from rhodecode.lib.utils import add_cache, action_logger
49 from rhodecode.lib.compat import json, OrderedDict
49 from rhodecode.lib.compat import json, OrderedDict
50 from rhodecode.lib.hooks import log_create_repository
50 from rhodecode.lib.hooks import log_create_repository
51
51
52 from rhodecode.model.db import Statistics, Repository, User
52 from rhodecode.model.db import Statistics, Repository, User
53
53
54
54
55 add_cache(config)
55 add_cache(config)
56
56
57 __all__ = ['whoosh_index', 'get_commits_stats',
57 __all__ = ['whoosh_index', 'get_commits_stats',
58 'reset_user_password', 'send_email']
58 'reset_user_password', 'send_email']
59
59
60
60
61 def get_logger(cls):
61 def get_logger(cls):
62 if CELERY_ON:
62 if CELERY_ON:
63 try:
63 try:
64 log = cls.get_logger()
64 log = cls.get_logger()
65 except:
65 except:
66 log = logging.getLogger(__name__)
66 log = logging.getLogger(__name__)
67 else:
67 else:
68 log = logging.getLogger(__name__)
68 log = logging.getLogger(__name__)
69
69
70 return log
70 return log
71
71
72
72
73 @task(ignore_result=True)
73 @task(ignore_result=True)
74 @locked_task
74 @locked_task
75 @dbsession
75 @dbsession
76 def whoosh_index(repo_location, full_index):
76 def whoosh_index(repo_location, full_index):
77 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
77 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
78 log = get_logger(whoosh_index)
78 log = get_logger(whoosh_index)
79 DBS = get_session()
79 DBS = get_session()
80
80
81 index_location = config['index_dir']
81 index_location = config['index_dir']
82 WhooshIndexingDaemon(index_location=index_location,
82 WhooshIndexingDaemon(index_location=index_location,
83 repo_location=repo_location, sa=DBS)\
83 repo_location=repo_location, sa=DBS)\
84 .run(full_index=full_index)
84 .run(full_index=full_index)
85
85
86
86
87 @task(ignore_result=True)
87 @task(ignore_result=True)
88 @dbsession
88 @dbsession
89 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
89 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
90 log = get_logger(get_commits_stats)
90 log = get_logger(get_commits_stats)
91 DBS = get_session()
91 DBS = get_session()
92 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
92 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
93 ts_max_y)
93 ts_max_y)
94 lockkey_path = config['here']
94 lockkey_path = config['here']
95
95
96 log.info('running task with lockkey %s' % lockkey)
96 log.info('running task with lockkey %s' % lockkey)
97
97
98 try:
98 try:
99 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
99 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
100
100
101 # for js data compatibility cleans the key for person from '
101 # for js data compatibility cleans the key for person from '
102 akc = lambda k: person(k).replace('"', "")
102 akc = lambda k: person(k).replace('"', "")
103
103
104 co_day_auth_aggr = {}
104 co_day_auth_aggr = {}
105 commits_by_day_aggregate = {}
105 commits_by_day_aggregate = {}
106 repo = Repository.get_by_repo_name(repo_name)
106 repo = Repository.get_by_repo_name(repo_name)
107 if repo is None:
107 if repo is None:
108 return True
108 return True
109
109
110 repo = repo.scm_instance
110 repo = repo.scm_instance
111 repo_size = repo.count()
111 repo_size = repo.count()
112 # return if repo have no revisions
112 # return if repo have no revisions
113 if repo_size < 1:
113 if repo_size < 1:
114 lock.release()
114 lock.release()
115 return True
115 return True
116
116
117 skip_date_limit = True
117 skip_date_limit = True
118 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
118 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
119 last_rev = None
119 last_rev = None
120 last_cs = None
120 last_cs = None
121 timegetter = itemgetter('time')
121 timegetter = itemgetter('time')
122
122
123 dbrepo = DBS.query(Repository)\
123 dbrepo = DBS.query(Repository)\
124 .filter(Repository.repo_name == repo_name).scalar()
124 .filter(Repository.repo_name == repo_name).scalar()
125 cur_stats = DBS.query(Statistics)\
125 cur_stats = DBS.query(Statistics)\
126 .filter(Statistics.repository == dbrepo).scalar()
126 .filter(Statistics.repository == dbrepo).scalar()
127
127
128 if cur_stats is not None:
128 if cur_stats is not None:
129 last_rev = cur_stats.stat_on_revision
129 last_rev = cur_stats.stat_on_revision
130
130
131 if last_rev == repo.get_changeset().revision and repo_size > 1:
131 if last_rev == repo.get_changeset().revision and repo_size > 1:
132 # pass silently without any work if we're not on first revision or
132 # pass silently without any work if we're not on first revision or
133 # current state of parsing revision(from db marker) is the
133 # current state of parsing revision(from db marker) is the
134 # last revision
134 # last revision
135 lock.release()
135 lock.release()
136 return True
136 return True
137
137
138 if cur_stats:
138 if cur_stats:
139 commits_by_day_aggregate = OrderedDict(json.loads(
139 commits_by_day_aggregate = OrderedDict(json.loads(
140 cur_stats.commit_activity_combined))
140 cur_stats.commit_activity_combined))
141 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
141 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
142
142
143 log.debug('starting parsing %s' % parse_limit)
143 log.debug('starting parsing %s' % parse_limit)
144 lmktime = mktime
144 lmktime = mktime
145
145
146 last_rev = last_rev + 1 if last_rev >= 0 else 0
146 last_rev = last_rev + 1 if last_rev >= 0 else 0
147 log.debug('Getting revisions from %s to %s' % (
147 log.debug('Getting revisions from %s to %s' % (
148 last_rev, last_rev + parse_limit)
148 last_rev, last_rev + parse_limit)
149 )
149 )
150 for cs in repo[last_rev:last_rev + parse_limit]:
150 for cs in repo[last_rev:last_rev + parse_limit]:
151 log.debug('parsing %s' % cs)
151 log.debug('parsing %s' % cs)
152 last_cs = cs # remember last parsed changeset
152 last_cs = cs # remember last parsed changeset
153 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
153 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
154 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
154 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
155
155
156 if akc(cs.author) in co_day_auth_aggr:
156 if akc(cs.author) in co_day_auth_aggr:
157 try:
157 try:
158 l = [timegetter(x) for x in
158 l = [timegetter(x) for x in
159 co_day_auth_aggr[akc(cs.author)]['data']]
159 co_day_auth_aggr[akc(cs.author)]['data']]
160 time_pos = l.index(k)
160 time_pos = l.index(k)
161 except ValueError:
161 except ValueError:
162 time_pos = False
162 time_pos = False
163
163
164 if time_pos >= 0 and time_pos is not False:
164 if time_pos >= 0 and time_pos is not False:
165
165
166 datadict = \
166 datadict = \
167 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
167 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
168
168
169 datadict["commits"] += 1
169 datadict["commits"] += 1
170 datadict["added"] += len(cs.added)
170 datadict["added"] += len(cs.added)
171 datadict["changed"] += len(cs.changed)
171 datadict["changed"] += len(cs.changed)
172 datadict["removed"] += len(cs.removed)
172 datadict["removed"] += len(cs.removed)
173
173
174 else:
174 else:
175 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
175 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
176
176
177 datadict = {"time": k,
177 datadict = {"time": k,
178 "commits": 1,
178 "commits": 1,
179 "added": len(cs.added),
179 "added": len(cs.added),
180 "changed": len(cs.changed),
180 "changed": len(cs.changed),
181 "removed": len(cs.removed),
181 "removed": len(cs.removed),
182 }
182 }
183 co_day_auth_aggr[akc(cs.author)]['data']\
183 co_day_auth_aggr[akc(cs.author)]['data']\
184 .append(datadict)
184 .append(datadict)
185
185
186 else:
186 else:
187 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
187 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
188 co_day_auth_aggr[akc(cs.author)] = {
188 co_day_auth_aggr[akc(cs.author)] = {
189 "label": akc(cs.author),
189 "label": akc(cs.author),
190 "data": [{"time":k,
190 "data": [{"time":k,
191 "commits":1,
191 "commits":1,
192 "added":len(cs.added),
192 "added":len(cs.added),
193 "changed":len(cs.changed),
193 "changed":len(cs.changed),
194 "removed":len(cs.removed),
194 "removed":len(cs.removed),
195 }],
195 }],
196 "schema": ["commits"],
196 "schema": ["commits"],
197 }
197 }
198
198
199 #gather all data by day
199 #gather all data by day
200 if k in commits_by_day_aggregate:
200 if k in commits_by_day_aggregate:
201 commits_by_day_aggregate[k] += 1
201 commits_by_day_aggregate[k] += 1
202 else:
202 else:
203 commits_by_day_aggregate[k] = 1
203 commits_by_day_aggregate[k] = 1
204
204
205 overview_data = sorted(commits_by_day_aggregate.items(),
205 overview_data = sorted(commits_by_day_aggregate.items(),
206 key=itemgetter(0))
206 key=itemgetter(0))
207
207
208 if not co_day_auth_aggr:
208 if not co_day_auth_aggr:
209 co_day_auth_aggr[akc(repo.contact)] = {
209 co_day_auth_aggr[akc(repo.contact)] = {
210 "label": akc(repo.contact),
210 "label": akc(repo.contact),
211 "data": [0, 1],
211 "data": [0, 1],
212 "schema": ["commits"],
212 "schema": ["commits"],
213 }
213 }
214
214
215 stats = cur_stats if cur_stats else Statistics()
215 stats = cur_stats if cur_stats else Statistics()
216 stats.commit_activity = json.dumps(co_day_auth_aggr)
216 stats.commit_activity = json.dumps(co_day_auth_aggr)
217 stats.commit_activity_combined = json.dumps(overview_data)
217 stats.commit_activity_combined = json.dumps(overview_data)
218
218
219 log.debug('last revison %s' % last_rev)
219 log.debug('last revison %s' % last_rev)
220 leftovers = len(repo.revisions[last_rev:])
220 leftovers = len(repo.revisions[last_rev:])
221 log.debug('revisions to parse %s' % leftovers)
221 log.debug('revisions to parse %s' % leftovers)
222
222
223 if last_rev == 0 or leftovers < parse_limit:
223 if last_rev == 0 or leftovers < parse_limit:
224 log.debug('getting code trending stats')
224 log.debug('getting code trending stats')
225 stats.languages = json.dumps(__get_codes_stats(repo_name))
225 stats.languages = json.dumps(__get_codes_stats(repo_name))
226
226
227 try:
227 try:
228 stats.repository = dbrepo
228 stats.repository = dbrepo
229 stats.stat_on_revision = last_cs.revision if last_cs else 0
229 stats.stat_on_revision = last_cs.revision if last_cs else 0
230 DBS.add(stats)
230 DBS.add(stats)
231 DBS.commit()
231 DBS.commit()
232 except:
232 except:
233 log.error(traceback.format_exc())
233 log.error(traceback.format_exc())
234 DBS.rollback()
234 DBS.rollback()
235 lock.release()
235 lock.release()
236 return False
236 return False
237
237
238 # final release
238 # final release
239 lock.release()
239 lock.release()
240
240
241 # execute another task if celery is enabled
241 # execute another task if celery is enabled
242 if len(repo.revisions) > 1 and CELERY_ON:
242 if len(repo.revisions) > 1 and CELERY_ON:
243 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
243 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
244 return True
244 return True
245 except LockHeld:
245 except LockHeld:
246 log.info('LockHeld')
246 log.info('LockHeld')
247 return 'Task with key %s already running' % lockkey
247 return 'Task with key %s already running' % lockkey
248
248
249 @task(ignore_result=True)
249 @task(ignore_result=True)
250 @dbsession
250 @dbsession
251 def send_password_link(user_email):
251 def send_password_link(user_email):
252 from rhodecode.model.notification import EmailNotificationModel
252 from rhodecode.model.notification import EmailNotificationModel
253
253
254 log = get_logger(send_password_link)
254 log = get_logger(send_password_link)
255 DBS = get_session()
255 DBS = get_session()
256
256
257 try:
257 try:
258 user = User.get_by_email(user_email)
258 user = User.get_by_email(user_email)
259 if user:
259 if user:
260 log.debug('password reset user found %s' % user)
260 log.debug('password reset user found %s' % user)
261 link = url('reset_password_confirmation', key=user.api_key,
261 link = url('reset_password_confirmation', key=user.api_key,
262 qualified=True)
262 qualified=True)
263 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
263 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
264 body = EmailNotificationModel().get_email_tmpl(reg_type,
264 body = EmailNotificationModel().get_email_tmpl(reg_type,
265 **{'user':user.short_contact,
265 **{'user':user.short_contact,
266 'reset_url':link})
266 'reset_url':link})
267 log.debug('sending email')
267 log.debug('sending email')
268 run_task(send_email, user_email,
268 run_task(send_email, user_email,
269 _("password reset link"), body)
269 _("password reset link"), body)
270 log.info('send new password mail to %s' % user_email)
270 log.info('send new password mail to %s' % user_email)
271 else:
271 else:
272 log.debug("password reset email %s not found" % user_email)
272 log.debug("password reset email %s not found" % user_email)
273 except:
273 except:
274 log.error(traceback.format_exc())
274 log.error(traceback.format_exc())
275 return False
275 return False
276
276
277 return True
277 return True
278
278
279 @task(ignore_result=True)
279 @task(ignore_result=True)
280 @dbsession
280 @dbsession
281 def reset_user_password(user_email):
281 def reset_user_password(user_email):
282 from rhodecode.lib import auth
282 from rhodecode.lib import auth
283
283
284 log = get_logger(reset_user_password)
284 log = get_logger(reset_user_password)
285 DBS = get_session()
285 DBS = get_session()
286
286
287 try:
287 try:
288 try:
288 try:
289 user = User.get_by_email(user_email)
289 user = User.get_by_email(user_email)
290 new_passwd = auth.PasswordGenerator().gen_password(8,
290 new_passwd = auth.PasswordGenerator().gen_password(8,
291 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
291 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
292 if user:
292 if user:
293 user.password = auth.get_crypt_password(new_passwd)
293 user.password = auth.get_crypt_password(new_passwd)
294 user.api_key = auth.generate_api_key(user.username)
294 user.api_key = auth.generate_api_key(user.username)
295 DBS.add(user)
295 DBS.add(user)
296 DBS.commit()
296 DBS.commit()
297 log.info('change password for %s' % user_email)
297 log.info('change password for %s' % user_email)
298 if new_passwd is None:
298 if new_passwd is None:
299 raise Exception('unable to generate new password')
299 raise Exception('unable to generate new password')
300 except:
300 except:
301 log.error(traceback.format_exc())
301 log.error(traceback.format_exc())
302 DBS.rollback()
302 DBS.rollback()
303
303
304 run_task(send_email, user_email,
304 run_task(send_email, user_email,
305 'Your new password',
305 'Your new password',
306 'Your new RhodeCode password:%s' % (new_passwd))
306 'Your new RhodeCode password:%s' % (new_passwd))
307 log.info('send new password mail to %s' % user_email)
307 log.info('send new password mail to %s' % user_email)
308
308
309 except:
309 except:
310 log.error('Failed to update user password')
310 log.error('Failed to update user password')
311 log.error(traceback.format_exc())
311 log.error(traceback.format_exc())
312
312
313 return True
313 return True
314
314
315
315
316 @task(ignore_result=True)
316 @task(ignore_result=True)
317 @dbsession
317 @dbsession
318 def send_email(recipients, subject, body, html_body=''):
318 def send_email(recipients, subject, body, html_body=''):
319 """
319 """
320 Sends an email with defined parameters from the .ini files.
320 Sends an email with defined parameters from the .ini files.
321
321
322 :param recipients: list of recipients, it this is empty the defined email
322 :param recipients: list of recipients, it this is empty the defined email
323 address from field 'email_to' is used instead
323 address from field 'email_to' is used instead
324 :param subject: subject of the mail
324 :param subject: subject of the mail
325 :param body: body of the mail
325 :param body: body of the mail
326 :param html_body: html version of body
326 :param html_body: html version of body
327 """
327 """
328 log = get_logger(send_email)
328 log = get_logger(send_email)
329 DBS = get_session()
329 DBS = get_session()
330
330
331 email_config = config
331 email_config = config
332 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
332 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
333 if not recipients:
333 if not recipients:
334 # if recipients are not defined we send to email_config + all admins
334 # if recipients are not defined we send to email_config + all admins
335 admins = [u.email for u in User.query()
335 admins = [u.email for u in User.query()
336 .filter(User.admin == True).all()]
336 .filter(User.admin == True).all()]
337 recipients = [email_config.get('email_to')] + admins
337 recipients = [email_config.get('email_to')] + admins
338
338
339 mail_from = email_config.get('app_email_from', 'RhodeCode')
339 mail_from = email_config.get('app_email_from', 'RhodeCode')
340 user = email_config.get('smtp_username')
340 user = email_config.get('smtp_username')
341 passwd = email_config.get('smtp_password')
341 passwd = email_config.get('smtp_password')
342 mail_server = email_config.get('smtp_server')
342 mail_server = email_config.get('smtp_server')
343 mail_port = email_config.get('smtp_port')
343 mail_port = email_config.get('smtp_port')
344 tls = str2bool(email_config.get('smtp_use_tls'))
344 tls = str2bool(email_config.get('smtp_use_tls'))
345 ssl = str2bool(email_config.get('smtp_use_ssl'))
345 ssl = str2bool(email_config.get('smtp_use_ssl'))
346 debug = str2bool(config.get('debug'))
346 debug = str2bool(config.get('debug'))
347 smtp_auth = email_config.get('smtp_auth')
347 smtp_auth = email_config.get('smtp_auth')
348
348
349 try:
349 try:
350 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
350 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
351 mail_port, ssl, tls, debug=debug)
351 mail_port, ssl, tls, debug=debug)
352 m.send(recipients, subject, body, html_body)
352 m.send(recipients, subject, body, html_body)
353 except:
353 except:
354 log.error('Mail sending failed')
354 log.error('Mail sending failed')
355 log.error(traceback.format_exc())
355 log.error(traceback.format_exc())
356 return False
356 return False
357 return True
357 return True
358
358
359
359
360 @task(ignore_result=True)
360 @task(ignore_result=True)
361 @dbsession
361 @dbsession
362 def create_repo_fork(form_data, cur_user):
362 def create_repo_fork(form_data, cur_user):
363 """
363 """
364 Creates a fork of repository using interval VCS methods
364 Creates a fork of repository using interval VCS methods
365
365
366 :param form_data:
366 :param form_data:
367 :param cur_user:
367 :param cur_user:
368 """
368 """
369 from rhodecode.model.repo import RepoModel
369 from rhodecode.model.repo import RepoModel
370 from rhodecode.model.user import UserModel
370 from rhodecode.model.user import UserModel
371
371
372 log = get_logger(create_repo_fork)
372 log = get_logger(create_repo_fork)
373 DBS = get_session()
373 DBS = get_session()
374
374
375 base_path = Repository.base_path()
375 base_path = Repository.base_path()
376 cur_user = UserModel(DBS)._get_user(cur_user)
376 cur_user = UserModel(DBS)._get_user(cur_user)
377
377
378 fork_name = form_data['repo_name_full']
378 fork_name = form_data['repo_name_full']
379 repo_type = form_data['repo_type']
379 repo_type = form_data['repo_type']
380 description = form_data['description']
380 description = form_data['description']
381 owner = cur_user
381 owner = cur_user
382 private = form_data['private']
382 private = form_data['private']
383 clone_uri = form_data.get('clone_uri')
383 clone_uri = form_data.get('clone_uri')
384 repos_group = form_data['repo_group']
384 repos_group = form_data['repo_group']
385 landing_rev = form_data['landing_rev']
385 landing_rev = form_data['landing_rev']
386 copy_fork_permissions = form_data.get('copy_permissions')
386 copy_fork_permissions = form_data.get('copy_permissions')
387 fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
387 fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
388
388
389 fork_repo = RepoModel(DBS).create_repo(
389 fork_repo = RepoModel(DBS).create_repo(
390 fork_name, repo_type, description, owner, private, clone_uri,
390 fork_name, repo_type, description, owner, private, clone_uri,
391 repos_group, landing_rev, just_db=True, fork_of=fork_of,
391 repos_group, landing_rev, just_db=True, fork_of=fork_of,
392 copy_fork_permissions=copy_fork_permissions
392 copy_fork_permissions=copy_fork_permissions
393 )
393 )
394
394
395 update_after_clone = form_data['update_after_clone']
395 update_after_clone = form_data['update_after_clone']
396
396
397 source_repo_path = os.path.join(base_path, fork_of.repo_name)
397 source_repo_path = os.path.join(base_path, fork_of.repo_name)
398 destination_fork_path = os.path.join(base_path, fork_name)
398 destination_fork_path = os.path.join(base_path, fork_name)
399
399
400 log.info('creating fork of %s as %s', source_repo_path,
400 log.info('creating fork of %s as %s', source_repo_path,
401 destination_fork_path)
401 destination_fork_path)
402 backend = get_backend(repo_type)
402 backend = get_backend(repo_type)
403 backend(safe_str(destination_fork_path), create=True,
403
404 src_url=safe_str(source_repo_path),
404 if repo_type == 'git':
405 update_after_clone=update_after_clone, bare=True)
405 backend(safe_str(destination_fork_path), create=True,
406 src_url=safe_str(source_repo_path),
407 update_after_clone=update_after_clone,
408 bare=True)
409 elif repo_type == 'hg':
410 backend(safe_str(destination_fork_path), create=True,
411 src_url=safe_str(source_repo_path),
412 update_after_clone=update_after_clone)
413 else:
414 raise Exception('Unknown backend type %s' % repo_type)
415
406 log_create_repository(fork_repo.get_dict(), created_by=cur_user.username)
416 log_create_repository(fork_repo.get_dict(), created_by=cur_user.username)
407
417
408 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
418 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
409 fork_of.repo_name, '', DBS)
419 fork_of.repo_name, '', DBS)
410
420
411 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
421 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
412 fork_name, '', DBS)
422 fork_name, '', DBS)
413 # finally commit at latest possible stage
423 # finally commit at latest possible stage
414 DBS.commit()
424 DBS.commit()
415
425
416
426
417 def __get_codes_stats(repo_name):
427 def __get_codes_stats(repo_name):
418 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
428 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
419 repo = Repository.get_by_repo_name(repo_name).scm_instance
429 repo = Repository.get_by_repo_name(repo_name).scm_instance
420
430
421 tip = repo.get_changeset()
431 tip = repo.get_changeset()
422 code_stats = {}
432 code_stats = {}
423
433
424 def aggregate(cs):
434 def aggregate(cs):
425 for f in cs[2]:
435 for f in cs[2]:
426 ext = lower(f.extension)
436 ext = lower(f.extension)
427 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
437 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
428 if ext in code_stats:
438 if ext in code_stats:
429 code_stats[ext] += 1
439 code_stats[ext] += 1
430 else:
440 else:
431 code_stats[ext] = 1
441 code_stats[ext] = 1
432
442
433 map(aggregate, tip.walk('/'))
443 map(aggregate, tip.walk('/'))
434
444
435 return code_stats or {}
445 return code_stats or {}
General Comments 0
You need to be logged in to leave comments. Login now