##// END OF EJS Templates
added recursion limit for stats gathering, sometimes it did >1000 loops which lead to python throwing max recursion depth exceeded error....
marcink -
r3276:eaa887c6 beta
parent child Browse files
Show More
@@ -1,259 +1,261 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.controllers.summary
3 rhodecode.controllers.summary
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 Summary controller for Rhodecode
6 Summary controller for Rhodecode
7
7
8 :created_on: Apr 18, 2010
8 :created_on: Apr 18, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25
25
26 import traceback
26 import traceback
27 import calendar
27 import calendar
28 import logging
28 import logging
29 import urllib
29 import urllib
30 from time import mktime
30 from time import mktime
31 from datetime import timedelta, date
31 from datetime import timedelta, date
32 from urlparse import urlparse
32 from urlparse import urlparse
33
33
34 from pylons import tmpl_context as c, request, url, config
34 from pylons import tmpl_context as c, request, url, config
35 from pylons.i18n.translation import _
35 from pylons.i18n.translation import _
36 from webob.exc import HTTPBadRequest
36 from webob.exc import HTTPBadRequest
37
37
38 from beaker.cache import cache_region, region_invalidate
38 from beaker.cache import cache_region, region_invalidate
39
39
40 from rhodecode.lib.compat import product
40 from rhodecode.lib.compat import product
41 from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
41 from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
42 NodeDoesNotExistError
42 NodeDoesNotExistError
43 from rhodecode.config.conf import ALL_READMES, ALL_EXTS, LANGUAGES_EXTENSIONS_MAP
43 from rhodecode.config.conf import ALL_READMES, ALL_EXTS, LANGUAGES_EXTENSIONS_MAP
44 from rhodecode.model.db import Statistics, CacheInvalidation
44 from rhodecode.model.db import Statistics, CacheInvalidation
45 from rhodecode.lib.utils import jsonify
45 from rhodecode.lib.utils import jsonify
46 from rhodecode.lib.utils2 import safe_unicode
46 from rhodecode.lib.utils2 import safe_unicode
47 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\
47 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\
48 NotAnonymous
48 NotAnonymous
49 from rhodecode.lib.base import BaseRepoController, render
49 from rhodecode.lib.base import BaseRepoController, render
50 from rhodecode.lib.vcs.backends.base import EmptyChangeset
50 from rhodecode.lib.vcs.backends.base import EmptyChangeset
51 from rhodecode.lib.markup_renderer import MarkupRenderer
51 from rhodecode.lib.markup_renderer import MarkupRenderer
52 from rhodecode.lib.celerylib import run_task
52 from rhodecode.lib.celerylib import run_task
53 from rhodecode.lib.celerylib.tasks import get_commits_stats
53 from rhodecode.lib.celerylib.tasks import get_commits_stats
54 from rhodecode.lib.helpers import RepoPage
54 from rhodecode.lib.helpers import RepoPage
55 from rhodecode.lib.compat import json, OrderedDict
55 from rhodecode.lib.compat import json, OrderedDict
56 from rhodecode.lib.vcs.nodes import FileNode
56 from rhodecode.lib.vcs.nodes import FileNode
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60 README_FILES = [''.join([x[0][0], x[1][0]]) for x in
60 README_FILES = [''.join([x[0][0], x[1][0]]) for x in
61 sorted(list(product(ALL_READMES, ALL_EXTS)),
61 sorted(list(product(ALL_READMES, ALL_EXTS)),
62 key=lambda y:y[0][1] + y[1][1])]
62 key=lambda y:y[0][1] + y[1][1])]
63
63
64
64
65 class SummaryController(BaseRepoController):
65 class SummaryController(BaseRepoController):
66
66
67 @LoginRequired()
67 @LoginRequired()
68 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
68 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
69 'repository.admin')
69 'repository.admin')
70 def __before__(self):
70 def __before__(self):
71 super(SummaryController, self).__before__()
71 super(SummaryController, self).__before__()
72
72
73 def index(self, repo_name):
73 def index(self, repo_name):
74 c.dbrepo = dbrepo = c.rhodecode_db_repo
74 c.dbrepo = dbrepo = c.rhodecode_db_repo
75 c.following = self.scm_model.is_following_repo(repo_name,
75 c.following = self.scm_model.is_following_repo(repo_name,
76 self.rhodecode_user.user_id)
76 self.rhodecode_user.user_id)
77
77
78 def url_generator(**kw):
78 def url_generator(**kw):
79 return url('shortlog_home', repo_name=repo_name, size=10, **kw)
79 return url('shortlog_home', repo_name=repo_name, size=10, **kw)
80
80
81 c.repo_changesets = RepoPage(c.rhodecode_repo, page=1,
81 c.repo_changesets = RepoPage(c.rhodecode_repo, page=1,
82 items_per_page=10, url=url_generator)
82 items_per_page=10, url=url_generator)
83 page_revisions = [x.raw_id for x in list(c.repo_changesets)]
83 page_revisions = [x.raw_id for x in list(c.repo_changesets)]
84 c.statuses = c.rhodecode_db_repo.statuses(page_revisions)
84 c.statuses = c.rhodecode_db_repo.statuses(page_revisions)
85
85
86 if self.rhodecode_user.username == 'default':
86 if self.rhodecode_user.username == 'default':
87 # for default(anonymous) user we don't need to pass credentials
87 # for default(anonymous) user we don't need to pass credentials
88 username = ''
88 username = ''
89 password = ''
89 password = ''
90 else:
90 else:
91 username = str(self.rhodecode_user.username)
91 username = str(self.rhodecode_user.username)
92 password = '@'
92 password = '@'
93
93
94 parsed_url = urlparse(url.current(qualified=True))
94 parsed_url = urlparse(url.current(qualified=True))
95
95
96 default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'
96 default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'
97
97
98 uri_tmpl = config.get('clone_uri', default_clone_uri)
98 uri_tmpl = config.get('clone_uri', default_clone_uri)
99 uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
99 uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
100 decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
100 decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
101 uri_dict = {
101 uri_dict = {
102 'user': urllib.quote(username),
102 'user': urllib.quote(username),
103 'pass': password,
103 'pass': password,
104 'scheme': parsed_url.scheme,
104 'scheme': parsed_url.scheme,
105 'netloc': parsed_url.netloc,
105 'netloc': parsed_url.netloc,
106 'path': decoded_path
106 'path': decoded_path
107 }
107 }
108
108
109 uri = uri_tmpl % uri_dict
109 uri = uri_tmpl % uri_dict
110 # generate another clone url by id
110 # generate another clone url by id
111 uri_dict.update(
111 uri_dict.update(
112 {'path': decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)}
112 {'path': decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)}
113 )
113 )
114 uri_id = uri_tmpl % uri_dict
114 uri_id = uri_tmpl % uri_dict
115
115
116 c.clone_repo_url = uri
116 c.clone_repo_url = uri
117 c.clone_repo_url_id = uri_id
117 c.clone_repo_url_id = uri_id
118 c.repo_tags = OrderedDict()
118 c.repo_tags = OrderedDict()
119 for name, hash_ in c.rhodecode_repo.tags.items()[:10]:
119 for name, hash_ in c.rhodecode_repo.tags.items()[:10]:
120 try:
120 try:
121 c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash_)
121 c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash_)
122 except ChangesetError:
122 except ChangesetError:
123 c.repo_tags[name] = EmptyChangeset(hash_)
123 c.repo_tags[name] = EmptyChangeset(hash_)
124
124
125 c.repo_branches = OrderedDict()
125 c.repo_branches = OrderedDict()
126 for name, hash_ in c.rhodecode_repo.branches.items()[:10]:
126 for name, hash_ in c.rhodecode_repo.branches.items()[:10]:
127 try:
127 try:
128 c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash_)
128 c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash_)
129 except ChangesetError:
129 except ChangesetError:
130 c.repo_branches[name] = EmptyChangeset(hash_)
130 c.repo_branches[name] = EmptyChangeset(hash_)
131
131
132 td = date.today() + timedelta(days=1)
132 td = date.today() + timedelta(days=1)
133 td_1m = td - timedelta(days=calendar.mdays[td.month])
133 td_1m = td - timedelta(days=calendar.mdays[td.month])
134 td_1y = td - timedelta(days=365)
134 td_1y = td - timedelta(days=365)
135
135
136 ts_min_m = mktime(td_1m.timetuple())
136 ts_min_m = mktime(td_1m.timetuple())
137 ts_min_y = mktime(td_1y.timetuple())
137 ts_min_y = mktime(td_1y.timetuple())
138 ts_max_y = mktime(td.timetuple())
138 ts_max_y = mktime(td.timetuple())
139
139
140 if dbrepo.enable_statistics:
140 if dbrepo.enable_statistics:
141 c.show_stats = True
141 c.show_stats = True
142 c.no_data_msg = _('No data loaded yet')
142 c.no_data_msg = _('No data loaded yet')
143 run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y)
143 recurse_limit = 500 # don't recurse more than 500 times when parsing
144 run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y,
145 ts_max_y, recurse_limit)
144 else:
146 else:
145 c.show_stats = False
147 c.show_stats = False
146 c.no_data_msg = _('Statistics are disabled for this repository')
148 c.no_data_msg = _('Statistics are disabled for this repository')
147 c.ts_min = ts_min_m
149 c.ts_min = ts_min_m
148 c.ts_max = ts_max_y
150 c.ts_max = ts_max_y
149
151
150 stats = self.sa.query(Statistics)\
152 stats = self.sa.query(Statistics)\
151 .filter(Statistics.repository == dbrepo)\
153 .filter(Statistics.repository == dbrepo)\
152 .scalar()
154 .scalar()
153
155
154 c.stats_percentage = 0
156 c.stats_percentage = 0
155
157
156 if stats and stats.languages:
158 if stats and stats.languages:
157 c.no_data = False is dbrepo.enable_statistics
159 c.no_data = False is dbrepo.enable_statistics
158 lang_stats_d = json.loads(stats.languages)
160 lang_stats_d = json.loads(stats.languages)
159 c.commit_data = stats.commit_activity
161 c.commit_data = stats.commit_activity
160 c.overview_data = stats.commit_activity_combined
162 c.overview_data = stats.commit_activity_combined
161
163
162 lang_stats = ((x, {"count": y,
164 lang_stats = ((x, {"count": y,
163 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
165 "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
164 for x, y in lang_stats_d.items())
166 for x, y in lang_stats_d.items())
165
167
166 c.trending_languages = json.dumps(
168 c.trending_languages = json.dumps(
167 sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
169 sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
168 )
170 )
169 last_rev = stats.stat_on_revision + 1
171 last_rev = stats.stat_on_revision + 1
170 c.repo_last_rev = c.rhodecode_repo.count()\
172 c.repo_last_rev = c.rhodecode_repo.count()\
171 if c.rhodecode_repo.revisions else 0
173 if c.rhodecode_repo.revisions else 0
172 if last_rev == 0 or c.repo_last_rev == 0:
174 if last_rev == 0 or c.repo_last_rev == 0:
173 pass
175 pass
174 else:
176 else:
175 c.stats_percentage = '%.2f' % ((float((last_rev)) /
177 c.stats_percentage = '%.2f' % ((float((last_rev)) /
176 c.repo_last_rev) * 100)
178 c.repo_last_rev) * 100)
177 else:
179 else:
178 c.commit_data = json.dumps({})
180 c.commit_data = json.dumps({})
179 c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
181 c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
180 c.trending_languages = json.dumps({})
182 c.trending_languages = json.dumps({})
181 c.no_data = True
183 c.no_data = True
182
184
183 c.enable_downloads = dbrepo.enable_downloads
185 c.enable_downloads = dbrepo.enable_downloads
184 if c.enable_downloads:
186 if c.enable_downloads:
185 c.download_options = self._get_download_links(c.rhodecode_repo)
187 c.download_options = self._get_download_links(c.rhodecode_repo)
186
188
187 c.readme_data, c.readme_file = \
189 c.readme_data, c.readme_file = \
188 self.__get_readme_data(c.rhodecode_db_repo)
190 self.__get_readme_data(c.rhodecode_db_repo)
189 return render('summary/summary.html')
191 return render('summary/summary.html')
190
192
191 @NotAnonymous()
193 @NotAnonymous()
192 @jsonify
194 @jsonify
193 def repo_size(self, repo_name):
195 def repo_size(self, repo_name):
194 if request.is_xhr:
196 if request.is_xhr:
195 return _('repository size: %s') % c.rhodecode_db_repo._repo_size()
197 return _('repository size: %s') % c.rhodecode_db_repo._repo_size()
196 else:
198 else:
197 raise HTTPBadRequest()
199 raise HTTPBadRequest()
198
200
199 def __get_readme_data(self, db_repo):
201 def __get_readme_data(self, db_repo):
200 repo_name = db_repo.repo_name
202 repo_name = db_repo.repo_name
201
203
202 @cache_region('long_term')
204 @cache_region('long_term')
203 def _get_readme_from_cache(key):
205 def _get_readme_from_cache(key):
204 readme_data = None
206 readme_data = None
205 readme_file = None
207 readme_file = None
206 log.debug('Looking for README file')
208 log.debug('Looking for README file')
207 try:
209 try:
208 # get's the landing revision! or tip if fails
210 # get's the landing revision! or tip if fails
209 cs = db_repo.get_landing_changeset()
211 cs = db_repo.get_landing_changeset()
210 if isinstance(cs, EmptyChangeset):
212 if isinstance(cs, EmptyChangeset):
211 raise EmptyRepositoryError()
213 raise EmptyRepositoryError()
212 renderer = MarkupRenderer()
214 renderer = MarkupRenderer()
213 for f in README_FILES:
215 for f in README_FILES:
214 try:
216 try:
215 readme = cs.get_node(f)
217 readme = cs.get_node(f)
216 if not isinstance(readme, FileNode):
218 if not isinstance(readme, FileNode):
217 continue
219 continue
218 readme_file = f
220 readme_file = f
219 log.debug('Found README file `%s` rendering...' %
221 log.debug('Found README file `%s` rendering...' %
220 readme_file)
222 readme_file)
221 readme_data = renderer.render(readme.content, f)
223 readme_data = renderer.render(readme.content, f)
222 break
224 break
223 except NodeDoesNotExistError:
225 except NodeDoesNotExistError:
224 continue
226 continue
225 except ChangesetError:
227 except ChangesetError:
226 log.error(traceback.format_exc())
228 log.error(traceback.format_exc())
227 pass
229 pass
228 except EmptyRepositoryError:
230 except EmptyRepositoryError:
229 pass
231 pass
230 except Exception:
232 except Exception:
231 log.error(traceback.format_exc())
233 log.error(traceback.format_exc())
232
234
233 return readme_data, readme_file
235 return readme_data, readme_file
234
236
235 key = repo_name + '_README'
237 key = repo_name + '_README'
236 inv = CacheInvalidation.invalidate(key)
238 inv = CacheInvalidation.invalidate(key)
237 if inv is not None:
239 if inv is not None:
238 region_invalidate(_get_readme_from_cache, None, key)
240 region_invalidate(_get_readme_from_cache, None, key)
239 CacheInvalidation.set_valid(inv.cache_key)
241 CacheInvalidation.set_valid(inv.cache_key)
240 return _get_readme_from_cache(key)
242 return _get_readme_from_cache(key)
241
243
242 def _get_download_links(self, repo):
244 def _get_download_links(self, repo):
243
245
244 download_l = []
246 download_l = []
245
247
246 branches_group = ([], _("Branches"))
248 branches_group = ([], _("Branches"))
247 tags_group = ([], _("Tags"))
249 tags_group = ([], _("Tags"))
248
250
249 for name, chs in c.rhodecode_repo.branches.items():
251 for name, chs in c.rhodecode_repo.branches.items():
250 #chs = chs.split(':')[-1]
252 #chs = chs.split(':')[-1]
251 branches_group[0].append((chs, name),)
253 branches_group[0].append((chs, name),)
252 download_l.append(branches_group)
254 download_l.append(branches_group)
253
255
254 for name, chs in c.rhodecode_repo.tags.items():
256 for name, chs in c.rhodecode_repo.tags.items():
255 #chs = chs.split(':')[-1]
257 #chs = chs.split(':')[-1]
256 tags_group[0].append((chs, name),)
258 tags_group[0].append((chs, name),)
257 download_l.append(tags_group)
259 download_l.append(tags_group)
258
260
259 return download_l
261 return download_l
@@ -1,452 +1,456 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.lib.celerylib.tasks
3 rhodecode.lib.celerylib.tasks
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
5
5
6 RhodeCode task modules, containing all task that suppose to be run
6 RhodeCode task modules, containing all task that suppose to be run
7 by celery daemon
7 by celery daemon
8
8
9 :created_on: Oct 6, 2010
9 :created_on: Oct 6, 2010
10 :author: marcink
10 :author: marcink
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
12 :license: GPLv3, see COPYING for more details.
12 :license: GPLv3, see COPYING for more details.
13 """
13 """
14 # This program is free software: you can redistribute it and/or modify
14 # This program is free software: you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation, either version 3 of the License, or
16 # the Free Software Foundation, either version 3 of the License, or
17 # (at your option) any later version.
17 # (at your option) any later version.
18 #
18 #
19 # This program is distributed in the hope that it will be useful,
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
22 # GNU General Public License for more details.
23 #
23 #
24 # You should have received a copy of the GNU General Public License
24 # You should have received a copy of the GNU General Public License
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 # along with this program. If not, see <http://www.gnu.org/licenses/>.
26 from celery.decorators import task
26 from celery.decorators import task
27
27
28 import os
28 import os
29 import traceback
29 import traceback
30 import logging
30 import logging
31 from os.path import join as jn
31 from os.path import join as jn
32
32
33 from time import mktime
33 from time import mktime
34 from operator import itemgetter
34 from operator import itemgetter
35 from string import lower
35 from string import lower
36
36
37 from pylons import config, url
37 from pylons import config, url
38 from pylons.i18n.translation import _
38 from pylons.i18n.translation import _
39
39
40 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs import get_backend
41
41
42 from rhodecode import CELERY_ON, CELERY_EAGER
42 from rhodecode import CELERY_ON, CELERY_EAGER
43 from rhodecode.lib.utils2 import safe_str
43 from rhodecode.lib.utils2 import safe_str
44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
44 from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
45 str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
46 from rhodecode.lib.helpers import person
46 from rhodecode.lib.helpers import person
47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
47 from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
48 from rhodecode.lib.utils import add_cache, action_logger
48 from rhodecode.lib.utils import add_cache, action_logger
49 from rhodecode.lib.compat import json, OrderedDict
49 from rhodecode.lib.compat import json, OrderedDict
50 from rhodecode.lib.hooks import log_create_repository
50 from rhodecode.lib.hooks import log_create_repository
51
51
52 from rhodecode.model.db import Statistics, Repository, User
52 from rhodecode.model.db import Statistics, Repository, User
53 from rhodecode.model.scm import ScmModel
53 from rhodecode.model.scm import ScmModel
54
54
55
55
56 add_cache(config)
56 add_cache(config)
57
57
58 __all__ = ['whoosh_index', 'get_commits_stats',
58 __all__ = ['whoosh_index', 'get_commits_stats',
59 'reset_user_password', 'send_email']
59 'reset_user_password', 'send_email']
60
60
61
61
62 def get_logger(cls):
62 def get_logger(cls):
63 if CELERY_ON:
63 if CELERY_ON:
64 try:
64 try:
65 log = cls.get_logger()
65 log = cls.get_logger()
66 except:
66 except:
67 log = logging.getLogger(__name__)
67 log = logging.getLogger(__name__)
68 else:
68 else:
69 log = logging.getLogger(__name__)
69 log = logging.getLogger(__name__)
70
70
71 return log
71 return log
72
72
73
73
74 @task(ignore_result=True)
74 @task(ignore_result=True)
75 @locked_task
75 @locked_task
76 @dbsession
76 @dbsession
77 def whoosh_index(repo_location, full_index):
77 def whoosh_index(repo_location, full_index):
78 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
78 from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
79 log = get_logger(whoosh_index)
79 log = get_logger(whoosh_index)
80 DBS = get_session()
80 DBS = get_session()
81
81
82 index_location = config['index_dir']
82 index_location = config['index_dir']
83 WhooshIndexingDaemon(index_location=index_location,
83 WhooshIndexingDaemon(index_location=index_location,
84 repo_location=repo_location, sa=DBS)\
84 repo_location=repo_location, sa=DBS)\
85 .run(full_index=full_index)
85 .run(full_index=full_index)
86
86
87
87
88 @task(ignore_result=True)
88 @task(ignore_result=True)
89 @dbsession
89 @dbsession
90 def get_commits_stats(repo_name, ts_min_y, ts_max_y):
90 def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
91 log = get_logger(get_commits_stats)
91 log = get_logger(get_commits_stats)
92 DBS = get_session()
92 DBS = get_session()
93 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
93 lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
94 ts_max_y)
94 ts_max_y)
95 lockkey_path = config['app_conf']['cache_dir']
95 lockkey_path = config['app_conf']['cache_dir']
96
96
97 log.info('running task with lockkey %s' % lockkey)
97 log.info('running task with lockkey %s' % lockkey)
98
98
99 try:
99 try:
100 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
100 lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
101
101
102 # for js data compatibility cleans the key for person from '
102 # for js data compatibility cleans the key for person from '
103 akc = lambda k: person(k).replace('"', "")
103 akc = lambda k: person(k).replace('"', "")
104
104
105 co_day_auth_aggr = {}
105 co_day_auth_aggr = {}
106 commits_by_day_aggregate = {}
106 commits_by_day_aggregate = {}
107 repo = Repository.get_by_repo_name(repo_name)
107 repo = Repository.get_by_repo_name(repo_name)
108 if repo is None:
108 if repo is None:
109 return True
109 return True
110
110
111 repo = repo.scm_instance
111 repo = repo.scm_instance
112 repo_size = repo.count()
112 repo_size = repo.count()
113 # return if repo have no revisions
113 # return if repo have no revisions
114 if repo_size < 1:
114 if repo_size < 1:
115 lock.release()
115 lock.release()
116 return True
116 return True
117
117
118 skip_date_limit = True
118 skip_date_limit = True
119 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
119 parse_limit = int(config['app_conf'].get('commit_parse_limit'))
120 last_rev = None
120 last_rev = None
121 last_cs = None
121 last_cs = None
122 timegetter = itemgetter('time')
122 timegetter = itemgetter('time')
123
123
124 dbrepo = DBS.query(Repository)\
124 dbrepo = DBS.query(Repository)\
125 .filter(Repository.repo_name == repo_name).scalar()
125 .filter(Repository.repo_name == repo_name).scalar()
126 cur_stats = DBS.query(Statistics)\
126 cur_stats = DBS.query(Statistics)\
127 .filter(Statistics.repository == dbrepo).scalar()
127 .filter(Statistics.repository == dbrepo).scalar()
128
128
129 if cur_stats is not None:
129 if cur_stats is not None:
130 last_rev = cur_stats.stat_on_revision
130 last_rev = cur_stats.stat_on_revision
131
131
132 if last_rev == repo.get_changeset().revision and repo_size > 1:
132 if last_rev == repo.get_changeset().revision and repo_size > 1:
133 # pass silently without any work if we're not on first revision or
133 # pass silently without any work if we're not on first revision or
134 # current state of parsing revision(from db marker) is the
134 # current state of parsing revision(from db marker) is the
135 # last revision
135 # last revision
136 lock.release()
136 lock.release()
137 return True
137 return True
138
138
139 if cur_stats:
139 if cur_stats:
140 commits_by_day_aggregate = OrderedDict(json.loads(
140 commits_by_day_aggregate = OrderedDict(json.loads(
141 cur_stats.commit_activity_combined))
141 cur_stats.commit_activity_combined))
142 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
142 co_day_auth_aggr = json.loads(cur_stats.commit_activity)
143
143
144 log.debug('starting parsing %s' % parse_limit)
144 log.debug('starting parsing %s' % parse_limit)
145 lmktime = mktime
145 lmktime = mktime
146
146
147 last_rev = last_rev + 1 if last_rev >= 0 else 0
147 last_rev = last_rev + 1 if last_rev >= 0 else 0
148 log.debug('Getting revisions from %s to %s' % (
148 log.debug('Getting revisions from %s to %s' % (
149 last_rev, last_rev + parse_limit)
149 last_rev, last_rev + parse_limit)
150 )
150 )
151 for cs in repo[last_rev:last_rev + parse_limit]:
151 for cs in repo[last_rev:last_rev + parse_limit]:
152 log.debug('parsing %s' % cs)
152 log.debug('parsing %s' % cs)
153 last_cs = cs # remember last parsed changeset
153 last_cs = cs # remember last parsed changeset
154 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
154 k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
155 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
155 cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
156
156
157 if akc(cs.author) in co_day_auth_aggr:
157 if akc(cs.author) in co_day_auth_aggr:
158 try:
158 try:
159 l = [timegetter(x) for x in
159 l = [timegetter(x) for x in
160 co_day_auth_aggr[akc(cs.author)]['data']]
160 co_day_auth_aggr[akc(cs.author)]['data']]
161 time_pos = l.index(k)
161 time_pos = l.index(k)
162 except ValueError:
162 except ValueError:
163 time_pos = False
163 time_pos = False
164
164
165 if time_pos >= 0 and time_pos is not False:
165 if time_pos >= 0 and time_pos is not False:
166
166
167 datadict = \
167 datadict = \
168 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
168 co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
169
169
170 datadict["commits"] += 1
170 datadict["commits"] += 1
171 datadict["added"] += len(cs.added)
171 datadict["added"] += len(cs.added)
172 datadict["changed"] += len(cs.changed)
172 datadict["changed"] += len(cs.changed)
173 datadict["removed"] += len(cs.removed)
173 datadict["removed"] += len(cs.removed)
174
174
175 else:
175 else:
176 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
176 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
177
177
178 datadict = {"time": k,
178 datadict = {"time": k,
179 "commits": 1,
179 "commits": 1,
180 "added": len(cs.added),
180 "added": len(cs.added),
181 "changed": len(cs.changed),
181 "changed": len(cs.changed),
182 "removed": len(cs.removed),
182 "removed": len(cs.removed),
183 }
183 }
184 co_day_auth_aggr[akc(cs.author)]['data']\
184 co_day_auth_aggr[akc(cs.author)]['data']\
185 .append(datadict)
185 .append(datadict)
186
186
187 else:
187 else:
188 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
188 if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
189 co_day_auth_aggr[akc(cs.author)] = {
189 co_day_auth_aggr[akc(cs.author)] = {
190 "label": akc(cs.author),
190 "label": akc(cs.author),
191 "data": [{"time":k,
191 "data": [{"time":k,
192 "commits":1,
192 "commits":1,
193 "added":len(cs.added),
193 "added":len(cs.added),
194 "changed":len(cs.changed),
194 "changed":len(cs.changed),
195 "removed":len(cs.removed),
195 "removed":len(cs.removed),
196 }],
196 }],
197 "schema": ["commits"],
197 "schema": ["commits"],
198 }
198 }
199
199
200 #gather all data by day
200 #gather all data by day
201 if k in commits_by_day_aggregate:
201 if k in commits_by_day_aggregate:
202 commits_by_day_aggregate[k] += 1
202 commits_by_day_aggregate[k] += 1
203 else:
203 else:
204 commits_by_day_aggregate[k] = 1
204 commits_by_day_aggregate[k] = 1
205
205
206 overview_data = sorted(commits_by_day_aggregate.items(),
206 overview_data = sorted(commits_by_day_aggregate.items(),
207 key=itemgetter(0))
207 key=itemgetter(0))
208
208
209 if not co_day_auth_aggr:
209 if not co_day_auth_aggr:
210 co_day_auth_aggr[akc(repo.contact)] = {
210 co_day_auth_aggr[akc(repo.contact)] = {
211 "label": akc(repo.contact),
211 "label": akc(repo.contact),
212 "data": [0, 1],
212 "data": [0, 1],
213 "schema": ["commits"],
213 "schema": ["commits"],
214 }
214 }
215
215
216 stats = cur_stats if cur_stats else Statistics()
216 stats = cur_stats if cur_stats else Statistics()
217 stats.commit_activity = json.dumps(co_day_auth_aggr)
217 stats.commit_activity = json.dumps(co_day_auth_aggr)
218 stats.commit_activity_combined = json.dumps(overview_data)
218 stats.commit_activity_combined = json.dumps(overview_data)
219
219
220 log.debug('last revison %s' % last_rev)
220 log.debug('last revison %s' % last_rev)
221 leftovers = len(repo.revisions[last_rev:])
221 leftovers = len(repo.revisions[last_rev:])
222 log.debug('revisions to parse %s' % leftovers)
222 log.debug('revisions to parse %s' % leftovers)
223
223
224 if last_rev == 0 or leftovers < parse_limit:
224 if last_rev == 0 or leftovers < parse_limit:
225 log.debug('getting code trending stats')
225 log.debug('getting code trending stats')
226 stats.languages = json.dumps(__get_codes_stats(repo_name))
226 stats.languages = json.dumps(__get_codes_stats(repo_name))
227
227
228 try:
228 try:
229 stats.repository = dbrepo
229 stats.repository = dbrepo
230 stats.stat_on_revision = last_cs.revision if last_cs else 0
230 stats.stat_on_revision = last_cs.revision if last_cs else 0
231 DBS.add(stats)
231 DBS.add(stats)
232 DBS.commit()
232 DBS.commit()
233 except:
233 except:
234 log.error(traceback.format_exc())
234 log.error(traceback.format_exc())
235 DBS.rollback()
235 DBS.rollback()
236 lock.release()
236 lock.release()
237 return False
237 return False
238
238
239 # final release
239 # final release
240 lock.release()
240 lock.release()
241
241
242 # execute another task if celery is enabled
242 # execute another task if celery is enabled
243 if len(repo.revisions) > 1 and CELERY_ON:
243 if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
244 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
244 recurse_limit -= 1
245 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
246 recurse_limit)
247 if recurse_limit <= 0:
248 log.debug('Breaking recursive mode due to reach of recurse limit')
245 return True
249 return True
246 except LockHeld:
250 except LockHeld:
247 log.info('LockHeld')
251 log.info('LockHeld')
248 return 'Task with key %s already running' % lockkey
252 return 'Task with key %s already running' % lockkey
249
253
250 @task(ignore_result=True)
254 @task(ignore_result=True)
251 @dbsession
255 @dbsession
252 def send_password_link(user_email):
256 def send_password_link(user_email):
253 from rhodecode.model.notification import EmailNotificationModel
257 from rhodecode.model.notification import EmailNotificationModel
254
258
255 log = get_logger(send_password_link)
259 log = get_logger(send_password_link)
256 DBS = get_session()
260 DBS = get_session()
257
261
258 try:
262 try:
259 user = User.get_by_email(user_email)
263 user = User.get_by_email(user_email)
260 if user:
264 if user:
261 log.debug('password reset user found %s' % user)
265 log.debug('password reset user found %s' % user)
262 link = url('reset_password_confirmation', key=user.api_key,
266 link = url('reset_password_confirmation', key=user.api_key,
263 qualified=True)
267 qualified=True)
264 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
268 reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
265 body = EmailNotificationModel().get_email_tmpl(reg_type,
269 body = EmailNotificationModel().get_email_tmpl(reg_type,
266 **{'user':user.short_contact,
270 **{'user':user.short_contact,
267 'reset_url':link})
271 'reset_url':link})
268 log.debug('sending email')
272 log.debug('sending email')
269 run_task(send_email, user_email,
273 run_task(send_email, user_email,
270 _("password reset link"), body)
274 _("password reset link"), body)
271 log.info('send new password mail to %s' % user_email)
275 log.info('send new password mail to %s' % user_email)
272 else:
276 else:
273 log.debug("password reset email %s not found" % user_email)
277 log.debug("password reset email %s not found" % user_email)
274 except:
278 except:
275 log.error(traceback.format_exc())
279 log.error(traceback.format_exc())
276 return False
280 return False
277
281
278 return True
282 return True
279
283
280 @task(ignore_result=True)
284 @task(ignore_result=True)
281 @dbsession
285 @dbsession
282 def reset_user_password(user_email):
286 def reset_user_password(user_email):
283 from rhodecode.lib import auth
287 from rhodecode.lib import auth
284
288
285 log = get_logger(reset_user_password)
289 log = get_logger(reset_user_password)
286 DBS = get_session()
290 DBS = get_session()
287
291
288 try:
292 try:
289 try:
293 try:
290 user = User.get_by_email(user_email)
294 user = User.get_by_email(user_email)
291 new_passwd = auth.PasswordGenerator().gen_password(8,
295 new_passwd = auth.PasswordGenerator().gen_password(8,
292 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
296 auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
293 if user:
297 if user:
294 user.password = auth.get_crypt_password(new_passwd)
298 user.password = auth.get_crypt_password(new_passwd)
295 user.api_key = auth.generate_api_key(user.username)
299 user.api_key = auth.generate_api_key(user.username)
296 DBS.add(user)
300 DBS.add(user)
297 DBS.commit()
301 DBS.commit()
298 log.info('change password for %s' % user_email)
302 log.info('change password for %s' % user_email)
299 if new_passwd is None:
303 if new_passwd is None:
300 raise Exception('unable to generate new password')
304 raise Exception('unable to generate new password')
301 except:
305 except:
302 log.error(traceback.format_exc())
306 log.error(traceback.format_exc())
303 DBS.rollback()
307 DBS.rollback()
304
308
305 run_task(send_email, user_email,
309 run_task(send_email, user_email,
306 'Your new password',
310 'Your new password',
307 'Your new RhodeCode password:%s' % (new_passwd))
311 'Your new RhodeCode password:%s' % (new_passwd))
308 log.info('send new password mail to %s' % user_email)
312 log.info('send new password mail to %s' % user_email)
309
313
310 except:
314 except:
311 log.error('Failed to update user password')
315 log.error('Failed to update user password')
312 log.error(traceback.format_exc())
316 log.error(traceback.format_exc())
313
317
314 return True
318 return True
315
319
316
320
317 @task(ignore_result=True)
321 @task(ignore_result=True)
318 @dbsession
322 @dbsession
319 def send_email(recipients, subject, body, html_body=''):
323 def send_email(recipients, subject, body, html_body=''):
320 """
324 """
321 Sends an email with defined parameters from the .ini files.
325 Sends an email with defined parameters from the .ini files.
322
326
323 :param recipients: list of recipients, it this is empty the defined email
327 :param recipients: list of recipients, it this is empty the defined email
324 address from field 'email_to' is used instead
328 address from field 'email_to' is used instead
325 :param subject: subject of the mail
329 :param subject: subject of the mail
326 :param body: body of the mail
330 :param body: body of the mail
327 :param html_body: html version of body
331 :param html_body: html version of body
328 """
332 """
329 log = get_logger(send_email)
333 log = get_logger(send_email)
330 DBS = get_session()
334 DBS = get_session()
331
335
332 email_config = config
336 email_config = config
333 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
337 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
334 if not recipients:
338 if not recipients:
335 # if recipients are not defined we send to email_config + all admins
339 # if recipients are not defined we send to email_config + all admins
336 admins = [u.email for u in User.query()
340 admins = [u.email for u in User.query()
337 .filter(User.admin == True).all()]
341 .filter(User.admin == True).all()]
338 recipients = [email_config.get('email_to')] + admins
342 recipients = [email_config.get('email_to')] + admins
339
343
340 mail_from = email_config.get('app_email_from', 'RhodeCode')
344 mail_from = email_config.get('app_email_from', 'RhodeCode')
341 user = email_config.get('smtp_username')
345 user = email_config.get('smtp_username')
342 passwd = email_config.get('smtp_password')
346 passwd = email_config.get('smtp_password')
343 mail_server = email_config.get('smtp_server')
347 mail_server = email_config.get('smtp_server')
344 mail_port = email_config.get('smtp_port')
348 mail_port = email_config.get('smtp_port')
345 tls = str2bool(email_config.get('smtp_use_tls'))
349 tls = str2bool(email_config.get('smtp_use_tls'))
346 ssl = str2bool(email_config.get('smtp_use_ssl'))
350 ssl = str2bool(email_config.get('smtp_use_ssl'))
347 debug = str2bool(config.get('debug'))
351 debug = str2bool(config.get('debug'))
348 smtp_auth = email_config.get('smtp_auth')
352 smtp_auth = email_config.get('smtp_auth')
349
353
350 if not mail_server:
354 if not mail_server:
351 log.error("SMTP mail server not configured - cannot send mail")
355 log.error("SMTP mail server not configured - cannot send mail")
352 return False
356 return False
353
357
354 try:
358 try:
355 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
359 m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
356 mail_port, ssl, tls, debug=debug)
360 mail_port, ssl, tls, debug=debug)
357 m.send(recipients, subject, body, html_body)
361 m.send(recipients, subject, body, html_body)
358 except:
362 except:
359 log.error('Mail sending failed')
363 log.error('Mail sending failed')
360 log.error(traceback.format_exc())
364 log.error(traceback.format_exc())
361 return False
365 return False
362 return True
366 return True
363
367
364
368
365 @task(ignore_result=True)
369 @task(ignore_result=True)
366 @dbsession
370 @dbsession
367 def create_repo_fork(form_data, cur_user):
371 def create_repo_fork(form_data, cur_user):
368 """
372 """
369 Creates a fork of repository using interval VCS methods
373 Creates a fork of repository using interval VCS methods
370
374
371 :param form_data:
375 :param form_data:
372 :param cur_user:
376 :param cur_user:
373 """
377 """
374 from rhodecode.model.repo import RepoModel
378 from rhodecode.model.repo import RepoModel
375 from rhodecode.model.user import UserModel
379 from rhodecode.model.user import UserModel
376
380
377 log = get_logger(create_repo_fork)
381 log = get_logger(create_repo_fork)
378 DBS = get_session()
382 DBS = get_session()
379
383
380 base_path = Repository.base_path()
384 base_path = Repository.base_path()
381 cur_user = UserModel(DBS)._get_user(cur_user)
385 cur_user = UserModel(DBS)._get_user(cur_user)
382
386
383 fork_name = form_data['repo_name_full']
387 fork_name = form_data['repo_name_full']
384 repo_type = form_data['repo_type']
388 repo_type = form_data['repo_type']
385 description = form_data['description']
389 description = form_data['description']
386 owner = cur_user
390 owner = cur_user
387 private = form_data['private']
391 private = form_data['private']
388 clone_uri = form_data.get('clone_uri')
392 clone_uri = form_data.get('clone_uri')
389 repos_group = form_data['repo_group']
393 repos_group = form_data['repo_group']
390 landing_rev = form_data['landing_rev']
394 landing_rev = form_data['landing_rev']
391 copy_fork_permissions = form_data.get('copy_permissions')
395 copy_fork_permissions = form_data.get('copy_permissions')
392 fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
396 fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
393
397
394 fork_repo = RepoModel(DBS).create_repo(
398 fork_repo = RepoModel(DBS).create_repo(
395 fork_name, repo_type, description, owner, private, clone_uri,
399 fork_name, repo_type, description, owner, private, clone_uri,
396 repos_group, landing_rev, just_db=True, fork_of=fork_of,
400 repos_group, landing_rev, just_db=True, fork_of=fork_of,
397 copy_fork_permissions=copy_fork_permissions
401 copy_fork_permissions=copy_fork_permissions
398 )
402 )
399
403
400 update_after_clone = form_data['update_after_clone']
404 update_after_clone = form_data['update_after_clone']
401
405
402 source_repo_path = os.path.join(base_path, fork_of.repo_name)
406 source_repo_path = os.path.join(base_path, fork_of.repo_name)
403 destination_fork_path = os.path.join(base_path, fork_name)
407 destination_fork_path = os.path.join(base_path, fork_name)
404
408
405 log.info('creating fork of %s as %s', source_repo_path,
409 log.info('creating fork of %s as %s', source_repo_path,
406 destination_fork_path)
410 destination_fork_path)
407 backend = get_backend(repo_type)
411 backend = get_backend(repo_type)
408
412
409 if repo_type == 'git':
413 if repo_type == 'git':
410 r = backend(safe_str(destination_fork_path), create=True,
414 r = backend(safe_str(destination_fork_path), create=True,
411 src_url=safe_str(source_repo_path),
415 src_url=safe_str(source_repo_path),
412 update_after_clone=update_after_clone,
416 update_after_clone=update_after_clone,
413 bare=True)
417 bare=True)
414 # add rhodecode hook into this repo
418 # add rhodecode hook into this repo
415 ScmModel().install_git_hook(repo=r)
419 ScmModel().install_git_hook(repo=r)
416 elif repo_type == 'hg':
420 elif repo_type == 'hg':
417 r = backend(safe_str(destination_fork_path), create=True,
421 r = backend(safe_str(destination_fork_path), create=True,
418 src_url=safe_str(source_repo_path),
422 src_url=safe_str(source_repo_path),
419 update_after_clone=update_after_clone)
423 update_after_clone=update_after_clone)
420 else:
424 else:
421 raise Exception('Unknown backend type %s' % repo_type)
425 raise Exception('Unknown backend type %s' % repo_type)
422
426
423 log_create_repository(fork_repo.get_dict(), created_by=cur_user.username)
427 log_create_repository(fork_repo.get_dict(), created_by=cur_user.username)
424
428
425 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
429 action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
426 fork_of.repo_name, '', DBS)
430 fork_of.repo_name, '', DBS)
427
431
428 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
432 action_logger(cur_user, 'user_created_fork:%s' % fork_name,
429 fork_name, '', DBS)
433 fork_name, '', DBS)
430 # finally commit at latest possible stage
434 # finally commit at latest possible stage
431 DBS.commit()
435 DBS.commit()
432
436
433
437
434 def __get_codes_stats(repo_name):
438 def __get_codes_stats(repo_name):
435 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
439 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
436 repo = Repository.get_by_repo_name(repo_name).scm_instance
440 repo = Repository.get_by_repo_name(repo_name).scm_instance
437
441
438 tip = repo.get_changeset()
442 tip = repo.get_changeset()
439 code_stats = {}
443 code_stats = {}
440
444
441 def aggregate(cs):
445 def aggregate(cs):
442 for f in cs[2]:
446 for f in cs[2]:
443 ext = lower(f.extension)
447 ext = lower(f.extension)
444 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
448 if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
445 if ext in code_stats:
449 if ext in code_stats:
446 code_stats[ext] += 1
450 code_stats[ext] += 1
447 else:
451 else:
448 code_stats[ext] = 1
452 code_stats[ext] = 1
449
453
450 map(aggregate, tip.walk('/'))
454 map(aggregate, tip.walk('/'))
451
455
452 return code_stats or {}
456 return code_stats or {}
General Comments 0
You need to be logged in to leave comments. Login now