##// END OF EJS Templates
libs: removed utf8 markers
super-admin -
r5054:c54edc4f default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,23 +1,22 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 RhodeCode libs
21 RhodeCode libs
23 """
22 """
@@ -1,357 +1,356 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import logging
20 import logging
22
21
23 from webhelpers2.html.builder import literal
22 from webhelpers2.html.builder import literal
24 from webhelpers2.html.tags import link_to
23 from webhelpers2.html.tags import link_to
25
24
26 from rhodecode.lib.utils2 import AttributeDict
25 from rhodecode.lib.utils2 import AttributeDict
27 from rhodecode.lib.vcs.backends.base import BaseCommit
26 from rhodecode.lib.vcs.backends.base import BaseCommit
28 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
27 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
29
28
30
29
31 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
32
31
33
32
34 def action_parser(request, user_log, feed=False, parse_cs=False):
33 def action_parser(request, user_log, feed=False, parse_cs=False):
35 """
34 """
36 This helper will action_map the specified string action into translated
35 This helper will action_map the specified string action into translated
37 fancy names with icons and links
36 fancy names with icons and links
38
37
39 :param user_log: user log instance
38 :param user_log: user log instance
40 :param feed: use output for feeds (no html and fancy icons)
39 :param feed: use output for feeds (no html and fancy icons)
41 :param parse_cs: parse Changesets into VCS instances
40 :param parse_cs: parse Changesets into VCS instances
42 """
41 """
43 if user_log.version == 'v2':
42 if user_log.version == 'v2':
44 ap = AuditLogParser(request, user_log)
43 ap = AuditLogParser(request, user_log)
45 return ap.callbacks()
44 return ap.callbacks()
46 else:
45 else:
47 # old style
46 # old style
48 ap = ActionParser(request, user_log, feed=False, parse_commits=False)
47 ap = ActionParser(request, user_log, feed=False, parse_commits=False)
49 return ap.callbacks()
48 return ap.callbacks()
50
49
51
50
52 class ActionParser(object):
51 class ActionParser(object):
53
52
54 commits_limit = 3 # display this amount always
53 commits_limit = 3 # display this amount always
55 commits_top_limit = 50 # show up to this amount of commits hidden
54 commits_top_limit = 50 # show up to this amount of commits hidden
56
55
57 def __init__(self, request, user_log, feed=False, parse_commits=False):
56 def __init__(self, request, user_log, feed=False, parse_commits=False):
58 self.user_log = user_log
57 self.user_log = user_log
59 self.feed = feed
58 self.feed = feed
60 self.parse_commits = parse_commits
59 self.parse_commits = parse_commits
61 self.request = request
60 self.request = request
62
61
63 self.action = user_log.action
62 self.action = user_log.action
64 self.action_params = ' '
63 self.action_params = ' '
65 x = self.action.split(':', 1)
64 x = self.action.split(':', 1)
66 if len(x) > 1:
65 if len(x) > 1:
67 self.action, self.action_params = x
66 self.action, self.action_params = x
68
67
69 def callbacks(self):
68 def callbacks(self):
70 action_str = self.action_map.get(self.action, self.action)
69 action_str = self.action_map.get(self.action, self.action)
71 if self.feed:
70 if self.feed:
72 action = action_str[0].replace('[', '').replace(']', '')
71 action = action_str[0].replace('[', '').replace(']', '')
73 else:
72 else:
74 action = action_str[0]\
73 action = action_str[0]\
75 .replace('[', '<span class="journal_highlight">')\
74 .replace('[', '<span class="journal_highlight">')\
76 .replace(']', '</span>')
75 .replace(']', '</span>')
77
76
78 action_params_func = _no_params_func
77 action_params_func = _no_params_func
79 if callable(action_str[1]):
78 if callable(action_str[1]):
80 action_params_func = action_str[1]
79 action_params_func = action_str[1]
81
80
82 # returned callbacks we need to call to get
81 # returned callbacks we need to call to get
83 return [
82 return [
84 lambda: literal(action), action_params_func,
83 lambda: literal(action), action_params_func,
85 self.action_parser_icon]
84 self.action_parser_icon]
86
85
87 @property
86 @property
88 def action_map(self):
87 def action_map(self):
89 _ = self.request.translate
88 _ = self.request.translate
90 # action : translated str, callback(extractor), icon
89 # action : translated str, callback(extractor), icon
91 action_map = {
90 action_map = {
92 'user_deleted_repo': (
91 'user_deleted_repo': (
93 _('[deleted] repository'),
92 _('[deleted] repository'),
94 None, 'icon-trash'),
93 None, 'icon-trash'),
95 'user_created_repo': (
94 'user_created_repo': (
96 _('[created] repository'),
95 _('[created] repository'),
97 None, 'icon-plus icon-plus-colored'),
96 None, 'icon-plus icon-plus-colored'),
98 'user_created_fork': (
97 'user_created_fork': (
99 _('[created] repository as fork'),
98 _('[created] repository as fork'),
100 None, 'icon-code-fork'),
99 None, 'icon-code-fork'),
101 'user_forked_repo': (
100 'user_forked_repo': (
102 _('[forked] repository'),
101 _('[forked] repository'),
103 self.get_fork_name, 'icon-code-fork'),
102 self.get_fork_name, 'icon-code-fork'),
104 'user_updated_repo': (
103 'user_updated_repo': (
105 _('[updated] repository'),
104 _('[updated] repository'),
106 None, 'icon-pencil icon-pencil-colored'),
105 None, 'icon-pencil icon-pencil-colored'),
107 'user_downloaded_archive': (
106 'user_downloaded_archive': (
108 _('[downloaded] archive from repository'),
107 _('[downloaded] archive from repository'),
109 self.get_archive_name, 'icon-download-alt'),
108 self.get_archive_name, 'icon-download-alt'),
110 'admin_deleted_repo': (
109 'admin_deleted_repo': (
111 _('[delete] repository'),
110 _('[delete] repository'),
112 None, 'icon-trash'),
111 None, 'icon-trash'),
113 'admin_created_repo': (
112 'admin_created_repo': (
114 _('[created] repository'),
113 _('[created] repository'),
115 None, 'icon-plus icon-plus-colored'),
114 None, 'icon-plus icon-plus-colored'),
116 'admin_forked_repo': (
115 'admin_forked_repo': (
117 _('[forked] repository'),
116 _('[forked] repository'),
118 None, 'icon-code-fork icon-fork-colored'),
117 None, 'icon-code-fork icon-fork-colored'),
119 'admin_updated_repo': (
118 'admin_updated_repo': (
120 _('[updated] repository'),
119 _('[updated] repository'),
121 None, 'icon-pencil icon-pencil-colored'),
120 None, 'icon-pencil icon-pencil-colored'),
122 'admin_created_user': (
121 'admin_created_user': (
123 _('[created] user'),
122 _('[created] user'),
124 self.get_user_name, 'icon-user icon-user-colored'),
123 self.get_user_name, 'icon-user icon-user-colored'),
125 'admin_updated_user': (
124 'admin_updated_user': (
126 _('[updated] user'),
125 _('[updated] user'),
127 self.get_user_name, 'icon-user icon-user-colored'),
126 self.get_user_name, 'icon-user icon-user-colored'),
128 'admin_created_users_group': (
127 'admin_created_users_group': (
129 _('[created] user group'),
128 _('[created] user group'),
130 self.get_users_group, 'icon-pencil icon-pencil-colored'),
129 self.get_users_group, 'icon-pencil icon-pencil-colored'),
131 'admin_updated_users_group': (
130 'admin_updated_users_group': (
132 _('[updated] user group'),
131 _('[updated] user group'),
133 self.get_users_group, 'icon-pencil icon-pencil-colored'),
132 self.get_users_group, 'icon-pencil icon-pencil-colored'),
134 'user_commented_revision': (
133 'user_commented_revision': (
135 _('[commented] on commit in repository'),
134 _('[commented] on commit in repository'),
136 self.get_cs_links, 'icon-comment icon-comment-colored'),
135 self.get_cs_links, 'icon-comment icon-comment-colored'),
137 'user_commented_pull_request': (
136 'user_commented_pull_request': (
138 _('[commented] on pull request for'),
137 _('[commented] on pull request for'),
139 self.get_pull_request, 'icon-comment icon-comment-colored'),
138 self.get_pull_request, 'icon-comment icon-comment-colored'),
140 'user_closed_pull_request': (
139 'user_closed_pull_request': (
141 _('[closed] pull request for'),
140 _('[closed] pull request for'),
142 self.get_pull_request, 'icon-check'),
141 self.get_pull_request, 'icon-check'),
143 'user_merged_pull_request': (
142 'user_merged_pull_request': (
144 _('[merged] pull request for'),
143 _('[merged] pull request for'),
145 self.get_pull_request, 'icon-check'),
144 self.get_pull_request, 'icon-check'),
146 'push': (
145 'push': (
147 _('[pushed] into'),
146 _('[pushed] into'),
148 self.get_cs_links, 'icon-arrow-up'),
147 self.get_cs_links, 'icon-arrow-up'),
149 'push_local': (
148 'push_local': (
150 _('[committed via RhodeCode] into repository'),
149 _('[committed via RhodeCode] into repository'),
151 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
150 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
152 'push_remote': (
151 'push_remote': (
153 _('[pulled from remote] into repository'),
152 _('[pulled from remote] into repository'),
154 self.get_cs_links, 'icon-arrow-up'),
153 self.get_cs_links, 'icon-arrow-up'),
155 'pull': (
154 'pull': (
156 _('[pulled] from'),
155 _('[pulled] from'),
157 None, 'icon-arrow-down'),
156 None, 'icon-arrow-down'),
158 'started_following_repo': (
157 'started_following_repo': (
159 _('[started following] repository'),
158 _('[started following] repository'),
160 None, 'icon-heart icon-heart-colored'),
159 None, 'icon-heart icon-heart-colored'),
161 'stopped_following_repo': (
160 'stopped_following_repo': (
162 _('[stopped following] repository'),
161 _('[stopped following] repository'),
163 None, 'icon-heart-empty icon-heart-colored'),
162 None, 'icon-heart-empty icon-heart-colored'),
164 }
163 }
165 return action_map
164 return action_map
166
165
167 def get_fork_name(self):
166 def get_fork_name(self):
168 from rhodecode.lib import helpers as h
167 from rhodecode.lib import helpers as h
169 _ = self.request.translate
168 _ = self.request.translate
170 repo_name = self.action_params
169 repo_name = self.action_params
171 _url = h.route_path('repo_summary', repo_name=repo_name)
170 _url = h.route_path('repo_summary', repo_name=repo_name)
172 return _('fork name %s') % link_to(self.action_params, _url)
171 return _('fork name %s') % link_to(self.action_params, _url)
173
172
174 def get_user_name(self):
173 def get_user_name(self):
175 user_name = self.action_params
174 user_name = self.action_params
176 return user_name
175 return user_name
177
176
178 def get_users_group(self):
177 def get_users_group(self):
179 group_name = self.action_params
178 group_name = self.action_params
180 return group_name
179 return group_name
181
180
182 def get_pull_request(self):
181 def get_pull_request(self):
183 from rhodecode.lib import helpers as h
182 from rhodecode.lib import helpers as h
184 _ = self.request.translate
183 _ = self.request.translate
185 pull_request_id = self.action_params
184 pull_request_id = self.action_params
186 if self.is_deleted():
185 if self.is_deleted():
187 repo_name = self.user_log.repository_name
186 repo_name = self.user_log.repository_name
188 else:
187 else:
189 repo_name = self.user_log.repository.repo_name
188 repo_name = self.user_log.repository.repo_name
190 return link_to(
189 return link_to(
191 _('Pull request #%s') % pull_request_id,
190 _('Pull request #%s') % pull_request_id,
192 h.route_path('pullrequest_show', repo_name=repo_name,
191 h.route_path('pullrequest_show', repo_name=repo_name,
193 pull_request_id=pull_request_id))
192 pull_request_id=pull_request_id))
194
193
195 def get_archive_name(self):
194 def get_archive_name(self):
196 archive_name = self.action_params
195 archive_name = self.action_params
197 return archive_name
196 return archive_name
198
197
199 def action_parser_icon(self):
198 def action_parser_icon(self):
200 tmpl = """<i class="%s" alt="%s"></i>"""
199 tmpl = """<i class="%s" alt="%s"></i>"""
201 ico = self.action_map.get(self.action, ['', '', ''])[2]
200 ico = self.action_map.get(self.action, ['', '', ''])[2]
202 return literal(tmpl % (ico, self.action))
201 return literal(tmpl % (ico, self.action))
203
202
204 def get_cs_links(self):
203 def get_cs_links(self):
205 from rhodecode.lib import helpers as h
204 from rhodecode.lib import helpers as h
206 _ = self.request.translate
205 _ = self.request.translate
207 if self.is_deleted():
206 if self.is_deleted():
208 return self.action_params
207 return self.action_params
209
208
210 repo_name = self.user_log.repository.repo_name
209 repo_name = self.user_log.repository.repo_name
211 commit_ids = self.action_params.split(',')
210 commit_ids = self.action_params.split(',')
212 commits = self.get_commits(commit_ids)
211 commits = self.get_commits(commit_ids)
213
212
214 link_generator = (
213 link_generator = (
215 self.lnk(commit, repo_name)
214 self.lnk(commit, repo_name)
216 for commit in commits[:self.commits_limit])
215 for commit in commits[:self.commits_limit])
217 commit_links = [" " + ', '.join(link_generator)]
216 commit_links = [" " + ', '.join(link_generator)]
218 _op1, _name1 = _get_op(commit_ids[0])
217 _op1, _name1 = _get_op(commit_ids[0])
219 _op2, _name2 = _get_op(commit_ids[-1])
218 _op2, _name2 = _get_op(commit_ids[-1])
220
219
221 commit_id_range = '%s...%s' % (_name1, _name2)
220 commit_id_range = '%s...%s' % (_name1, _name2)
222
221
223 compare_view = (
222 compare_view = (
224 ' <div class="compare_view tooltip" title="%s">'
223 ' <div class="compare_view tooltip" title="%s">'
225 '<a href="%s">%s</a> </div>' % (
224 '<a href="%s">%s</a> </div>' % (
226 _('Show all combined commits %s->%s') % (
225 _('Show all combined commits %s->%s') % (
227 commit_ids[0][:12], commit_ids[-1][:12]
226 commit_ids[0][:12], commit_ids[-1][:12]
228 ),
227 ),
229 h.route_path(
228 h.route_path(
230 'repo_commit', repo_name=repo_name,
229 'repo_commit', repo_name=repo_name,
231 commit_id=commit_id_range), _('compare view')
230 commit_id=commit_id_range), _('compare view')
232 )
231 )
233 )
232 )
234
233
235 if len(commit_ids) > self.commits_limit:
234 if len(commit_ids) > self.commits_limit:
236 more_count = len(commit_ids) - self.commits_limit
235 more_count = len(commit_ids) - self.commits_limit
237 commit_links.append(
236 commit_links.append(
238 _(' and %(num)s more commits') % {'num': more_count}
237 _(' and %(num)s more commits') % {'num': more_count}
239 )
238 )
240
239
241 if len(commits) > 1:
240 if len(commits) > 1:
242 commit_links.append(compare_view)
241 commit_links.append(compare_view)
243 return ''.join(commit_links)
242 return ''.join(commit_links)
244
243
245 def get_commits(self, commit_ids):
244 def get_commits(self, commit_ids):
246 commits = []
245 commits = []
247 if not [v for v in commit_ids if v != '']:
246 if not [v for v in commit_ids if v != '']:
248 return commits
247 return commits
249
248
250 repo = None
249 repo = None
251 if self.parse_commits:
250 if self.parse_commits:
252 repo = self.user_log.repository.scm_instance()
251 repo = self.user_log.repository.scm_instance()
253
252
254 for commit_id in commit_ids[:self.commits_top_limit]:
253 for commit_id in commit_ids[:self.commits_top_limit]:
255 _op, _name = _get_op(commit_id)
254 _op, _name = _get_op(commit_id)
256
255
257 # we want parsed commits, or new log store format is bad
256 # we want parsed commits, or new log store format is bad
258 if self.parse_commits:
257 if self.parse_commits:
259 try:
258 try:
260 commit = repo.get_commit(commit_id=commit_id)
259 commit = repo.get_commit(commit_id=commit_id)
261 commits.append(commit)
260 commits.append(commit)
262 except CommitDoesNotExistError:
261 except CommitDoesNotExistError:
263 log.error(
262 log.error(
264 'cannot find commit id %s in this repository',
263 'cannot find commit id %s in this repository',
265 commit_id)
264 commit_id)
266 commits.append(commit_id)
265 commits.append(commit_id)
267 continue
266 continue
268 else:
267 else:
269 fake_commit = AttributeDict({
268 fake_commit = AttributeDict({
270 'short_id': commit_id[:12],
269 'short_id': commit_id[:12],
271 'raw_id': commit_id,
270 'raw_id': commit_id,
272 'message': '',
271 'message': '',
273 'op': _op,
272 'op': _op,
274 'ref_name': _name
273 'ref_name': _name
275 })
274 })
276 commits.append(fake_commit)
275 commits.append(fake_commit)
277
276
278 return commits
277 return commits
279
278
280 def lnk(self, commit_or_id, repo_name):
279 def lnk(self, commit_or_id, repo_name):
281 from rhodecode.lib.helpers import tooltip
280 from rhodecode.lib.helpers import tooltip
282 from rhodecode.lib import helpers as h
281 from rhodecode.lib import helpers as h
283 _ = self.request.translate
282 _ = self.request.translate
284 title = ''
283 title = ''
285 lazy_cs = True
284 lazy_cs = True
286 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
285 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
287 lazy_cs = True
286 lazy_cs = True
288 if (getattr(commit_or_id, 'op', None) and
287 if (getattr(commit_or_id, 'op', None) and
289 getattr(commit_or_id, 'ref_name', None)):
288 getattr(commit_or_id, 'ref_name', None)):
290 lazy_cs = False
289 lazy_cs = False
291 lbl = '?'
290 lbl = '?'
292 if commit_or_id.op == 'delete_branch':
291 if commit_or_id.op == 'delete_branch':
293 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
292 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
294 title = ''
293 title = ''
295 elif commit_or_id.op == 'tag':
294 elif commit_or_id.op == 'tag':
296 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
295 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
297 title = ''
296 title = ''
298 _url = '#'
297 _url = '#'
299
298
300 else:
299 else:
301 lbl = '%s' % (commit_or_id.short_id[:8])
300 lbl = '%s' % (commit_or_id.short_id[:8])
302 _url = h.route_path('repo_commit', repo_name=repo_name,
301 _url = h.route_path('repo_commit', repo_name=repo_name,
303 commit_id=commit_or_id.raw_id)
302 commit_id=commit_or_id.raw_id)
304 title = tooltip(commit_or_id.message)
303 title = tooltip(commit_or_id.message)
305 else:
304 else:
306 # commit cannot be found/striped/removed etc.
305 # commit cannot be found/striped/removed etc.
307 lbl = ('%s' % commit_or_id)[:12]
306 lbl = ('%s' % commit_or_id)[:12]
308 _url = '#'
307 _url = '#'
309 title = _('Commit not found')
308 title = _('Commit not found')
310 if self.parse_commits:
309 if self.parse_commits:
311 return link_to(lbl, _url, title=title, class_='tooltip')
310 return link_to(lbl, _url, title=title, class_='tooltip')
312 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
311 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
313 class_='lazy-cs' if lazy_cs else '')
312 class_='lazy-cs' if lazy_cs else '')
314
313
315 def is_deleted(self):
314 def is_deleted(self):
316 return self.user_log.repository is None
315 return self.user_log.repository is None
317
316
318
317
319 class AuditLogParser(object):
318 class AuditLogParser(object):
320 def __init__(self, request, audit_log_entry):
319 def __init__(self, request, audit_log_entry):
321 self.audit_log_entry = audit_log_entry
320 self.audit_log_entry = audit_log_entry
322 self.request = request
321 self.request = request
323
322
324 def get_icon(self, action):
323 def get_icon(self, action):
325 return 'icon-rhodecode'
324 return 'icon-rhodecode'
326
325
327 def callbacks(self):
326 def callbacks(self):
328 action_str = self.audit_log_entry.action
327 action_str = self.audit_log_entry.action
329
328
330 def callback():
329 def callback():
331 # returned callbacks we need to call to get
330 # returned callbacks we need to call to get
332 action = action_str \
331 action = action_str \
333 .replace('[', '<span class="journal_highlight">')\
332 .replace('[', '<span class="journal_highlight">')\
334 .replace(']', '</span>')
333 .replace(']', '</span>')
335 return literal(action)
334 return literal(action)
336
335
337 def icon():
336 def icon():
338 tmpl = """<i class="%s" alt="%s"></i>"""
337 tmpl = """<i class="%s" alt="%s"></i>"""
339 ico = self.get_icon(action_str)
338 ico = self.get_icon(action_str)
340 return literal(tmpl % (ico, action_str))
339 return literal(tmpl % (ico, action_str))
341
340
342 action_params_func = _no_params_func
341 action_params_func = _no_params_func
343
342
344 return [
343 return [
345 callback, action_params_func, icon]
344 callback, action_params_func, icon]
346
345
347
346
348 def _no_params_func():
347 def _no_params_func():
349 return ""
348 return ""
350
349
351
350
352 def _get_op(commit_id):
351 def _get_op(commit_id):
353 _op = None
352 _op = None
354 _name = commit_id
353 _name = commit_id
355 if len(commit_id.split('=>')) == 2:
354 if len(commit_id.split('=>')) == 2:
356 _op, _name = commit_id.split('=>')
355 _op, _name = commit_id.split('=>')
357 return _op, _name
356 return _op, _name
@@ -1,305 +1,305 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2017-2020 RhodeCode GmbH
3 # Copyright (C) 2017-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import datetime
22 import datetime
23
23
24 from rhodecode.lib.jsonalchemy import JsonRaw
24 from rhodecode.lib.jsonalchemy import JsonRaw
25 from rhodecode.model import meta
25 from rhodecode.model import meta
26 from rhodecode.model.db import User, UserLog, Repository
26 from rhodecode.model.db import User, UserLog, Repository
27
27
28
28
29 log = logging.getLogger(__name__)
29 log = logging.getLogger(__name__)
30
30
31 # action as key, and expected action_data as value
31 # action as key, and expected action_data as value
32 ACTIONS_V1 = {
32 ACTIONS_V1 = {
33 'user.login.success': {'user_agent': ''},
33 'user.login.success': {'user_agent': ''},
34 'user.login.failure': {'user_agent': ''},
34 'user.login.failure': {'user_agent': ''},
35 'user.logout': {'user_agent': ''},
35 'user.logout': {'user_agent': ''},
36 'user.register': {},
36 'user.register': {},
37 'user.password.reset_request': {},
37 'user.password.reset_request': {},
38 'user.push': {'user_agent': '', 'commit_ids': []},
38 'user.push': {'user_agent': '', 'commit_ids': []},
39 'user.pull': {'user_agent': ''},
39 'user.pull': {'user_agent': ''},
40
40
41 'user.create': {'data': {}},
41 'user.create': {'data': {}},
42 'user.delete': {'old_data': {}},
42 'user.delete': {'old_data': {}},
43 'user.edit': {'old_data': {}},
43 'user.edit': {'old_data': {}},
44 'user.edit.permissions': {},
44 'user.edit.permissions': {},
45 'user.edit.ip.add': {'ip': {}, 'user': {}},
45 'user.edit.ip.add': {'ip': {}, 'user': {}},
46 'user.edit.ip.delete': {'ip': {}, 'user': {}},
46 'user.edit.ip.delete': {'ip': {}, 'user': {}},
47 'user.edit.token.add': {'token': {}, 'user': {}},
47 'user.edit.token.add': {'token': {}, 'user': {}},
48 'user.edit.token.delete': {'token': {}, 'user': {}},
48 'user.edit.token.delete': {'token': {}, 'user': {}},
49 'user.edit.email.add': {'email': ''},
49 'user.edit.email.add': {'email': ''},
50 'user.edit.email.delete': {'email': ''},
50 'user.edit.email.delete': {'email': ''},
51 'user.edit.ssh_key.add': {'token': {}, 'user': {}},
51 'user.edit.ssh_key.add': {'token': {}, 'user': {}},
52 'user.edit.ssh_key.delete': {'token': {}, 'user': {}},
52 'user.edit.ssh_key.delete': {'token': {}, 'user': {}},
53 'user.edit.password_reset.enabled': {},
53 'user.edit.password_reset.enabled': {},
54 'user.edit.password_reset.disabled': {},
54 'user.edit.password_reset.disabled': {},
55
55
56 'user_group.create': {'data': {}},
56 'user_group.create': {'data': {}},
57 'user_group.delete': {'old_data': {}},
57 'user_group.delete': {'old_data': {}},
58 'user_group.edit': {'old_data': {}},
58 'user_group.edit': {'old_data': {}},
59 'user_group.edit.permissions': {},
59 'user_group.edit.permissions': {},
60 'user_group.edit.member.add': {'user': {}},
60 'user_group.edit.member.add': {'user': {}},
61 'user_group.edit.member.delete': {'user': {}},
61 'user_group.edit.member.delete': {'user': {}},
62
62
63 'repo.create': {'data': {}},
63 'repo.create': {'data': {}},
64 'repo.fork': {'data': {}},
64 'repo.fork': {'data': {}},
65 'repo.edit': {'old_data': {}},
65 'repo.edit': {'old_data': {}},
66 'repo.edit.permissions': {},
66 'repo.edit.permissions': {},
67 'repo.edit.permissions.branch': {},
67 'repo.edit.permissions.branch': {},
68 'repo.archive': {'old_data': {}},
68 'repo.archive': {'old_data': {}},
69 'repo.delete': {'old_data': {}},
69 'repo.delete': {'old_data': {}},
70
70
71 'repo.archive.download': {'user_agent': '', 'archive_name': '',
71 'repo.archive.download': {'user_agent': '', 'archive_name': '',
72 'archive_spec': '', 'archive_cached': ''},
72 'archive_spec': '', 'archive_cached': ''},
73
73
74 'repo.permissions.branch_rule.create': {},
74 'repo.permissions.branch_rule.create': {},
75 'repo.permissions.branch_rule.edit': {},
75 'repo.permissions.branch_rule.edit': {},
76 'repo.permissions.branch_rule.delete': {},
76 'repo.permissions.branch_rule.delete': {},
77
77
78 'repo.pull_request.create': '',
78 'repo.pull_request.create': '',
79 'repo.pull_request.edit': '',
79 'repo.pull_request.edit': '',
80 'repo.pull_request.delete': '',
80 'repo.pull_request.delete': '',
81 'repo.pull_request.close': '',
81 'repo.pull_request.close': '',
82 'repo.pull_request.merge': '',
82 'repo.pull_request.merge': '',
83 'repo.pull_request.vote': '',
83 'repo.pull_request.vote': '',
84 'repo.pull_request.comment.create': '',
84 'repo.pull_request.comment.create': '',
85 'repo.pull_request.comment.edit': '',
85 'repo.pull_request.comment.edit': '',
86 'repo.pull_request.comment.delete': '',
86 'repo.pull_request.comment.delete': '',
87
87
88 'repo.pull_request.reviewer.add': '',
88 'repo.pull_request.reviewer.add': '',
89 'repo.pull_request.reviewer.delete': '',
89 'repo.pull_request.reviewer.delete': '',
90
90
91 'repo.pull_request.observer.add': '',
91 'repo.pull_request.observer.add': '',
92 'repo.pull_request.observer.delete': '',
92 'repo.pull_request.observer.delete': '',
93
93
94 'repo.commit.strip': {'commit_id': ''},
94 'repo.commit.strip': {'commit_id': ''},
95 'repo.commit.comment.create': {'data': {}},
95 'repo.commit.comment.create': {'data': {}},
96 'repo.commit.comment.delete': {'data': {}},
96 'repo.commit.comment.delete': {'data': {}},
97 'repo.commit.comment.edit': {'data': {}},
97 'repo.commit.comment.edit': {'data': {}},
98 'repo.commit.vote': '',
98 'repo.commit.vote': '',
99
99
100 'repo.artifact.add': '',
100 'repo.artifact.add': '',
101 'repo.artifact.delete': '',
101 'repo.artifact.delete': '',
102
102
103 'repo_group.create': {'data': {}},
103 'repo_group.create': {'data': {}},
104 'repo_group.edit': {'old_data': {}},
104 'repo_group.edit': {'old_data': {}},
105 'repo_group.edit.permissions': {},
105 'repo_group.edit.permissions': {},
106 'repo_group.delete': {'old_data': {}},
106 'repo_group.delete': {'old_data': {}},
107 }
107 }
108
108
109 ACTIONS = ACTIONS_V1
109 ACTIONS = ACTIONS_V1
110
110
111 SOURCE_WEB = 'source_web'
111 SOURCE_WEB = 'source_web'
112 SOURCE_API = 'source_api'
112 SOURCE_API = 'source_api'
113
113
114
114
115 class UserWrap(object):
115 class UserWrap(object):
116 """
116 """
117 Fake object used to imitate AuthUser
117 Fake object used to imitate AuthUser
118 """
118 """
119
119
120 def __init__(self, user_id=None, username=None, ip_addr=None):
120 def __init__(self, user_id=None, username=None, ip_addr=None):
121 self.user_id = user_id
121 self.user_id = user_id
122 self.username = username
122 self.username = username
123 self.ip_addr = ip_addr
123 self.ip_addr = ip_addr
124
124
125
125
126 class RepoWrap(object):
126 class RepoWrap(object):
127 """
127 """
128 Fake object used to imitate RepoObject that audit logger requires
128 Fake object used to imitate RepoObject that audit logger requires
129 """
129 """
130
130
131 def __init__(self, repo_id=None, repo_name=None):
131 def __init__(self, repo_id=None, repo_name=None):
132 self.repo_id = repo_id
132 self.repo_id = repo_id
133 self.repo_name = repo_name
133 self.repo_name = repo_name
134
134
135
135
136 def _store_log(action_name, action_data, user_id, username, user_data,
136 def _store_log(action_name, action_data, user_id, username, user_data,
137 ip_address, repository_id, repository_name):
137 ip_address, repository_id, repository_name):
138 user_log = UserLog()
138 user_log = UserLog()
139 user_log.version = UserLog.VERSION_2
139 user_log.version = UserLog.VERSION_2
140
140
141 user_log.action = action_name
141 user_log.action = action_name
142 user_log.action_data = action_data or JsonRaw('{}')
142 user_log.action_data = action_data or JsonRaw('{}')
143
143
144 user_log.user_ip = ip_address
144 user_log.user_ip = ip_address
145
145
146 user_log.user_id = user_id
146 user_log.user_id = user_id
147 user_log.username = username
147 user_log.username = username
148 user_log.user_data = user_data or JsonRaw('{}')
148 user_log.user_data = user_data or JsonRaw('{}')
149
149
150 user_log.repository_id = repository_id
150 user_log.repository_id = repository_id
151 user_log.repository_name = repository_name
151 user_log.repository_name = repository_name
152
152
153 user_log.action_date = datetime.datetime.now()
153 user_log.action_date = datetime.datetime.now()
154
154
155 return user_log
155 return user_log
156
156
157
157
158 def store_web(*args, **kwargs):
158 def store_web(*args, **kwargs):
159 action_data = {}
159 action_data = {}
160 org_action_data = kwargs.pop('action_data', {})
160 org_action_data = kwargs.pop('action_data', {})
161 action_data.update(org_action_data)
161 action_data.update(org_action_data)
162 action_data['source'] = SOURCE_WEB
162 action_data['source'] = SOURCE_WEB
163 kwargs['action_data'] = action_data
163 kwargs['action_data'] = action_data
164
164
165 return store(*args, **kwargs)
165 return store(*args, **kwargs)
166
166
167
167
168 def store_api(*args, **kwargs):
168 def store_api(*args, **kwargs):
169 action_data = {}
169 action_data = {}
170 org_action_data = kwargs.pop('action_data', {})
170 org_action_data = kwargs.pop('action_data', {})
171 action_data.update(org_action_data)
171 action_data.update(org_action_data)
172 action_data['source'] = SOURCE_API
172 action_data['source'] = SOURCE_API
173 kwargs['action_data'] = action_data
173 kwargs['action_data'] = action_data
174
174
175 return store(*args, **kwargs)
175 return store(*args, **kwargs)
176
176
177
177
178 def store(action, user, action_data=None, user_data=None, ip_addr=None,
178 def store(action, user, action_data=None, user_data=None, ip_addr=None,
179 repo=None, sa_session=None, commit=False):
179 repo=None, sa_session=None, commit=False):
180 """
180 """
181 Audit logger for various actions made by users, typically this
181 Audit logger for various actions made by users, typically this
182 results in a call such::
182 results in a call such::
183
183
184 from rhodecode.lib import audit_logger
184 from rhodecode.lib import audit_logger
185
185
186 audit_logger.store(
186 audit_logger.store(
187 'repo.edit', user=self._rhodecode_user)
187 'repo.edit', user=self._rhodecode_user)
188 audit_logger.store(
188 audit_logger.store(
189 'repo.delete', action_data={'data': repo_data},
189 'repo.delete', action_data={'data': repo_data},
190 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'))
190 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'))
191
191
192 # repo action
192 # repo action
193 audit_logger.store(
193 audit_logger.store(
194 'repo.delete',
194 'repo.delete',
195 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'),
195 user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'),
196 repo=audit_logger.RepoWrap(repo_name='some-repo'))
196 repo=audit_logger.RepoWrap(repo_name='some-repo'))
197
197
198 # repo action, when we know and have the repository object already
198 # repo action, when we know and have the repository object already
199 audit_logger.store(
199 audit_logger.store(
200 'repo.delete', action_data={'source': audit_logger.SOURCE_WEB, },
200 'repo.delete', action_data={'source': audit_logger.SOURCE_WEB, },
201 user=self._rhodecode_user,
201 user=self._rhodecode_user,
202 repo=repo_object)
202 repo=repo_object)
203
203
204 # alternative wrapper to the above
204 # alternative wrapper to the above
205 audit_logger.store_web(
205 audit_logger.store_web(
206 'repo.delete', action_data={},
206 'repo.delete', action_data={},
207 user=self._rhodecode_user,
207 user=self._rhodecode_user,
208 repo=repo_object)
208 repo=repo_object)
209
209
210 # without an user ?
210 # without an user ?
211 audit_logger.store(
211 audit_logger.store(
212 'user.login.failure',
212 'user.login.failure',
213 user=audit_logger.UserWrap(
213 user=audit_logger.UserWrap(
214 username=self.request.params.get('username'),
214 username=self.request.params.get('username'),
215 ip_addr=self.request.remote_addr))
215 ip_addr=self.request.remote_addr))
216
216
217 """
217 """
218 from rhodecode.lib.utils2 import safe_unicode
218 from rhodecode.lib.utils2 import safe_unicode
219 from rhodecode.lib.auth import AuthUser
219 from rhodecode.lib.auth import AuthUser
220
220
221 action_spec = ACTIONS.get(action, None)
221 action_spec = ACTIONS.get(action, None)
222 if action_spec is None:
222 if action_spec is None:
223 raise ValueError('Action `{}` is not supported'.format(action))
223 raise ValueError('Action `{}` is not supported'.format(action))
224
224
225 if not sa_session:
225 if not sa_session:
226 sa_session = meta.Session()
226 sa_session = meta.Session()
227
227
228 try:
228 try:
229 username = getattr(user, 'username', None)
229 username = getattr(user, 'username', None)
230 if not username:
230 if not username:
231 pass
231 pass
232
232
233 user_id = getattr(user, 'user_id', None)
233 user_id = getattr(user, 'user_id', None)
234 if not user_id:
234 if not user_id:
235 # maybe we have username ? Try to figure user_id from username
235 # maybe we have username ? Try to figure user_id from username
236 if username:
236 if username:
237 user_id = getattr(
237 user_id = getattr(
238 User.get_by_username(username), 'user_id', None)
238 User.get_by_username(username), 'user_id', None)
239
239
240 ip_addr = ip_addr or getattr(user, 'ip_addr', None)
240 ip_addr = ip_addr or getattr(user, 'ip_addr', None)
241 if not ip_addr:
241 if not ip_addr:
242 pass
242 pass
243
243
244 if not user_data:
244 if not user_data:
245 # try to get this from the auth user
245 # try to get this from the auth user
246 if isinstance(user, AuthUser):
246 if isinstance(user, AuthUser):
247 user_data = {
247 user_data = {
248 'username': user.username,
248 'username': user.username,
249 'email': user.email,
249 'email': user.email,
250 }
250 }
251
251
252 repository_name = getattr(repo, 'repo_name', None)
252 repository_name = getattr(repo, 'repo_name', None)
253 repository_id = getattr(repo, 'repo_id', None)
253 repository_id = getattr(repo, 'repo_id', None)
254 if not repository_id:
254 if not repository_id:
255 # maybe we have repo_name ? Try to figure repo_id from repo_name
255 # maybe we have repo_name ? Try to figure repo_id from repo_name
256 if repository_name:
256 if repository_name:
257 repository_id = getattr(
257 repository_id = getattr(
258 Repository.get_by_repo_name(repository_name), 'repo_id', None)
258 Repository.get_by_repo_name(repository_name), 'repo_id', None)
259
259
260 action_name = safe_unicode(action)
260 action_name = safe_unicode(action)
261 ip_address = safe_unicode(ip_addr)
261 ip_address = safe_unicode(ip_addr)
262
262
263 with sa_session.no_autoflush:
263 with sa_session.no_autoflush:
264
264
265 user_log = _store_log(
265 user_log = _store_log(
266 action_name=action_name,
266 action_name=action_name,
267 action_data=action_data or {},
267 action_data=action_data or {},
268 user_id=user_id,
268 user_id=user_id,
269 username=username,
269 username=username,
270 user_data=user_data or {},
270 user_data=user_data or {},
271 ip_address=ip_address,
271 ip_address=ip_address,
272 repository_id=repository_id,
272 repository_id=repository_id,
273 repository_name=repository_name
273 repository_name=repository_name
274 )
274 )
275
275
276 sa_session.add(user_log)
276 sa_session.add(user_log)
277 if commit:
277 if commit:
278 sa_session.commit()
278 sa_session.commit()
279 entry_id = user_log.entry_id or ''
279 entry_id = user_log.entry_id or ''
280
280
281 update_user_last_activity(sa_session, user_id)
281 update_user_last_activity(sa_session, user_id)
282
282
283 if commit:
283 if commit:
284 sa_session.commit()
284 sa_session.commit()
285
285
286 log.info('AUDIT[%s]: Logging action: `%s` by user:id:%s[%s] ip:%s',
286 log.info('AUDIT[%s]: Logging action: `%s` by user:id:%s[%s] ip:%s',
287 entry_id, action_name, user_id, username, ip_address,
287 entry_id, action_name, user_id, username, ip_address,
288 extra={"entry_id": entry_id, "action": action_name,
288 extra={"entry_id": entry_id, "action": action_name,
289 "user_id": user_id, "ip": ip_address})
289 "user_id": user_id, "ip": ip_address})
290
290
291 except Exception:
291 except Exception:
292 log.exception('AUDIT: failed to store audit log')
292 log.exception('AUDIT: failed to store audit log')
293
293
294
294
295 def update_user_last_activity(sa_session, user_id):
295 def update_user_last_activity(sa_session, user_id):
296 _last_activity = datetime.datetime.now()
296 _last_activity = datetime.datetime.now()
297 try:
297 try:
298 sa_session.query(User).filter(User.user_id == user_id).update(
298 sa_session.query(User).filter(User.user_id == user_id).update(
299 {"last_activity": _last_activity})
299 {"last_activity": _last_activity})
300 log.debug(
300 log.debug(
301 'updated user `%s` last activity to:%s', user_id, _last_activity)
301 'updated user `%s` last activity to:%s', user_id, _last_activity)
302 except Exception:
302 except Exception:
303 log.exception("Failed last activity update for user_id: %s", user_id)
303 log.exception("Failed last activity update for user_id: %s", user_id)
304 sa_session.rollback()
304 sa_session.rollback()
305
305
@@ -1,2533 +1,2532 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 authentication and permission libraries
21 authentication and permission libraries
23 """
22 """
24
23
25 import os
24 import os
26
25
27 import colander
26 import colander
28 import time
27 import time
29 import collections
28 import collections
30 import fnmatch
29 import fnmatch
31 import itertools
30 import itertools
32 import logging
31 import logging
33 import random
32 import random
34 import traceback
33 import traceback
35 from functools import wraps
34 from functools import wraps
36
35
37 import ipaddress
36 import ipaddress
38
37
39 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
38 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
40 from sqlalchemy.orm.exc import ObjectDeletedError
39 from sqlalchemy.orm.exc import ObjectDeletedError
41 from sqlalchemy.orm import joinedload
40 from sqlalchemy.orm import joinedload
42 from zope.cachedescriptors.property import Lazy as LazyProperty
41 from zope.cachedescriptors.property import Lazy as LazyProperty
43
42
44 import rhodecode
43 import rhodecode
45 from rhodecode.model import meta
44 from rhodecode.model import meta
46 from rhodecode.model.meta import Session
45 from rhodecode.model.meta import Session
47 from rhodecode.model.user import UserModel
46 from rhodecode.model.user import UserModel
48 from rhodecode.model.db import (
47 from rhodecode.model.db import (
49 false, User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
48 false, User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 UserIpMap, UserApiKeys, RepoGroup, UserGroup, UserNotice)
49 UserIpMap, UserApiKeys, RepoGroup, UserGroup, UserNotice)
51 from rhodecode.lib import rc_cache
50 from rhodecode.lib import rc_cache
52 from rhodecode.lib.utils import (
51 from rhodecode.lib.utils import (
53 get_repo_slug, get_repo_group_slug, get_user_group_slug)
52 get_repo_slug, get_repo_group_slug, get_user_group_slug)
54 from rhodecode.lib.type_utils import aslist
53 from rhodecode.lib.type_utils import aslist
55 from rhodecode.lib.hash_utils import sha1, sha256, md5
54 from rhodecode.lib.hash_utils import sha1, sha256, md5
56 from rhodecode.lib.str_utils import ascii_bytes, safe_str, safe_int, safe_bytes
55 from rhodecode.lib.str_utils import ascii_bytes, safe_str, safe_int, safe_bytes
57 from rhodecode.lib.caching_query import FromCache
56 from rhodecode.lib.caching_query import FromCache
58
57
59
58
60 if rhodecode.is_unix:
59 if rhodecode.is_unix:
61 import bcrypt
60 import bcrypt
62
61
63 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
64
63
65 csrf_token_key = "csrf_token"
64 csrf_token_key = "csrf_token"
66
65
67
66
68 class PasswordGenerator(object):
67 class PasswordGenerator(object):
69 """
68 """
70 This is a simple class for generating password from different sets of
69 This is a simple class for generating password from different sets of
71 characters
70 characters
72 usage::
71 usage::
73 passwd_gen = PasswordGenerator()
72 passwd_gen = PasswordGenerator()
74 #print 8-letter password containing only big and small letters
73 #print 8-letter password containing only big and small letters
75 of alphabet
74 of alphabet
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
75 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 """
76 """
78 ALPHABETS_NUM = r'''1234567890'''
77 ALPHABETS_NUM = r'''1234567890'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
78 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
79 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
80 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
81 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
82 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
83 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
84 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
85 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
86 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88
87
89 def __init__(self, passwd=''):
88 def __init__(self, passwd=''):
90 self.passwd = passwd
89 self.passwd = passwd
91
90
92 def gen_password(self, length, type_=None):
91 def gen_password(self, length, type_=None):
93 if type_ is None:
92 if type_ is None:
94 type_ = self.ALPHABETS_FULL
93 type_ = self.ALPHABETS_FULL
95 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
94 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
96 return self.passwd
95 return self.passwd
97
96
98
97
99 class _RhodeCodeCryptoBase(object):
98 class _RhodeCodeCryptoBase(object):
100 ENC_PREF = None
99 ENC_PREF = None
101
100
102 def hash_create(self, str_):
101 def hash_create(self, str_):
103 """
102 """
104 hash the string using
103 hash the string using
105
104
106 :param str_: password to hash
105 :param str_: password to hash
107 """
106 """
108 raise NotImplementedError
107 raise NotImplementedError
109
108
110 def hash_check_with_upgrade(self, password, hashed):
109 def hash_check_with_upgrade(self, password, hashed):
111 """
110 """
112 Returns tuple in which first element is boolean that states that
111 Returns tuple in which first element is boolean that states that
113 given password matches it's hashed version, and the second is new hash
112 given password matches it's hashed version, and the second is new hash
114 of the password, in case this password should be migrated to new
113 of the password, in case this password should be migrated to new
115 cipher.
114 cipher.
116 """
115 """
117 checked_hash = self.hash_check(password, hashed)
116 checked_hash = self.hash_check(password, hashed)
118 return checked_hash, None
117 return checked_hash, None
119
118
120 def hash_check(self, password, hashed):
119 def hash_check(self, password, hashed):
121 """
120 """
122 Checks matching password with it's hashed value.
121 Checks matching password with it's hashed value.
123
122
124 :param password: password
123 :param password: password
125 :param hashed: password in hashed form
124 :param hashed: password in hashed form
126 """
125 """
127 raise NotImplementedError
126 raise NotImplementedError
128
127
129 def _assert_bytes(self, value):
128 def _assert_bytes(self, value):
130 """
129 """
131 Passing in an `unicode` object can lead to hard to detect issues
130 Passing in an `unicode` object can lead to hard to detect issues
132 if passwords contain non-ascii characters. Doing a type check
131 if passwords contain non-ascii characters. Doing a type check
133 during runtime, so that such mistakes are detected early on.
132 during runtime, so that such mistakes are detected early on.
134 """
133 """
135 if not isinstance(value, str):
134 if not isinstance(value, str):
136 raise TypeError(
135 raise TypeError(
137 "Bytestring required as input, got %r." % (value, ))
136 "Bytestring required as input, got %r." % (value, ))
138
137
139
138
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
139 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 ENC_PREF = ('$2a$10', '$2b$10')
140 ENC_PREF = ('$2a$10', '$2b$10')
142
141
143 def hash_create(self, str_):
142 def hash_create(self, str_):
144 self._assert_bytes(str_)
143 self._assert_bytes(str_)
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
144 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146
145
147 def hash_check_with_upgrade(self, password, hashed):
146 def hash_check_with_upgrade(self, password, hashed):
148 """
147 """
149 Returns tuple in which first element is boolean that states that
148 Returns tuple in which first element is boolean that states that
150 given password matches it's hashed version, and the second is new hash
149 given password matches it's hashed version, and the second is new hash
151 of the password, in case this password should be migrated to new
150 of the password, in case this password should be migrated to new
152 cipher.
151 cipher.
153
152
154 This implements special upgrade logic which works like that:
153 This implements special upgrade logic which works like that:
155 - check if the given password == bcrypted hash, if yes then we
154 - check if the given password == bcrypted hash, if yes then we
156 properly used password and it was already in bcrypt. Proceed
155 properly used password and it was already in bcrypt. Proceed
157 without any changes
156 without any changes
158 - if bcrypt hash check is not working try with sha256. If hash compare
157 - if bcrypt hash check is not working try with sha256. If hash compare
159 is ok, it means we using correct but old hashed password. indicate
158 is ok, it means we using correct but old hashed password. indicate
160 hash change and proceed
159 hash change and proceed
161 """
160 """
162
161
163 new_hash = None
162 new_hash = None
164
163
165 # regular pw check
164 # regular pw check
166 password_match_bcrypt = self.hash_check(password, hashed)
165 password_match_bcrypt = self.hash_check(password, hashed)
167
166
168 # now we want to know if the password was maybe from sha256
167 # now we want to know if the password was maybe from sha256
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
168 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 if not password_match_bcrypt:
169 if not password_match_bcrypt:
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
170 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 new_hash = self.hash_create(password) # make new bcrypt hash
171 new_hash = self.hash_create(password) # make new bcrypt hash
173 password_match_bcrypt = True
172 password_match_bcrypt = True
174
173
175 return password_match_bcrypt, new_hash
174 return password_match_bcrypt, new_hash
176
175
177 def hash_check(self, password, hashed):
176 def hash_check(self, password, hashed):
178 """
177 """
179 Checks matching password with it's hashed value.
178 Checks matching password with it's hashed value.
180
179
181 :param password: password
180 :param password: password
182 :param hashed: password in hashed form
181 :param hashed: password in hashed form
183 """
182 """
184 self._assert_bytes(password)
183 self._assert_bytes(password)
185 try:
184 try:
186 return bcrypt.hashpw(password, hashed) == hashed
185 return bcrypt.hashpw(password, hashed) == hashed
187 except ValueError as e:
186 except ValueError as e:
188 # we're having a invalid salt here probably, we should not crash
187 # we're having a invalid salt here probably, we should not crash
189 # just return with False as it would be a wrong password.
188 # just return with False as it would be a wrong password.
190 log.debug('Failed to check password hash using bcrypt %s',
189 log.debug('Failed to check password hash using bcrypt %s',
191 safe_str(e))
190 safe_str(e))
192
191
193 return False
192 return False
194
193
195
194
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
195 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 ENC_PREF = '_'
196 ENC_PREF = '_'
198
197
199 def hash_create(self, str_):
198 def hash_create(self, str_):
200 self._assert_bytes(str_)
199 self._assert_bytes(str_)
201 return sha256(str_)
200 return sha256(str_)
202
201
203 def hash_check(self, password, hashed):
202 def hash_check(self, password, hashed):
204 """
203 """
205 Checks matching password with it's hashed value.
204 Checks matching password with it's hashed value.
206
205
207 :param password: password
206 :param password: password
208 :param hashed: password in hashed form
207 :param hashed: password in hashed form
209 """
208 """
210 self._assert_bytes(password)
209 self._assert_bytes(password)
211 return sha256(password) == hashed
210 return sha256(password) == hashed
212
211
213
212
214 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
213 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
215 ENC_PREF = '_'
214 ENC_PREF = '_'
216
215
217 def hash_create(self, str_):
216 def hash_create(self, str_):
218 self._assert_bytes(str_)
217 self._assert_bytes(str_)
219 return sha1(str_)
218 return sha1(str_)
220
219
221 def hash_check(self, password, hashed):
220 def hash_check(self, password, hashed):
222 """
221 """
223 Checks matching password with it's hashed value.
222 Checks matching password with it's hashed value.
224
223
225 :param password: password
224 :param password: password
226 :param hashed: password in hashed form
225 :param hashed: password in hashed form
227 """
226 """
228 self._assert_bytes(password)
227 self._assert_bytes(password)
229 return sha1(password) == hashed
228 return sha1(password) == hashed
230
229
231
230
232 def crypto_backend():
231 def crypto_backend():
233 """
232 """
234 Return the matching crypto backend.
233 Return the matching crypto backend.
235
234
236 Selection is based on if we run tests or not, we pick sha1-test backend to run
235 Selection is based on if we run tests or not, we pick sha1-test backend to run
237 tests faster since BCRYPT is expensive to calculate
236 tests faster since BCRYPT is expensive to calculate
238 """
237 """
239 if rhodecode.is_test:
238 if rhodecode.is_test:
240 RhodeCodeCrypto = _RhodeCodeCryptoTest()
239 RhodeCodeCrypto = _RhodeCodeCryptoTest()
241 else:
240 else:
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
241 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243
242
244 return RhodeCodeCrypto
243 return RhodeCodeCrypto
245
244
246
245
247 def get_crypt_password(password):
246 def get_crypt_password(password):
248 """
247 """
249 Create the hash of `password` with the active crypto backend.
248 Create the hash of `password` with the active crypto backend.
250
249
251 :param password: The cleartext password.
250 :param password: The cleartext password.
252 :type password: unicode
251 :type password: unicode
253 """
252 """
254 password = safe_str(password)
253 password = safe_str(password)
255 return crypto_backend().hash_create(password)
254 return crypto_backend().hash_create(password)
256
255
257
256
258 def check_password(password, hashed):
257 def check_password(password, hashed):
259 """
258 """
260 Check if the value in `password` matches the hash in `hashed`.
259 Check if the value in `password` matches the hash in `hashed`.
261
260
262 :param password: The cleartext password.
261 :param password: The cleartext password.
263 :type password: unicode
262 :type password: unicode
264
263
265 :param hashed: The expected hashed version of the password.
264 :param hashed: The expected hashed version of the password.
266 :type hashed: The hash has to be passed in in text representation.
265 :type hashed: The hash has to be passed in in text representation.
267 """
266 """
268 password = safe_str(password)
267 password = safe_str(password)
269 return crypto_backend().hash_check(password, hashed)
268 return crypto_backend().hash_check(password, hashed)
270
269
271
270
272 def generate_auth_token(data, salt=None):
271 def generate_auth_token(data, salt=None):
273 """
272 """
274 Generates API KEY from given string
273 Generates API KEY from given string
275 """
274 """
276
275
277 if salt is None:
276 if salt is None:
278 salt = os.urandom(16)
277 salt = os.urandom(16)
279 return sha1(data + salt)
278 return sha1(data + salt)
280
279
281
280
282 def get_came_from(request):
281 def get_came_from(request):
283 """
282 """
284 get query_string+path from request sanitized after removing auth_token
283 get query_string+path from request sanitized after removing auth_token
285 """
284 """
286 _req = request
285 _req = request
287
286
288 path = _req.path
287 path = _req.path
289 if 'auth_token' in _req.GET:
288 if 'auth_token' in _req.GET:
290 # sanitize the request and remove auth_token for redirection
289 # sanitize the request and remove auth_token for redirection
291 _req.GET.pop('auth_token')
290 _req.GET.pop('auth_token')
292 qs = _req.query_string
291 qs = _req.query_string
293 if qs:
292 if qs:
294 path += '?' + qs
293 path += '?' + qs
295
294
296 return path
295 return path
297
296
298
297
299 class CookieStoreWrapper(object):
298 class CookieStoreWrapper(object):
300
299
301 def __init__(self, cookie_store):
300 def __init__(self, cookie_store):
302 self.cookie_store = cookie_store
301 self.cookie_store = cookie_store
303
302
304 def __repr__(self):
303 def __repr__(self):
305 return 'CookieStore<%s>' % (self.cookie_store)
304 return 'CookieStore<%s>' % (self.cookie_store)
306
305
307 def get(self, key, other=None):
306 def get(self, key, other=None):
308 if isinstance(self.cookie_store, dict):
307 if isinstance(self.cookie_store, dict):
309 return self.cookie_store.get(key, other)
308 return self.cookie_store.get(key, other)
310 elif isinstance(self.cookie_store, AuthUser):
309 elif isinstance(self.cookie_store, AuthUser):
311 return self.cookie_store.__dict__.get(key, other)
310 return self.cookie_store.__dict__.get(key, other)
312
311
313
312
314 def _cached_perms_data(user_id, scope, user_is_admin,
313 def _cached_perms_data(user_id, scope, user_is_admin,
315 user_inherit_default_permissions, explicit, algo,
314 user_inherit_default_permissions, explicit, algo,
316 calculate_super_admin):
315 calculate_super_admin):
317
316
318 permissions = PermissionCalculator(
317 permissions = PermissionCalculator(
319 user_id, scope, user_is_admin, user_inherit_default_permissions,
318 user_id, scope, user_is_admin, user_inherit_default_permissions,
320 explicit, algo, calculate_super_admin)
319 explicit, algo, calculate_super_admin)
321 return permissions.calculate()
320 return permissions.calculate()
322
321
323
322
324 class PermOrigin(object):
323 class PermOrigin(object):
325 SUPER_ADMIN = 'superadmin'
324 SUPER_ADMIN = 'superadmin'
326 ARCHIVED = 'archived'
325 ARCHIVED = 'archived'
327
326
328 REPO_USER = 'user:%s'
327 REPO_USER = 'user:%s'
329 REPO_USERGROUP = 'usergroup:%s'
328 REPO_USERGROUP = 'usergroup:%s'
330 REPO_OWNER = 'repo.owner'
329 REPO_OWNER = 'repo.owner'
331 REPO_DEFAULT = 'repo.default'
330 REPO_DEFAULT = 'repo.default'
332 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
331 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
333 REPO_PRIVATE = 'repo.private'
332 REPO_PRIVATE = 'repo.private'
334
333
335 REPOGROUP_USER = 'user:%s'
334 REPOGROUP_USER = 'user:%s'
336 REPOGROUP_USERGROUP = 'usergroup:%s'
335 REPOGROUP_USERGROUP = 'usergroup:%s'
337 REPOGROUP_OWNER = 'group.owner'
336 REPOGROUP_OWNER = 'group.owner'
338 REPOGROUP_DEFAULT = 'group.default'
337 REPOGROUP_DEFAULT = 'group.default'
339 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
338 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
340
339
341 USERGROUP_USER = 'user:%s'
340 USERGROUP_USER = 'user:%s'
342 USERGROUP_USERGROUP = 'usergroup:%s'
341 USERGROUP_USERGROUP = 'usergroup:%s'
343 USERGROUP_OWNER = 'usergroup.owner'
342 USERGROUP_OWNER = 'usergroup.owner'
344 USERGROUP_DEFAULT = 'usergroup.default'
343 USERGROUP_DEFAULT = 'usergroup.default'
345 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
344 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
346
345
347
346
348 class PermOriginDict(dict):
347 class PermOriginDict(dict):
349 """
348 """
350 A special dict used for tracking permissions along with their origins.
349 A special dict used for tracking permissions along with their origins.
351
350
352 `__setitem__` has been overridden to expect a tuple(perm, origin)
351 `__setitem__` has been overridden to expect a tuple(perm, origin)
353 `__getitem__` will return only the perm
352 `__getitem__` will return only the perm
354 `.perm_origin_stack` will return the stack of (perm, origin) set per key
353 `.perm_origin_stack` will return the stack of (perm, origin) set per key
355
354
356 >>> perms = PermOriginDict()
355 >>> perms = PermOriginDict()
357 >>> perms['resource'] = 'read', 'default', 1
356 >>> perms['resource'] = 'read', 'default', 1
358 >>> perms['resource']
357 >>> perms['resource']
359 'read'
358 'read'
360 >>> perms['resource'] = 'write', 'admin', 2
359 >>> perms['resource'] = 'write', 'admin', 2
361 >>> perms['resource']
360 >>> perms['resource']
362 'write'
361 'write'
363 >>> perms.perm_origin_stack
362 >>> perms.perm_origin_stack
364 {'resource': [('read', 'default', 1), ('write', 'admin', 2)]}
363 {'resource': [('read', 'default', 1), ('write', 'admin', 2)]}
365 """
364 """
366
365
367 def __init__(self, *args, **kw):
366 def __init__(self, *args, **kw):
368 dict.__init__(self, *args, **kw)
367 dict.__init__(self, *args, **kw)
369 self.perm_origin_stack = collections.OrderedDict()
368 self.perm_origin_stack = collections.OrderedDict()
370
369
371 def __setitem__(self, key, perm_origin_obj_id):
370 def __setitem__(self, key, perm_origin_obj_id):
372 # set (most likely via pickle) key:val pair without tuple
371 # set (most likely via pickle) key:val pair without tuple
373 if not isinstance(perm_origin_obj_id, tuple):
372 if not isinstance(perm_origin_obj_id, tuple):
374 perm = perm_origin_obj_id
373 perm = perm_origin_obj_id
375 dict.__setitem__(self, key, perm)
374 dict.__setitem__(self, key, perm)
376 else:
375 else:
377 # unpack if we create a key from tuple
376 # unpack if we create a key from tuple
378 (perm, origin, obj_id) = perm_origin_obj_id
377 (perm, origin, obj_id) = perm_origin_obj_id
379 self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id))
378 self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id))
380 dict.__setitem__(self, key, perm)
379 dict.__setitem__(self, key, perm)
381
380
382
381
383 class BranchPermOriginDict(dict):
382 class BranchPermOriginDict(dict):
384 """
383 """
385 Dedicated branch permissions dict, with tracking of patterns and origins.
384 Dedicated branch permissions dict, with tracking of patterns and origins.
386
385
387 >>> perms = BranchPermOriginDict()
386 >>> perms = BranchPermOriginDict()
388 >>> perms['resource'] = '*pattern', 'read', 'default'
387 >>> perms['resource'] = '*pattern', 'read', 'default'
389 >>> perms['resource']
388 >>> perms['resource']
390 {'*pattern': 'read'}
389 {'*pattern': 'read'}
391 >>> perms['resource'] = '*pattern', 'write', 'admin'
390 >>> perms['resource'] = '*pattern', 'write', 'admin'
392 >>> perms['resource']
391 >>> perms['resource']
393 {'*pattern': 'write'}
392 {'*pattern': 'write'}
394 >>> perms.perm_origin_stack
393 >>> perms.perm_origin_stack
395 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
394 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
396 """
395 """
397 def __init__(self, *args, **kw):
396 def __init__(self, *args, **kw):
398 dict.__init__(self, *args, **kw)
397 dict.__init__(self, *args, **kw)
399 self.perm_origin_stack = collections.OrderedDict()
398 self.perm_origin_stack = collections.OrderedDict()
400
399
401 def __setitem__(self, key, pattern_perm_origin):
400 def __setitem__(self, key, pattern_perm_origin):
402 # set (most likely via pickle) key:val pair without tuple
401 # set (most likely via pickle) key:val pair without tuple
403 if not isinstance(pattern_perm_origin, tuple):
402 if not isinstance(pattern_perm_origin, tuple):
404 pattern_perm = pattern_perm_origin
403 pattern_perm = pattern_perm_origin
405 dict.__setitem__(self, key, pattern_perm)
404 dict.__setitem__(self, key, pattern_perm)
406
405
407 else:
406 else:
408 (pattern_perm, origin) = pattern_perm_origin
407 (pattern_perm, origin) = pattern_perm_origin
409 # we're passing in the dict, so we save the the stack
408 # we're passing in the dict, so we save the the stack
410 for pattern, perm in pattern_perm.items():
409 for pattern, perm in pattern_perm.items():
411 self.perm_origin_stack.setdefault(key, {})\
410 self.perm_origin_stack.setdefault(key, {})\
412 .setdefault(pattern, []).append((perm, origin))
411 .setdefault(pattern, []).append((perm, origin))
413
412
414 dict.__setitem__(self, key, pattern_perm)
413 dict.__setitem__(self, key, pattern_perm)
415
414
416
415
417 class PermissionCalculator(object):
416 class PermissionCalculator(object):
418
417
419 def __init__(
418 def __init__(
420 self, user_id, scope, user_is_admin,
419 self, user_id, scope, user_is_admin,
421 user_inherit_default_permissions, explicit, algo,
420 user_inherit_default_permissions, explicit, algo,
422 calculate_super_admin_as_user=False):
421 calculate_super_admin_as_user=False):
423
422
424 self.user_id = user_id
423 self.user_id = user_id
425 self.user_is_admin = user_is_admin
424 self.user_is_admin = user_is_admin
426 self.inherit_default_permissions = user_inherit_default_permissions
425 self.inherit_default_permissions = user_inherit_default_permissions
427 self.explicit = explicit
426 self.explicit = explicit
428 self.algo = algo
427 self.algo = algo
429 self.calculate_super_admin_as_user = calculate_super_admin_as_user
428 self.calculate_super_admin_as_user = calculate_super_admin_as_user
430
429
431 scope = scope or {}
430 scope = scope or {}
432 self.scope_repo_id = scope.get('repo_id')
431 self.scope_repo_id = scope.get('repo_id')
433 self.scope_repo_group_id = scope.get('repo_group_id')
432 self.scope_repo_group_id = scope.get('repo_group_id')
434 self.scope_user_group_id = scope.get('user_group_id')
433 self.scope_user_group_id = scope.get('user_group_id')
435
434
436 self.default_user_id = User.get_default_user(cache=True).user_id
435 self.default_user_id = User.get_default_user(cache=True).user_id
437
436
438 self.permissions_repositories = PermOriginDict()
437 self.permissions_repositories = PermOriginDict()
439 self.permissions_repository_groups = PermOriginDict()
438 self.permissions_repository_groups = PermOriginDict()
440 self.permissions_user_groups = PermOriginDict()
439 self.permissions_user_groups = PermOriginDict()
441 self.permissions_repository_branches = BranchPermOriginDict()
440 self.permissions_repository_branches = BranchPermOriginDict()
442 self.permissions_global = set()
441 self.permissions_global = set()
443
442
444 self.default_repo_perms = Permission.get_default_repo_perms(
443 self.default_repo_perms = Permission.get_default_repo_perms(
445 self.default_user_id, self.scope_repo_id)
444 self.default_user_id, self.scope_repo_id)
446 self.default_repo_groups_perms = Permission.get_default_group_perms(
445 self.default_repo_groups_perms = Permission.get_default_group_perms(
447 self.default_user_id, self.scope_repo_group_id)
446 self.default_user_id, self.scope_repo_group_id)
448 self.default_user_group_perms = \
447 self.default_user_group_perms = \
449 Permission.get_default_user_group_perms(
448 Permission.get_default_user_group_perms(
450 self.default_user_id, self.scope_user_group_id)
449 self.default_user_id, self.scope_user_group_id)
451
450
452 # default branch perms
451 # default branch perms
453 self.default_branch_repo_perms = \
452 self.default_branch_repo_perms = \
454 Permission.get_default_repo_branch_perms(
453 Permission.get_default_repo_branch_perms(
455 self.default_user_id, self.scope_repo_id)
454 self.default_user_id, self.scope_repo_id)
456
455
457 def calculate(self):
456 def calculate(self):
458 if self.user_is_admin and not self.calculate_super_admin_as_user:
457 if self.user_is_admin and not self.calculate_super_admin_as_user:
459 return self._calculate_super_admin_permissions()
458 return self._calculate_super_admin_permissions()
460
459
461 self._calculate_global_default_permissions()
460 self._calculate_global_default_permissions()
462 self._calculate_global_permissions()
461 self._calculate_global_permissions()
463 self._calculate_default_permissions()
462 self._calculate_default_permissions()
464 self._calculate_repository_permissions()
463 self._calculate_repository_permissions()
465 self._calculate_repository_branch_permissions()
464 self._calculate_repository_branch_permissions()
466 self._calculate_repository_group_permissions()
465 self._calculate_repository_group_permissions()
467 self._calculate_user_group_permissions()
466 self._calculate_user_group_permissions()
468 return self._permission_structure()
467 return self._permission_structure()
469
468
470 def _calculate_super_admin_permissions(self):
469 def _calculate_super_admin_permissions(self):
471 """
470 """
472 super-admin user have all default rights for repositories
471 super-admin user have all default rights for repositories
473 and groups set to admin
472 and groups set to admin
474 """
473 """
475 self.permissions_global.add('hg.admin')
474 self.permissions_global.add('hg.admin')
476 self.permissions_global.add('hg.create.write_on_repogroup.true')
475 self.permissions_global.add('hg.create.write_on_repogroup.true')
477
476
478 # repositories
477 # repositories
479 for perm in self.default_repo_perms:
478 for perm in self.default_repo_perms:
480 r_k = perm.UserRepoToPerm.repository.repo_name
479 r_k = perm.UserRepoToPerm.repository.repo_name
481 obj_id = perm.UserRepoToPerm.repository.repo_id
480 obj_id = perm.UserRepoToPerm.repository.repo_id
482 archived = perm.UserRepoToPerm.repository.archived
481 archived = perm.UserRepoToPerm.repository.archived
483 p = 'repository.admin'
482 p = 'repository.admin'
484 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id
483 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id
485 # special case for archived repositories, which we block still even for
484 # special case for archived repositories, which we block still even for
486 # super admins
485 # super admins
487 if archived:
486 if archived:
488 p = 'repository.read'
487 p = 'repository.read'
489 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id
488 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id
490
489
491 # repository groups
490 # repository groups
492 for perm in self.default_repo_groups_perms:
491 for perm in self.default_repo_groups_perms:
493 rg_k = perm.UserRepoGroupToPerm.group.group_name
492 rg_k = perm.UserRepoGroupToPerm.group.group_name
494 obj_id = perm.UserRepoGroupToPerm.group.group_id
493 obj_id = perm.UserRepoGroupToPerm.group.group_id
495 p = 'group.admin'
494 p = 'group.admin'
496 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id
495 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id
497
496
498 # user groups
497 # user groups
499 for perm in self.default_user_group_perms:
498 for perm in self.default_user_group_perms:
500 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
499 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
501 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
500 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
502 p = 'usergroup.admin'
501 p = 'usergroup.admin'
503 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id
502 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id
504
503
505 # branch permissions
504 # branch permissions
506 # since super-admin also can have custom rule permissions
505 # since super-admin also can have custom rule permissions
507 # we *always* need to calculate those inherited from default, and also explicit
506 # we *always* need to calculate those inherited from default, and also explicit
508 self._calculate_default_permissions_repository_branches(
507 self._calculate_default_permissions_repository_branches(
509 user_inherit_object_permissions=False)
508 user_inherit_object_permissions=False)
510 self._calculate_repository_branch_permissions()
509 self._calculate_repository_branch_permissions()
511
510
512 return self._permission_structure()
511 return self._permission_structure()
513
512
514 def _calculate_global_default_permissions(self):
513 def _calculate_global_default_permissions(self):
515 """
514 """
516 global permissions taken from the default user
515 global permissions taken from the default user
517 """
516 """
518 default_global_perms = UserToPerm.query()\
517 default_global_perms = UserToPerm.query()\
519 .filter(UserToPerm.user_id == self.default_user_id)\
518 .filter(UserToPerm.user_id == self.default_user_id)\
520 .options(joinedload(UserToPerm.permission))
519 .options(joinedload(UserToPerm.permission))
521
520
522 for perm in default_global_perms:
521 for perm in default_global_perms:
523 self.permissions_global.add(perm.permission.permission_name)
522 self.permissions_global.add(perm.permission.permission_name)
524
523
525 if self.user_is_admin:
524 if self.user_is_admin:
526 self.permissions_global.add('hg.admin')
525 self.permissions_global.add('hg.admin')
527 self.permissions_global.add('hg.create.write_on_repogroup.true')
526 self.permissions_global.add('hg.create.write_on_repogroup.true')
528
527
529 def _calculate_global_permissions(self):
528 def _calculate_global_permissions(self):
530 """
529 """
531 Set global system permissions with user permissions or permissions
530 Set global system permissions with user permissions or permissions
532 taken from the user groups of the current user.
531 taken from the user groups of the current user.
533
532
534 The permissions include repo creating, repo group creating, forking
533 The permissions include repo creating, repo group creating, forking
535 etc.
534 etc.
536 """
535 """
537
536
538 # now we read the defined permissions and overwrite what we have set
537 # now we read the defined permissions and overwrite what we have set
539 # before those can be configured from groups or users explicitly.
538 # before those can be configured from groups or users explicitly.
540
539
541 # In case we want to extend this list we should make sure
540 # In case we want to extend this list we should make sure
542 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
541 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
543 from rhodecode.model.permission import PermissionModel
542 from rhodecode.model.permission import PermissionModel
544
543
545 _configurable = frozenset([
544 _configurable = frozenset([
546 PermissionModel.FORKING_DISABLED, PermissionModel.FORKING_ENABLED,
545 PermissionModel.FORKING_DISABLED, PermissionModel.FORKING_ENABLED,
547 'hg.create.none', 'hg.create.repository',
546 'hg.create.none', 'hg.create.repository',
548 'hg.usergroup.create.false', 'hg.usergroup.create.true',
547 'hg.usergroup.create.false', 'hg.usergroup.create.true',
549 'hg.repogroup.create.false', 'hg.repogroup.create.true',
548 'hg.repogroup.create.false', 'hg.repogroup.create.true',
550 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
549 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
551 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
550 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
552 ])
551 ])
553
552
554 # USER GROUPS comes first user group global permissions
553 # USER GROUPS comes first user group global permissions
555 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
554 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
556 .options(joinedload(UserGroupToPerm.permission))\
555 .options(joinedload(UserGroupToPerm.permission))\
557 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
556 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
558 UserGroupMember.users_group_id))\
557 UserGroupMember.users_group_id))\
559 .filter(UserGroupMember.user_id == self.user_id)\
558 .filter(UserGroupMember.user_id == self.user_id)\
560 .order_by(UserGroupToPerm.users_group_id)\
559 .order_by(UserGroupToPerm.users_group_id)\
561 .all()
560 .all()
562
561
563 # need to group here by groups since user can be in more than
562 # need to group here by groups since user can be in more than
564 # one group, so we get all groups
563 # one group, so we get all groups
565 _explicit_grouped_perms = [
564 _explicit_grouped_perms = [
566 [x, list(y)] for x, y in
565 [x, list(y)] for x, y in
567 itertools.groupby(user_perms_from_users_groups,
566 itertools.groupby(user_perms_from_users_groups,
568 lambda _x: _x.users_group)]
567 lambda _x: _x.users_group)]
569
568
570 for gr, perms in _explicit_grouped_perms:
569 for gr, perms in _explicit_grouped_perms:
571 # since user can be in multiple groups iterate over them and
570 # since user can be in multiple groups iterate over them and
572 # select the lowest permissions first (more explicit)
571 # select the lowest permissions first (more explicit)
573 # TODO(marcink): do this^^
572 # TODO(marcink): do this^^
574
573
575 # group doesn't inherit default permissions so we actually set them
574 # group doesn't inherit default permissions so we actually set them
576 if not gr.inherit_default_permissions:
575 if not gr.inherit_default_permissions:
577 # NEED TO IGNORE all previously set configurable permissions
576 # NEED TO IGNORE all previously set configurable permissions
578 # and replace them with explicitly set from this user
577 # and replace them with explicitly set from this user
579 # group permissions
578 # group permissions
580 self.permissions_global = self.permissions_global.difference(
579 self.permissions_global = self.permissions_global.difference(
581 _configurable)
580 _configurable)
582 for perm in perms:
581 for perm in perms:
583 self.permissions_global.add(perm.permission.permission_name)
582 self.permissions_global.add(perm.permission.permission_name)
584
583
585 # user explicit global permissions
584 # user explicit global permissions
586 user_perms = Session().query(UserToPerm)\
585 user_perms = Session().query(UserToPerm)\
587 .options(joinedload(UserToPerm.permission))\
586 .options(joinedload(UserToPerm.permission))\
588 .filter(UserToPerm.user_id == self.user_id).all()
587 .filter(UserToPerm.user_id == self.user_id).all()
589
588
590 if not self.inherit_default_permissions:
589 if not self.inherit_default_permissions:
591 # NEED TO IGNORE all configurable permissions and
590 # NEED TO IGNORE all configurable permissions and
592 # replace them with explicitly set from this user permissions
591 # replace them with explicitly set from this user permissions
593 self.permissions_global = self.permissions_global.difference(
592 self.permissions_global = self.permissions_global.difference(
594 _configurable)
593 _configurable)
595 for perm in user_perms:
594 for perm in user_perms:
596 self.permissions_global.add(perm.permission.permission_name)
595 self.permissions_global.add(perm.permission.permission_name)
597
596
598 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
597 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
599 for perm in self.default_repo_perms:
598 for perm in self.default_repo_perms:
600 r_k = perm.UserRepoToPerm.repository.repo_name
599 r_k = perm.UserRepoToPerm.repository.repo_name
601 obj_id = perm.UserRepoToPerm.repository.repo_id
600 obj_id = perm.UserRepoToPerm.repository.repo_id
602 archived = perm.UserRepoToPerm.repository.archived
601 archived = perm.UserRepoToPerm.repository.archived
603 p = perm.Permission.permission_name
602 p = perm.Permission.permission_name
604 o = PermOrigin.REPO_DEFAULT
603 o = PermOrigin.REPO_DEFAULT
605 self.permissions_repositories[r_k] = p, o, obj_id
604 self.permissions_repositories[r_k] = p, o, obj_id
606
605
607 # if we decide this user isn't inheriting permissions from
606 # if we decide this user isn't inheriting permissions from
608 # default user we set him to .none so only explicit
607 # default user we set him to .none so only explicit
609 # permissions work
608 # permissions work
610 if not user_inherit_object_permissions:
609 if not user_inherit_object_permissions:
611 p = 'repository.none'
610 p = 'repository.none'
612 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
611 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
613 self.permissions_repositories[r_k] = p, o, obj_id
612 self.permissions_repositories[r_k] = p, o, obj_id
614
613
615 if perm.Repository.private and not (
614 if perm.Repository.private and not (
616 perm.Repository.user_id == self.user_id):
615 perm.Repository.user_id == self.user_id):
617 # disable defaults for private repos,
616 # disable defaults for private repos,
618 p = 'repository.none'
617 p = 'repository.none'
619 o = PermOrigin.REPO_PRIVATE
618 o = PermOrigin.REPO_PRIVATE
620 self.permissions_repositories[r_k] = p, o, obj_id
619 self.permissions_repositories[r_k] = p, o, obj_id
621
620
622 elif perm.Repository.user_id == self.user_id:
621 elif perm.Repository.user_id == self.user_id:
623 # set admin if owner
622 # set admin if owner
624 p = 'repository.admin'
623 p = 'repository.admin'
625 o = PermOrigin.REPO_OWNER
624 o = PermOrigin.REPO_OWNER
626 self.permissions_repositories[r_k] = p, o, obj_id
625 self.permissions_repositories[r_k] = p, o, obj_id
627
626
628 if self.user_is_admin:
627 if self.user_is_admin:
629 p = 'repository.admin'
628 p = 'repository.admin'
630 o = PermOrigin.SUPER_ADMIN
629 o = PermOrigin.SUPER_ADMIN
631 self.permissions_repositories[r_k] = p, o, obj_id
630 self.permissions_repositories[r_k] = p, o, obj_id
632
631
633 # finally in case of archived repositories, we downgrade higher
632 # finally in case of archived repositories, we downgrade higher
634 # permissions to read
633 # permissions to read
635 if archived:
634 if archived:
636 current_perm = self.permissions_repositories[r_k]
635 current_perm = self.permissions_repositories[r_k]
637 if current_perm in ['repository.write', 'repository.admin']:
636 if current_perm in ['repository.write', 'repository.admin']:
638 p = 'repository.read'
637 p = 'repository.read'
639 o = PermOrigin.ARCHIVED
638 o = PermOrigin.ARCHIVED
640 self.permissions_repositories[r_k] = p, o, obj_id
639 self.permissions_repositories[r_k] = p, o, obj_id
641
640
642 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
641 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
643 for perm in self.default_branch_repo_perms:
642 for perm in self.default_branch_repo_perms:
644
643
645 r_k = perm.UserRepoToPerm.repository.repo_name
644 r_k = perm.UserRepoToPerm.repository.repo_name
646 p = perm.Permission.permission_name
645 p = perm.Permission.permission_name
647 pattern = perm.UserToRepoBranchPermission.branch_pattern
646 pattern = perm.UserToRepoBranchPermission.branch_pattern
648 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
647 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
649
648
650 if not self.explicit:
649 if not self.explicit:
651 cur_perm = self.permissions_repository_branches.get(r_k)
650 cur_perm = self.permissions_repository_branches.get(r_k)
652 if cur_perm:
651 if cur_perm:
653 cur_perm = cur_perm[pattern]
652 cur_perm = cur_perm[pattern]
654 cur_perm = cur_perm or 'branch.none'
653 cur_perm = cur_perm or 'branch.none'
655
654
656 p = self._choose_permission(p, cur_perm)
655 p = self._choose_permission(p, cur_perm)
657
656
658 # NOTE(marcink): register all pattern/perm instances in this
657 # NOTE(marcink): register all pattern/perm instances in this
659 # special dict that aggregates entries
658 # special dict that aggregates entries
660 self.permissions_repository_branches[r_k] = {pattern: p}, o
659 self.permissions_repository_branches[r_k] = {pattern: p}, o
661
660
662 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
661 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
663 for perm in self.default_repo_groups_perms:
662 for perm in self.default_repo_groups_perms:
664 rg_k = perm.UserRepoGroupToPerm.group.group_name
663 rg_k = perm.UserRepoGroupToPerm.group.group_name
665 obj_id = perm.UserRepoGroupToPerm.group.group_id
664 obj_id = perm.UserRepoGroupToPerm.group.group_id
666 p = perm.Permission.permission_name
665 p = perm.Permission.permission_name
667 o = PermOrigin.REPOGROUP_DEFAULT
666 o = PermOrigin.REPOGROUP_DEFAULT
668 self.permissions_repository_groups[rg_k] = p, o, obj_id
667 self.permissions_repository_groups[rg_k] = p, o, obj_id
669
668
670 # if we decide this user isn't inheriting permissions from default
669 # if we decide this user isn't inheriting permissions from default
671 # user we set him to .none so only explicit permissions work
670 # user we set him to .none so only explicit permissions work
672 if not user_inherit_object_permissions:
671 if not user_inherit_object_permissions:
673 p = 'group.none'
672 p = 'group.none'
674 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
673 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
675 self.permissions_repository_groups[rg_k] = p, o, obj_id
674 self.permissions_repository_groups[rg_k] = p, o, obj_id
676
675
677 if perm.RepoGroup.user_id == self.user_id:
676 if perm.RepoGroup.user_id == self.user_id:
678 # set admin if owner
677 # set admin if owner
679 p = 'group.admin'
678 p = 'group.admin'
680 o = PermOrigin.REPOGROUP_OWNER
679 o = PermOrigin.REPOGROUP_OWNER
681 self.permissions_repository_groups[rg_k] = p, o, obj_id
680 self.permissions_repository_groups[rg_k] = p, o, obj_id
682
681
683 if self.user_is_admin:
682 if self.user_is_admin:
684 p = 'group.admin'
683 p = 'group.admin'
685 o = PermOrigin.SUPER_ADMIN
684 o = PermOrigin.SUPER_ADMIN
686 self.permissions_repository_groups[rg_k] = p, o, obj_id
685 self.permissions_repository_groups[rg_k] = p, o, obj_id
687
686
688 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
687 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
689 for perm in self.default_user_group_perms:
688 for perm in self.default_user_group_perms:
690 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
689 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
691 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
690 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
692 p = perm.Permission.permission_name
691 p = perm.Permission.permission_name
693 o = PermOrigin.USERGROUP_DEFAULT
692 o = PermOrigin.USERGROUP_DEFAULT
694 self.permissions_user_groups[u_k] = p, o, obj_id
693 self.permissions_user_groups[u_k] = p, o, obj_id
695
694
696 # if we decide this user isn't inheriting permissions from default
695 # if we decide this user isn't inheriting permissions from default
697 # user we set him to .none so only explicit permissions work
696 # user we set him to .none so only explicit permissions work
698 if not user_inherit_object_permissions:
697 if not user_inherit_object_permissions:
699 p = 'usergroup.none'
698 p = 'usergroup.none'
700 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
699 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
701 self.permissions_user_groups[u_k] = p, o, obj_id
700 self.permissions_user_groups[u_k] = p, o, obj_id
702
701
703 if perm.UserGroup.user_id == self.user_id:
702 if perm.UserGroup.user_id == self.user_id:
704 # set admin if owner
703 # set admin if owner
705 p = 'usergroup.admin'
704 p = 'usergroup.admin'
706 o = PermOrigin.USERGROUP_OWNER
705 o = PermOrigin.USERGROUP_OWNER
707 self.permissions_user_groups[u_k] = p, o, obj_id
706 self.permissions_user_groups[u_k] = p, o, obj_id
708
707
709 if self.user_is_admin:
708 if self.user_is_admin:
710 p = 'usergroup.admin'
709 p = 'usergroup.admin'
711 o = PermOrigin.SUPER_ADMIN
710 o = PermOrigin.SUPER_ADMIN
712 self.permissions_user_groups[u_k] = p, o, obj_id
711 self.permissions_user_groups[u_k] = p, o, obj_id
713
712
714 def _calculate_default_permissions(self):
713 def _calculate_default_permissions(self):
715 """
714 """
716 Set default user permissions for repositories, repository branches,
715 Set default user permissions for repositories, repository branches,
717 repository groups, user groups taken from the default user.
716 repository groups, user groups taken from the default user.
718
717
719 Calculate inheritance of object permissions based on what we have now
718 Calculate inheritance of object permissions based on what we have now
720 in GLOBAL permissions. We check if .false is in GLOBAL since this is
719 in GLOBAL permissions. We check if .false is in GLOBAL since this is
721 explicitly set. Inherit is the opposite of .false being there.
720 explicitly set. Inherit is the opposite of .false being there.
722
721
723 .. note::
722 .. note::
724
723
725 the syntax is little bit odd but what we need to check here is
724 the syntax is little bit odd but what we need to check here is
726 the opposite of .false permission being in the list so even for
725 the opposite of .false permission being in the list so even for
727 inconsistent state when both .true/.false is there
726 inconsistent state when both .true/.false is there
728 .false is more important
727 .false is more important
729
728
730 """
729 """
731 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
730 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
732 in self.permissions_global)
731 in self.permissions_global)
733
732
734 # default permissions inherited from `default` user permissions
733 # default permissions inherited from `default` user permissions
735 self._calculate_default_permissions_repositories(
734 self._calculate_default_permissions_repositories(
736 user_inherit_object_permissions)
735 user_inherit_object_permissions)
737
736
738 self._calculate_default_permissions_repository_branches(
737 self._calculate_default_permissions_repository_branches(
739 user_inherit_object_permissions)
738 user_inherit_object_permissions)
740
739
741 self._calculate_default_permissions_repository_groups(
740 self._calculate_default_permissions_repository_groups(
742 user_inherit_object_permissions)
741 user_inherit_object_permissions)
743
742
744 self._calculate_default_permissions_user_groups(
743 self._calculate_default_permissions_user_groups(
745 user_inherit_object_permissions)
744 user_inherit_object_permissions)
746
745
747 def _calculate_repository_permissions(self):
746 def _calculate_repository_permissions(self):
748 """
747 """
749 Repository access permissions for the current user.
748 Repository access permissions for the current user.
750
749
751 Check if the user is part of user groups for this repository and
750 Check if the user is part of user groups for this repository and
752 fill in the permission from it. `_choose_permission` decides of which
751 fill in the permission from it. `_choose_permission` decides of which
753 permission should be selected based on selected method.
752 permission should be selected based on selected method.
754 """
753 """
755
754
756 # user group for repositories permissions
755 # user group for repositories permissions
757 user_repo_perms_from_user_group = Permission\
756 user_repo_perms_from_user_group = Permission\
758 .get_default_repo_perms_from_user_group(
757 .get_default_repo_perms_from_user_group(
759 self.user_id, self.scope_repo_id)
758 self.user_id, self.scope_repo_id)
760
759
761 multiple_counter = collections.defaultdict(int)
760 multiple_counter = collections.defaultdict(int)
762 for perm in user_repo_perms_from_user_group:
761 for perm in user_repo_perms_from_user_group:
763 r_k = perm.UserGroupRepoToPerm.repository.repo_name
762 r_k = perm.UserGroupRepoToPerm.repository.repo_name
764 obj_id = perm.UserGroupRepoToPerm.repository.repo_id
763 obj_id = perm.UserGroupRepoToPerm.repository.repo_id
765 multiple_counter[r_k] += 1
764 multiple_counter[r_k] += 1
766 p = perm.Permission.permission_name
765 p = perm.Permission.permission_name
767 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
766 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
768 .users_group.users_group_name
767 .users_group.users_group_name
769
768
770 if multiple_counter[r_k] > 1:
769 if multiple_counter[r_k] > 1:
771 cur_perm = self.permissions_repositories[r_k]
770 cur_perm = self.permissions_repositories[r_k]
772 p = self._choose_permission(p, cur_perm)
771 p = self._choose_permission(p, cur_perm)
773
772
774 self.permissions_repositories[r_k] = p, o, obj_id
773 self.permissions_repositories[r_k] = p, o, obj_id
775
774
776 if perm.Repository.user_id == self.user_id:
775 if perm.Repository.user_id == self.user_id:
777 # set admin if owner
776 # set admin if owner
778 p = 'repository.admin'
777 p = 'repository.admin'
779 o = PermOrigin.REPO_OWNER
778 o = PermOrigin.REPO_OWNER
780 self.permissions_repositories[r_k] = p, o, obj_id
779 self.permissions_repositories[r_k] = p, o, obj_id
781
780
782 if self.user_is_admin:
781 if self.user_is_admin:
783 p = 'repository.admin'
782 p = 'repository.admin'
784 o = PermOrigin.SUPER_ADMIN
783 o = PermOrigin.SUPER_ADMIN
785 self.permissions_repositories[r_k] = p, o, obj_id
784 self.permissions_repositories[r_k] = p, o, obj_id
786
785
787 # user explicit permissions for repositories, overrides any specified
786 # user explicit permissions for repositories, overrides any specified
788 # by the group permission
787 # by the group permission
789 user_repo_perms = Permission.get_default_repo_perms(
788 user_repo_perms = Permission.get_default_repo_perms(
790 self.user_id, self.scope_repo_id)
789 self.user_id, self.scope_repo_id)
791 for perm in user_repo_perms:
790 for perm in user_repo_perms:
792 r_k = perm.UserRepoToPerm.repository.repo_name
791 r_k = perm.UserRepoToPerm.repository.repo_name
793 obj_id = perm.UserRepoToPerm.repository.repo_id
792 obj_id = perm.UserRepoToPerm.repository.repo_id
794 archived = perm.UserRepoToPerm.repository.archived
793 archived = perm.UserRepoToPerm.repository.archived
795 p = perm.Permission.permission_name
794 p = perm.Permission.permission_name
796 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
795 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
797
796
798 if not self.explicit:
797 if not self.explicit:
799 cur_perm = self.permissions_repositories.get(
798 cur_perm = self.permissions_repositories.get(
800 r_k, 'repository.none')
799 r_k, 'repository.none')
801 p = self._choose_permission(p, cur_perm)
800 p = self._choose_permission(p, cur_perm)
802
801
803 self.permissions_repositories[r_k] = p, o, obj_id
802 self.permissions_repositories[r_k] = p, o, obj_id
804
803
805 if perm.Repository.user_id == self.user_id:
804 if perm.Repository.user_id == self.user_id:
806 # set admin if owner
805 # set admin if owner
807 p = 'repository.admin'
806 p = 'repository.admin'
808 o = PermOrigin.REPO_OWNER
807 o = PermOrigin.REPO_OWNER
809 self.permissions_repositories[r_k] = p, o, obj_id
808 self.permissions_repositories[r_k] = p, o, obj_id
810
809
811 if self.user_is_admin:
810 if self.user_is_admin:
812 p = 'repository.admin'
811 p = 'repository.admin'
813 o = PermOrigin.SUPER_ADMIN
812 o = PermOrigin.SUPER_ADMIN
814 self.permissions_repositories[r_k] = p, o, obj_id
813 self.permissions_repositories[r_k] = p, o, obj_id
815
814
816 # finally in case of archived repositories, we downgrade higher
815 # finally in case of archived repositories, we downgrade higher
817 # permissions to read
816 # permissions to read
818 if archived:
817 if archived:
819 current_perm = self.permissions_repositories[r_k]
818 current_perm = self.permissions_repositories[r_k]
820 if current_perm in ['repository.write', 'repository.admin']:
819 if current_perm in ['repository.write', 'repository.admin']:
821 p = 'repository.read'
820 p = 'repository.read'
822 o = PermOrigin.ARCHIVED
821 o = PermOrigin.ARCHIVED
823 self.permissions_repositories[r_k] = p, o, obj_id
822 self.permissions_repositories[r_k] = p, o, obj_id
824
823
825 def _calculate_repository_branch_permissions(self):
824 def _calculate_repository_branch_permissions(self):
826 # user group for repositories permissions
825 # user group for repositories permissions
827 user_repo_branch_perms_from_user_group = Permission\
826 user_repo_branch_perms_from_user_group = Permission\
828 .get_default_repo_branch_perms_from_user_group(
827 .get_default_repo_branch_perms_from_user_group(
829 self.user_id, self.scope_repo_id)
828 self.user_id, self.scope_repo_id)
830
829
831 multiple_counter = collections.defaultdict(int)
830 multiple_counter = collections.defaultdict(int)
832 for perm in user_repo_branch_perms_from_user_group:
831 for perm in user_repo_branch_perms_from_user_group:
833 r_k = perm.UserGroupRepoToPerm.repository.repo_name
832 r_k = perm.UserGroupRepoToPerm.repository.repo_name
834 p = perm.Permission.permission_name
833 p = perm.Permission.permission_name
835 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
834 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
836 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
835 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
837 .users_group.users_group_name
836 .users_group.users_group_name
838
837
839 multiple_counter[r_k] += 1
838 multiple_counter[r_k] += 1
840 if multiple_counter[r_k] > 1:
839 if multiple_counter[r_k] > 1:
841 cur_perm = self.permissions_repository_branches[r_k][pattern]
840 cur_perm = self.permissions_repository_branches[r_k][pattern]
842 p = self._choose_permission(p, cur_perm)
841 p = self._choose_permission(p, cur_perm)
843
842
844 self.permissions_repository_branches[r_k] = {pattern: p}, o
843 self.permissions_repository_branches[r_k] = {pattern: p}, o
845
844
846 # user explicit branch permissions for repositories, overrides
845 # user explicit branch permissions for repositories, overrides
847 # any specified by the group permission
846 # any specified by the group permission
848 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
847 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
849 self.user_id, self.scope_repo_id)
848 self.user_id, self.scope_repo_id)
850
849
851 for perm in user_repo_branch_perms:
850 for perm in user_repo_branch_perms:
852
851
853 r_k = perm.UserRepoToPerm.repository.repo_name
852 r_k = perm.UserRepoToPerm.repository.repo_name
854 p = perm.Permission.permission_name
853 p = perm.Permission.permission_name
855 pattern = perm.UserToRepoBranchPermission.branch_pattern
854 pattern = perm.UserToRepoBranchPermission.branch_pattern
856 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
855 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
857
856
858 if not self.explicit:
857 if not self.explicit:
859 cur_perm = self.permissions_repository_branches.get(r_k)
858 cur_perm = self.permissions_repository_branches.get(r_k)
860 if cur_perm:
859 if cur_perm:
861 cur_perm = cur_perm[pattern]
860 cur_perm = cur_perm[pattern]
862 cur_perm = cur_perm or 'branch.none'
861 cur_perm = cur_perm or 'branch.none'
863 p = self._choose_permission(p, cur_perm)
862 p = self._choose_permission(p, cur_perm)
864
863
865 # NOTE(marcink): register all pattern/perm instances in this
864 # NOTE(marcink): register all pattern/perm instances in this
866 # special dict that aggregates entries
865 # special dict that aggregates entries
867 self.permissions_repository_branches[r_k] = {pattern: p}, o
866 self.permissions_repository_branches[r_k] = {pattern: p}, o
868
867
869 def _calculate_repository_group_permissions(self):
868 def _calculate_repository_group_permissions(self):
870 """
869 """
871 Repository group permissions for the current user.
870 Repository group permissions for the current user.
872
871
873 Check if the user is part of user groups for repository groups and
872 Check if the user is part of user groups for repository groups and
874 fill in the permissions from it. `_choose_permission` decides of which
873 fill in the permissions from it. `_choose_permission` decides of which
875 permission should be selected based on selected method.
874 permission should be selected based on selected method.
876 """
875 """
877 # user group for repo groups permissions
876 # user group for repo groups permissions
878 user_repo_group_perms_from_user_group = Permission\
877 user_repo_group_perms_from_user_group = Permission\
879 .get_default_group_perms_from_user_group(
878 .get_default_group_perms_from_user_group(
880 self.user_id, self.scope_repo_group_id)
879 self.user_id, self.scope_repo_group_id)
881
880
882 multiple_counter = collections.defaultdict(int)
881 multiple_counter = collections.defaultdict(int)
883 for perm in user_repo_group_perms_from_user_group:
882 for perm in user_repo_group_perms_from_user_group:
884 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
883 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
885 obj_id = perm.UserGroupRepoGroupToPerm.group.group_id
884 obj_id = perm.UserGroupRepoGroupToPerm.group.group_id
886 multiple_counter[rg_k] += 1
885 multiple_counter[rg_k] += 1
887 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
886 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
888 .users_group.users_group_name
887 .users_group.users_group_name
889 p = perm.Permission.permission_name
888 p = perm.Permission.permission_name
890
889
891 if multiple_counter[rg_k] > 1:
890 if multiple_counter[rg_k] > 1:
892 cur_perm = self.permissions_repository_groups[rg_k]
891 cur_perm = self.permissions_repository_groups[rg_k]
893 p = self._choose_permission(p, cur_perm)
892 p = self._choose_permission(p, cur_perm)
894 self.permissions_repository_groups[rg_k] = p, o, obj_id
893 self.permissions_repository_groups[rg_k] = p, o, obj_id
895
894
896 if perm.RepoGroup.user_id == self.user_id:
895 if perm.RepoGroup.user_id == self.user_id:
897 # set admin if owner, even for member of other user group
896 # set admin if owner, even for member of other user group
898 p = 'group.admin'
897 p = 'group.admin'
899 o = PermOrigin.REPOGROUP_OWNER
898 o = PermOrigin.REPOGROUP_OWNER
900 self.permissions_repository_groups[rg_k] = p, o, obj_id
899 self.permissions_repository_groups[rg_k] = p, o, obj_id
901
900
902 if self.user_is_admin:
901 if self.user_is_admin:
903 p = 'group.admin'
902 p = 'group.admin'
904 o = PermOrigin.SUPER_ADMIN
903 o = PermOrigin.SUPER_ADMIN
905 self.permissions_repository_groups[rg_k] = p, o, obj_id
904 self.permissions_repository_groups[rg_k] = p, o, obj_id
906
905
907 # user explicit permissions for repository groups
906 # user explicit permissions for repository groups
908 user_repo_groups_perms = Permission.get_default_group_perms(
907 user_repo_groups_perms = Permission.get_default_group_perms(
909 self.user_id, self.scope_repo_group_id)
908 self.user_id, self.scope_repo_group_id)
910 for perm in user_repo_groups_perms:
909 for perm in user_repo_groups_perms:
911 rg_k = perm.UserRepoGroupToPerm.group.group_name
910 rg_k = perm.UserRepoGroupToPerm.group.group_name
912 obj_id = perm.UserRepoGroupToPerm.group.group_id
911 obj_id = perm.UserRepoGroupToPerm.group.group_id
913 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
912 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
914 .user.username
913 .user.username
915 p = perm.Permission.permission_name
914 p = perm.Permission.permission_name
916
915
917 if not self.explicit:
916 if not self.explicit:
918 cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none')
917 cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none')
919 p = self._choose_permission(p, cur_perm)
918 p = self._choose_permission(p, cur_perm)
920
919
921 self.permissions_repository_groups[rg_k] = p, o, obj_id
920 self.permissions_repository_groups[rg_k] = p, o, obj_id
922
921
923 if perm.RepoGroup.user_id == self.user_id:
922 if perm.RepoGroup.user_id == self.user_id:
924 # set admin if owner
923 # set admin if owner
925 p = 'group.admin'
924 p = 'group.admin'
926 o = PermOrigin.REPOGROUP_OWNER
925 o = PermOrigin.REPOGROUP_OWNER
927 self.permissions_repository_groups[rg_k] = p, o, obj_id
926 self.permissions_repository_groups[rg_k] = p, o, obj_id
928
927
929 if self.user_is_admin:
928 if self.user_is_admin:
930 p = 'group.admin'
929 p = 'group.admin'
931 o = PermOrigin.SUPER_ADMIN
930 o = PermOrigin.SUPER_ADMIN
932 self.permissions_repository_groups[rg_k] = p, o, obj_id
931 self.permissions_repository_groups[rg_k] = p, o, obj_id
933
932
934 def _calculate_user_group_permissions(self):
933 def _calculate_user_group_permissions(self):
935 """
934 """
936 User group permissions for the current user.
935 User group permissions for the current user.
937 """
936 """
938 # user group for user group permissions
937 # user group for user group permissions
939 user_group_from_user_group = Permission\
938 user_group_from_user_group = Permission\
940 .get_default_user_group_perms_from_user_group(
939 .get_default_user_group_perms_from_user_group(
941 self.user_id, self.scope_user_group_id)
940 self.user_id, self.scope_user_group_id)
942
941
943 multiple_counter = collections.defaultdict(int)
942 multiple_counter = collections.defaultdict(int)
944 for perm in user_group_from_user_group:
943 for perm in user_group_from_user_group:
945 ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
944 ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
946 obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id
945 obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id
947 multiple_counter[ug_k] += 1
946 multiple_counter[ug_k] += 1
948 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
947 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
949 .user_group.users_group_name
948 .user_group.users_group_name
950 p = perm.Permission.permission_name
949 p = perm.Permission.permission_name
951
950
952 if multiple_counter[ug_k] > 1:
951 if multiple_counter[ug_k] > 1:
953 cur_perm = self.permissions_user_groups[ug_k]
952 cur_perm = self.permissions_user_groups[ug_k]
954 p = self._choose_permission(p, cur_perm)
953 p = self._choose_permission(p, cur_perm)
955
954
956 self.permissions_user_groups[ug_k] = p, o, obj_id
955 self.permissions_user_groups[ug_k] = p, o, obj_id
957
956
958 if perm.UserGroup.user_id == self.user_id:
957 if perm.UserGroup.user_id == self.user_id:
959 # set admin if owner, even for member of other user group
958 # set admin if owner, even for member of other user group
960 p = 'usergroup.admin'
959 p = 'usergroup.admin'
961 o = PermOrigin.USERGROUP_OWNER
960 o = PermOrigin.USERGROUP_OWNER
962 self.permissions_user_groups[ug_k] = p, o, obj_id
961 self.permissions_user_groups[ug_k] = p, o, obj_id
963
962
964 if self.user_is_admin:
963 if self.user_is_admin:
965 p = 'usergroup.admin'
964 p = 'usergroup.admin'
966 o = PermOrigin.SUPER_ADMIN
965 o = PermOrigin.SUPER_ADMIN
967 self.permissions_user_groups[ug_k] = p, o, obj_id
966 self.permissions_user_groups[ug_k] = p, o, obj_id
968
967
969 # user explicit permission for user groups
968 # user explicit permission for user groups
970 user_user_groups_perms = Permission.get_default_user_group_perms(
969 user_user_groups_perms = Permission.get_default_user_group_perms(
971 self.user_id, self.scope_user_group_id)
970 self.user_id, self.scope_user_group_id)
972 for perm in user_user_groups_perms:
971 for perm in user_user_groups_perms:
973 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
972 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
974 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
973 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
975 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
974 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
976 .user.username
975 .user.username
977 p = perm.Permission.permission_name
976 p = perm.Permission.permission_name
978
977
979 if not self.explicit:
978 if not self.explicit:
980 cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none')
979 cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none')
981 p = self._choose_permission(p, cur_perm)
980 p = self._choose_permission(p, cur_perm)
982
981
983 self.permissions_user_groups[ug_k] = p, o, obj_id
982 self.permissions_user_groups[ug_k] = p, o, obj_id
984
983
985 if perm.UserGroup.user_id == self.user_id:
984 if perm.UserGroup.user_id == self.user_id:
986 # set admin if owner
985 # set admin if owner
987 p = 'usergroup.admin'
986 p = 'usergroup.admin'
988 o = PermOrigin.USERGROUP_OWNER
987 o = PermOrigin.USERGROUP_OWNER
989 self.permissions_user_groups[ug_k] = p, o, obj_id
988 self.permissions_user_groups[ug_k] = p, o, obj_id
990
989
991 if self.user_is_admin:
990 if self.user_is_admin:
992 p = 'usergroup.admin'
991 p = 'usergroup.admin'
993 o = PermOrigin.SUPER_ADMIN
992 o = PermOrigin.SUPER_ADMIN
994 self.permissions_user_groups[ug_k] = p, o, obj_id
993 self.permissions_user_groups[ug_k] = p, o, obj_id
995
994
996 def _choose_permission(self, new_perm, cur_perm):
995 def _choose_permission(self, new_perm, cur_perm):
997 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
996 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
998 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
997 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
999 if self.algo == 'higherwin':
998 if self.algo == 'higherwin':
1000 if new_perm_val > cur_perm_val:
999 if new_perm_val > cur_perm_val:
1001 return new_perm
1000 return new_perm
1002 return cur_perm
1001 return cur_perm
1003 elif self.algo == 'lowerwin':
1002 elif self.algo == 'lowerwin':
1004 if new_perm_val < cur_perm_val:
1003 if new_perm_val < cur_perm_val:
1005 return new_perm
1004 return new_perm
1006 return cur_perm
1005 return cur_perm
1007
1006
1008 def _permission_structure(self):
1007 def _permission_structure(self):
1009 return {
1008 return {
1010 'global': self.permissions_global,
1009 'global': self.permissions_global,
1011 'repositories': self.permissions_repositories,
1010 'repositories': self.permissions_repositories,
1012 'repository_branches': self.permissions_repository_branches,
1011 'repository_branches': self.permissions_repository_branches,
1013 'repositories_groups': self.permissions_repository_groups,
1012 'repositories_groups': self.permissions_repository_groups,
1014 'user_groups': self.permissions_user_groups,
1013 'user_groups': self.permissions_user_groups,
1015 }
1014 }
1016
1015
1017
1016
1018 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
1017 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
1019 """
1018 """
1020 Check if given controller_name is in whitelist of auth token access
1019 Check if given controller_name is in whitelist of auth token access
1021 """
1020 """
1022 if not whitelist:
1021 if not whitelist:
1023 from rhodecode import CONFIG
1022 from rhodecode import CONFIG
1024 whitelist = aslist(
1023 whitelist = aslist(
1025 CONFIG.get('api_access_controllers_whitelist'), sep=',')
1024 CONFIG.get('api_access_controllers_whitelist'), sep=',')
1026 # backward compat translation
1025 # backward compat translation
1027 compat = {
1026 compat = {
1028 # old controller, new VIEW
1027 # old controller, new VIEW
1029 'ChangesetController:*': 'RepoCommitsView:*',
1028 'ChangesetController:*': 'RepoCommitsView:*',
1030 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
1029 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
1031 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
1030 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
1032 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
1031 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
1033 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
1032 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
1034 'GistsController:*': 'GistView:*',
1033 'GistsController:*': 'GistView:*',
1035 }
1034 }
1036
1035
1037 log.debug(
1036 log.debug(
1038 'Allowed views for AUTH TOKEN access: %s', whitelist)
1037 'Allowed views for AUTH TOKEN access: %s', whitelist)
1039 auth_token_access_valid = False
1038 auth_token_access_valid = False
1040
1039
1041 for entry in whitelist:
1040 for entry in whitelist:
1042 token_match = True
1041 token_match = True
1043 if entry in compat:
1042 if entry in compat:
1044 # translate from old Controllers to Pyramid Views
1043 # translate from old Controllers to Pyramid Views
1045 entry = compat[entry]
1044 entry = compat[entry]
1046
1045
1047 if '@' in entry:
1046 if '@' in entry:
1048 # specific AuthToken
1047 # specific AuthToken
1049 entry, allowed_token = entry.split('@', 1)
1048 entry, allowed_token = entry.split('@', 1)
1050 token_match = auth_token == allowed_token
1049 token_match = auth_token == allowed_token
1051
1050
1052 if fnmatch.fnmatch(view_name, entry) and token_match:
1051 if fnmatch.fnmatch(view_name, entry) and token_match:
1053 auth_token_access_valid = True
1052 auth_token_access_valid = True
1054 break
1053 break
1055
1054
1056 if auth_token_access_valid:
1055 if auth_token_access_valid:
1057 log.debug('view: `%s` matches entry in whitelist: %s',
1056 log.debug('view: `%s` matches entry in whitelist: %s',
1058 view_name, whitelist)
1057 view_name, whitelist)
1059
1058
1060 else:
1059 else:
1061 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1060 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1062 % (view_name, whitelist))
1061 % (view_name, whitelist))
1063 if auth_token:
1062 if auth_token:
1064 # if we use auth token key and don't have access it's a warning
1063 # if we use auth token key and don't have access it's a warning
1065 log.warning(msg)
1064 log.warning(msg)
1066 else:
1065 else:
1067 log.debug(msg)
1066 log.debug(msg)
1068
1067
1069 return auth_token_access_valid
1068 return auth_token_access_valid
1070
1069
1071
1070
1072 class AuthUser(object):
1071 class AuthUser(object):
1073 """
1072 """
1074 A simple object that handles all attributes of user in RhodeCode
1073 A simple object that handles all attributes of user in RhodeCode
1075
1074
1076 It does lookup based on API key,given user, or user present in session
1075 It does lookup based on API key,given user, or user present in session
1077 Then it fills all required information for such user. It also checks if
1076 Then it fills all required information for such user. It also checks if
1078 anonymous access is enabled and if so, it returns default user as logged in
1077 anonymous access is enabled and if so, it returns default user as logged in
1079 """
1078 """
1080 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1079 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1081 repo_read_perms = ['repository.read', 'repository.admin', 'repository.write']
1080 repo_read_perms = ['repository.read', 'repository.admin', 'repository.write']
1082 repo_group_read_perms = ['group.read', 'group.write', 'group.admin']
1081 repo_group_read_perms = ['group.read', 'group.write', 'group.admin']
1083 user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
1082 user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
1084
1083
1085 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1084 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1086
1085
1087 self.user_id = user_id
1086 self.user_id = user_id
1088 self._api_key = api_key
1087 self._api_key = api_key
1089
1088
1090 self.api_key = None
1089 self.api_key = None
1091 self.username = username
1090 self.username = username
1092 self.ip_addr = ip_addr
1091 self.ip_addr = ip_addr
1093 self.name = ''
1092 self.name = ''
1094 self.lastname = ''
1093 self.lastname = ''
1095 self.first_name = ''
1094 self.first_name = ''
1096 self.last_name = ''
1095 self.last_name = ''
1097 self.email = ''
1096 self.email = ''
1098 self.is_authenticated = False
1097 self.is_authenticated = False
1099 self.admin = False
1098 self.admin = False
1100 self.inherit_default_permissions = False
1099 self.inherit_default_permissions = False
1101 self.password = ''
1100 self.password = ''
1102
1101
1103 self.anonymous_user = None # propagated on propagate_data
1102 self.anonymous_user = None # propagated on propagate_data
1104 self.propagate_data()
1103 self.propagate_data()
1105 self._instance = None
1104 self._instance = None
1106 self._permissions_scoped_cache = {} # used to bind scoped calculation
1105 self._permissions_scoped_cache = {} # used to bind scoped calculation
1107
1106
1108 @LazyProperty
1107 @LazyProperty
1109 def permissions(self):
1108 def permissions(self):
1110 return self.get_perms(user=self, cache=None)
1109 return self.get_perms(user=self, cache=None)
1111
1110
1112 @LazyProperty
1111 @LazyProperty
1113 def permissions_safe(self):
1112 def permissions_safe(self):
1114 """
1113 """
1115 Filtered permissions excluding not allowed repositories
1114 Filtered permissions excluding not allowed repositories
1116 """
1115 """
1117 perms = self.get_perms(user=self, cache=None)
1116 perms = self.get_perms(user=self, cache=None)
1118
1117
1119 perms['repositories'] = {
1118 perms['repositories'] = {
1120 k: v for k, v in perms['repositories'].items()
1119 k: v for k, v in perms['repositories'].items()
1121 if v != 'repository.none'}
1120 if v != 'repository.none'}
1122 perms['repositories_groups'] = {
1121 perms['repositories_groups'] = {
1123 k: v for k, v in perms['repositories_groups'].items()
1122 k: v for k, v in perms['repositories_groups'].items()
1124 if v != 'group.none'}
1123 if v != 'group.none'}
1125 perms['user_groups'] = {
1124 perms['user_groups'] = {
1126 k: v for k, v in perms['user_groups'].items()
1125 k: v for k, v in perms['user_groups'].items()
1127 if v != 'usergroup.none'}
1126 if v != 'usergroup.none'}
1128 perms['repository_branches'] = {
1127 perms['repository_branches'] = {
1129 k: v for k, v in perms['repository_branches'].items()
1128 k: v for k, v in perms['repository_branches'].items()
1130 if v != 'branch.none'}
1129 if v != 'branch.none'}
1131 return perms
1130 return perms
1132
1131
1133 @LazyProperty
1132 @LazyProperty
1134 def permissions_full_details(self):
1133 def permissions_full_details(self):
1135 return self.get_perms(
1134 return self.get_perms(
1136 user=self, cache=None, calculate_super_admin=True)
1135 user=self, cache=None, calculate_super_admin=True)
1137
1136
1138 def permissions_with_scope(self, scope):
1137 def permissions_with_scope(self, scope):
1139 """
1138 """
1140 Call the get_perms function with scoped data. The scope in that function
1139 Call the get_perms function with scoped data. The scope in that function
1141 narrows the SQL calls to the given ID of objects resulting in fetching
1140 narrows the SQL calls to the given ID of objects resulting in fetching
1142 Just particular permission we want to obtain. If scope is an empty dict
1141 Just particular permission we want to obtain. If scope is an empty dict
1143 then it basically narrows the scope to GLOBAL permissions only.
1142 then it basically narrows the scope to GLOBAL permissions only.
1144
1143
1145 :param scope: dict
1144 :param scope: dict
1146 """
1145 """
1147 if 'repo_name' in scope:
1146 if 'repo_name' in scope:
1148 obj = Repository.get_by_repo_name(scope['repo_name'])
1147 obj = Repository.get_by_repo_name(scope['repo_name'])
1149 if obj:
1148 if obj:
1150 scope['repo_id'] = obj.repo_id
1149 scope['repo_id'] = obj.repo_id
1151 _scope = collections.OrderedDict()
1150 _scope = collections.OrderedDict()
1152 _scope['repo_id'] = -1
1151 _scope['repo_id'] = -1
1153 _scope['user_group_id'] = -1
1152 _scope['user_group_id'] = -1
1154 _scope['repo_group_id'] = -1
1153 _scope['repo_group_id'] = -1
1155
1154
1156 for k in sorted(scope.keys()):
1155 for k in sorted(scope.keys()):
1157 _scope[k] = scope[k]
1156 _scope[k] = scope[k]
1158
1157
1159 # store in cache to mimic how the @LazyProperty works,
1158 # store in cache to mimic how the @LazyProperty works,
1160 # the difference here is that we use the unique key calculated
1159 # the difference here is that we use the unique key calculated
1161 # from params and values
1160 # from params and values
1162 return self.get_perms(user=self, cache=None, scope=_scope)
1161 return self.get_perms(user=self, cache=None, scope=_scope)
1163
1162
1164 def get_instance(self):
1163 def get_instance(self):
1165 return User.get(self.user_id)
1164 return User.get(self.user_id)
1166
1165
1167 def propagate_data(self):
1166 def propagate_data(self):
1168 """
1167 """
1169 Fills in user data and propagates values to this instance. Maps fetched
1168 Fills in user data and propagates values to this instance. Maps fetched
1170 user attributes to this class instance attributes
1169 user attributes to this class instance attributes
1171 """
1170 """
1172 log.debug('AuthUser: starting data propagation for new potential user')
1171 log.debug('AuthUser: starting data propagation for new potential user')
1173 user_model = UserModel()
1172 user_model = UserModel()
1174 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1173 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1175 is_user_loaded = False
1174 is_user_loaded = False
1176
1175
1177 # lookup by userid
1176 # lookup by userid
1178 if self.user_id is not None and self.user_id != anon_user.user_id:
1177 if self.user_id is not None and self.user_id != anon_user.user_id:
1179 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1178 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1180 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1179 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1181
1180
1182 # try go get user by api key
1181 # try go get user by api key
1183 elif self._api_key and self._api_key != anon_user.api_key:
1182 elif self._api_key and self._api_key != anon_user.api_key:
1184 log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:])
1183 log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:])
1185 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1184 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1186
1185
1187 # lookup by username
1186 # lookup by username
1188 elif self.username:
1187 elif self.username:
1189 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1188 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1190 is_user_loaded = user_model.fill_data(self, username=self.username)
1189 is_user_loaded = user_model.fill_data(self, username=self.username)
1191 else:
1190 else:
1192 log.debug('No data in %s that could been used to log in', self)
1191 log.debug('No data in %s that could been used to log in', self)
1193
1192
1194 if not is_user_loaded:
1193 if not is_user_loaded:
1195 log.debug(
1194 log.debug(
1196 'Failed to load user. Fallback to default user %s', anon_user)
1195 'Failed to load user. Fallback to default user %s', anon_user)
1197 # if we cannot authenticate user try anonymous
1196 # if we cannot authenticate user try anonymous
1198 if anon_user.active:
1197 if anon_user.active:
1199 log.debug('default user is active, using it as a session user')
1198 log.debug('default user is active, using it as a session user')
1200 user_model.fill_data(self, user_id=anon_user.user_id)
1199 user_model.fill_data(self, user_id=anon_user.user_id)
1201 # then we set this user is logged in
1200 # then we set this user is logged in
1202 self.is_authenticated = True
1201 self.is_authenticated = True
1203 else:
1202 else:
1204 log.debug('default user is NOT active')
1203 log.debug('default user is NOT active')
1205 # in case of disabled anonymous user we reset some of the
1204 # in case of disabled anonymous user we reset some of the
1206 # parameters so such user is "corrupted", skipping the fill_data
1205 # parameters so such user is "corrupted", skipping the fill_data
1207 for attr in ['user_id', 'username', 'admin', 'active']:
1206 for attr in ['user_id', 'username', 'admin', 'active']:
1208 setattr(self, attr, None)
1207 setattr(self, attr, None)
1209 self.is_authenticated = False
1208 self.is_authenticated = False
1210
1209
1211 if not self.username:
1210 if not self.username:
1212 self.username = 'None'
1211 self.username = 'None'
1213
1212
1214 log.debug('AuthUser: propagated user is now %s', self)
1213 log.debug('AuthUser: propagated user is now %s', self)
1215
1214
1216 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1215 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1217 calculate_super_admin=False, cache=None):
1216 calculate_super_admin=False, cache=None):
1218 """
1217 """
1219 Fills user permission attribute with permissions taken from database
1218 Fills user permission attribute with permissions taken from database
1220 works for permissions given for repositories, and for permissions that
1219 works for permissions given for repositories, and for permissions that
1221 are granted to groups
1220 are granted to groups
1222
1221
1223 :param user: instance of User object from database
1222 :param user: instance of User object from database
1224 :param explicit: In case there are permissions both for user and a group
1223 :param explicit: In case there are permissions both for user and a group
1225 that user is part of, explicit flag will defiine if user will
1224 that user is part of, explicit flag will defiine if user will
1226 explicitly override permissions from group, if it's False it will
1225 explicitly override permissions from group, if it's False it will
1227 make decision based on the algo
1226 make decision based on the algo
1228 :param algo: algorithm to decide what permission should be choose if
1227 :param algo: algorithm to decide what permission should be choose if
1229 it's multiple defined, eg user in two different groups. It also
1228 it's multiple defined, eg user in two different groups. It also
1230 decides if explicit flag is turned off how to specify the permission
1229 decides if explicit flag is turned off how to specify the permission
1231 for case when user is in a group + have defined separate permission
1230 for case when user is in a group + have defined separate permission
1232 :param calculate_super_admin: calculate permissions for super-admin in the
1231 :param calculate_super_admin: calculate permissions for super-admin in the
1233 same way as for regular user without speedups
1232 same way as for regular user without speedups
1234 :param cache: Use caching for calculation, None = let the cache backend decide
1233 :param cache: Use caching for calculation, None = let the cache backend decide
1235 """
1234 """
1236 user_id = user.user_id
1235 user_id = user.user_id
1237 user_is_admin = user.is_admin
1236 user_is_admin = user.is_admin
1238
1237
1239 # inheritance of global permissions like create repo/fork repo etc
1238 # inheritance of global permissions like create repo/fork repo etc
1240 user_inherit_default_permissions = user.inherit_default_permissions
1239 user_inherit_default_permissions = user.inherit_default_permissions
1241
1240
1242 cache_seconds = safe_int(
1241 cache_seconds = safe_int(
1243 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1242 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1244
1243
1245 if cache is None:
1244 if cache is None:
1246 # let the backend cache decide
1245 # let the backend cache decide
1247 cache_on = cache_seconds > 0
1246 cache_on = cache_seconds > 0
1248 else:
1247 else:
1249 cache_on = cache
1248 cache_on = cache
1250
1249
1251 log.debug(
1250 log.debug(
1252 'Computing PERMISSION tree for user %s scope `%s` '
1251 'Computing PERMISSION tree for user %s scope `%s` '
1253 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1252 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1254
1253
1255 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1254 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1256 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1255 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1257
1256
1258 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1257 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1259 condition=cache_on)
1258 condition=cache_on)
1260 def compute_perm_tree(cache_name, cache_ver,
1259 def compute_perm_tree(cache_name, cache_ver,
1261 user_id, scope, user_is_admin,user_inherit_default_permissions,
1260 user_id, scope, user_is_admin,user_inherit_default_permissions,
1262 explicit, algo, calculate_super_admin):
1261 explicit, algo, calculate_super_admin):
1263 return _cached_perms_data(
1262 return _cached_perms_data(
1264 user_id, scope, user_is_admin, user_inherit_default_permissions,
1263 user_id, scope, user_is_admin, user_inherit_default_permissions,
1265 explicit, algo, calculate_super_admin)
1264 explicit, algo, calculate_super_admin)
1266
1265
1267 start = time.time()
1266 start = time.time()
1268 result = compute_perm_tree(
1267 result = compute_perm_tree(
1269 'permissions', 'v1', user_id, scope, user_is_admin,
1268 'permissions', 'v1', user_id, scope, user_is_admin,
1270 user_inherit_default_permissions, explicit, algo,
1269 user_inherit_default_permissions, explicit, algo,
1271 calculate_super_admin)
1270 calculate_super_admin)
1272
1271
1273 result_repr = []
1272 result_repr = []
1274 for k in result:
1273 for k in result:
1275 result_repr.append((k, len(result[k])))
1274 result_repr.append((k, len(result[k])))
1276 total = time.time() - start
1275 total = time.time() - start
1277 log.debug('PERMISSION tree for user %s computed in %.4fs: %s',
1276 log.debug('PERMISSION tree for user %s computed in %.4fs: %s',
1278 user, total, result_repr)
1277 user, total, result_repr)
1279
1278
1280 return result
1279 return result
1281
1280
1282 @property
1281 @property
1283 def is_default(self):
1282 def is_default(self):
1284 return self.username == User.DEFAULT_USER
1283 return self.username == User.DEFAULT_USER
1285
1284
1286 @property
1285 @property
1287 def is_admin(self):
1286 def is_admin(self):
1288 return self.admin
1287 return self.admin
1289
1288
1290 @property
1289 @property
1291 def is_user_object(self):
1290 def is_user_object(self):
1292 return self.user_id is not None
1291 return self.user_id is not None
1293
1292
1294 @property
1293 @property
1295 def repositories_admin(self):
1294 def repositories_admin(self):
1296 """
1295 """
1297 Returns list of repositories you're an admin of
1296 Returns list of repositories you're an admin of
1298 """
1297 """
1299 return [
1298 return [
1300 x[0] for x in self.permissions['repositories'].items()
1299 x[0] for x in self.permissions['repositories'].items()
1301 if x[1] == 'repository.admin']
1300 if x[1] == 'repository.admin']
1302
1301
1303 @property
1302 @property
1304 def repository_groups_admin(self):
1303 def repository_groups_admin(self):
1305 """
1304 """
1306 Returns list of repository groups you're an admin of
1305 Returns list of repository groups you're an admin of
1307 """
1306 """
1308 return [
1307 return [
1309 x[0] for x in self.permissions['repositories_groups'].items()
1308 x[0] for x in self.permissions['repositories_groups'].items()
1310 if x[1] == 'group.admin']
1309 if x[1] == 'group.admin']
1311
1310
1312 @property
1311 @property
1313 def user_groups_admin(self):
1312 def user_groups_admin(self):
1314 """
1313 """
1315 Returns list of user groups you're an admin of
1314 Returns list of user groups you're an admin of
1316 """
1315 """
1317 return [
1316 return [
1318 x[0] for x in self.permissions['user_groups'].items()
1317 x[0] for x in self.permissions['user_groups'].items()
1319 if x[1] == 'usergroup.admin']
1318 if x[1] == 'usergroup.admin']
1320
1319
1321 def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1320 def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1322 if not perms:
1321 if not perms:
1323 perms = AuthUser.repo_read_perms
1322 perms = AuthUser.repo_read_perms
1324 allowed_ids = []
1323 allowed_ids = []
1325 for k, stack_data in self.permissions['repositories'].perm_origin_stack.items():
1324 for k, stack_data in self.permissions['repositories'].perm_origin_stack.items():
1326 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1325 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1327 if prefix_filter and not k.startswith(prefix_filter):
1326 if prefix_filter and not k.startswith(prefix_filter):
1328 continue
1327 continue
1329 if perm in perms:
1328 if perm in perms:
1330 allowed_ids.append(obj_id)
1329 allowed_ids.append(obj_id)
1331 return allowed_ids
1330 return allowed_ids
1332
1331
1333 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1332 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1334 """
1333 """
1335 Returns list of repository ids that user have access to based on given
1334 Returns list of repository ids that user have access to based on given
1336 perms. The cache flag should be only used in cases that are used for
1335 perms. The cache flag should be only used in cases that are used for
1337 display purposes, NOT IN ANY CASE for permission checks.
1336 display purposes, NOT IN ANY CASE for permission checks.
1338 """
1337 """
1339 from rhodecode.model.scm import RepoList
1338 from rhodecode.model.scm import RepoList
1340 if not perms:
1339 if not perms:
1341 perms = AuthUser.repo_read_perms
1340 perms = AuthUser.repo_read_perms
1342
1341
1343 if not isinstance(perms, list):
1342 if not isinstance(perms, list):
1344 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1343 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1345
1344
1346 def _cached_repo_acl(perm_def, _name_filter):
1345 def _cached_repo_acl(perm_def, _name_filter):
1347 qry = Repository.query()
1346 qry = Repository.query()
1348 if _name_filter:
1347 if _name_filter:
1349 ilike_expression = '%{}%'.format(_name_filter)
1348 ilike_expression = '%{}%'.format(_name_filter)
1350 qry = qry.filter(
1349 qry = qry.filter(
1351 Repository.repo_name.ilike(ilike_expression))
1350 Repository.repo_name.ilike(ilike_expression))
1352
1351
1353 return [x.repo_id for x in
1352 return [x.repo_id for x in
1354 RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1353 RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1355
1354
1356 log.debug('Computing REPO ACL IDS user %s', self)
1355 log.debug('Computing REPO ACL IDS user %s', self)
1357
1356
1358 cache_namespace_uid = 'cache_user_repo_acl_ids.{}'.format(self.user_id)
1357 cache_namespace_uid = 'cache_user_repo_acl_ids.{}'.format(self.user_id)
1359 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1358 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1360
1359
1361 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1360 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1362 def compute_repo_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1361 def compute_repo_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1363 return _cached_repo_acl(perm_def, _name_filter)
1362 return _cached_repo_acl(perm_def, _name_filter)
1364
1363
1365 start = time.time()
1364 start = time.time()
1366 result = compute_repo_acl_ids('v1', self.user_id, perms, name_filter)
1365 result = compute_repo_acl_ids('v1', self.user_id, perms, name_filter)
1367 total = time.time() - start
1366 total = time.time() - start
1368 log.debug('REPO ACL IDS for user %s computed in %.4fs', self, total)
1367 log.debug('REPO ACL IDS for user %s computed in %.4fs', self, total)
1369
1368
1370 return result
1369 return result
1371
1370
1372 def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1371 def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1373 if not perms:
1372 if not perms:
1374 perms = AuthUser.repo_group_read_perms
1373 perms = AuthUser.repo_group_read_perms
1375 allowed_ids = []
1374 allowed_ids = []
1376 for k, stack_data in self.permissions['repositories_groups'].perm_origin_stack.items():
1375 for k, stack_data in self.permissions['repositories_groups'].perm_origin_stack.items():
1377 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1376 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1378 if prefix_filter and not k.startswith(prefix_filter):
1377 if prefix_filter and not k.startswith(prefix_filter):
1379 continue
1378 continue
1380 if perm in perms:
1379 if perm in perms:
1381 allowed_ids.append(obj_id)
1380 allowed_ids.append(obj_id)
1382 return allowed_ids
1381 return allowed_ids
1383
1382
1384 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1383 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1385 """
1384 """
1386 Returns list of repository group ids that user have access to based on given
1385 Returns list of repository group ids that user have access to based on given
1387 perms. The cache flag should be only used in cases that are used for
1386 perms. The cache flag should be only used in cases that are used for
1388 display purposes, NOT IN ANY CASE for permission checks.
1387 display purposes, NOT IN ANY CASE for permission checks.
1389 """
1388 """
1390 from rhodecode.model.scm import RepoGroupList
1389 from rhodecode.model.scm import RepoGroupList
1391 if not perms:
1390 if not perms:
1392 perms = AuthUser.repo_group_read_perms
1391 perms = AuthUser.repo_group_read_perms
1393
1392
1394 if not isinstance(perms, list):
1393 if not isinstance(perms, list):
1395 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1394 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1396
1395
1397 def _cached_repo_group_acl(perm_def, _name_filter):
1396 def _cached_repo_group_acl(perm_def, _name_filter):
1398 qry = RepoGroup.query()
1397 qry = RepoGroup.query()
1399 if _name_filter:
1398 if _name_filter:
1400 ilike_expression = '%{}%'.format(_name_filter)
1399 ilike_expression = '%{}%'.format(_name_filter)
1401 qry = qry.filter(
1400 qry = qry.filter(
1402 RepoGroup.group_name.ilike(ilike_expression))
1401 RepoGroup.group_name.ilike(ilike_expression))
1403
1402
1404 return [x.group_id for x in
1403 return [x.group_id for x in
1405 RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1404 RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1406
1405
1407 log.debug('Computing REPO GROUP ACL IDS user %s', self)
1406 log.debug('Computing REPO GROUP ACL IDS user %s', self)
1408
1407
1409 cache_namespace_uid = 'cache_user_repo_group_acl_ids.{}'.format(self.user_id)
1408 cache_namespace_uid = 'cache_user_repo_group_acl_ids.{}'.format(self.user_id)
1410 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1409 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1411
1410
1412 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1411 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1413 def compute_repo_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1412 def compute_repo_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1414 return _cached_repo_group_acl(perm_def, _name_filter)
1413 return _cached_repo_group_acl(perm_def, _name_filter)
1415
1414
1416 start = time.time()
1415 start = time.time()
1417 result = compute_repo_group_acl_ids('v1', self.user_id, perms, name_filter)
1416 result = compute_repo_group_acl_ids('v1', self.user_id, perms, name_filter)
1418 total = time.time() - start
1417 total = time.time() - start
1419 log.debug('REPO GROUP ACL IDS for user %s computed in %.4fs', self, total)
1418 log.debug('REPO GROUP ACL IDS for user %s computed in %.4fs', self, total)
1420
1419
1421 return result
1420 return result
1422
1421
1423 def user_group_acl_ids_from_stack(self, perms=None, cache=False):
1422 def user_group_acl_ids_from_stack(self, perms=None, cache=False):
1424 if not perms:
1423 if not perms:
1425 perms = AuthUser.user_group_read_perms
1424 perms = AuthUser.user_group_read_perms
1426 allowed_ids = []
1425 allowed_ids = []
1427 for k, stack_data in self.permissions['user_groups'].perm_origin_stack.items():
1426 for k, stack_data in self.permissions['user_groups'].perm_origin_stack.items():
1428 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1427 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1429 if perm in perms:
1428 if perm in perms:
1430 allowed_ids.append(obj_id)
1429 allowed_ids.append(obj_id)
1431 return allowed_ids
1430 return allowed_ids
1432
1431
1433 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1432 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1434 """
1433 """
1435 Returns list of user group ids that user have access to based on given
1434 Returns list of user group ids that user have access to based on given
1436 perms. The cache flag should be only used in cases that are used for
1435 perms. The cache flag should be only used in cases that are used for
1437 display purposes, NOT IN ANY CASE for permission checks.
1436 display purposes, NOT IN ANY CASE for permission checks.
1438 """
1437 """
1439 from rhodecode.model.scm import UserGroupList
1438 from rhodecode.model.scm import UserGroupList
1440 if not perms:
1439 if not perms:
1441 perms = AuthUser.user_group_read_perms
1440 perms = AuthUser.user_group_read_perms
1442
1441
1443 if not isinstance(perms, list):
1442 if not isinstance(perms, list):
1444 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1443 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1445
1444
1446 def _cached_user_group_acl(perm_def, _name_filter):
1445 def _cached_user_group_acl(perm_def, _name_filter):
1447 qry = UserGroup.query()
1446 qry = UserGroup.query()
1448 if _name_filter:
1447 if _name_filter:
1449 ilike_expression = '%{}%'.format(_name_filter)
1448 ilike_expression = '%{}%'.format(_name_filter)
1450 qry = qry.filter(
1449 qry = qry.filter(
1451 UserGroup.users_group_name.ilike(ilike_expression))
1450 UserGroup.users_group_name.ilike(ilike_expression))
1452
1451
1453 return [x.users_group_id for x in
1452 return [x.users_group_id for x in
1454 UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1453 UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1455
1454
1456 log.debug('Computing USER GROUP ACL IDS user %s', self)
1455 log.debug('Computing USER GROUP ACL IDS user %s', self)
1457
1456
1458 cache_namespace_uid = 'cache_user_user_group_acl_ids.{}'.format(self.user_id)
1457 cache_namespace_uid = 'cache_user_user_group_acl_ids.{}'.format(self.user_id)
1459 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1458 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1460
1459
1461 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1460 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1462 def compute_user_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1461 def compute_user_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1463 return _cached_user_group_acl(perm_def, _name_filter)
1462 return _cached_user_group_acl(perm_def, _name_filter)
1464
1463
1465 start = time.time()
1464 start = time.time()
1466 result = compute_user_group_acl_ids('v1', self.user_id, perms, name_filter)
1465 result = compute_user_group_acl_ids('v1', self.user_id, perms, name_filter)
1467 total = time.time() - start
1466 total = time.time() - start
1468 log.debug('USER GROUP ACL IDS for user %s computed in %.4fs', self, total)
1467 log.debug('USER GROUP ACL IDS for user %s computed in %.4fs', self, total)
1469
1468
1470 return result
1469 return result
1471
1470
1472 @property
1471 @property
1473 def ip_allowed(self):
1472 def ip_allowed(self):
1474 """
1473 """
1475 Checks if ip_addr used in constructor is allowed from defined list of
1474 Checks if ip_addr used in constructor is allowed from defined list of
1476 allowed ip_addresses for user
1475 allowed ip_addresses for user
1477
1476
1478 :returns: boolean, True if ip is in allowed ip range
1477 :returns: boolean, True if ip is in allowed ip range
1479 """
1478 """
1480 # check IP
1479 # check IP
1481 inherit = self.inherit_default_permissions
1480 inherit = self.inherit_default_permissions
1482 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1481 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1483 inherit_from_default=inherit)
1482 inherit_from_default=inherit)
1484
1483
1485 @property
1484 @property
1486 def personal_repo_group(self):
1485 def personal_repo_group(self):
1487 return RepoGroup.get_user_personal_repo_group(self.user_id)
1486 return RepoGroup.get_user_personal_repo_group(self.user_id)
1488
1487
1489 @LazyProperty
1488 @LazyProperty
1490 def feed_token(self):
1489 def feed_token(self):
1491 return self.get_instance().feed_token
1490 return self.get_instance().feed_token
1492
1491
1493 @LazyProperty
1492 @LazyProperty
1494 def artifact_token(self):
1493 def artifact_token(self):
1495 return self.get_instance().artifact_token
1494 return self.get_instance().artifact_token
1496
1495
1497 @classmethod
1496 @classmethod
1498 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1497 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1499 allowed_ips = AuthUser.get_allowed_ips(
1498 allowed_ips = AuthUser.get_allowed_ips(
1500 user_id, cache=True, inherit_from_default=inherit_from_default)
1499 user_id, cache=True, inherit_from_default=inherit_from_default)
1501 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1500 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1502 log.debug('IP:%s for user %s is in range of %s',
1501 log.debug('IP:%s for user %s is in range of %s',
1503 ip_addr, user_id, allowed_ips)
1502 ip_addr, user_id, allowed_ips)
1504 return True
1503 return True
1505 else:
1504 else:
1506 log.info('Access for IP:%s forbidden for user %s, '
1505 log.info('Access for IP:%s forbidden for user %s, '
1507 'not in %s', ip_addr, user_id, allowed_ips,
1506 'not in %s', ip_addr, user_id, allowed_ips,
1508 extra={"ip": ip_addr, "user_id": user_id})
1507 extra={"ip": ip_addr, "user_id": user_id})
1509 return False
1508 return False
1510
1509
1511 def get_branch_permissions(self, repo_name, perms=None):
1510 def get_branch_permissions(self, repo_name, perms=None):
1512 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1511 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1513 branch_perms = perms.get('repository_branches', {})
1512 branch_perms = perms.get('repository_branches', {})
1514 if not branch_perms:
1513 if not branch_perms:
1515 return {}
1514 return {}
1516 repo_branch_perms = branch_perms.get(repo_name)
1515 repo_branch_perms = branch_perms.get(repo_name)
1517 return repo_branch_perms or {}
1516 return repo_branch_perms or {}
1518
1517
1519 def get_rule_and_branch_permission(self, repo_name, branch_name):
1518 def get_rule_and_branch_permission(self, repo_name, branch_name):
1520 """
1519 """
1521 Check if this AuthUser has defined any permissions for branches. If any of
1520 Check if this AuthUser has defined any permissions for branches. If any of
1522 the rules match in order, we return the matching permissions
1521 the rules match in order, we return the matching permissions
1523 """
1522 """
1524
1523
1525 rule = default_perm = ''
1524 rule = default_perm = ''
1526
1525
1527 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1526 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1528 if not repo_branch_perms:
1527 if not repo_branch_perms:
1529 return rule, default_perm
1528 return rule, default_perm
1530
1529
1531 # now calculate the permissions
1530 # now calculate the permissions
1532 for pattern, branch_perm in repo_branch_perms.items():
1531 for pattern, branch_perm in repo_branch_perms.items():
1533 if fnmatch.fnmatch(branch_name, pattern):
1532 if fnmatch.fnmatch(branch_name, pattern):
1534 rule = '`{}`=>{}'.format(pattern, branch_perm)
1533 rule = '`{}`=>{}'.format(pattern, branch_perm)
1535 return rule, branch_perm
1534 return rule, branch_perm
1536
1535
1537 return rule, default_perm
1536 return rule, default_perm
1538
1537
1539 def get_notice_messages(self):
1538 def get_notice_messages(self):
1540
1539
1541 notice_level = 'notice-error'
1540 notice_level = 'notice-error'
1542 notice_messages = []
1541 notice_messages = []
1543 if self.is_default:
1542 if self.is_default:
1544 return [], notice_level
1543 return [], notice_level
1545
1544
1546 notices = UserNotice.query()\
1545 notices = UserNotice.query()\
1547 .filter(UserNotice.user_id == self.user_id)\
1546 .filter(UserNotice.user_id == self.user_id)\
1548 .filter(UserNotice.notice_read == false())\
1547 .filter(UserNotice.notice_read == false())\
1549 .all()
1548 .all()
1550
1549
1551 try:
1550 try:
1552 for entry in notices:
1551 for entry in notices:
1553
1552
1554 msg = {
1553 msg = {
1555 'msg_id': entry.user_notice_id,
1554 'msg_id': entry.user_notice_id,
1556 'level': entry.notification_level,
1555 'level': entry.notification_level,
1557 'subject': entry.notice_subject,
1556 'subject': entry.notice_subject,
1558 'body': entry.notice_body,
1557 'body': entry.notice_body,
1559 }
1558 }
1560 notice_messages.append(msg)
1559 notice_messages.append(msg)
1561
1560
1562 log.debug('Got user %s %s messages', self, len(notice_messages))
1561 log.debug('Got user %s %s messages', self, len(notice_messages))
1563
1562
1564 levels = [x['level'] for x in notice_messages]
1563 levels = [x['level'] for x in notice_messages]
1565 notice_level = 'notice-error' if 'error' in levels else 'notice-warning'
1564 notice_level = 'notice-error' if 'error' in levels else 'notice-warning'
1566 except Exception:
1565 except Exception:
1567 pass
1566 pass
1568
1567
1569 return notice_messages, notice_level
1568 return notice_messages, notice_level
1570
1569
1571 def __repr__(self):
1570 def __repr__(self):
1572 return self.repr_user(self.user_id, self.username, self.ip_addr, self.is_authenticated)
1571 return self.repr_user(self.user_id, self.username, self.ip_addr, self.is_authenticated)
1573
1572
1574 def set_authenticated(self, authenticated=True):
1573 def set_authenticated(self, authenticated=True):
1575 if self.user_id != self.anonymous_user.user_id:
1574 if self.user_id != self.anonymous_user.user_id:
1576 self.is_authenticated = authenticated
1575 self.is_authenticated = authenticated
1577
1576
1578 def get_cookie_store(self):
1577 def get_cookie_store(self):
1579 return {
1578 return {
1580 'username': self.username,
1579 'username': self.username,
1581 'password': md5(safe_bytes(self.password or '')),
1580 'password': md5(safe_bytes(self.password or '')),
1582 'user_id': self.user_id,
1581 'user_id': self.user_id,
1583 'is_authenticated': self.is_authenticated
1582 'is_authenticated': self.is_authenticated
1584 }
1583 }
1585
1584
1586 @classmethod
1585 @classmethod
1587 def repr_user(cls, user_id=0, username='ANONYMOUS', ip='0.0.0.0', is_authenticated=False):
1586 def repr_user(cls, user_id=0, username='ANONYMOUS', ip='0.0.0.0', is_authenticated=False):
1588 tmpl = "<AuthUser('id:{}[{}] ip:{} auth:{}')>"
1587 tmpl = "<AuthUser('id:{}[{}] ip:{} auth:{}')>"
1589 return tmpl.format(user_id, username, ip, is_authenticated)
1588 return tmpl.format(user_id, username, ip, is_authenticated)
1590
1589
1591 @classmethod
1590 @classmethod
1592 def from_cookie_store(cls, cookie_store):
1591 def from_cookie_store(cls, cookie_store):
1593 """
1592 """
1594 Creates AuthUser from a cookie store
1593 Creates AuthUser from a cookie store
1595
1594
1596 :param cls:
1595 :param cls:
1597 :param cookie_store:
1596 :param cookie_store:
1598 """
1597 """
1599 user_id = cookie_store.get('user_id')
1598 user_id = cookie_store.get('user_id')
1600 username = cookie_store.get('username')
1599 username = cookie_store.get('username')
1601 api_key = cookie_store.get('api_key')
1600 api_key = cookie_store.get('api_key')
1602 return AuthUser(user_id, api_key, username)
1601 return AuthUser(user_id, api_key, username)
1603
1602
1604 @classmethod
1603 @classmethod
1605 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1604 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1606 _set = set()
1605 _set = set()
1607
1606
1608 if inherit_from_default:
1607 if inherit_from_default:
1609 def_user_id = User.get_default_user(cache=True).user_id
1608 def_user_id = User.get_default_user(cache=True).user_id
1610 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1609 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1611 if cache:
1610 if cache:
1612 default_ips = default_ips.options(
1611 default_ips = default_ips.options(
1613 FromCache("sql_cache_short", "get_user_ips_default"))
1612 FromCache("sql_cache_short", "get_user_ips_default"))
1614
1613
1615 # populate from default user
1614 # populate from default user
1616 for ip in default_ips:
1615 for ip in default_ips:
1617 try:
1616 try:
1618 _set.add(ip.ip_addr)
1617 _set.add(ip.ip_addr)
1619 except ObjectDeletedError:
1618 except ObjectDeletedError:
1620 # since we use heavy caching sometimes it happens that
1619 # since we use heavy caching sometimes it happens that
1621 # we get deleted objects here, we just skip them
1620 # we get deleted objects here, we just skip them
1622 pass
1621 pass
1623
1622
1624 # NOTE:(marcink) we don't want to load any rules for empty
1623 # NOTE:(marcink) we don't want to load any rules for empty
1625 # user_id which is the case of access of non logged users when anonymous
1624 # user_id which is the case of access of non logged users when anonymous
1626 # access is disabled
1625 # access is disabled
1627 user_ips = []
1626 user_ips = []
1628 if user_id:
1627 if user_id:
1629 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1628 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1630 if cache:
1629 if cache:
1631 user_ips = user_ips.options(
1630 user_ips = user_ips.options(
1632 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1631 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1633
1632
1634 for ip in user_ips:
1633 for ip in user_ips:
1635 try:
1634 try:
1636 _set.add(ip.ip_addr)
1635 _set.add(ip.ip_addr)
1637 except ObjectDeletedError:
1636 except ObjectDeletedError:
1638 # since we use heavy caching sometimes it happens that we get
1637 # since we use heavy caching sometimes it happens that we get
1639 # deleted objects here, we just skip them
1638 # deleted objects here, we just skip them
1640 pass
1639 pass
1641 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1640 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1642
1641
1643
1642
1644 def set_available_permissions(settings):
1643 def set_available_permissions(settings):
1645 """
1644 """
1646 This function will propagate pyramid settings with all available defined
1645 This function will propagate pyramid settings with all available defined
1647 permission given in db. We don't want to check each time from db for new
1646 permission given in db. We don't want to check each time from db for new
1648 permissions since adding a new permission also requires application restart
1647 permissions since adding a new permission also requires application restart
1649 ie. to decorate new views with the newly created permission
1648 ie. to decorate new views with the newly created permission
1650
1649
1651 :param settings: current pyramid registry.settings
1650 :param settings: current pyramid registry.settings
1652
1651
1653 """
1652 """
1654 log.debug('auth: getting information about all available permissions')
1653 log.debug('auth: getting information about all available permissions')
1655 try:
1654 try:
1656 sa = meta.Session
1655 sa = meta.Session
1657 all_perms = sa.query(Permission).all()
1656 all_perms = sa.query(Permission).all()
1658 settings.setdefault('available_permissions',
1657 settings.setdefault('available_permissions',
1659 [x.permission_name for x in all_perms])
1658 [x.permission_name for x in all_perms])
1660 log.debug('auth: set available permissions')
1659 log.debug('auth: set available permissions')
1661 except Exception:
1660 except Exception:
1662 log.exception('Failed to fetch permissions from the database.')
1661 log.exception('Failed to fetch permissions from the database.')
1663 raise
1662 raise
1664
1663
1665
1664
1666 def get_csrf_token(session, force_new=False, save_if_missing=True):
1665 def get_csrf_token(session, force_new=False, save_if_missing=True):
1667 """
1666 """
1668 Return the current authentication token, creating one if one doesn't
1667 Return the current authentication token, creating one if one doesn't
1669 already exist and the save_if_missing flag is present.
1668 already exist and the save_if_missing flag is present.
1670
1669
1671 :param session: pass in the pyramid session, else we use the global ones
1670 :param session: pass in the pyramid session, else we use the global ones
1672 :param force_new: force to re-generate the token and store it in session
1671 :param force_new: force to re-generate the token and store it in session
1673 :param save_if_missing: save the newly generated token if it's missing in
1672 :param save_if_missing: save the newly generated token if it's missing in
1674 session
1673 session
1675 """
1674 """
1676 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1675 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1677 # from pyramid.csrf import get_csrf_token
1676 # from pyramid.csrf import get_csrf_token
1678
1677
1679 if (csrf_token_key not in session and save_if_missing) or force_new:
1678 if (csrf_token_key not in session and save_if_missing) or force_new:
1680 token = sha1(ascii_bytes(str(random.getrandbits(128))))
1679 token = sha1(ascii_bytes(str(random.getrandbits(128))))
1681 session[csrf_token_key] = token
1680 session[csrf_token_key] = token
1682 if hasattr(session, 'save'):
1681 if hasattr(session, 'save'):
1683 session.save()
1682 session.save()
1684 return session.get(csrf_token_key)
1683 return session.get(csrf_token_key)
1685
1684
1686
1685
1687 def get_request(perm_class_instance):
1686 def get_request(perm_class_instance):
1688 from pyramid.threadlocal import get_current_request
1687 from pyramid.threadlocal import get_current_request
1689 pyramid_request = get_current_request()
1688 pyramid_request = get_current_request()
1690 return pyramid_request
1689 return pyramid_request
1691
1690
1692
1691
1693 # CHECK DECORATORS
1692 # CHECK DECORATORS
1694 class CSRFRequired(object):
1693 class CSRFRequired(object):
1695 """
1694 """
1696 Decorator for authenticating a form
1695 Decorator for authenticating a form
1697
1696
1698 This decorator uses an authorization token stored in the client's
1697 This decorator uses an authorization token stored in the client's
1699 session for prevention of certain Cross-site request forgery (CSRF)
1698 session for prevention of certain Cross-site request forgery (CSRF)
1700 attacks (See
1699 attacks (See
1701 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1700 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1702 information).
1701 information).
1703
1702
1704 For use with the ``secure_form`` helper functions.
1703 For use with the ``secure_form`` helper functions.
1705
1704
1706 """
1705 """
1707 def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None):
1706 def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None):
1708 self.token = token
1707 self.token = token
1709 self.header = header
1708 self.header = header
1710 self.except_methods = except_methods or []
1709 self.except_methods = except_methods or []
1711
1710
1712 def __call__(self, func):
1711 def __call__(self, func):
1713 return get_cython_compat_decorator(self.__wrapper, func)
1712 return get_cython_compat_decorator(self.__wrapper, func)
1714
1713
1715 def _get_csrf(self, _request):
1714 def _get_csrf(self, _request):
1716 return _request.POST.get(self.token, _request.headers.get(self.header))
1715 return _request.POST.get(self.token, _request.headers.get(self.header))
1717
1716
1718 def check_csrf(self, _request, cur_token):
1717 def check_csrf(self, _request, cur_token):
1719 supplied_token = self._get_csrf(_request)
1718 supplied_token = self._get_csrf(_request)
1720 return supplied_token and supplied_token == cur_token
1719 return supplied_token and supplied_token == cur_token
1721
1720
1722 def _get_request(self):
1721 def _get_request(self):
1723 return get_request(self)
1722 return get_request(self)
1724
1723
1725 def __wrapper(self, func, *fargs, **fkwargs):
1724 def __wrapper(self, func, *fargs, **fkwargs):
1726 request = self._get_request()
1725 request = self._get_request()
1727
1726
1728 if request.method in self.except_methods:
1727 if request.method in self.except_methods:
1729 return func(*fargs, **fkwargs)
1728 return func(*fargs, **fkwargs)
1730
1729
1731 cur_token = get_csrf_token(request.session, save_if_missing=False)
1730 cur_token = get_csrf_token(request.session, save_if_missing=False)
1732 if self.check_csrf(request, cur_token):
1731 if self.check_csrf(request, cur_token):
1733 if request.POST.get(self.token):
1732 if request.POST.get(self.token):
1734 del request.POST[self.token]
1733 del request.POST[self.token]
1735 return func(*fargs, **fkwargs)
1734 return func(*fargs, **fkwargs)
1736 else:
1735 else:
1737 reason = 'token-missing'
1736 reason = 'token-missing'
1738 supplied_token = self._get_csrf(request)
1737 supplied_token = self._get_csrf(request)
1739 if supplied_token and cur_token != supplied_token:
1738 if supplied_token and cur_token != supplied_token:
1740 reason = 'token-mismatch [%s:%s]' % (
1739 reason = 'token-mismatch [%s:%s]' % (
1741 cur_token or ''[:6], supplied_token or ''[:6])
1740 cur_token or ''[:6], supplied_token or ''[:6])
1742
1741
1743 csrf_message = \
1742 csrf_message = \
1744 ("Cross-site request forgery detected, request denied. See "
1743 ("Cross-site request forgery detected, request denied. See "
1745 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1744 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1746 "more information.")
1745 "more information.")
1747 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1746 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1748 'REMOTE_ADDR:%s, HEADERS:%s' % (
1747 'REMOTE_ADDR:%s, HEADERS:%s' % (
1749 request, reason, request.remote_addr, request.headers))
1748 request, reason, request.remote_addr, request.headers))
1750
1749
1751 raise HTTPForbidden(explanation=csrf_message)
1750 raise HTTPForbidden(explanation=csrf_message)
1752
1751
1753
1752
1754 class LoginRequired(object):
1753 class LoginRequired(object):
1755 """
1754 """
1756 Must be logged in to execute this function else
1755 Must be logged in to execute this function else
1757 redirect to login page
1756 redirect to login page
1758
1757
1759 :param api_access: if enabled this checks only for valid auth token
1758 :param api_access: if enabled this checks only for valid auth token
1760 and grants access based on valid token
1759 and grants access based on valid token
1761 """
1760 """
1762 def __init__(self, auth_token_access=None):
1761 def __init__(self, auth_token_access=None):
1763 self.auth_token_access = auth_token_access
1762 self.auth_token_access = auth_token_access
1764 if self.auth_token_access:
1763 if self.auth_token_access:
1765 valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES))
1764 valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES))
1766 if not valid_type:
1765 if not valid_type:
1767 raise ValueError('auth_token_access must be on of {}, got {}'.format(
1766 raise ValueError('auth_token_access must be on of {}, got {}'.format(
1768 UserApiKeys.ROLES, auth_token_access))
1767 UserApiKeys.ROLES, auth_token_access))
1769
1768
1770 def __call__(self, func):
1769 def __call__(self, func):
1771 return get_cython_compat_decorator(self.__wrapper, func)
1770 return get_cython_compat_decorator(self.__wrapper, func)
1772
1771
1773 def _get_request(self):
1772 def _get_request(self):
1774 return get_request(self)
1773 return get_request(self)
1775
1774
1776 def __wrapper(self, func, *fargs, **fkwargs):
1775 def __wrapper(self, func, *fargs, **fkwargs):
1777 from rhodecode.lib import helpers as h
1776 from rhodecode.lib import helpers as h
1778 cls = fargs[0]
1777 cls = fargs[0]
1779 user = cls._rhodecode_user
1778 user = cls._rhodecode_user
1780 request = self._get_request()
1779 request = self._get_request()
1781 _ = request.translate
1780 _ = request.translate
1782
1781
1783 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1782 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1784 log.debug('Starting login restriction checks for user: %s', user)
1783 log.debug('Starting login restriction checks for user: %s', user)
1785 # check if our IP is allowed
1784 # check if our IP is allowed
1786 ip_access_valid = True
1785 ip_access_valid = True
1787 if not user.ip_allowed:
1786 if not user.ip_allowed:
1788 h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))),
1787 h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))),
1789 category='warning')
1788 category='warning')
1790 ip_access_valid = False
1789 ip_access_valid = False
1791
1790
1792 # we used stored token that is extract from GET or URL param (if any)
1791 # we used stored token that is extract from GET or URL param (if any)
1793 _auth_token = request.user_auth_token
1792 _auth_token = request.user_auth_token
1794
1793
1795 # check if we used an AUTH_TOKEN and it's a valid one
1794 # check if we used an AUTH_TOKEN and it's a valid one
1796 # defined white-list of controllers which API access will be enabled
1795 # defined white-list of controllers which API access will be enabled
1797 whitelist = None
1796 whitelist = None
1798 if self.auth_token_access:
1797 if self.auth_token_access:
1799 # since this location is allowed by @LoginRequired decorator it's our
1798 # since this location is allowed by @LoginRequired decorator it's our
1800 # only whitelist
1799 # only whitelist
1801 whitelist = [loc]
1800 whitelist = [loc]
1802 auth_token_access_valid = allowed_auth_token_access(
1801 auth_token_access_valid = allowed_auth_token_access(
1803 loc, whitelist=whitelist, auth_token=_auth_token)
1802 loc, whitelist=whitelist, auth_token=_auth_token)
1804
1803
1805 # explicit controller is enabled or API is in our whitelist
1804 # explicit controller is enabled or API is in our whitelist
1806 if auth_token_access_valid:
1805 if auth_token_access_valid:
1807 log.debug('Checking AUTH TOKEN access for %s', cls)
1806 log.debug('Checking AUTH TOKEN access for %s', cls)
1808 db_user = user.get_instance()
1807 db_user = user.get_instance()
1809
1808
1810 if db_user:
1809 if db_user:
1811 if self.auth_token_access:
1810 if self.auth_token_access:
1812 roles = self.auth_token_access
1811 roles = self.auth_token_access
1813 else:
1812 else:
1814 roles = [UserApiKeys.ROLE_HTTP]
1813 roles = [UserApiKeys.ROLE_HTTP]
1815 log.debug('AUTH TOKEN: checking auth for user %s and roles %s',
1814 log.debug('AUTH TOKEN: checking auth for user %s and roles %s',
1816 db_user, roles)
1815 db_user, roles)
1817 token_match = db_user.authenticate_by_token(
1816 token_match = db_user.authenticate_by_token(
1818 _auth_token, roles=roles)
1817 _auth_token, roles=roles)
1819 else:
1818 else:
1820 log.debug('Unable to fetch db instance for auth user: %s', user)
1819 log.debug('Unable to fetch db instance for auth user: %s', user)
1821 token_match = False
1820 token_match = False
1822
1821
1823 if _auth_token and token_match:
1822 if _auth_token and token_match:
1824 auth_token_access_valid = True
1823 auth_token_access_valid = True
1825 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1824 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1826 else:
1825 else:
1827 auth_token_access_valid = False
1826 auth_token_access_valid = False
1828 if not _auth_token:
1827 if not _auth_token:
1829 log.debug("AUTH TOKEN *NOT* present in request")
1828 log.debug("AUTH TOKEN *NOT* present in request")
1830 else:
1829 else:
1831 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1830 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1832
1831
1833 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1832 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1834 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1833 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1835 else 'AUTH_TOKEN_AUTH'
1834 else 'AUTH_TOKEN_AUTH'
1836
1835
1837 if ip_access_valid and (
1836 if ip_access_valid and (
1838 user.is_authenticated or auth_token_access_valid):
1837 user.is_authenticated or auth_token_access_valid):
1839 log.info('user %s authenticating with:%s IS authenticated on func %s',
1838 log.info('user %s authenticating with:%s IS authenticated on func %s',
1840 user, reason, loc)
1839 user, reason, loc)
1841
1840
1842 return func(*fargs, **fkwargs)
1841 return func(*fargs, **fkwargs)
1843 else:
1842 else:
1844 log.warning(
1843 log.warning(
1845 'user %s authenticating with:%s NOT authenticated on '
1844 'user %s authenticating with:%s NOT authenticated on '
1846 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1845 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1847 user, reason, loc, ip_access_valid, auth_token_access_valid)
1846 user, reason, loc, ip_access_valid, auth_token_access_valid)
1848 # we preserve the get PARAM
1847 # we preserve the get PARAM
1849 came_from = get_came_from(request)
1848 came_from = get_came_from(request)
1850
1849
1851 log.debug('redirecting to login page with %s', came_from)
1850 log.debug('redirecting to login page with %s', came_from)
1852 raise HTTPFound(
1851 raise HTTPFound(
1853 h.route_path('login', _query={'came_from': came_from}))
1852 h.route_path('login', _query={'came_from': came_from}))
1854
1853
1855
1854
1856 class NotAnonymous(object):
1855 class NotAnonymous(object):
1857 """
1856 """
1858 Must be logged in to execute this function else
1857 Must be logged in to execute this function else
1859 redirect to login page
1858 redirect to login page
1860 """
1859 """
1861
1860
1862 def __call__(self, func):
1861 def __call__(self, func):
1863 return get_cython_compat_decorator(self.__wrapper, func)
1862 return get_cython_compat_decorator(self.__wrapper, func)
1864
1863
1865 def _get_request(self):
1864 def _get_request(self):
1866 return get_request(self)
1865 return get_request(self)
1867
1866
1868 def __wrapper(self, func, *fargs, **fkwargs):
1867 def __wrapper(self, func, *fargs, **fkwargs):
1869 import rhodecode.lib.helpers as h
1868 import rhodecode.lib.helpers as h
1870 cls = fargs[0]
1869 cls = fargs[0]
1871 self.user = cls._rhodecode_user
1870 self.user = cls._rhodecode_user
1872 request = self._get_request()
1871 request = self._get_request()
1873 _ = request.translate
1872 _ = request.translate
1874 log.debug('Checking if user is not anonymous @%s', cls)
1873 log.debug('Checking if user is not anonymous @%s', cls)
1875
1874
1876 anonymous = self.user.username == User.DEFAULT_USER
1875 anonymous = self.user.username == User.DEFAULT_USER
1877
1876
1878 if anonymous:
1877 if anonymous:
1879 came_from = get_came_from(request)
1878 came_from = get_came_from(request)
1880 h.flash(_('You need to be a registered user to '
1879 h.flash(_('You need to be a registered user to '
1881 'perform this action'),
1880 'perform this action'),
1882 category='warning')
1881 category='warning')
1883 raise HTTPFound(
1882 raise HTTPFound(
1884 h.route_path('login', _query={'came_from': came_from}))
1883 h.route_path('login', _query={'came_from': came_from}))
1885 else:
1884 else:
1886 return func(*fargs, **fkwargs)
1885 return func(*fargs, **fkwargs)
1887
1886
1888
1887
1889 class PermsDecorator(object):
1888 class PermsDecorator(object):
1890 """
1889 """
1891 Base class for controller decorators, we extract the current user from
1890 Base class for controller decorators, we extract the current user from
1892 the class itself, which has it stored in base controllers
1891 the class itself, which has it stored in base controllers
1893 """
1892 """
1894
1893
1895 def __init__(self, *required_perms):
1894 def __init__(self, *required_perms):
1896 self.required_perms = set(required_perms)
1895 self.required_perms = set(required_perms)
1897
1896
1898 def __call__(self, func):
1897 def __call__(self, func):
1899 return get_cython_compat_decorator(self.__wrapper, func)
1898 return get_cython_compat_decorator(self.__wrapper, func)
1900
1899
1901 def _get_request(self):
1900 def _get_request(self):
1902 return get_request(self)
1901 return get_request(self)
1903
1902
1904 def __wrapper(self, func, *fargs, **fkwargs):
1903 def __wrapper(self, func, *fargs, **fkwargs):
1905 import rhodecode.lib.helpers as h
1904 import rhodecode.lib.helpers as h
1906 cls = fargs[0]
1905 cls = fargs[0]
1907 _user = cls._rhodecode_user
1906 _user = cls._rhodecode_user
1908 request = self._get_request()
1907 request = self._get_request()
1909 _ = request.translate
1908 _ = request.translate
1910
1909
1911 log.debug('checking %s permissions %s for %s %s',
1910 log.debug('checking %s permissions %s for %s %s',
1912 self.__class__.__name__, self.required_perms, cls, _user)
1911 self.__class__.__name__, self.required_perms, cls, _user)
1913
1912
1914 if self.check_permissions(_user):
1913 if self.check_permissions(_user):
1915 log.debug('Permission granted for %s %s', cls, _user)
1914 log.debug('Permission granted for %s %s', cls, _user)
1916 return func(*fargs, **fkwargs)
1915 return func(*fargs, **fkwargs)
1917
1916
1918 else:
1917 else:
1919 log.debug('Permission denied for %s %s', cls, _user)
1918 log.debug('Permission denied for %s %s', cls, _user)
1920 anonymous = _user.username == User.DEFAULT_USER
1919 anonymous = _user.username == User.DEFAULT_USER
1921
1920
1922 if anonymous:
1921 if anonymous:
1923 came_from = get_came_from(self._get_request())
1922 came_from = get_came_from(self._get_request())
1924 h.flash(_('You need to be signed in to view this page'),
1923 h.flash(_('You need to be signed in to view this page'),
1925 category='warning')
1924 category='warning')
1926 raise HTTPFound(
1925 raise HTTPFound(
1927 h.route_path('login', _query={'came_from': came_from}))
1926 h.route_path('login', _query={'came_from': came_from}))
1928
1927
1929 else:
1928 else:
1930 # redirect with 404 to prevent resource discovery
1929 # redirect with 404 to prevent resource discovery
1931 raise HTTPNotFound()
1930 raise HTTPNotFound()
1932
1931
1933 def check_permissions(self, user):
1932 def check_permissions(self, user):
1934 """Dummy function for overriding"""
1933 """Dummy function for overriding"""
1935 raise NotImplementedError(
1934 raise NotImplementedError(
1936 'You have to write this function in child class')
1935 'You have to write this function in child class')
1937
1936
1938
1937
1939 class HasPermissionAllDecorator(PermsDecorator):
1938 class HasPermissionAllDecorator(PermsDecorator):
1940 """
1939 """
1941 Checks for access permission for all given predicates. All of them
1940 Checks for access permission for all given predicates. All of them
1942 have to be meet in order to fulfill the request
1941 have to be meet in order to fulfill the request
1943 """
1942 """
1944
1943
1945 def check_permissions(self, user):
1944 def check_permissions(self, user):
1946 perms = user.permissions_with_scope({})
1945 perms = user.permissions_with_scope({})
1947 if self.required_perms.issubset(perms['global']):
1946 if self.required_perms.issubset(perms['global']):
1948 return True
1947 return True
1949 return False
1948 return False
1950
1949
1951
1950
1952 class HasPermissionAnyDecorator(PermsDecorator):
1951 class HasPermissionAnyDecorator(PermsDecorator):
1953 """
1952 """
1954 Checks for access permission for any of given predicates. In order to
1953 Checks for access permission for any of given predicates. In order to
1955 fulfill the request any of predicates must be meet
1954 fulfill the request any of predicates must be meet
1956 """
1955 """
1957
1956
1958 def check_permissions(self, user):
1957 def check_permissions(self, user):
1959 perms = user.permissions_with_scope({})
1958 perms = user.permissions_with_scope({})
1960 if self.required_perms.intersection(perms['global']):
1959 if self.required_perms.intersection(perms['global']):
1961 return True
1960 return True
1962 return False
1961 return False
1963
1962
1964
1963
1965 class HasRepoPermissionAllDecorator(PermsDecorator):
1964 class HasRepoPermissionAllDecorator(PermsDecorator):
1966 """
1965 """
1967 Checks for access permission for all given predicates for specific
1966 Checks for access permission for all given predicates for specific
1968 repository. All of them have to be meet in order to fulfill the request
1967 repository. All of them have to be meet in order to fulfill the request
1969 """
1968 """
1970 def _get_repo_name(self):
1969 def _get_repo_name(self):
1971 _request = self._get_request()
1970 _request = self._get_request()
1972 return get_repo_slug(_request)
1971 return get_repo_slug(_request)
1973
1972
1974 def check_permissions(self, user):
1973 def check_permissions(self, user):
1975 perms = user.permissions
1974 perms = user.permissions
1976 repo_name = self._get_repo_name()
1975 repo_name = self._get_repo_name()
1977
1976
1978 try:
1977 try:
1979 user_perms = {perms['repositories'][repo_name]}
1978 user_perms = {perms['repositories'][repo_name]}
1980 except KeyError:
1979 except KeyError:
1981 log.debug('cannot locate repo with name: `%s` in permissions defs',
1980 log.debug('cannot locate repo with name: `%s` in permissions defs',
1982 repo_name)
1981 repo_name)
1983 return False
1982 return False
1984
1983
1985 log.debug('checking `%s` permissions for repo `%s`',
1984 log.debug('checking `%s` permissions for repo `%s`',
1986 user_perms, repo_name)
1985 user_perms, repo_name)
1987 if self.required_perms.issubset(user_perms):
1986 if self.required_perms.issubset(user_perms):
1988 return True
1987 return True
1989 return False
1988 return False
1990
1989
1991
1990
1992 class HasRepoPermissionAnyDecorator(PermsDecorator):
1991 class HasRepoPermissionAnyDecorator(PermsDecorator):
1993 """
1992 """
1994 Checks for access permission for any of given predicates for specific
1993 Checks for access permission for any of given predicates for specific
1995 repository. In order to fulfill the request any of predicates must be meet
1994 repository. In order to fulfill the request any of predicates must be meet
1996 """
1995 """
1997 def _get_repo_name(self):
1996 def _get_repo_name(self):
1998 _request = self._get_request()
1997 _request = self._get_request()
1999 return get_repo_slug(_request)
1998 return get_repo_slug(_request)
2000
1999
2001 def check_permissions(self, user):
2000 def check_permissions(self, user):
2002 perms = user.permissions
2001 perms = user.permissions
2003 repo_name = self._get_repo_name()
2002 repo_name = self._get_repo_name()
2004
2003
2005 try:
2004 try:
2006 user_perms = {perms['repositories'][repo_name]}
2005 user_perms = {perms['repositories'][repo_name]}
2007 except KeyError:
2006 except KeyError:
2008 log.debug(
2007 log.debug(
2009 'cannot locate repo with name: `%s` in permissions defs',
2008 'cannot locate repo with name: `%s` in permissions defs',
2010 repo_name)
2009 repo_name)
2011 return False
2010 return False
2012
2011
2013 log.debug('checking `%s` permissions for repo `%s`',
2012 log.debug('checking `%s` permissions for repo `%s`',
2014 user_perms, repo_name)
2013 user_perms, repo_name)
2015 if self.required_perms.intersection(user_perms):
2014 if self.required_perms.intersection(user_perms):
2016 return True
2015 return True
2017 return False
2016 return False
2018
2017
2019
2018
2020 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
2019 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
2021 """
2020 """
2022 Checks for access permission for all given predicates for specific
2021 Checks for access permission for all given predicates for specific
2023 repository group. All of them have to be meet in order to
2022 repository group. All of them have to be meet in order to
2024 fulfill the request
2023 fulfill the request
2025 """
2024 """
2026 def _get_repo_group_name(self):
2025 def _get_repo_group_name(self):
2027 _request = self._get_request()
2026 _request = self._get_request()
2028 return get_repo_group_slug(_request)
2027 return get_repo_group_slug(_request)
2029
2028
2030 def check_permissions(self, user):
2029 def check_permissions(self, user):
2031 perms = user.permissions
2030 perms = user.permissions
2032 group_name = self._get_repo_group_name()
2031 group_name = self._get_repo_group_name()
2033 try:
2032 try:
2034 user_perms = {perms['repositories_groups'][group_name]}
2033 user_perms = {perms['repositories_groups'][group_name]}
2035 except KeyError:
2034 except KeyError:
2036 log.debug(
2035 log.debug(
2037 'cannot locate repo group with name: `%s` in permissions defs',
2036 'cannot locate repo group with name: `%s` in permissions defs',
2038 group_name)
2037 group_name)
2039 return False
2038 return False
2040
2039
2041 log.debug('checking `%s` permissions for repo group `%s`',
2040 log.debug('checking `%s` permissions for repo group `%s`',
2042 user_perms, group_name)
2041 user_perms, group_name)
2043 if self.required_perms.issubset(user_perms):
2042 if self.required_perms.issubset(user_perms):
2044 return True
2043 return True
2045 return False
2044 return False
2046
2045
2047
2046
2048 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
2047 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
2049 """
2048 """
2050 Checks for access permission for any of given predicates for specific
2049 Checks for access permission for any of given predicates for specific
2051 repository group. In order to fulfill the request any
2050 repository group. In order to fulfill the request any
2052 of predicates must be met
2051 of predicates must be met
2053 """
2052 """
2054 def _get_repo_group_name(self):
2053 def _get_repo_group_name(self):
2055 _request = self._get_request()
2054 _request = self._get_request()
2056 return get_repo_group_slug(_request)
2055 return get_repo_group_slug(_request)
2057
2056
2058 def check_permissions(self, user):
2057 def check_permissions(self, user):
2059 perms = user.permissions
2058 perms = user.permissions
2060 group_name = self._get_repo_group_name()
2059 group_name = self._get_repo_group_name()
2061
2060
2062 try:
2061 try:
2063 user_perms = {perms['repositories_groups'][group_name]}
2062 user_perms = {perms['repositories_groups'][group_name]}
2064 except KeyError:
2063 except KeyError:
2065 log.debug(
2064 log.debug(
2066 'cannot locate repo group with name: `%s` in permissions defs',
2065 'cannot locate repo group with name: `%s` in permissions defs',
2067 group_name)
2066 group_name)
2068 return False
2067 return False
2069
2068
2070 log.debug('checking `%s` permissions for repo group `%s`',
2069 log.debug('checking `%s` permissions for repo group `%s`',
2071 user_perms, group_name)
2070 user_perms, group_name)
2072 if self.required_perms.intersection(user_perms):
2071 if self.required_perms.intersection(user_perms):
2073 return True
2072 return True
2074 return False
2073 return False
2075
2074
2076
2075
2077 class HasUserGroupPermissionAllDecorator(PermsDecorator):
2076 class HasUserGroupPermissionAllDecorator(PermsDecorator):
2078 """
2077 """
2079 Checks for access permission for all given predicates for specific
2078 Checks for access permission for all given predicates for specific
2080 user group. All of them have to be meet in order to fulfill the request
2079 user group. All of them have to be meet in order to fulfill the request
2081 """
2080 """
2082 def _get_user_group_name(self):
2081 def _get_user_group_name(self):
2083 _request = self._get_request()
2082 _request = self._get_request()
2084 return get_user_group_slug(_request)
2083 return get_user_group_slug(_request)
2085
2084
2086 def check_permissions(self, user):
2085 def check_permissions(self, user):
2087 perms = user.permissions
2086 perms = user.permissions
2088 group_name = self._get_user_group_name()
2087 group_name = self._get_user_group_name()
2089 try:
2088 try:
2090 user_perms = {perms['user_groups'][group_name]}
2089 user_perms = {perms['user_groups'][group_name]}
2091 except KeyError:
2090 except KeyError:
2092 return False
2091 return False
2093
2092
2094 if self.required_perms.issubset(user_perms):
2093 if self.required_perms.issubset(user_perms):
2095 return True
2094 return True
2096 return False
2095 return False
2097
2096
2098
2097
2099 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
2098 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
2100 """
2099 """
2101 Checks for access permission for any of given predicates for specific
2100 Checks for access permission for any of given predicates for specific
2102 user group. In order to fulfill the request any of predicates must be meet
2101 user group. In order to fulfill the request any of predicates must be meet
2103 """
2102 """
2104 def _get_user_group_name(self):
2103 def _get_user_group_name(self):
2105 _request = self._get_request()
2104 _request = self._get_request()
2106 return get_user_group_slug(_request)
2105 return get_user_group_slug(_request)
2107
2106
2108 def check_permissions(self, user):
2107 def check_permissions(self, user):
2109 perms = user.permissions
2108 perms = user.permissions
2110 group_name = self._get_user_group_name()
2109 group_name = self._get_user_group_name()
2111 try:
2110 try:
2112 user_perms = {perms['user_groups'][group_name]}
2111 user_perms = {perms['user_groups'][group_name]}
2113 except KeyError:
2112 except KeyError:
2114 return False
2113 return False
2115
2114
2116 if self.required_perms.intersection(user_perms):
2115 if self.required_perms.intersection(user_perms):
2117 return True
2116 return True
2118 return False
2117 return False
2119
2118
2120
2119
2121 # CHECK FUNCTIONS
2120 # CHECK FUNCTIONS
2122 class PermsFunction(object):
2121 class PermsFunction(object):
2123 """Base function for other check functions"""
2122 """Base function for other check functions"""
2124
2123
2125 def __init__(self, *perms):
2124 def __init__(self, *perms):
2126 self.required_perms = set(perms)
2125 self.required_perms = set(perms)
2127 self.repo_name = None
2126 self.repo_name = None
2128 self.repo_group_name = None
2127 self.repo_group_name = None
2129 self.user_group_name = None
2128 self.user_group_name = None
2130
2129
2131 def __bool__(self):
2130 def __bool__(self):
2132 import inspect
2131 import inspect
2133 frame = inspect.currentframe()
2132 frame = inspect.currentframe()
2134 stack_trace = traceback.format_stack(frame)
2133 stack_trace = traceback.format_stack(frame)
2135 log.error('Checking bool value on a class instance of perm '
2134 log.error('Checking bool value on a class instance of perm '
2136 'function is not allowed: %s', ''.join(stack_trace))
2135 'function is not allowed: %s', ''.join(stack_trace))
2137 # rather than throwing errors, here we always return False so if by
2136 # rather than throwing errors, here we always return False so if by
2138 # accident someone checks truth for just an instance it will always end
2137 # accident someone checks truth for just an instance it will always end
2139 # up in returning False
2138 # up in returning False
2140 return False
2139 return False
2141 __nonzero__ = __bool__
2140 __nonzero__ = __bool__
2142
2141
2143 def __call__(self, check_location='', user=None):
2142 def __call__(self, check_location='', user=None):
2144 if not user:
2143 if not user:
2145 log.debug('Using user attribute from global request')
2144 log.debug('Using user attribute from global request')
2146 request = self._get_request()
2145 request = self._get_request()
2147 user = request.user
2146 user = request.user
2148
2147
2149 # init auth user if not already given
2148 # init auth user if not already given
2150 if not isinstance(user, AuthUser):
2149 if not isinstance(user, AuthUser):
2151 log.debug('Wrapping user %s into AuthUser', user)
2150 log.debug('Wrapping user %s into AuthUser', user)
2152 user = AuthUser(user.user_id)
2151 user = AuthUser(user.user_id)
2153
2152
2154 cls_name = self.__class__.__name__
2153 cls_name = self.__class__.__name__
2155 check_scope = self._get_check_scope(cls_name)
2154 check_scope = self._get_check_scope(cls_name)
2156 check_location = check_location or 'unspecified location'
2155 check_location = check_location or 'unspecified location'
2157
2156
2158 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
2157 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
2159 self.required_perms, user, check_scope, check_location)
2158 self.required_perms, user, check_scope, check_location)
2160 if not user:
2159 if not user:
2161 log.warning('Empty user given for permission check')
2160 log.warning('Empty user given for permission check')
2162 return False
2161 return False
2163
2162
2164 if self.check_permissions(user):
2163 if self.check_permissions(user):
2165 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2164 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2166 check_scope, user, check_location)
2165 check_scope, user, check_location)
2167 return True
2166 return True
2168
2167
2169 else:
2168 else:
2170 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2169 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2171 check_scope, user, check_location)
2170 check_scope, user, check_location)
2172 return False
2171 return False
2173
2172
2174 def _get_request(self):
2173 def _get_request(self):
2175 return get_request(self)
2174 return get_request(self)
2176
2175
2177 def _get_check_scope(self, cls_name):
2176 def _get_check_scope(self, cls_name):
2178 return {
2177 return {
2179 'HasPermissionAll': 'GLOBAL',
2178 'HasPermissionAll': 'GLOBAL',
2180 'HasPermissionAny': 'GLOBAL',
2179 'HasPermissionAny': 'GLOBAL',
2181 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
2180 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
2182 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
2181 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
2183 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
2182 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
2184 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
2183 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
2185 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
2184 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
2186 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
2185 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
2187 }.get(cls_name, '?:%s' % cls_name)
2186 }.get(cls_name, '?:%s' % cls_name)
2188
2187
2189 def check_permissions(self, user):
2188 def check_permissions(self, user):
2190 """Dummy function for overriding"""
2189 """Dummy function for overriding"""
2191 raise Exception('You have to write this function in child class')
2190 raise Exception('You have to write this function in child class')
2192
2191
2193
2192
2194 class HasPermissionAll(PermsFunction):
2193 class HasPermissionAll(PermsFunction):
2195 def check_permissions(self, user):
2194 def check_permissions(self, user):
2196 perms = user.permissions_with_scope({})
2195 perms = user.permissions_with_scope({})
2197 if self.required_perms.issubset(perms.get('global')):
2196 if self.required_perms.issubset(perms.get('global')):
2198 return True
2197 return True
2199 return False
2198 return False
2200
2199
2201
2200
2202 class HasPermissionAny(PermsFunction):
2201 class HasPermissionAny(PermsFunction):
2203 def check_permissions(self, user):
2202 def check_permissions(self, user):
2204 perms = user.permissions_with_scope({})
2203 perms = user.permissions_with_scope({})
2205 if self.required_perms.intersection(perms.get('global')):
2204 if self.required_perms.intersection(perms.get('global')):
2206 return True
2205 return True
2207 return False
2206 return False
2208
2207
2209
2208
2210 class HasRepoPermissionAll(PermsFunction):
2209 class HasRepoPermissionAll(PermsFunction):
2211 def __call__(self, repo_name=None, check_location='', user=None):
2210 def __call__(self, repo_name=None, check_location='', user=None):
2212 self.repo_name = repo_name
2211 self.repo_name = repo_name
2213 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2212 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2214
2213
2215 def _get_repo_name(self):
2214 def _get_repo_name(self):
2216 if not self.repo_name:
2215 if not self.repo_name:
2217 _request = self._get_request()
2216 _request = self._get_request()
2218 self.repo_name = get_repo_slug(_request)
2217 self.repo_name = get_repo_slug(_request)
2219 return self.repo_name
2218 return self.repo_name
2220
2219
2221 def check_permissions(self, user):
2220 def check_permissions(self, user):
2222 self.repo_name = self._get_repo_name()
2221 self.repo_name = self._get_repo_name()
2223 perms = user.permissions
2222 perms = user.permissions
2224 try:
2223 try:
2225 user_perms = {perms['repositories'][self.repo_name]}
2224 user_perms = {perms['repositories'][self.repo_name]}
2226 except KeyError:
2225 except KeyError:
2227 return False
2226 return False
2228 if self.required_perms.issubset(user_perms):
2227 if self.required_perms.issubset(user_perms):
2229 return True
2228 return True
2230 return False
2229 return False
2231
2230
2232
2231
2233 class HasRepoPermissionAny(PermsFunction):
2232 class HasRepoPermissionAny(PermsFunction):
2234 def __call__(self, repo_name=None, check_location='', user=None):
2233 def __call__(self, repo_name=None, check_location='', user=None):
2235 self.repo_name = repo_name
2234 self.repo_name = repo_name
2236 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2235 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2237
2236
2238 def _get_repo_name(self):
2237 def _get_repo_name(self):
2239 if not self.repo_name:
2238 if not self.repo_name:
2240 _request = self._get_request()
2239 _request = self._get_request()
2241 self.repo_name = get_repo_slug(_request)
2240 self.repo_name = get_repo_slug(_request)
2242 return self.repo_name
2241 return self.repo_name
2243
2242
2244 def check_permissions(self, user):
2243 def check_permissions(self, user):
2245 self.repo_name = self._get_repo_name()
2244 self.repo_name = self._get_repo_name()
2246 perms = user.permissions
2245 perms = user.permissions
2247 try:
2246 try:
2248 user_perms = {perms['repositories'][self.repo_name]}
2247 user_perms = {perms['repositories'][self.repo_name]}
2249 except KeyError:
2248 except KeyError:
2250 return False
2249 return False
2251 if self.required_perms.intersection(user_perms):
2250 if self.required_perms.intersection(user_perms):
2252 return True
2251 return True
2253 return False
2252 return False
2254
2253
2255
2254
2256 class HasRepoGroupPermissionAny(PermsFunction):
2255 class HasRepoGroupPermissionAny(PermsFunction):
2257 def __call__(self, group_name=None, check_location='', user=None):
2256 def __call__(self, group_name=None, check_location='', user=None):
2258 self.repo_group_name = group_name
2257 self.repo_group_name = group_name
2259 return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
2258 return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
2260
2259
2261 def check_permissions(self, user):
2260 def check_permissions(self, user):
2262 perms = user.permissions
2261 perms = user.permissions
2263 try:
2262 try:
2264 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2263 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2265 except KeyError:
2264 except KeyError:
2266 return False
2265 return False
2267 if self.required_perms.intersection(user_perms):
2266 if self.required_perms.intersection(user_perms):
2268 return True
2267 return True
2269 return False
2268 return False
2270
2269
2271
2270
2272 class HasRepoGroupPermissionAll(PermsFunction):
2271 class HasRepoGroupPermissionAll(PermsFunction):
2273 def __call__(self, group_name=None, check_location='', user=None):
2272 def __call__(self, group_name=None, check_location='', user=None):
2274 self.repo_group_name = group_name
2273 self.repo_group_name = group_name
2275 return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
2274 return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
2276
2275
2277 def check_permissions(self, user):
2276 def check_permissions(self, user):
2278 perms = user.permissions
2277 perms = user.permissions
2279 try:
2278 try:
2280 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2279 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2281 except KeyError:
2280 except KeyError:
2282 return False
2281 return False
2283 if self.required_perms.issubset(user_perms):
2282 if self.required_perms.issubset(user_perms):
2284 return True
2283 return True
2285 return False
2284 return False
2286
2285
2287
2286
2288 class HasUserGroupPermissionAny(PermsFunction):
2287 class HasUserGroupPermissionAny(PermsFunction):
2289 def __call__(self, user_group_name=None, check_location='', user=None):
2288 def __call__(self, user_group_name=None, check_location='', user=None):
2290 self.user_group_name = user_group_name
2289 self.user_group_name = user_group_name
2291 return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
2290 return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
2292
2291
2293 def check_permissions(self, user):
2292 def check_permissions(self, user):
2294 perms = user.permissions
2293 perms = user.permissions
2295 try:
2294 try:
2296 user_perms = {perms['user_groups'][self.user_group_name]}
2295 user_perms = {perms['user_groups'][self.user_group_name]}
2297 except KeyError:
2296 except KeyError:
2298 return False
2297 return False
2299 if self.required_perms.intersection(user_perms):
2298 if self.required_perms.intersection(user_perms):
2300 return True
2299 return True
2301 return False
2300 return False
2302
2301
2303
2302
2304 class HasUserGroupPermissionAll(PermsFunction):
2303 class HasUserGroupPermissionAll(PermsFunction):
2305 def __call__(self, user_group_name=None, check_location='', user=None):
2304 def __call__(self, user_group_name=None, check_location='', user=None):
2306 self.user_group_name = user_group_name
2305 self.user_group_name = user_group_name
2307 return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
2306 return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
2308
2307
2309 def check_permissions(self, user):
2308 def check_permissions(self, user):
2310 perms = user.permissions
2309 perms = user.permissions
2311 try:
2310 try:
2312 user_perms = {perms['user_groups'][self.user_group_name]}
2311 user_perms = {perms['user_groups'][self.user_group_name]}
2313 except KeyError:
2312 except KeyError:
2314 return False
2313 return False
2315 if self.required_perms.issubset(user_perms):
2314 if self.required_perms.issubset(user_perms):
2316 return True
2315 return True
2317 return False
2316 return False
2318
2317
2319
2318
2320 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2319 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2321 class HasPermissionAnyMiddleware(object):
2320 class HasPermissionAnyMiddleware(object):
2322 def __init__(self, *perms):
2321 def __init__(self, *perms):
2323 self.required_perms = set(perms)
2322 self.required_perms = set(perms)
2324
2323
2325 def __call__(self, auth_user, repo_name):
2324 def __call__(self, auth_user, repo_name):
2326 # # repo_name MUST be unicode, since we handle keys in permission
2325 # # repo_name MUST be unicode, since we handle keys in permission
2327 # # dict by unicode
2326 # # dict by unicode
2328 #TODO: verify
2327 #TODO: verify
2329 # repo_name = safe_unicode(repo_name)
2328 # repo_name = safe_unicode(repo_name)
2330
2329
2331 log.debug(
2330 log.debug(
2332 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2331 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2333 self.required_perms, auth_user, repo_name)
2332 self.required_perms, auth_user, repo_name)
2334
2333
2335 if self.check_permissions(auth_user, repo_name):
2334 if self.check_permissions(auth_user, repo_name):
2336 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2335 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2337 repo_name, auth_user, 'PermissionMiddleware')
2336 repo_name, auth_user, 'PermissionMiddleware')
2338 return True
2337 return True
2339
2338
2340 else:
2339 else:
2341 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2340 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2342 repo_name, auth_user, 'PermissionMiddleware')
2341 repo_name, auth_user, 'PermissionMiddleware')
2343 return False
2342 return False
2344
2343
2345 def check_permissions(self, user, repo_name):
2344 def check_permissions(self, user, repo_name):
2346 perms = user.permissions_with_scope({'repo_name': repo_name})
2345 perms = user.permissions_with_scope({'repo_name': repo_name})
2347
2346
2348 try:
2347 try:
2349 user_perms = {perms['repositories'][repo_name]}
2348 user_perms = {perms['repositories'][repo_name]}
2350 except Exception:
2349 except Exception:
2351 log.exception('Error while accessing user permissions')
2350 log.exception('Error while accessing user permissions')
2352 return False
2351 return False
2353
2352
2354 if self.required_perms.intersection(user_perms):
2353 if self.required_perms.intersection(user_perms):
2355 return True
2354 return True
2356 return False
2355 return False
2357
2356
2358
2357
2359 # SPECIAL VERSION TO HANDLE API AUTH
2358 # SPECIAL VERSION TO HANDLE API AUTH
2360 class _BaseApiPerm(object):
2359 class _BaseApiPerm(object):
2361 def __init__(self, *perms):
2360 def __init__(self, *perms):
2362 self.required_perms = set(perms)
2361 self.required_perms = set(perms)
2363
2362
2364 def __call__(self, check_location=None, user=None, repo_name=None,
2363 def __call__(self, check_location=None, user=None, repo_name=None,
2365 group_name=None, user_group_name=None):
2364 group_name=None, user_group_name=None):
2366 cls_name = self.__class__.__name__
2365 cls_name = self.__class__.__name__
2367 check_scope = 'global:%s' % (self.required_perms,)
2366 check_scope = 'global:%s' % (self.required_perms,)
2368 if repo_name:
2367 if repo_name:
2369 check_scope += ', repo_name:%s' % (repo_name,)
2368 check_scope += ', repo_name:%s' % (repo_name,)
2370
2369
2371 if group_name:
2370 if group_name:
2372 check_scope += ', repo_group_name:%s' % (group_name,)
2371 check_scope += ', repo_group_name:%s' % (group_name,)
2373
2372
2374 if user_group_name:
2373 if user_group_name:
2375 check_scope += ', user_group_name:%s' % (user_group_name,)
2374 check_scope += ', user_group_name:%s' % (user_group_name,)
2376
2375
2377 log.debug('checking cls:%s %s %s @ %s',
2376 log.debug('checking cls:%s %s %s @ %s',
2378 cls_name, self.required_perms, check_scope, check_location)
2377 cls_name, self.required_perms, check_scope, check_location)
2379 if not user:
2378 if not user:
2380 log.debug('Empty User passed into arguments')
2379 log.debug('Empty User passed into arguments')
2381 return False
2380 return False
2382
2381
2383 # process user
2382 # process user
2384 if not isinstance(user, AuthUser):
2383 if not isinstance(user, AuthUser):
2385 user = AuthUser(user.user_id)
2384 user = AuthUser(user.user_id)
2386 if not check_location:
2385 if not check_location:
2387 check_location = 'unspecified'
2386 check_location = 'unspecified'
2388 if self.check_permissions(user.permissions, repo_name, group_name,
2387 if self.check_permissions(user.permissions, repo_name, group_name,
2389 user_group_name):
2388 user_group_name):
2390 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2389 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2391 check_scope, user, check_location)
2390 check_scope, user, check_location)
2392 return True
2391 return True
2393
2392
2394 else:
2393 else:
2395 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2394 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2396 check_scope, user, check_location)
2395 check_scope, user, check_location)
2397 return False
2396 return False
2398
2397
2399 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2398 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2400 user_group_name=None):
2399 user_group_name=None):
2401 """
2400 """
2402 implement in child class should return True if permissions are ok,
2401 implement in child class should return True if permissions are ok,
2403 False otherwise
2402 False otherwise
2404
2403
2405 :param perm_defs: dict with permission definitions
2404 :param perm_defs: dict with permission definitions
2406 :param repo_name: repo name
2405 :param repo_name: repo name
2407 """
2406 """
2408 raise NotImplementedError()
2407 raise NotImplementedError()
2409
2408
2410
2409
2411 class HasPermissionAllApi(_BaseApiPerm):
2410 class HasPermissionAllApi(_BaseApiPerm):
2412 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2411 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2413 user_group_name=None):
2412 user_group_name=None):
2414 if self.required_perms.issubset(perm_defs.get('global')):
2413 if self.required_perms.issubset(perm_defs.get('global')):
2415 return True
2414 return True
2416 return False
2415 return False
2417
2416
2418
2417
2419 class HasPermissionAnyApi(_BaseApiPerm):
2418 class HasPermissionAnyApi(_BaseApiPerm):
2420 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2419 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2421 user_group_name=None):
2420 user_group_name=None):
2422 if self.required_perms.intersection(perm_defs.get('global')):
2421 if self.required_perms.intersection(perm_defs.get('global')):
2423 return True
2422 return True
2424 return False
2423 return False
2425
2424
2426
2425
2427 class HasRepoPermissionAllApi(_BaseApiPerm):
2426 class HasRepoPermissionAllApi(_BaseApiPerm):
2428 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2427 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2429 user_group_name=None):
2428 user_group_name=None):
2430 try:
2429 try:
2431 _user_perms = {perm_defs['repositories'][repo_name]}
2430 _user_perms = {perm_defs['repositories'][repo_name]}
2432 except KeyError:
2431 except KeyError:
2433 log.warning(traceback.format_exc())
2432 log.warning(traceback.format_exc())
2434 return False
2433 return False
2435 if self.required_perms.issubset(_user_perms):
2434 if self.required_perms.issubset(_user_perms):
2436 return True
2435 return True
2437 return False
2436 return False
2438
2437
2439
2438
2440 class HasRepoPermissionAnyApi(_BaseApiPerm):
2439 class HasRepoPermissionAnyApi(_BaseApiPerm):
2441 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2440 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2442 user_group_name=None):
2441 user_group_name=None):
2443 try:
2442 try:
2444 _user_perms = {perm_defs['repositories'][repo_name]}
2443 _user_perms = {perm_defs['repositories'][repo_name]}
2445 except KeyError:
2444 except KeyError:
2446 log.warning(traceback.format_exc())
2445 log.warning(traceback.format_exc())
2447 return False
2446 return False
2448 if self.required_perms.intersection(_user_perms):
2447 if self.required_perms.intersection(_user_perms):
2449 return True
2448 return True
2450 return False
2449 return False
2451
2450
2452
2451
2453 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2452 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2454 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2453 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2455 user_group_name=None):
2454 user_group_name=None):
2456 try:
2455 try:
2457 _user_perms = {perm_defs['repositories_groups'][group_name]}
2456 _user_perms = {perm_defs['repositories_groups'][group_name]}
2458 except KeyError:
2457 except KeyError:
2459 log.warning(traceback.format_exc())
2458 log.warning(traceback.format_exc())
2460 return False
2459 return False
2461 if self.required_perms.intersection(_user_perms):
2460 if self.required_perms.intersection(_user_perms):
2462 return True
2461 return True
2463 return False
2462 return False
2464
2463
2465
2464
2466 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2465 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2467 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2466 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2468 user_group_name=None):
2467 user_group_name=None):
2469 try:
2468 try:
2470 _user_perms = {perm_defs['repositories_groups'][group_name]}
2469 _user_perms = {perm_defs['repositories_groups'][group_name]}
2471 except KeyError:
2470 except KeyError:
2472 log.warning(traceback.format_exc())
2471 log.warning(traceback.format_exc())
2473 return False
2472 return False
2474 if self.required_perms.issubset(_user_perms):
2473 if self.required_perms.issubset(_user_perms):
2475 return True
2474 return True
2476 return False
2475 return False
2477
2476
2478
2477
2479 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2478 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2480 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2479 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2481 user_group_name=None):
2480 user_group_name=None):
2482 try:
2481 try:
2483 _user_perms = {perm_defs['user_groups'][user_group_name]}
2482 _user_perms = {perm_defs['user_groups'][user_group_name]}
2484 except KeyError:
2483 except KeyError:
2485 log.warning(traceback.format_exc())
2484 log.warning(traceback.format_exc())
2486 return False
2485 return False
2487 if self.required_perms.intersection(_user_perms):
2486 if self.required_perms.intersection(_user_perms):
2488 return True
2487 return True
2489 return False
2488 return False
2490
2489
2491
2490
2492 def check_ip_access(source_ip, allowed_ips=None):
2491 def check_ip_access(source_ip, allowed_ips=None):
2493 """
2492 """
2494 Checks if source_ip is a subnet of any of allowed_ips.
2493 Checks if source_ip is a subnet of any of allowed_ips.
2495
2494
2496 :param source_ip:
2495 :param source_ip:
2497 :param allowed_ips: list of allowed ips together with mask
2496 :param allowed_ips: list of allowed ips together with mask
2498 """
2497 """
2499 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2498 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2500 source_ip_address = ipaddress.ip_address(source_ip)
2499 source_ip_address = ipaddress.ip_address(source_ip)
2501 if isinstance(allowed_ips, (tuple, list, set)):
2500 if isinstance(allowed_ips, (tuple, list, set)):
2502 for ip in allowed_ips:
2501 for ip in allowed_ips:
2503 #TODO: verify
2502 #TODO: verify
2504 #ip = safe_unicode(ip)
2503 #ip = safe_unicode(ip)
2505 try:
2504 try:
2506 network_address = ipaddress.ip_network(ip, strict=False)
2505 network_address = ipaddress.ip_network(ip, strict=False)
2507 if source_ip_address in network_address:
2506 if source_ip_address in network_address:
2508 log.debug('IP %s is network %s', source_ip_address, network_address)
2507 log.debug('IP %s is network %s', source_ip_address, network_address)
2509 return True
2508 return True
2510 # for any case we cannot determine the IP, don't crash just
2509 # for any case we cannot determine the IP, don't crash just
2511 # skip it and log as error, we want to say forbidden still when
2510 # skip it and log as error, we want to say forbidden still when
2512 # sending bad IP
2511 # sending bad IP
2513 except Exception:
2512 except Exception:
2514 log.error(traceback.format_exc())
2513 log.error(traceback.format_exc())
2515 continue
2514 continue
2516 return False
2515 return False
2517
2516
2518
2517
2519 def get_cython_compat_decorator(wrapper, func):
2518 def get_cython_compat_decorator(wrapper, func):
2520 """
2519 """
2521 Creates a cython compatible decorator. The previously used
2520 Creates a cython compatible decorator. The previously used
2522 decorator.decorator() function seems to be incompatible with cython.
2521 decorator.decorator() function seems to be incompatible with cython.
2523
2522
2524 :param wrapper: __wrapper method of the decorator class
2523 :param wrapper: __wrapper method of the decorator class
2525 :param func: decorated function
2524 :param func: decorated function
2526 """
2525 """
2527 @wraps(func)
2526 @wraps(func)
2528 def local_wrapper(*args, **kwds):
2527 def local_wrapper(*args, **kwds):
2529 return wrapper(func, *args, **kwds)
2528 return wrapper(func, *args, **kwds)
2530 local_wrapper.__wrapped__ = func
2529 local_wrapper.__wrapped__ = func
2531 return local_wrapper
2530 return local_wrapper
2532
2531
2533
2532
@@ -1,611 +1,610 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 The base Controller API
21 The base Controller API
23 Provides the BaseController class for subclassing. And usage in different
22 Provides the BaseController class for subclassing. And usage in different
24 controllers
23 controllers
25 """
24 """
26
25
27 import logging
26 import logging
28 import socket
27 import socket
29
28
30 import markupsafe
29 import markupsafe
31 import ipaddress
30 import ipaddress
32
31
33 from paste.auth.basic import AuthBasicAuthenticator
32 from paste.auth.basic import AuthBasicAuthenticator
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
33 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
34 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36
35
37 import rhodecode
36 import rhodecode
38 from rhodecode.authentication.base import VCS_TYPE
37 from rhodecode.authentication.base import VCS_TYPE
39 from rhodecode.lib import auth, utils2
38 from rhodecode.lib import auth, utils2
40 from rhodecode.lib import helpers as h
39 from rhodecode.lib import helpers as h
41 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
40 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
42 from rhodecode.lib.exceptions import UserCreationError
41 from rhodecode.lib.exceptions import UserCreationError
43 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
42 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
44 from rhodecode.lib.utils2 import (
43 from rhodecode.lib.utils2 import (
45 str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str)
44 str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str)
46 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
45 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
47 from rhodecode.model.notification import NotificationModel
46 from rhodecode.model.notification import NotificationModel
48 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
47 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
49
48
50 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
51
50
52
51
53 def _filter_proxy(ip):
52 def _filter_proxy(ip):
54 """
53 """
55 Passed in IP addresses in HEADERS can be in a special format of multiple
54 Passed in IP addresses in HEADERS can be in a special format of multiple
56 ips. Those comma separated IPs are passed from various proxies in the
55 ips. Those comma separated IPs are passed from various proxies in the
57 chain of request processing. The left-most being the original client.
56 chain of request processing. The left-most being the original client.
58 We only care about the first IP which came from the org. client.
57 We only care about the first IP which came from the org. client.
59
58
60 :param ip: ip string from headers
59 :param ip: ip string from headers
61 """
60 """
62 if ',' in ip:
61 if ',' in ip:
63 _ips = ip.split(',')
62 _ips = ip.split(',')
64 _first_ip = _ips[0].strip()
63 _first_ip = _ips[0].strip()
65 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
64 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
66 return _first_ip
65 return _first_ip
67 return ip
66 return ip
68
67
69
68
70 def _filter_port(ip):
69 def _filter_port(ip):
71 """
70 """
72 Removes a port from ip, there are 4 main cases to handle here.
71 Removes a port from ip, there are 4 main cases to handle here.
73 - ipv4 eg. 127.0.0.1
72 - ipv4 eg. 127.0.0.1
74 - ipv6 eg. ::1
73 - ipv6 eg. ::1
75 - ipv4+port eg. 127.0.0.1:8080
74 - ipv4+port eg. 127.0.0.1:8080
76 - ipv6+port eg. [::1]:8080
75 - ipv6+port eg. [::1]:8080
77
76
78 :param ip:
77 :param ip:
79 """
78 """
80 def is_ipv6(ip_addr):
79 def is_ipv6(ip_addr):
81 if hasattr(socket, 'inet_pton'):
80 if hasattr(socket, 'inet_pton'):
82 try:
81 try:
83 socket.inet_pton(socket.AF_INET6, ip_addr)
82 socket.inet_pton(socket.AF_INET6, ip_addr)
84 except socket.error:
83 except socket.error:
85 return False
84 return False
86 else:
85 else:
87 # fallback to ipaddress
86 # fallback to ipaddress
88 try:
87 try:
89 ipaddress.IPv6Address(safe_str(ip_addr))
88 ipaddress.IPv6Address(safe_str(ip_addr))
90 except Exception:
89 except Exception:
91 return False
90 return False
92 return True
91 return True
93
92
94 if ':' not in ip: # must be ipv4 pure ip
93 if ':' not in ip: # must be ipv4 pure ip
95 return ip
94 return ip
96
95
97 if '[' in ip and ']' in ip: # ipv6 with port
96 if '[' in ip and ']' in ip: # ipv6 with port
98 return ip.split(']')[0][1:].lower()
97 return ip.split(']')[0][1:].lower()
99
98
100 # must be ipv6 or ipv4 with port
99 # must be ipv6 or ipv4 with port
101 if is_ipv6(ip):
100 if is_ipv6(ip):
102 return ip
101 return ip
103 else:
102 else:
104 ip, _port = ip.split(':')[:2] # means ipv4+port
103 ip, _port = ip.split(':')[:2] # means ipv4+port
105 return ip
104 return ip
106
105
107
106
108 def get_ip_addr(environ):
107 def get_ip_addr(environ):
109 proxy_key = 'HTTP_X_REAL_IP'
108 proxy_key = 'HTTP_X_REAL_IP'
110 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
109 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
111 def_key = 'REMOTE_ADDR'
110 def_key = 'REMOTE_ADDR'
112 _filters = lambda x: _filter_port(_filter_proxy(x))
111 _filters = lambda x: _filter_port(_filter_proxy(x))
113
112
114 ip = environ.get(proxy_key)
113 ip = environ.get(proxy_key)
115 if ip:
114 if ip:
116 return _filters(ip)
115 return _filters(ip)
117
116
118 ip = environ.get(proxy_key2)
117 ip = environ.get(proxy_key2)
119 if ip:
118 if ip:
120 return _filters(ip)
119 return _filters(ip)
121
120
122 ip = environ.get(def_key, '0.0.0.0')
121 ip = environ.get(def_key, '0.0.0.0')
123 return _filters(ip)
122 return _filters(ip)
124
123
125
124
126 def get_server_ip_addr(environ, log_errors=True):
125 def get_server_ip_addr(environ, log_errors=True):
127 hostname = environ.get('SERVER_NAME')
126 hostname = environ.get('SERVER_NAME')
128 try:
127 try:
129 return socket.gethostbyname(hostname)
128 return socket.gethostbyname(hostname)
130 except Exception as e:
129 except Exception as e:
131 if log_errors:
130 if log_errors:
132 # in some cases this lookup is not possible, and we don't want to
131 # in some cases this lookup is not possible, and we don't want to
133 # make it an exception in logs
132 # make it an exception in logs
134 log.exception('Could not retrieve server ip address: %s', e)
133 log.exception('Could not retrieve server ip address: %s', e)
135 return hostname
134 return hostname
136
135
137
136
138 def get_server_port(environ):
137 def get_server_port(environ):
139 return environ.get('SERVER_PORT')
138 return environ.get('SERVER_PORT')
140
139
141
140
142 def get_access_path(environ):
141 def get_access_path(environ):
143 path = environ.get('PATH_INFO')
142 path = environ.get('PATH_INFO')
144 org_req = environ.get('pylons.original_request')
143 org_req = environ.get('pylons.original_request')
145 if org_req:
144 if org_req:
146 path = org_req.environ.get('PATH_INFO')
145 path = org_req.environ.get('PATH_INFO')
147 return path
146 return path
148
147
149
148
150 def get_user_agent(environ):
149 def get_user_agent(environ):
151 return environ.get('HTTP_USER_AGENT')
150 return environ.get('HTTP_USER_AGENT')
152
151
153
152
154 def vcs_operation_context(
153 def vcs_operation_context(
155 environ, repo_name, username, action, scm, check_locking=True,
154 environ, repo_name, username, action, scm, check_locking=True,
156 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
155 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
157 """
156 """
158 Generate the context for a vcs operation, e.g. push or pull.
157 Generate the context for a vcs operation, e.g. push or pull.
159
158
160 This context is passed over the layers so that hooks triggered by the
159 This context is passed over the layers so that hooks triggered by the
161 vcs operation know details like the user, the user's IP address etc.
160 vcs operation know details like the user, the user's IP address etc.
162
161
163 :param check_locking: Allows to switch of the computation of the locking
162 :param check_locking: Allows to switch of the computation of the locking
164 data. This serves mainly the need of the simplevcs middleware to be
163 data. This serves mainly the need of the simplevcs middleware to be
165 able to disable this for certain operations.
164 able to disable this for certain operations.
166
165
167 """
166 """
168 # Tri-state value: False: unlock, None: nothing, True: lock
167 # Tri-state value: False: unlock, None: nothing, True: lock
169 make_lock = None
168 make_lock = None
170 locked_by = [None, None, None]
169 locked_by = [None, None, None]
171 is_anonymous = username == User.DEFAULT_USER
170 is_anonymous = username == User.DEFAULT_USER
172 user = User.get_by_username(username)
171 user = User.get_by_username(username)
173 if not is_anonymous and check_locking:
172 if not is_anonymous and check_locking:
174 log.debug('Checking locking on repository "%s"', repo_name)
173 log.debug('Checking locking on repository "%s"', repo_name)
175 repo = Repository.get_by_repo_name(repo_name)
174 repo = Repository.get_by_repo_name(repo_name)
176 make_lock, __, locked_by = repo.get_locking_state(
175 make_lock, __, locked_by = repo.get_locking_state(
177 action, user.user_id)
176 action, user.user_id)
178 user_id = user.user_id
177 user_id = user.user_id
179 settings_model = VcsSettingsModel(repo=repo_name)
178 settings_model = VcsSettingsModel(repo=repo_name)
180 ui_settings = settings_model.get_ui_settings()
179 ui_settings = settings_model.get_ui_settings()
181
180
182 # NOTE(marcink): This should be also in sync with
181 # NOTE(marcink): This should be also in sync with
183 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
182 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
184 store = [x for x in ui_settings if x.key == '/']
183 store = [x for x in ui_settings if x.key == '/']
185 repo_store = ''
184 repo_store = ''
186 if store:
185 if store:
187 repo_store = store[0].value
186 repo_store = store[0].value
188
187
189 scm_data = {
188 scm_data = {
190 'ip': get_ip_addr(environ),
189 'ip': get_ip_addr(environ),
191 'username': username,
190 'username': username,
192 'user_id': user_id,
191 'user_id': user_id,
193 'action': action,
192 'action': action,
194 'repository': repo_name,
193 'repository': repo_name,
195 'scm': scm,
194 'scm': scm,
196 'config': rhodecode.CONFIG['__file__'],
195 'config': rhodecode.CONFIG['__file__'],
197 'repo_store': repo_store,
196 'repo_store': repo_store,
198 'make_lock': make_lock,
197 'make_lock': make_lock,
199 'locked_by': locked_by,
198 'locked_by': locked_by,
200 'server_url': utils2.get_server_url(environ),
199 'server_url': utils2.get_server_url(environ),
201 'user_agent': get_user_agent(environ),
200 'user_agent': get_user_agent(environ),
202 'hooks': get_enabled_hook_classes(ui_settings),
201 'hooks': get_enabled_hook_classes(ui_settings),
203 'is_shadow_repo': is_shadow_repo,
202 'is_shadow_repo': is_shadow_repo,
204 'detect_force_push': detect_force_push,
203 'detect_force_push': detect_force_push,
205 'check_branch_perms': check_branch_perms,
204 'check_branch_perms': check_branch_perms,
206 }
205 }
207 return scm_data
206 return scm_data
208
207
209
208
210 class BasicAuth(AuthBasicAuthenticator):
209 class BasicAuth(AuthBasicAuthenticator):
211
210
212 def __init__(self, realm, authfunc, registry, auth_http_code=None,
211 def __init__(self, realm, authfunc, registry, auth_http_code=None,
213 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
212 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
214 self.realm = realm
213 self.realm = realm
215 self.rc_realm = rc_realm
214 self.rc_realm = rc_realm
216 self.initial_call = initial_call_detection
215 self.initial_call = initial_call_detection
217 self.authfunc = authfunc
216 self.authfunc = authfunc
218 self.registry = registry
217 self.registry = registry
219 self.acl_repo_name = acl_repo_name
218 self.acl_repo_name = acl_repo_name
220 self._rc_auth_http_code = auth_http_code
219 self._rc_auth_http_code = auth_http_code
221
220
222 def _get_response_from_code(self, http_code):
221 def _get_response_from_code(self, http_code):
223 try:
222 try:
224 return get_exception(safe_int(http_code))
223 return get_exception(safe_int(http_code))
225 except Exception:
224 except Exception:
226 log.exception('Failed to fetch response for code %s', http_code)
225 log.exception('Failed to fetch response for code %s', http_code)
227 return HTTPForbidden
226 return HTTPForbidden
228
227
229 def get_rc_realm(self):
228 def get_rc_realm(self):
230 return safe_str(self.rc_realm)
229 return safe_str(self.rc_realm)
231
230
232 def build_authentication(self):
231 def build_authentication(self):
233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
232 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
234 if self._rc_auth_http_code and not self.initial_call:
233 if self._rc_auth_http_code and not self.initial_call:
235 # return alternative HTTP code if alternative http return code
234 # return alternative HTTP code if alternative http return code
236 # is specified in RhodeCode config, but ONLY if it's not the
235 # is specified in RhodeCode config, but ONLY if it's not the
237 # FIRST call
236 # FIRST call
238 custom_response_klass = self._get_response_from_code(
237 custom_response_klass = self._get_response_from_code(
239 self._rc_auth_http_code)
238 self._rc_auth_http_code)
240 return custom_response_klass(headers=head)
239 return custom_response_klass(headers=head)
241 return HTTPUnauthorized(headers=head)
240 return HTTPUnauthorized(headers=head)
242
241
243 def authenticate(self, environ):
242 def authenticate(self, environ):
244 authorization = AUTHORIZATION(environ)
243 authorization = AUTHORIZATION(environ)
245 if not authorization:
244 if not authorization:
246 return self.build_authentication()
245 return self.build_authentication()
247 (authmeth, auth) = authorization.split(' ', 1)
246 (authmeth, auth) = authorization.split(' ', 1)
248 if 'basic' != authmeth.lower():
247 if 'basic' != authmeth.lower():
249 return self.build_authentication()
248 return self.build_authentication()
250 auth = auth.strip().decode('base64')
249 auth = auth.strip().decode('base64')
251 _parts = auth.split(':', 1)
250 _parts = auth.split(':', 1)
252 if len(_parts) == 2:
251 if len(_parts) == 2:
253 username, password = _parts
252 username, password = _parts
254 auth_data = self.authfunc(
253 auth_data = self.authfunc(
255 username, password, environ, VCS_TYPE,
254 username, password, environ, VCS_TYPE,
256 registry=self.registry, acl_repo_name=self.acl_repo_name)
255 registry=self.registry, acl_repo_name=self.acl_repo_name)
257 if auth_data:
256 if auth_data:
258 return {'username': username, 'auth_data': auth_data}
257 return {'username': username, 'auth_data': auth_data}
259 if username and password:
258 if username and password:
260 # we mark that we actually executed authentication once, at
259 # we mark that we actually executed authentication once, at
261 # that point we can use the alternative auth code
260 # that point we can use the alternative auth code
262 self.initial_call = False
261 self.initial_call = False
263
262
264 return self.build_authentication()
263 return self.build_authentication()
265
264
266 __call__ = authenticate
265 __call__ = authenticate
267
266
268
267
269 def calculate_version_hash(config):
268 def calculate_version_hash(config):
270 return sha1(
269 return sha1(
271 config.get('beaker.session.secret', '') +
270 config.get('beaker.session.secret', '') +
272 rhodecode.__version__)[:8]
271 rhodecode.__version__)[:8]
273
272
274
273
275 def get_current_lang(request):
274 def get_current_lang(request):
276 # NOTE(marcink): remove after pyramid move
275 # NOTE(marcink): remove after pyramid move
277 try:
276 try:
278 return translation.get_lang()[0]
277 return translation.get_lang()[0]
279 except:
278 except:
280 pass
279 pass
281
280
282 return getattr(request, '_LOCALE_', request.locale_name)
281 return getattr(request, '_LOCALE_', request.locale_name)
283
282
284
283
285 def attach_context_attributes(context, request, user_id=None, is_api=None):
284 def attach_context_attributes(context, request, user_id=None, is_api=None):
286 """
285 """
287 Attach variables into template context called `c`.
286 Attach variables into template context called `c`.
288 """
287 """
289 config = request.registry.settings
288 config = request.registry.settings
290
289
291 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
290 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
292 context.rc_config = rc_config
291 context.rc_config = rc_config
293 context.rhodecode_version = rhodecode.__version__
292 context.rhodecode_version = rhodecode.__version__
294 context.rhodecode_edition = config.get('rhodecode.edition')
293 context.rhodecode_edition = config.get('rhodecode.edition')
295 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
294 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
296 # unique secret + version does not leak the version but keep consistency
295 # unique secret + version does not leak the version but keep consistency
297 context.rhodecode_version_hash = calculate_version_hash(config)
296 context.rhodecode_version_hash = calculate_version_hash(config)
298
297
299 # Default language set for the incoming request
298 # Default language set for the incoming request
300 context.language = get_current_lang(request)
299 context.language = get_current_lang(request)
301
300
302 # Visual options
301 # Visual options
303 context.visual = AttributeDict({})
302 context.visual = AttributeDict({})
304
303
305 # DB stored Visual Items
304 # DB stored Visual Items
306 context.visual.show_public_icon = str2bool(
305 context.visual.show_public_icon = str2bool(
307 rc_config.get('rhodecode_show_public_icon'))
306 rc_config.get('rhodecode_show_public_icon'))
308 context.visual.show_private_icon = str2bool(
307 context.visual.show_private_icon = str2bool(
309 rc_config.get('rhodecode_show_private_icon'))
308 rc_config.get('rhodecode_show_private_icon'))
310 context.visual.stylify_metatags = str2bool(
309 context.visual.stylify_metatags = str2bool(
311 rc_config.get('rhodecode_stylify_metatags'))
310 rc_config.get('rhodecode_stylify_metatags'))
312 context.visual.dashboard_items = safe_int(
311 context.visual.dashboard_items = safe_int(
313 rc_config.get('rhodecode_dashboard_items', 100))
312 rc_config.get('rhodecode_dashboard_items', 100))
314 context.visual.admin_grid_items = safe_int(
313 context.visual.admin_grid_items = safe_int(
315 rc_config.get('rhodecode_admin_grid_items', 100))
314 rc_config.get('rhodecode_admin_grid_items', 100))
316 context.visual.show_revision_number = str2bool(
315 context.visual.show_revision_number = str2bool(
317 rc_config.get('rhodecode_show_revision_number', True))
316 rc_config.get('rhodecode_show_revision_number', True))
318 context.visual.show_sha_length = safe_int(
317 context.visual.show_sha_length = safe_int(
319 rc_config.get('rhodecode_show_sha_length', 100))
318 rc_config.get('rhodecode_show_sha_length', 100))
320 context.visual.repository_fields = str2bool(
319 context.visual.repository_fields = str2bool(
321 rc_config.get('rhodecode_repository_fields'))
320 rc_config.get('rhodecode_repository_fields'))
322 context.visual.show_version = str2bool(
321 context.visual.show_version = str2bool(
323 rc_config.get('rhodecode_show_version'))
322 rc_config.get('rhodecode_show_version'))
324 context.visual.use_gravatar = str2bool(
323 context.visual.use_gravatar = str2bool(
325 rc_config.get('rhodecode_use_gravatar'))
324 rc_config.get('rhodecode_use_gravatar'))
326 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
325 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
327 context.visual.default_renderer = rc_config.get(
326 context.visual.default_renderer = rc_config.get(
328 'rhodecode_markup_renderer', 'rst')
327 'rhodecode_markup_renderer', 'rst')
329 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
328 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
330 context.visual.rhodecode_support_url = \
329 context.visual.rhodecode_support_url = \
331 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
330 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
332
331
333 context.visual.affected_files_cut_off = 60
332 context.visual.affected_files_cut_off = 60
334
333
335 context.pre_code = rc_config.get('rhodecode_pre_code')
334 context.pre_code = rc_config.get('rhodecode_pre_code')
336 context.post_code = rc_config.get('rhodecode_post_code')
335 context.post_code = rc_config.get('rhodecode_post_code')
337 context.rhodecode_name = rc_config.get('rhodecode_title')
336 context.rhodecode_name = rc_config.get('rhodecode_title')
338 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
337 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
339 # if we have specified default_encoding in the request, it has more
338 # if we have specified default_encoding in the request, it has more
340 # priority
339 # priority
341 if request.GET.get('default_encoding'):
340 if request.GET.get('default_encoding'):
342 context.default_encodings.insert(0, request.GET.get('default_encoding'))
341 context.default_encodings.insert(0, request.GET.get('default_encoding'))
343 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
342 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
344 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
343 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
345 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
344 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
346
345
347 # INI stored
346 # INI stored
348 context.labs_active = str2bool(
347 context.labs_active = str2bool(
349 config.get('labs_settings_active', 'false'))
348 config.get('labs_settings_active', 'false'))
350 context.ssh_enabled = str2bool(
349 context.ssh_enabled = str2bool(
351 config.get('ssh.generate_authorized_keyfile', 'false'))
350 config.get('ssh.generate_authorized_keyfile', 'false'))
352 context.ssh_key_generator_enabled = str2bool(
351 context.ssh_key_generator_enabled = str2bool(
353 config.get('ssh.enable_ui_key_generator', 'true'))
352 config.get('ssh.enable_ui_key_generator', 'true'))
354
353
355 context.visual.allow_repo_location_change = str2bool(
354 context.visual.allow_repo_location_change = str2bool(
356 config.get('allow_repo_location_change', True))
355 config.get('allow_repo_location_change', True))
357 context.visual.allow_custom_hooks_settings = str2bool(
356 context.visual.allow_custom_hooks_settings = str2bool(
358 config.get('allow_custom_hooks_settings', True))
357 config.get('allow_custom_hooks_settings', True))
359 context.debug_style = str2bool(config.get('debug_style', False))
358 context.debug_style = str2bool(config.get('debug_style', False))
360
359
361 context.rhodecode_instanceid = config.get('instance_id')
360 context.rhodecode_instanceid = config.get('instance_id')
362
361
363 context.visual.cut_off_limit_diff = safe_int(
362 context.visual.cut_off_limit_diff = safe_int(
364 config.get('cut_off_limit_diff'))
363 config.get('cut_off_limit_diff'))
365 context.visual.cut_off_limit_file = safe_int(
364 context.visual.cut_off_limit_file = safe_int(
366 config.get('cut_off_limit_file'))
365 config.get('cut_off_limit_file'))
367
366
368 context.license = AttributeDict({})
367 context.license = AttributeDict({})
369 context.license.hide_license_info = str2bool(
368 context.license.hide_license_info = str2bool(
370 config.get('license.hide_license_info', False))
369 config.get('license.hide_license_info', False))
371
370
372 # AppEnlight
371 # AppEnlight
373 context.appenlight_enabled = config.get('appenlight', False)
372 context.appenlight_enabled = config.get('appenlight', False)
374 context.appenlight_api_public_key = config.get(
373 context.appenlight_api_public_key = config.get(
375 'appenlight.api_public_key', '')
374 'appenlight.api_public_key', '')
376 context.appenlight_server_url = config.get('appenlight.server_url', '')
375 context.appenlight_server_url = config.get('appenlight.server_url', '')
377
376
378 diffmode = {
377 diffmode = {
379 "unified": "unified",
378 "unified": "unified",
380 "sideside": "sideside"
379 "sideside": "sideside"
381 }.get(request.GET.get('diffmode'))
380 }.get(request.GET.get('diffmode'))
382
381
383 if is_api is not None:
382 if is_api is not None:
384 is_api = hasattr(request, 'rpc_user')
383 is_api = hasattr(request, 'rpc_user')
385 session_attrs = {
384 session_attrs = {
386 # defaults
385 # defaults
387 "clone_url_format": "http",
386 "clone_url_format": "http",
388 "diffmode": "sideside",
387 "diffmode": "sideside",
389 "license_fingerprint": request.session.get('license_fingerprint')
388 "license_fingerprint": request.session.get('license_fingerprint')
390 }
389 }
391
390
392 if not is_api:
391 if not is_api:
393 # don't access pyramid session for API calls
392 # don't access pyramid session for API calls
394 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
393 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
395 request.session['rc_user_session_attr.diffmode'] = diffmode
394 request.session['rc_user_session_attr.diffmode'] = diffmode
396
395
397 # session settings per user
396 # session settings per user
398
397
399 for k, v in request.session.items():
398 for k, v in request.session.items():
400 pref = 'rc_user_session_attr.'
399 pref = 'rc_user_session_attr.'
401 if k and k.startswith(pref):
400 if k and k.startswith(pref):
402 k = k[len(pref):]
401 k = k[len(pref):]
403 session_attrs[k] = v
402 session_attrs[k] = v
404
403
405 context.user_session_attrs = session_attrs
404 context.user_session_attrs = session_attrs
406
405
407 # JS template context
406 # JS template context
408 context.template_context = {
407 context.template_context = {
409 'repo_name': None,
408 'repo_name': None,
410 'repo_type': None,
409 'repo_type': None,
411 'repo_landing_commit': None,
410 'repo_landing_commit': None,
412 'rhodecode_user': {
411 'rhodecode_user': {
413 'username': None,
412 'username': None,
414 'email': None,
413 'email': None,
415 'notification_status': False
414 'notification_status': False
416 },
415 },
417 'session_attrs': session_attrs,
416 'session_attrs': session_attrs,
418 'visual': {
417 'visual': {
419 'default_renderer': None
418 'default_renderer': None
420 },
419 },
421 'commit_data': {
420 'commit_data': {
422 'commit_id': None
421 'commit_id': None
423 },
422 },
424 'pull_request_data': {'pull_request_id': None},
423 'pull_request_data': {'pull_request_id': None},
425 'timeago': {
424 'timeago': {
426 'refresh_time': 120 * 1000,
425 'refresh_time': 120 * 1000,
427 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
426 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
428 },
427 },
429 'pyramid_dispatch': {
428 'pyramid_dispatch': {
430
429
431 },
430 },
432 'extra': {'plugins': {}}
431 'extra': {'plugins': {}}
433 }
432 }
434 # END CONFIG VARS
433 # END CONFIG VARS
435 if is_api:
434 if is_api:
436 csrf_token = None
435 csrf_token = None
437 else:
436 else:
438 csrf_token = auth.get_csrf_token(session=request.session)
437 csrf_token = auth.get_csrf_token(session=request.session)
439
438
440 context.csrf_token = csrf_token
439 context.csrf_token = csrf_token
441 context.backends = rhodecode.BACKENDS.keys()
440 context.backends = rhodecode.BACKENDS.keys()
442
441
443 unread_count = 0
442 unread_count = 0
444 user_bookmark_list = []
443 user_bookmark_list = []
445 if user_id:
444 if user_id:
446 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
445 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
447 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
446 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
448 context.unread_notifications = unread_count
447 context.unread_notifications = unread_count
449 context.bookmark_items = user_bookmark_list
448 context.bookmark_items = user_bookmark_list
450
449
451 # web case
450 # web case
452 if hasattr(request, 'user'):
451 if hasattr(request, 'user'):
453 context.auth_user = request.user
452 context.auth_user = request.user
454 context.rhodecode_user = request.user
453 context.rhodecode_user = request.user
455
454
456 # api case
455 # api case
457 if hasattr(request, 'rpc_user'):
456 if hasattr(request, 'rpc_user'):
458 context.auth_user = request.rpc_user
457 context.auth_user = request.rpc_user
459 context.rhodecode_user = request.rpc_user
458 context.rhodecode_user = request.rpc_user
460
459
461 # attach the whole call context to the request
460 # attach the whole call context to the request
462 request.set_call_context(context)
461 request.set_call_context(context)
463
462
464
463
465 def get_auth_user(request):
464 def get_auth_user(request):
466 environ = request.environ
465 environ = request.environ
467 session = request.session
466 session = request.session
468
467
469 ip_addr = get_ip_addr(environ)
468 ip_addr = get_ip_addr(environ)
470
469
471 # make sure that we update permissions each time we call controller
470 # make sure that we update permissions each time we call controller
472 _auth_token = (
471 _auth_token = (
473 # ?auth_token=XXX
472 # ?auth_token=XXX
474 request.GET.get('auth_token', '')
473 request.GET.get('auth_token', '')
475 # ?api_key=XXX !LEGACY
474 # ?api_key=XXX !LEGACY
476 or request.GET.get('api_key', '')
475 or request.GET.get('api_key', '')
477 # or headers....
476 # or headers....
478 or request.headers.get('X-Rc-Auth-Token', '')
477 or request.headers.get('X-Rc-Auth-Token', '')
479 )
478 )
480 if not _auth_token and request.matchdict:
479 if not _auth_token and request.matchdict:
481 url_auth_token = request.matchdict.get('_auth_token')
480 url_auth_token = request.matchdict.get('_auth_token')
482 _auth_token = url_auth_token
481 _auth_token = url_auth_token
483 if _auth_token:
482 if _auth_token:
484 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
483 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
485
484
486 if _auth_token:
485 if _auth_token:
487 # when using API_KEY we assume user exists, and
486 # when using API_KEY we assume user exists, and
488 # doesn't need auth based on cookies.
487 # doesn't need auth based on cookies.
489 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
488 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
490 authenticated = False
489 authenticated = False
491 else:
490 else:
492 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
491 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
493 try:
492 try:
494 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
493 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
495 ip_addr=ip_addr)
494 ip_addr=ip_addr)
496 except UserCreationError as e:
495 except UserCreationError as e:
497 h.flash(e, 'error')
496 h.flash(e, 'error')
498 # container auth or other auth functions that create users
497 # container auth or other auth functions that create users
499 # on the fly can throw this exception signaling that there's
498 # on the fly can throw this exception signaling that there's
500 # issue with user creation, explanation should be provided
499 # issue with user creation, explanation should be provided
501 # in Exception itself. We then create a simple blank
500 # in Exception itself. We then create a simple blank
502 # AuthUser
501 # AuthUser
503 auth_user = AuthUser(ip_addr=ip_addr)
502 auth_user = AuthUser(ip_addr=ip_addr)
504
503
505 # in case someone changes a password for user it triggers session
504 # in case someone changes a password for user it triggers session
506 # flush and forces a re-login
505 # flush and forces a re-login
507 if password_changed(auth_user, session):
506 if password_changed(auth_user, session):
508 session.invalidate()
507 session.invalidate()
509 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
508 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
510 auth_user = AuthUser(ip_addr=ip_addr)
509 auth_user = AuthUser(ip_addr=ip_addr)
511
510
512 authenticated = cookie_store.get('is_authenticated')
511 authenticated = cookie_store.get('is_authenticated')
513
512
514 if not auth_user.is_authenticated and auth_user.is_user_object:
513 if not auth_user.is_authenticated and auth_user.is_user_object:
515 # user is not authenticated and not empty
514 # user is not authenticated and not empty
516 auth_user.set_authenticated(authenticated)
515 auth_user.set_authenticated(authenticated)
517
516
518 return auth_user, _auth_token
517 return auth_user, _auth_token
519
518
520
519
521 def h_filter(s):
520 def h_filter(s):
522 """
521 """
523 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
522 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
524 we wrap this with additional functionality that converts None to empty
523 we wrap this with additional functionality that converts None to empty
525 strings
524 strings
526 """
525 """
527 if s is None:
526 if s is None:
528 return markupsafe.Markup()
527 return markupsafe.Markup()
529 return markupsafe.escape(s)
528 return markupsafe.escape(s)
530
529
531
530
532 def add_events_routes(config):
531 def add_events_routes(config):
533 """
532 """
534 Adds routing that can be used in events. Because some events are triggered
533 Adds routing that can be used in events. Because some events are triggered
535 outside of pyramid context, we need to bootstrap request with some
534 outside of pyramid context, we need to bootstrap request with some
536 routing registered
535 routing registered
537 """
536 """
538
537
539 from rhodecode.apps._base import ADMIN_PREFIX
538 from rhodecode.apps._base import ADMIN_PREFIX
540
539
541 config.add_route(name='home', pattern='/')
540 config.add_route(name='home', pattern='/')
542 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
541 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
543 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
542 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
544
543
545 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
544 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
546 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
545 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
547 config.add_route(name='repo_summary', pattern='/{repo_name}')
546 config.add_route(name='repo_summary', pattern='/{repo_name}')
548 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
547 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
549 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
548 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
550
549
551 config.add_route(name='pullrequest_show',
550 config.add_route(name='pullrequest_show',
552 pattern='/{repo_name}/pull-request/{pull_request_id}')
551 pattern='/{repo_name}/pull-request/{pull_request_id}')
553 config.add_route(name='pull_requests_global',
552 config.add_route(name='pull_requests_global',
554 pattern='/pull-request/{pull_request_id}')
553 pattern='/pull-request/{pull_request_id}')
555
554
556 config.add_route(name='repo_commit',
555 config.add_route(name='repo_commit',
557 pattern='/{repo_name}/changeset/{commit_id}')
556 pattern='/{repo_name}/changeset/{commit_id}')
558 config.add_route(name='repo_files',
557 config.add_route(name='repo_files',
559 pattern='/{repo_name}/files/{commit_id}/{f_path}')
558 pattern='/{repo_name}/files/{commit_id}/{f_path}')
560
559
561 config.add_route(name='hovercard_user',
560 config.add_route(name='hovercard_user',
562 pattern='/_hovercard/user/{user_id}')
561 pattern='/_hovercard/user/{user_id}')
563
562
564 config.add_route(name='hovercard_user_group',
563 config.add_route(name='hovercard_user_group',
565 pattern='/_hovercard/user_group/{user_group_id}')
564 pattern='/_hovercard/user_group/{user_group_id}')
566
565
567 config.add_route(name='hovercard_pull_request',
566 config.add_route(name='hovercard_pull_request',
568 pattern='/_hovercard/pull_request/{pull_request_id}')
567 pattern='/_hovercard/pull_request/{pull_request_id}')
569
568
570 config.add_route(name='hovercard_repo_commit',
569 config.add_route(name='hovercard_repo_commit',
571 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
570 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
572
571
573
572
574 def bootstrap_config(request, registry_name='RcTestRegistry'):
573 def bootstrap_config(request, registry_name='RcTestRegistry'):
575 import pyramid.testing
574 import pyramid.testing
576 registry = pyramid.testing.Registry(registry_name)
575 registry = pyramid.testing.Registry(registry_name)
577
576
578 config = pyramid.testing.setUp(registry=registry, request=request)
577 config = pyramid.testing.setUp(registry=registry, request=request)
579
578
580 # allow pyramid lookup in testing
579 # allow pyramid lookup in testing
581 config.include('pyramid_mako')
580 config.include('pyramid_mako')
582 config.include('rhodecode.lib.rc_beaker')
581 config.include('rhodecode.lib.rc_beaker')
583 config.include('rhodecode.lib.rc_cache')
582 config.include('rhodecode.lib.rc_cache')
584
583
585 add_events_routes(config)
584 add_events_routes(config)
586
585
587 return config
586 return config
588
587
589
588
590 def bootstrap_request(**kwargs):
589 def bootstrap_request(**kwargs):
591 """
590 """
592 Returns a thin version of Request Object that is used in non-web context like testing/celery
591 Returns a thin version of Request Object that is used in non-web context like testing/celery
593 """
592 """
594
593
595 import pyramid.testing
594 import pyramid.testing
596 from rhodecode.lib.request import ThinRequest as _ThinRequest
595 from rhodecode.lib.request import ThinRequest as _ThinRequest
597
596
598 class ThinRequest(_ThinRequest):
597 class ThinRequest(_ThinRequest):
599 application_url = kwargs.pop('application_url', 'http://example.com')
598 application_url = kwargs.pop('application_url', 'http://example.com')
600 host = kwargs.pop('host', 'example.com:80')
599 host = kwargs.pop('host', 'example.com:80')
601 domain = kwargs.pop('domain', 'example.com')
600 domain = kwargs.pop('domain', 'example.com')
602
601
603 class ThinSession(pyramid.testing.DummySession):
602 class ThinSession(pyramid.testing.DummySession):
604 def save(*arg, **kw):
603 def save(*arg, **kw):
605 pass
604 pass
606
605
607 request = ThinRequest(**kwargs)
606 request = ThinRequest(**kwargs)
608 request.session = ThinSession()
607 request.session = ThinSession()
609
608
610 return request
609 return request
611
610
@@ -1,249 +1,248 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """caching_query.py
20 """caching_query.py
22
21
23 Represent functions and classes
22 Represent functions and classes
24 which allow the usage of Dogpile caching with SQLAlchemy.
23 which allow the usage of Dogpile caching with SQLAlchemy.
25 Introduces a query option called FromCache.
24 Introduces a query option called FromCache.
26
25
27 .. versionchanged:: 1.4 the caching approach has been altered to work
26 .. versionchanged:: 1.4 the caching approach has been altered to work
28 based on a session event.
27 based on a session event.
29
28
30
29
31 The three new concepts introduced here are:
30 The three new concepts introduced here are:
32
31
33 * ORMCache - an extension for an ORM :class:`.Session`
32 * ORMCache - an extension for an ORM :class:`.Session`
34 retrieves results in/from dogpile.cache.
33 retrieves results in/from dogpile.cache.
35 * FromCache - a query option that establishes caching
34 * FromCache - a query option that establishes caching
36 parameters on a Query
35 parameters on a Query
37 * RelationshipCache - a variant of FromCache which is specific
36 * RelationshipCache - a variant of FromCache which is specific
38 to a query invoked during a lazy load.
37 to a query invoked during a lazy load.
39
38
40 The rest of what's here are standard SQLAlchemy and
39 The rest of what's here are standard SQLAlchemy and
41 dogpile.cache constructs.
40 dogpile.cache constructs.
42
41
43 """
42 """
44 from dogpile.cache.api import NO_VALUE
43 from dogpile.cache.api import NO_VALUE
45
44
46 from sqlalchemy import event
45 from sqlalchemy import event
47 from sqlalchemy.orm import loading
46 from sqlalchemy.orm import loading
48 from sqlalchemy.orm.interfaces import UserDefinedOption
47 from sqlalchemy.orm.interfaces import UserDefinedOption
49
48
50
49
51 DEFAULT_REGION = "sql_cache_short"
50 DEFAULT_REGION = "sql_cache_short"
52
51
53
52
54 class ORMCache:
53 class ORMCache:
55
54
56 """An add-on for an ORM :class:`.Session` optionally loads full results
55 """An add-on for an ORM :class:`.Session` optionally loads full results
57 from a dogpile cache region.
56 from a dogpile cache region.
58
57
59 cache = ORMCache(regions={})
58 cache = ORMCache(regions={})
60 cache.listen_on_session(Session)
59 cache.listen_on_session(Session)
61
60
62 """
61 """
63
62
64 def __init__(self, regions):
63 def __init__(self, regions):
65 self.cache_regions = regions or self._get_region()
64 self.cache_regions = regions or self._get_region()
66 self._statement_cache = {}
65 self._statement_cache = {}
67
66
68 @classmethod
67 @classmethod
69 def _get_region(cls):
68 def _get_region(cls):
70 from rhodecode.lib.rc_cache import region_meta
69 from rhodecode.lib.rc_cache import region_meta
71 return region_meta.dogpile_cache_regions
70 return region_meta.dogpile_cache_regions
72
71
73 def listen_on_session(self, session_factory):
72 def listen_on_session(self, session_factory):
74 event.listen(session_factory, "do_orm_execute", self._do_orm_execute)
73 event.listen(session_factory, "do_orm_execute", self._do_orm_execute)
75
74
76 def _do_orm_execute(self, orm_context):
75 def _do_orm_execute(self, orm_context):
77
76
78 for opt in orm_context.user_defined_options:
77 for opt in orm_context.user_defined_options:
79 if isinstance(opt, RelationshipCache):
78 if isinstance(opt, RelationshipCache):
80 opt = opt._process_orm_context(orm_context)
79 opt = opt._process_orm_context(orm_context)
81 if opt is None:
80 if opt is None:
82 continue
81 continue
83
82
84 if isinstance(opt, FromCache):
83 if isinstance(opt, FromCache):
85 dogpile_region = self.cache_regions[opt.region]
84 dogpile_region = self.cache_regions[opt.region]
86
85
87 if opt.cache_key:
86 if opt.cache_key:
88 our_cache_key = f'SQL_CACHE_{opt.cache_key}'
87 our_cache_key = f'SQL_CACHE_{opt.cache_key}'
89 else:
88 else:
90 our_cache_key = opt._generate_cache_key(
89 our_cache_key = opt._generate_cache_key(
91 orm_context.statement, orm_context.parameters, self
90 orm_context.statement, orm_context.parameters, self
92 )
91 )
93
92
94 if opt.ignore_expiration:
93 if opt.ignore_expiration:
95 cached_value = dogpile_region.get(
94 cached_value = dogpile_region.get(
96 our_cache_key,
95 our_cache_key,
97 expiration_time=opt.expiration_time,
96 expiration_time=opt.expiration_time,
98 ignore_expiration=opt.ignore_expiration,
97 ignore_expiration=opt.ignore_expiration,
99 )
98 )
100 else:
99 else:
101
100
102 def createfunc():
101 def createfunc():
103 return orm_context.invoke_statement().freeze()
102 return orm_context.invoke_statement().freeze()
104
103
105 cached_value = dogpile_region.get_or_create(
104 cached_value = dogpile_region.get_or_create(
106 our_cache_key,
105 our_cache_key,
107 createfunc,
106 createfunc,
108 expiration_time=opt.expiration_time,
107 expiration_time=opt.expiration_time,
109 )
108 )
110
109
111 if cached_value is NO_VALUE:
110 if cached_value is NO_VALUE:
112 # keyerror? this is bigger than a keyerror...
111 # keyerror? this is bigger than a keyerror...
113 raise KeyError()
112 raise KeyError()
114
113
115 orm_result = loading.merge_frozen_result(
114 orm_result = loading.merge_frozen_result(
116 orm_context.session,
115 orm_context.session,
117 orm_context.statement,
116 orm_context.statement,
118 cached_value,
117 cached_value,
119 load=False,
118 load=False,
120 )
119 )
121 return orm_result()
120 return orm_result()
122
121
123 else:
122 else:
124 return None
123 return None
125
124
126 def invalidate(self, statement, parameters, opt):
125 def invalidate(self, statement, parameters, opt):
127 """Invalidate the cache value represented by a statement."""
126 """Invalidate the cache value represented by a statement."""
128
127
129 statement = statement.__clause_element__()
128 statement = statement.__clause_element__()
130
129
131 dogpile_region = self.cache_regions[opt.region]
130 dogpile_region = self.cache_regions[opt.region]
132
131
133 cache_key = opt._generate_cache_key(statement, parameters, self)
132 cache_key = opt._generate_cache_key(statement, parameters, self)
134
133
135 dogpile_region.delete(cache_key)
134 dogpile_region.delete(cache_key)
136
135
137
136
138 class FromCache(UserDefinedOption):
137 class FromCache(UserDefinedOption):
139 """Specifies that a Query should load results from a cache."""
138 """Specifies that a Query should load results from a cache."""
140
139
141 propagate_to_loaders = False
140 propagate_to_loaders = False
142
141
143 def __init__(
142 def __init__(
144 self,
143 self,
145 region=DEFAULT_REGION,
144 region=DEFAULT_REGION,
146 cache_key=None,
145 cache_key=None,
147 expiration_time=None,
146 expiration_time=None,
148 ignore_expiration=False,
147 ignore_expiration=False,
149 ):
148 ):
150 """Construct a new FromCache.
149 """Construct a new FromCache.
151
150
152 :param region: the cache region. Should be a
151 :param region: the cache region. Should be a
153 region configured in the dictionary of dogpile
152 region configured in the dictionary of dogpile
154 regions.
153 regions.
155
154
156 :param cache_key: optional. A string cache key
155 :param cache_key: optional. A string cache key
157 that will serve as the key to the query. Use this
156 that will serve as the key to the query. Use this
158 if your query has a huge amount of parameters (such
157 if your query has a huge amount of parameters (such
159 as when using in_()) which correspond more simply to
158 as when using in_()) which correspond more simply to
160 some other identifier.
159 some other identifier.
161
160
162 """
161 """
163 self.region = region
162 self.region = region
164 self.cache_key = cache_key
163 self.cache_key = cache_key
165 self.expiration_time = expiration_time
164 self.expiration_time = expiration_time
166 self.ignore_expiration = ignore_expiration
165 self.ignore_expiration = ignore_expiration
167
166
168 # this is not needed as of SQLAlchemy 1.4.28;
167 # this is not needed as of SQLAlchemy 1.4.28;
169 # UserDefinedOption classes no longer participate in the SQL
168 # UserDefinedOption classes no longer participate in the SQL
170 # compilation cache key
169 # compilation cache key
171 def _gen_cache_key(self, anon_map, bindparams):
170 def _gen_cache_key(self, anon_map, bindparams):
172 return None
171 return None
173
172
174 def _generate_cache_key(self, statement, parameters, orm_cache):
173 def _generate_cache_key(self, statement, parameters, orm_cache):
175 """generate a cache key with which to key the results of a statement.
174 """generate a cache key with which to key the results of a statement.
176
175
177 This leverages the use of the SQL compilation cache key which is
176 This leverages the use of the SQL compilation cache key which is
178 repurposed as a SQL results key.
177 repurposed as a SQL results key.
179
178
180 """
179 """
181 statement_cache_key = statement._generate_cache_key()
180 statement_cache_key = statement._generate_cache_key()
182
181
183 key = statement_cache_key.to_offline_string(
182 key = statement_cache_key.to_offline_string(
184 orm_cache._statement_cache, statement, parameters
183 orm_cache._statement_cache, statement, parameters
185 ) + repr(self.cache_key)
184 ) + repr(self.cache_key)
186 # print("here's our key...%s" % key)
185 # print("here's our key...%s" % key)
187 return key
186 return key
188
187
189
188
190 class RelationshipCache(FromCache):
189 class RelationshipCache(FromCache):
191 """Specifies that a Query as called within a "lazy load"
190 """Specifies that a Query as called within a "lazy load"
192 should load results from a cache."""
191 should load results from a cache."""
193
192
194 propagate_to_loaders = True
193 propagate_to_loaders = True
195
194
196 def __init__(
195 def __init__(
197 self,
196 self,
198 attribute,
197 attribute,
199 region=DEFAULT_REGION,
198 region=DEFAULT_REGION,
200 cache_key=None,
199 cache_key=None,
201 expiration_time=None,
200 expiration_time=None,
202 ignore_expiration=False,
201 ignore_expiration=False,
203 ):
202 ):
204 """Construct a new RelationshipCache.
203 """Construct a new RelationshipCache.
205
204
206 :param attribute: A Class.attribute which
205 :param attribute: A Class.attribute which
207 indicates a particular class relationship() whose
206 indicates a particular class relationship() whose
208 lazy loader should be pulled from the cache.
207 lazy loader should be pulled from the cache.
209
208
210 :param region: name of the cache region.
209 :param region: name of the cache region.
211
210
212 :param cache_key: optional. A string cache key
211 :param cache_key: optional. A string cache key
213 that will serve as the key to the query, bypassing
212 that will serve as the key to the query, bypassing
214 the usual means of forming a key from the Query itself.
213 the usual means of forming a key from the Query itself.
215
214
216 """
215 """
217 self.region = region
216 self.region = region
218 self.cache_key = cache_key
217 self.cache_key = cache_key
219 self.expiration_time = expiration_time
218 self.expiration_time = expiration_time
220 self.ignore_expiration = ignore_expiration
219 self.ignore_expiration = ignore_expiration
221 self._relationship_options = {
220 self._relationship_options = {
222 (attribute.property.parent.class_, attribute.property.key): self
221 (attribute.property.parent.class_, attribute.property.key): self
223 }
222 }
224
223
225 def _process_orm_context(self, orm_context):
224 def _process_orm_context(self, orm_context):
226 current_path = orm_context.loader_strategy_path
225 current_path = orm_context.loader_strategy_path
227
226
228 if current_path:
227 if current_path:
229 mapper, prop = current_path[-2:]
228 mapper, prop = current_path[-2:]
230 key = prop.key
229 key = prop.key
231
230
232 for cls in mapper.class_.__mro__:
231 for cls in mapper.class_.__mro__:
233 if (cls, key) in self._relationship_options:
232 if (cls, key) in self._relationship_options:
234 relationship_option = self._relationship_options[
233 relationship_option = self._relationship_options[
235 (cls, key)
234 (cls, key)
236 ]
235 ]
237 return relationship_option
236 return relationship_option
238
237
239 def and_(self, option):
238 def and_(self, option):
240 """Chain another RelationshipCache option to this one.
239 """Chain another RelationshipCache option to this one.
241
240
242 While many RelationshipCache objects can be specified on a single
241 While many RelationshipCache objects can be specified on a single
243 Query separately, chaining them together allows for a more efficient
242 Query separately, chaining them together allows for a more efficient
244 lookup during load.
243 lookup during load.
245
244
246 """
245 """
247 self._relationship_options.update(option._relationship_options)
246 self._relationship_options.update(option._relationship_options)
248 return self
247 return self
249
248
@@ -1,95 +1,94 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import socket
20 import socket
22 import logging
21 import logging
23
22
24 import rhodecode
23 import rhodecode
25 from zope.cachedescriptors.property import Lazy as LazyProperty
24 from zope.cachedescriptors.property import Lazy as LazyProperty
26 from rhodecode.lib.celerylib.loader import (
25 from rhodecode.lib.celerylib.loader import (
27 celery_app, RequestContextTask, get_logger)
26 celery_app, RequestContextTask, get_logger)
28 from rhodecode.lib.statsd_client import StatsdClient
27 from rhodecode.lib.statsd_client import StatsdClient
29
28
30 async_task = celery_app.task
29 async_task = celery_app.task
31
30
32
31
33 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
34
33
35
34
36 class ResultWrapper(object):
35 class ResultWrapper(object):
37 def __init__(self, task):
36 def __init__(self, task):
38 self.task = task
37 self.task = task
39
38
40 @LazyProperty
39 @LazyProperty
41 def result(self):
40 def result(self):
42 return self.task
41 return self.task
43
42
44
43
45 def run_task(task, *args, **kwargs):
44 def run_task(task, *args, **kwargs):
46 import celery
45 import celery
47 log.debug('Got task `%s` for execution, celery mode enabled:%s', task, rhodecode.CELERY_ENABLED)
46 log.debug('Got task `%s` for execution, celery mode enabled:%s', task, rhodecode.CELERY_ENABLED)
48 if task is None:
47 if task is None:
49 raise ValueError('Got non-existing task for execution')
48 raise ValueError('Got non-existing task for execution')
50
49
51 exec_mode = 'sync'
50 exec_mode = 'sync'
52 allow_async = True
51 allow_async = True
53
52
54 # if we're already in a celery task, don't allow async execution again
53 # if we're already in a celery task, don't allow async execution again
55 # e.g task within task
54 # e.g task within task
56 in_task = celery.current_task
55 in_task = celery.current_task
57 if in_task:
56 if in_task:
58 log.debug('This task in in context of another task: %s, not allowing another async execution', in_task)
57 log.debug('This task in in context of another task: %s, not allowing another async execution', in_task)
59 allow_async = False
58 allow_async = False
60 if kwargs.pop('allow_subtask', False):
59 if kwargs.pop('allow_subtask', False):
61 log.debug('Forced async by allow_async=True flag')
60 log.debug('Forced async by allow_async=True flag')
62 allow_async = True
61 allow_async = True
63
62
64 t = None
63 t = None
65 if rhodecode.CELERY_ENABLED and allow_async:
64 if rhodecode.CELERY_ENABLED and allow_async:
66
65
67 try:
66 try:
68 t = task.apply_async(args=args, kwargs=kwargs)
67 t = task.apply_async(args=args, kwargs=kwargs)
69 log.debug('executing task %s:%s in async mode', t.task_id, task)
68 log.debug('executing task %s:%s in async mode', t.task_id, task)
70 except socket.error as e:
69 except socket.error as e:
71 if isinstance(e, IOError) and e.errno == 111:
70 if isinstance(e, IOError) and e.errno == 111:
72 log.error('Unable to connect to celeryd `%s`. Sync execution', e)
71 log.error('Unable to connect to celeryd `%s`. Sync execution', e)
73 else:
72 else:
74 log.exception("Exception while connecting to celeryd.")
73 log.exception("Exception while connecting to celeryd.")
75 except KeyError as e:
74 except KeyError as e:
76 log.error('Unable to connect to celeryd `%s`. Sync execution', e)
75 log.error('Unable to connect to celeryd `%s`. Sync execution', e)
77 except Exception as e:
76 except Exception as e:
78 log.exception(
77 log.exception(
79 "Exception while trying to run task asynchronous. "
78 "Exception while trying to run task asynchronous. "
80 "Fallback to sync execution.")
79 "Fallback to sync execution.")
81
80
82 else:
81 else:
83 log.debug('executing task %s:%s in sync mode', 'TASK', task)
82 log.debug('executing task %s:%s in sync mode', 'TASK', task)
84 statsd = StatsdClient.statsd
83 statsd = StatsdClient.statsd
85 if statsd:
84 if statsd:
86 task_repr = getattr(task, 'name', task)
85 task_repr = getattr(task, 'name', task)
87 statsd.incr('rhodecode_celery_task_total', tags=[
86 statsd.incr('rhodecode_celery_task_total', tags=[
88 'task:{}'.format(task_repr),
87 'task:{}'.format(task_repr),
89 'mode:sync'
88 'mode:sync'
90 ])
89 ])
91
90
92 # we got async task, return it after statsd call
91 # we got async task, return it after statsd call
93 if t:
92 if t:
94 return t
93 return t
95 return ResultWrapper(task(*args, **kwargs))
94 return ResultWrapper(task(*args, **kwargs))
@@ -1,356 +1,355 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 """
19 """
21 Celery loader, run with::
20 Celery loader, run with::
22
21
23 celery worker \
22 celery worker \
24 --task-events \
23 --task-events \
25 --beat \
24 --beat \
26 --autoscale=20,2 \
25 --autoscale=20,2 \
27 --max-tasks-per-child 1 \
26 --max-tasks-per-child 1 \
28 --app rhodecode.lib.celerylib.loader \
27 --app rhodecode.lib.celerylib.loader \
29 --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
28 --scheduler rhodecode.lib.celerylib.scheduler.RcScheduler \
30 --loglevel DEBUG --ini=.dev/dev.ini
29 --loglevel DEBUG --ini=.dev/dev.ini
31 """
30 """
32 import os
31 import os
33 import logging
32 import logging
34 import importlib
33 import importlib
35
34
36 from celery import Celery
35 from celery import Celery
37 from celery import signals
36 from celery import signals
38 from celery import Task
37 from celery import Task
39 from celery import exceptions # pragma: no cover
38 from celery import exceptions # pragma: no cover
40 from kombu.serialization import register
39 from kombu.serialization import register
41
40
42 import rhodecode
41 import rhodecode
43
42
44 from rhodecode.lib.statsd_client import StatsdClient
43 from rhodecode.lib.statsd_client import StatsdClient
45 from rhodecode.lib.celerylib.utils import parse_ini_vars, ping_db
44 from rhodecode.lib.celerylib.utils import parse_ini_vars, ping_db
46 from rhodecode.lib.ext_json import json
45 from rhodecode.lib.ext_json import json
47 from rhodecode.lib.pyramid_utils import bootstrap, setup_logging
46 from rhodecode.lib.pyramid_utils import bootstrap, setup_logging
48 from rhodecode.lib.utils2 import str2bool
47 from rhodecode.lib.utils2 import str2bool
49 from rhodecode.model import meta
48 from rhodecode.model import meta
50
49
51
50
52 register('json_ext', json.dumps, json.loads,
51 register('json_ext', json.dumps, json.loads,
53 content_type='application/x-json-ext',
52 content_type='application/x-json-ext',
54 content_encoding='utf-8')
53 content_encoding='utf-8')
55
54
56 log = logging.getLogger('celery.rhodecode.loader')
55 log = logging.getLogger('celery.rhodecode.loader')
57
56
58
57
59 imports = ['rhodecode.lib.celerylib.tasks']
58 imports = ['rhodecode.lib.celerylib.tasks']
60
59
61 try:
60 try:
62 # try if we have EE tasks available
61 # try if we have EE tasks available
63 importlib.import_module('rc_ee')
62 importlib.import_module('rc_ee')
64 imports.append('rc_ee.lib.celerylib.tasks')
63 imports.append('rc_ee.lib.celerylib.tasks')
65 except ImportError:
64 except ImportError:
66 pass
65 pass
67
66
68
67
69 base_celery_config = {
68 base_celery_config = {
70 'result_backend': 'rpc://',
69 'result_backend': 'rpc://',
71 'result_expires': 60 * 60 * 24,
70 'result_expires': 60 * 60 * 24,
72 'result_persistent': True,
71 'result_persistent': True,
73 'imports': imports,
72 'imports': imports,
74 'worker_max_tasks_per_child': 20,
73 'worker_max_tasks_per_child': 20,
75 'accept_content': ['json_ext', 'json'],
74 'accept_content': ['json_ext', 'json'],
76 'task_serializer': 'json_ext',
75 'task_serializer': 'json_ext',
77 'result_serializer': 'json_ext',
76 'result_serializer': 'json_ext',
78 'worker_hijack_root_logger': False,
77 'worker_hijack_root_logger': False,
79 'database_table_names': {
78 'database_table_names': {
80 'task': 'beat_taskmeta',
79 'task': 'beat_taskmeta',
81 'group': 'beat_groupmeta',
80 'group': 'beat_groupmeta',
82 }
81 }
83 }
82 }
84
83
85
84
86 def add_preload_arguments(parser):
85 def add_preload_arguments(parser):
87 parser.add_argument(
86 parser.add_argument(
88 '--ini', default=None,
87 '--ini', default=None,
89 help='Path to ini configuration file.'
88 help='Path to ini configuration file.'
90 )
89 )
91 parser.add_argument(
90 parser.add_argument(
92 '--ini-var', default=None,
91 '--ini-var', default=None,
93 help='Comma separated list of key=value to pass to ini.'
92 help='Comma separated list of key=value to pass to ini.'
94 )
93 )
95
94
96
95
97 def get_logger(obj):
96 def get_logger(obj):
98 custom_log = logging.getLogger(
97 custom_log = logging.getLogger(
99 'rhodecode.task.{}'.format(obj.__class__.__name__))
98 'rhodecode.task.{}'.format(obj.__class__.__name__))
100
99
101 if rhodecode.CELERY_ENABLED:
100 if rhodecode.CELERY_ENABLED:
102 try:
101 try:
103 custom_log = obj.get_logger()
102 custom_log = obj.get_logger()
104 except Exception:
103 except Exception:
105 pass
104 pass
106
105
107 return custom_log
106 return custom_log
108
107
109
108
110 # init main celery app
109 # init main celery app
111 celery_app = Celery()
110 celery_app = Celery()
112 celery_app.user_options['preload'].add(add_preload_arguments)
111 celery_app.user_options['preload'].add(add_preload_arguments)
113
112
114
113
115 @signals.setup_logging.connect
114 @signals.setup_logging.connect
116 def setup_logging_callback(**kwargs):
115 def setup_logging_callback(**kwargs):
117 ini_file = celery_app.conf['RC_INI_FILE']
116 ini_file = celery_app.conf['RC_INI_FILE']
118 setup_logging(ini_file)
117 setup_logging(ini_file)
119
118
120
119
121 @signals.user_preload_options.connect
120 @signals.user_preload_options.connect
122 def on_preload_parsed(options, **kwargs):
121 def on_preload_parsed(options, **kwargs):
123
122
124 ini_file = options['ini']
123 ini_file = options['ini']
125 ini_vars = options['ini_var']
124 ini_vars = options['ini_var']
126
125
127 if ini_file is None:
126 if ini_file is None:
128 print('You must provide the --ini argument to start celery')
127 print('You must provide the --ini argument to start celery')
129 exit(-1)
128 exit(-1)
130
129
131 options = None
130 options = None
132 if ini_vars is not None:
131 if ini_vars is not None:
133 options = parse_ini_vars(ini_vars)
132 options = parse_ini_vars(ini_vars)
134
133
135 celery_app.conf['RC_INI_FILE'] = ini_file
134 celery_app.conf['RC_INI_FILE'] = ini_file
136 celery_app.conf['RC_INI_OPTIONS'] = options
135 celery_app.conf['RC_INI_OPTIONS'] = options
137 setup_logging(ini_file)
136 setup_logging(ini_file)
138
137
139
138
140 def _init_celery(app_type=''):
139 def _init_celery(app_type=''):
141 from rhodecode.config.middleware import get_celery_config
140 from rhodecode.config.middleware import get_celery_config
142
141
143 log.debug('Bootstrapping RhodeCode application for %s...', app_type)
142 log.debug('Bootstrapping RhodeCode application for %s...', app_type)
144
143
145 ini_file = celery_app.conf['RC_INI_FILE']
144 ini_file = celery_app.conf['RC_INI_FILE']
146 options = celery_app.conf['RC_INI_OPTIONS']
145 options = celery_app.conf['RC_INI_OPTIONS']
147
146
148 env = None
147 env = None
149 try:
148 try:
150 env = bootstrap(ini_file, options=options)
149 env = bootstrap(ini_file, options=options)
151 except Exception:
150 except Exception:
152 log.exception('Failed to bootstrap RhodeCode APP')
151 log.exception('Failed to bootstrap RhodeCode APP')
153
152
154 if not env:
153 if not env:
155 raise EnvironmentError(
154 raise EnvironmentError(
156 'Failed to load pyramid ENV. '
155 'Failed to load pyramid ENV. '
157 'Probably there is another error present that prevents from running pyramid app')
156 'Probably there is another error present that prevents from running pyramid app')
158
157
159 log.debug('Got Pyramid ENV: %s', env)
158 log.debug('Got Pyramid ENV: %s', env)
160
159
161 celery_settings = get_celery_config(env['registry'].settings)
160 celery_settings = get_celery_config(env['registry'].settings)
162
161
163 setup_celery_app(
162 setup_celery_app(
164 app=env['app'], root=env['root'], request=env['request'],
163 app=env['app'], root=env['root'], request=env['request'],
165 registry=env['registry'], closer=env['closer'],
164 registry=env['registry'], closer=env['closer'],
166 celery_settings=celery_settings)
165 celery_settings=celery_settings)
167
166
168
167
169 @signals.celeryd_init.connect
168 @signals.celeryd_init.connect
170 def on_celeryd_init(sender=None, conf=None, **kwargs):
169 def on_celeryd_init(sender=None, conf=None, **kwargs):
171 _init_celery('celery worker')
170 _init_celery('celery worker')
172
171
173 # fix the global flag even if it's disabled via .ini file because this
172 # fix the global flag even if it's disabled via .ini file because this
174 # is a worker code that doesn't need this to be disabled.
173 # is a worker code that doesn't need this to be disabled.
175 rhodecode.CELERY_ENABLED = True
174 rhodecode.CELERY_ENABLED = True
176
175
177
176
178 @signals.beat_init.connect
177 @signals.beat_init.connect
179 def on_beat_init(sender=None, conf=None, **kwargs):
178 def on_beat_init(sender=None, conf=None, **kwargs):
180 _init_celery('celery beat')
179 _init_celery('celery beat')
181
180
182
181
183 @signals.task_prerun.connect
182 @signals.task_prerun.connect
184 def task_prerun_signal(task_id, task, args, **kwargs):
183 def task_prerun_signal(task_id, task, args, **kwargs):
185 ping_db()
184 ping_db()
186 statsd = StatsdClient.statsd
185 statsd = StatsdClient.statsd
187 if statsd:
186 if statsd:
188 task_repr = getattr(task, 'name', task)
187 task_repr = getattr(task, 'name', task)
189 statsd.incr('rhodecode_celery_task_total', tags=[
188 statsd.incr('rhodecode_celery_task_total', tags=[
190 'task:{}'.format(task_repr),
189 'task:{}'.format(task_repr),
191 'mode:async'
190 'mode:async'
192 ])
191 ])
193
192
194
193
195 @signals.task_success.connect
194 @signals.task_success.connect
196 def task_success_signal(result, **kwargs):
195 def task_success_signal(result, **kwargs):
197 meta.Session.commit()
196 meta.Session.commit()
198 closer = celery_app.conf['PYRAMID_CLOSER']
197 closer = celery_app.conf['PYRAMID_CLOSER']
199 if closer:
198 if closer:
200 closer()
199 closer()
201
200
202
201
203
202
204 @signals.task_retry.connect
203 @signals.task_retry.connect
205 def task_retry_signal(
204 def task_retry_signal(
206 request, reason, einfo, **kwargs):
205 request, reason, einfo, **kwargs):
207 meta.Session.remove()
206 meta.Session.remove()
208 closer = celery_app.conf['PYRAMID_CLOSER']
207 closer = celery_app.conf['PYRAMID_CLOSER']
209 if closer:
208 if closer:
210 closer()
209 closer()
211
210
212
211
213 @signals.task_failure.connect
212 @signals.task_failure.connect
214 def task_failure_signal(
213 def task_failure_signal(
215 task_id, exception, args, kwargs, traceback, einfo, **kargs):
214 task_id, exception, args, kwargs, traceback, einfo, **kargs):
216
215
217 log.error('Task: %s failed !! exc_info: %s', task_id, einfo)
216 log.error('Task: %s failed !! exc_info: %s', task_id, einfo)
218 from rhodecode.lib.exc_tracking import store_exception
217 from rhodecode.lib.exc_tracking import store_exception
219 from rhodecode.lib.statsd_client import StatsdClient
218 from rhodecode.lib.statsd_client import StatsdClient
220
219
221 meta.Session.remove()
220 meta.Session.remove()
222
221
223 # simulate sys.exc_info()
222 # simulate sys.exc_info()
224 exc_info = (einfo.type, einfo.exception, einfo.tb)
223 exc_info = (einfo.type, einfo.exception, einfo.tb)
225 store_exception(id(exc_info), exc_info, prefix='rhodecode-celery')
224 store_exception(id(exc_info), exc_info, prefix='rhodecode-celery')
226 statsd = StatsdClient.statsd
225 statsd = StatsdClient.statsd
227 if statsd:
226 if statsd:
228 exc_type = "{}.{}".format(einfo.__class__.__module__, einfo.__class__.__name__)
227 exc_type = "{}.{}".format(einfo.__class__.__module__, einfo.__class__.__name__)
229 statsd.incr('rhodecode_exception_total',
228 statsd.incr('rhodecode_exception_total',
230 tags=["exc_source:celery", "type:{}".format(exc_type)])
229 tags=["exc_source:celery", "type:{}".format(exc_type)])
231
230
232 closer = celery_app.conf['PYRAMID_CLOSER']
231 closer = celery_app.conf['PYRAMID_CLOSER']
233 if closer:
232 if closer:
234 closer()
233 closer()
235
234
236
235
237 @signals.task_revoked.connect
236 @signals.task_revoked.connect
238 def task_revoked_signal(
237 def task_revoked_signal(
239 request, terminated, signum, expired, **kwargs):
238 request, terminated, signum, expired, **kwargs):
240 closer = celery_app.conf['PYRAMID_CLOSER']
239 closer = celery_app.conf['PYRAMID_CLOSER']
241 if closer:
240 if closer:
242 closer()
241 closer()
243
242
244
243
245 class UNSET(object):
244 class UNSET(object):
246 pass
245 pass
247
246
248
247
249 _unset = UNSET()
248 _unset = UNSET()
250
249
251
250
252 def set_celery_conf(app=_unset, root=_unset, request=_unset, registry=_unset, closer=_unset):
251 def set_celery_conf(app=_unset, root=_unset, request=_unset, registry=_unset, closer=_unset):
253
252
254 if request is not UNSET:
253 if request is not UNSET:
255 celery_app.conf.update({'PYRAMID_REQUEST': request})
254 celery_app.conf.update({'PYRAMID_REQUEST': request})
256
255
257 if registry is not UNSET:
256 if registry is not UNSET:
258 celery_app.conf.update({'PYRAMID_REGISTRY': registry})
257 celery_app.conf.update({'PYRAMID_REGISTRY': registry})
259
258
260
259
261 def setup_celery_app(app, root, request, registry, closer, celery_settings):
260 def setup_celery_app(app, root, request, registry, closer, celery_settings):
262 log.debug('Got custom celery conf: %s', celery_settings)
261 log.debug('Got custom celery conf: %s', celery_settings)
263 celery_config = base_celery_config
262 celery_config = base_celery_config
264 celery_config.update({
263 celery_config.update({
265 # store celerybeat scheduler db where the .ini file is
264 # store celerybeat scheduler db where the .ini file is
266 'beat_schedule_filename': registry.settings['celerybeat-schedule.path'],
265 'beat_schedule_filename': registry.settings['celerybeat-schedule.path'],
267 })
266 })
268
267
269 celery_config.update(celery_settings)
268 celery_config.update(celery_settings)
270 celery_app.config_from_object(celery_config)
269 celery_app.config_from_object(celery_config)
271
270
272 celery_app.conf.update({'PYRAMID_APP': app})
271 celery_app.conf.update({'PYRAMID_APP': app})
273 celery_app.conf.update({'PYRAMID_ROOT': root})
272 celery_app.conf.update({'PYRAMID_ROOT': root})
274 celery_app.conf.update({'PYRAMID_REQUEST': request})
273 celery_app.conf.update({'PYRAMID_REQUEST': request})
275 celery_app.conf.update({'PYRAMID_REGISTRY': registry})
274 celery_app.conf.update({'PYRAMID_REGISTRY': registry})
276 celery_app.conf.update({'PYRAMID_CLOSER': closer})
275 celery_app.conf.update({'PYRAMID_CLOSER': closer})
277
276
278
277
279 def configure_celery(config, celery_settings):
278 def configure_celery(config, celery_settings):
280 """
279 """
281 Helper that is called from our application creation logic. It gives
280 Helper that is called from our application creation logic. It gives
282 connection info into running webapp and allows execution of tasks from
281 connection info into running webapp and allows execution of tasks from
283 RhodeCode itself
282 RhodeCode itself
284 """
283 """
285 # store some globals into rhodecode
284 # store some globals into rhodecode
286 rhodecode.CELERY_ENABLED = str2bool(
285 rhodecode.CELERY_ENABLED = str2bool(
287 config.registry.settings.get('use_celery'))
286 config.registry.settings.get('use_celery'))
288 if rhodecode.CELERY_ENABLED:
287 if rhodecode.CELERY_ENABLED:
289 log.info('Configuring celery based on `%s` settings', celery_settings)
288 log.info('Configuring celery based on `%s` settings', celery_settings)
290 setup_celery_app(
289 setup_celery_app(
291 app=None, root=None, request=None, registry=config.registry,
290 app=None, root=None, request=None, registry=config.registry,
292 closer=None, celery_settings=celery_settings)
291 closer=None, celery_settings=celery_settings)
293
292
294
293
295 def maybe_prepare_env(req):
294 def maybe_prepare_env(req):
296 environ = {}
295 environ = {}
297 try:
296 try:
298 environ.update({
297 environ.update({
299 'PATH_INFO': req.environ['PATH_INFO'],
298 'PATH_INFO': req.environ['PATH_INFO'],
300 'SCRIPT_NAME': req.environ['SCRIPT_NAME'],
299 'SCRIPT_NAME': req.environ['SCRIPT_NAME'],
301 'HTTP_HOST': req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']),
300 'HTTP_HOST': req.environ.get('HTTP_HOST', req.environ['SERVER_NAME']),
302 'SERVER_NAME': req.environ['SERVER_NAME'],
301 'SERVER_NAME': req.environ['SERVER_NAME'],
303 'SERVER_PORT': req.environ['SERVER_PORT'],
302 'SERVER_PORT': req.environ['SERVER_PORT'],
304 'wsgi.url_scheme': req.environ['wsgi.url_scheme'],
303 'wsgi.url_scheme': req.environ['wsgi.url_scheme'],
305 })
304 })
306 except Exception:
305 except Exception:
307 pass
306 pass
308
307
309 return environ
308 return environ
310
309
311
310
312 class RequestContextTask(Task):
311 class RequestContextTask(Task):
313 """
312 """
314 This is a celery task which will create a rhodecode app instance context
313 This is a celery task which will create a rhodecode app instance context
315 for the task, patch pyramid with the original request
314 for the task, patch pyramid with the original request
316 that created the task and also add the user to the context.
315 that created the task and also add the user to the context.
317 """
316 """
318
317
319 def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
318 def apply_async(self, args=None, kwargs=None, task_id=None, producer=None,
320 link=None, link_error=None, shadow=None, **options):
319 link=None, link_error=None, shadow=None, **options):
321 """ queue the job to run (we are in web request context here) """
320 """ queue the job to run (we are in web request context here) """
322 from rhodecode.lib.base import get_ip_addr
321 from rhodecode.lib.base import get_ip_addr
323
322
324 req = self.app.conf['PYRAMID_REQUEST']
323 req = self.app.conf['PYRAMID_REQUEST']
325 if not req:
324 if not req:
326 raise ValueError('celery_app.conf is having empty PYRAMID_REQUEST key')
325 raise ValueError('celery_app.conf is having empty PYRAMID_REQUEST key')
327
326
328 log.debug('Running Task with class: %s. Request Class: %s',
327 log.debug('Running Task with class: %s. Request Class: %s',
329 self.__class__, req.__class__)
328 self.__class__, req.__class__)
330
329
331 user_id = 0
330 user_id = 0
332
331
333 # web case
332 # web case
334 if hasattr(req, 'user'):
333 if hasattr(req, 'user'):
335 user_id = req.user.user_id
334 user_id = req.user.user_id
336
335
337 # api case
336 # api case
338 elif hasattr(req, 'rpc_user'):
337 elif hasattr(req, 'rpc_user'):
339 user_id = req.rpc_user.user_id
338 user_id = req.rpc_user.user_id
340
339
341 # we hook into kwargs since it is the only way to pass our data to
340 # we hook into kwargs since it is the only way to pass our data to
342 # the celery worker
341 # the celery worker
343 environ = maybe_prepare_env(req)
342 environ = maybe_prepare_env(req)
344 options['headers'] = options.get('headers', {})
343 options['headers'] = options.get('headers', {})
345 options['headers'].update({
344 options['headers'].update({
346 'rhodecode_proxy_data': {
345 'rhodecode_proxy_data': {
347 'environ': environ,
346 'environ': environ,
348 'auth_user': {
347 'auth_user': {
349 'ip_addr': get_ip_addr(req.environ),
348 'ip_addr': get_ip_addr(req.environ),
350 'user_id': user_id
349 'user_id': user_id
351 },
350 },
352 }
351 }
353 })
352 })
354
353
355 return super(RequestContextTask, self).apply_async(
354 return super(RequestContextTask, self).apply_async(
356 args, kwargs, task_id, producer, link, link_error, shadow, **options)
355 args, kwargs, task_id, producer, link, link_error, shadow, **options)
@@ -1,60 +1,59 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
19 import logging
21 import importlib
20 import importlib
22
21
23 from celery.beat import (
22 from celery.beat import (
24 PersistentScheduler, ScheduleEntry as CeleryScheduleEntry)
23 PersistentScheduler, ScheduleEntry as CeleryScheduleEntry)
25
24
26 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
27
26
28
27
29 class FileScheduleEntry(CeleryScheduleEntry):
28 class FileScheduleEntry(CeleryScheduleEntry):
30 def __init__(self, name=None, task=None, last_run_at=None,
29 def __init__(self, name=None, task=None, last_run_at=None,
31 total_run_count=None, schedule=None, args=(), kwargs=None,
30 total_run_count=None, schedule=None, args=(), kwargs=None,
32 options=None, relative=False, app=None, **_kwargs):
31 options=None, relative=False, app=None, **_kwargs):
33 kwargs = kwargs or {}
32 kwargs = kwargs or {}
34 options = options or {}
33 options = options or {}
35
34
36 # because our custom loader passes in some variables that the original
35 # because our custom loader passes in some variables that the original
37 # function doesn't expect, we have this thin wrapper
36 # function doesn't expect, we have this thin wrapper
38
37
39 super(FileScheduleEntry, self).__init__(
38 super(FileScheduleEntry, self).__init__(
40 name=name, task=task, last_run_at=last_run_at,
39 name=name, task=task, last_run_at=last_run_at,
41 total_run_count=total_run_count, schedule=schedule, args=args,
40 total_run_count=total_run_count, schedule=schedule, args=args,
42 kwargs=kwargs, options=options, relative=relative, app=app)
41 kwargs=kwargs, options=options, relative=relative, app=app)
43
42
44
43
45 class FileScheduler(PersistentScheduler):
44 class FileScheduler(PersistentScheduler):
46 """CE base scheduler"""
45 """CE base scheduler"""
47 Entry = FileScheduleEntry
46 Entry = FileScheduleEntry
48
47
49 def setup_schedule(self):
48 def setup_schedule(self):
50 log.info("setup_schedule called")
49 log.info("setup_schedule called")
51 super(FileScheduler, self).setup_schedule()
50 super(FileScheduler, self).setup_schedule()
52
51
53
52
54 try:
53 try:
55 # try if we have EE scheduler available
54 # try if we have EE scheduler available
56 module = importlib.import_module('rc_ee.lib.celerylib.scheduler')
55 module = importlib.import_module('rc_ee.lib.celerylib.scheduler')
57 RcScheduler = module.RcScheduler
56 RcScheduler = module.RcScheduler
58 except ImportError:
57 except ImportError:
59 # fallback to CE scheduler
58 # fallback to CE scheduler
60 RcScheduler = FileScheduler
59 RcScheduler = FileScheduler
@@ -1,414 +1,414 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 RhodeCode task modules, containing all task that suppose to be run
22 RhodeCode task modules, containing all task that suppose to be run
23 by celery daemon
23 by celery daemon
24 """
24 """
25
25
26 import os
26 import os
27 import time
27 import time
28
28
29 from pyramid_mailer.mailer import Mailer
29 from pyramid_mailer.mailer import Mailer
30 from pyramid_mailer.message import Message
30 from pyramid_mailer.message import Message
31 from email.utils import formatdate
31 from email.utils import formatdate
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib import audit_logger
34 from rhodecode.lib import audit_logger
35 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
35 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
36 from rhodecode.lib import hooks_base
36 from rhodecode.lib import hooks_base
37 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
37 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
38 from rhodecode.lib.statsd_client import StatsdClient
38 from rhodecode.lib.statsd_client import StatsdClient
39 from rhodecode.model.db import (
39 from rhodecode.model.db import (
40 Session, IntegrityError, true, Repository, RepoGroup, User)
40 Session, IntegrityError, true, Repository, RepoGroup, User)
41 from rhodecode.model.permission import PermissionModel
41 from rhodecode.model.permission import PermissionModel
42
42
43
43
44 @async_task(ignore_result=True, base=RequestContextTask)
44 @async_task(ignore_result=True, base=RequestContextTask)
45 def send_email(recipients, subject, body='', html_body='', email_config=None,
45 def send_email(recipients, subject, body='', html_body='', email_config=None,
46 extra_headers=None):
46 extra_headers=None):
47 """
47 """
48 Sends an email with defined parameters from the .ini files.
48 Sends an email with defined parameters from the .ini files.
49
49
50 :param recipients: list of recipients, it this is empty the defined email
50 :param recipients: list of recipients, it this is empty the defined email
51 address from field 'email_to' is used instead
51 address from field 'email_to' is used instead
52 :param subject: subject of the mail
52 :param subject: subject of the mail
53 :param body: body of the mail
53 :param body: body of the mail
54 :param html_body: html version of body
54 :param html_body: html version of body
55 :param email_config: specify custom configuration for mailer
55 :param email_config: specify custom configuration for mailer
56 :param extra_headers: specify custom headers
56 :param extra_headers: specify custom headers
57 """
57 """
58 log = get_logger(send_email)
58 log = get_logger(send_email)
59
59
60 email_config = email_config or rhodecode.CONFIG
60 email_config = email_config or rhodecode.CONFIG
61
61
62 mail_server = email_config.get('smtp_server') or None
62 mail_server = email_config.get('smtp_server') or None
63 if mail_server is None:
63 if mail_server is None:
64 log.error("SMTP server information missing. Sending email failed. "
64 log.error("SMTP server information missing. Sending email failed. "
65 "Make sure that `smtp_server` variable is configured "
65 "Make sure that `smtp_server` variable is configured "
66 "inside the .ini file")
66 "inside the .ini file")
67 return False
67 return False
68
68
69 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
69 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
70
70
71 if recipients:
71 if recipients:
72 if isinstance(recipients, str):
72 if isinstance(recipients, str):
73 recipients = recipients.split(',')
73 recipients = recipients.split(',')
74 else:
74 else:
75 # if recipients are not defined we send to email_config + all admins
75 # if recipients are not defined we send to email_config + all admins
76 admins = []
76 admins = []
77 for u in User.query().filter(User.admin == true()).all():
77 for u in User.query().filter(User.admin == true()).all():
78 if u.email:
78 if u.email:
79 admins.append(u.email)
79 admins.append(u.email)
80 recipients = []
80 recipients = []
81 config_email = email_config.get('email_to')
81 config_email = email_config.get('email_to')
82 if config_email:
82 if config_email:
83 recipients += [config_email]
83 recipients += [config_email]
84 recipients += admins
84 recipients += admins
85
85
86 # translate our LEGACY config into the one that pyramid_mailer supports
86 # translate our LEGACY config into the one that pyramid_mailer supports
87 email_conf = dict(
87 email_conf = dict(
88 host=mail_server,
88 host=mail_server,
89 port=email_config.get('smtp_port', 25),
89 port=email_config.get('smtp_port', 25),
90 username=email_config.get('smtp_username'),
90 username=email_config.get('smtp_username'),
91 password=email_config.get('smtp_password'),
91 password=email_config.get('smtp_password'),
92
92
93 tls=str2bool(email_config.get('smtp_use_tls')),
93 tls=str2bool(email_config.get('smtp_use_tls')),
94 ssl=str2bool(email_config.get('smtp_use_ssl')),
94 ssl=str2bool(email_config.get('smtp_use_ssl')),
95
95
96 # SSL key file
96 # SSL key file
97 # keyfile='',
97 # keyfile='',
98
98
99 # SSL certificate file
99 # SSL certificate file
100 # certfile='',
100 # certfile='',
101
101
102 # Location of maildir
102 # Location of maildir
103 # queue_path='',
103 # queue_path='',
104
104
105 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
105 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
106
106
107 debug=str2bool(email_config.get('smtp_debug')),
107 debug=str2bool(email_config.get('smtp_debug')),
108 # /usr/sbin/sendmail Sendmail executable
108 # /usr/sbin/sendmail Sendmail executable
109 # sendmail_app='',
109 # sendmail_app='',
110
110
111 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
111 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
112 # sendmail_template='',
112 # sendmail_template='',
113 )
113 )
114
114
115 if extra_headers is None:
115 if extra_headers is None:
116 extra_headers = {}
116 extra_headers = {}
117
117
118 extra_headers.setdefault('Date', formatdate(time.time()))
118 extra_headers.setdefault('Date', formatdate(time.time()))
119
119
120 if 'thread_ids' in extra_headers:
120 if 'thread_ids' in extra_headers:
121 thread_ids = extra_headers.pop('thread_ids')
121 thread_ids = extra_headers.pop('thread_ids')
122 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
122 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
123
123
124 try:
124 try:
125 mailer = Mailer(**email_conf)
125 mailer = Mailer(**email_conf)
126
126
127 message = Message(subject=subject,
127 message = Message(subject=subject,
128 sender=email_conf['default_sender'],
128 sender=email_conf['default_sender'],
129 recipients=recipients,
129 recipients=recipients,
130 body=body, html=html_body,
130 body=body, html=html_body,
131 extra_headers=extra_headers)
131 extra_headers=extra_headers)
132 mailer.send_immediately(message)
132 mailer.send_immediately(message)
133 statsd = StatsdClient.statsd
133 statsd = StatsdClient.statsd
134 if statsd:
134 if statsd:
135 statsd.incr('rhodecode_email_sent_total')
135 statsd.incr('rhodecode_email_sent_total')
136
136
137 except Exception:
137 except Exception:
138 log.exception('Mail sending failed')
138 log.exception('Mail sending failed')
139 return False
139 return False
140 return True
140 return True
141
141
142
142
143 @async_task(ignore_result=True, base=RequestContextTask)
143 @async_task(ignore_result=True, base=RequestContextTask)
144 def create_repo(form_data, cur_user):
144 def create_repo(form_data, cur_user):
145 from rhodecode.model.repo import RepoModel
145 from rhodecode.model.repo import RepoModel
146 from rhodecode.model.user import UserModel
146 from rhodecode.model.user import UserModel
147 from rhodecode.model.scm import ScmModel
147 from rhodecode.model.scm import ScmModel
148 from rhodecode.model.settings import SettingsModel
148 from rhodecode.model.settings import SettingsModel
149
149
150 log = get_logger(create_repo)
150 log = get_logger(create_repo)
151
151
152 cur_user = UserModel()._get_user(cur_user)
152 cur_user = UserModel()._get_user(cur_user)
153 owner = cur_user
153 owner = cur_user
154
154
155 repo_name = form_data['repo_name']
155 repo_name = form_data['repo_name']
156 repo_name_full = form_data['repo_name_full']
156 repo_name_full = form_data['repo_name_full']
157 repo_type = form_data['repo_type']
157 repo_type = form_data['repo_type']
158 description = form_data['repo_description']
158 description = form_data['repo_description']
159 private = form_data['repo_private']
159 private = form_data['repo_private']
160 clone_uri = form_data.get('clone_uri')
160 clone_uri = form_data.get('clone_uri')
161 repo_group = safe_int(form_data['repo_group'])
161 repo_group = safe_int(form_data['repo_group'])
162 copy_fork_permissions = form_data.get('copy_permissions')
162 copy_fork_permissions = form_data.get('copy_permissions')
163 copy_group_permissions = form_data.get('repo_copy_permissions')
163 copy_group_permissions = form_data.get('repo_copy_permissions')
164 fork_of = form_data.get('fork_parent_id')
164 fork_of = form_data.get('fork_parent_id')
165 state = form_data.get('repo_state', Repository.STATE_PENDING)
165 state = form_data.get('repo_state', Repository.STATE_PENDING)
166
166
167 # repo creation defaults, private and repo_type are filled in form
167 # repo creation defaults, private and repo_type are filled in form
168 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
168 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
169 enable_statistics = form_data.get(
169 enable_statistics = form_data.get(
170 'enable_statistics', defs.get('repo_enable_statistics'))
170 'enable_statistics', defs.get('repo_enable_statistics'))
171 enable_locking = form_data.get(
171 enable_locking = form_data.get(
172 'enable_locking', defs.get('repo_enable_locking'))
172 'enable_locking', defs.get('repo_enable_locking'))
173 enable_downloads = form_data.get(
173 enable_downloads = form_data.get(
174 'enable_downloads', defs.get('repo_enable_downloads'))
174 'enable_downloads', defs.get('repo_enable_downloads'))
175
175
176 # set landing rev based on default branches for SCM
176 # set landing rev based on default branches for SCM
177 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
177 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
178
178
179 try:
179 try:
180 RepoModel()._create_repo(
180 RepoModel()._create_repo(
181 repo_name=repo_name_full,
181 repo_name=repo_name_full,
182 repo_type=repo_type,
182 repo_type=repo_type,
183 description=description,
183 description=description,
184 owner=owner,
184 owner=owner,
185 private=private,
185 private=private,
186 clone_uri=clone_uri,
186 clone_uri=clone_uri,
187 repo_group=repo_group,
187 repo_group=repo_group,
188 landing_rev=landing_ref,
188 landing_rev=landing_ref,
189 fork_of=fork_of,
189 fork_of=fork_of,
190 copy_fork_permissions=copy_fork_permissions,
190 copy_fork_permissions=copy_fork_permissions,
191 copy_group_permissions=copy_group_permissions,
191 copy_group_permissions=copy_group_permissions,
192 enable_statistics=enable_statistics,
192 enable_statistics=enable_statistics,
193 enable_locking=enable_locking,
193 enable_locking=enable_locking,
194 enable_downloads=enable_downloads,
194 enable_downloads=enable_downloads,
195 state=state
195 state=state
196 )
196 )
197 Session().commit()
197 Session().commit()
198
198
199 # now create this repo on Filesystem
199 # now create this repo on Filesystem
200 RepoModel()._create_filesystem_repo(
200 RepoModel()._create_filesystem_repo(
201 repo_name=repo_name,
201 repo_name=repo_name,
202 repo_type=repo_type,
202 repo_type=repo_type,
203 repo_group=RepoModel()._get_repo_group(repo_group),
203 repo_group=RepoModel()._get_repo_group(repo_group),
204 clone_uri=clone_uri,
204 clone_uri=clone_uri,
205 )
205 )
206 repo = Repository.get_by_repo_name(repo_name_full)
206 repo = Repository.get_by_repo_name(repo_name_full)
207 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
207 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
208
208
209 # update repo commit caches initially
209 # update repo commit caches initially
210 repo.update_commit_cache()
210 repo.update_commit_cache()
211
211
212 # set new created state
212 # set new created state
213 repo.set_state(Repository.STATE_CREATED)
213 repo.set_state(Repository.STATE_CREATED)
214 repo_id = repo.repo_id
214 repo_id = repo.repo_id
215 repo_data = repo.get_api_data()
215 repo_data = repo.get_api_data()
216
216
217 audit_logger.store(
217 audit_logger.store(
218 'repo.create', action_data={'data': repo_data},
218 'repo.create', action_data={'data': repo_data},
219 user=cur_user,
219 user=cur_user,
220 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
220 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
221
221
222 Session().commit()
222 Session().commit()
223
223
224 PermissionModel().trigger_permission_flush()
224 PermissionModel().trigger_permission_flush()
225
225
226 except Exception as e:
226 except Exception as e:
227 log.warning('Exception occurred when creating repository, '
227 log.warning('Exception occurred when creating repository, '
228 'doing cleanup...', exc_info=True)
228 'doing cleanup...', exc_info=True)
229 if isinstance(e, IntegrityError):
229 if isinstance(e, IntegrityError):
230 Session().rollback()
230 Session().rollback()
231
231
232 # rollback things manually !
232 # rollback things manually !
233 repo = Repository.get_by_repo_name(repo_name_full)
233 repo = Repository.get_by_repo_name(repo_name_full)
234 if repo:
234 if repo:
235 Repository.delete(repo.repo_id)
235 Repository.delete(repo.repo_id)
236 Session().commit()
236 Session().commit()
237 RepoModel()._delete_filesystem_repo(repo)
237 RepoModel()._delete_filesystem_repo(repo)
238 log.info('Cleanup of repo %s finished', repo_name_full)
238 log.info('Cleanup of repo %s finished', repo_name_full)
239 raise
239 raise
240
240
241 return True
241 return True
242
242
243
243
244 @async_task(ignore_result=True, base=RequestContextTask)
244 @async_task(ignore_result=True, base=RequestContextTask)
245 def create_repo_fork(form_data, cur_user):
245 def create_repo_fork(form_data, cur_user):
246 """
246 """
247 Creates a fork of repository using internal VCS methods
247 Creates a fork of repository using internal VCS methods
248 """
248 """
249 from rhodecode.model.repo import RepoModel
249 from rhodecode.model.repo import RepoModel
250 from rhodecode.model.user import UserModel
250 from rhodecode.model.user import UserModel
251
251
252 log = get_logger(create_repo_fork)
252 log = get_logger(create_repo_fork)
253
253
254 cur_user = UserModel()._get_user(cur_user)
254 cur_user = UserModel()._get_user(cur_user)
255 owner = cur_user
255 owner = cur_user
256
256
257 repo_name = form_data['repo_name'] # fork in this case
257 repo_name = form_data['repo_name'] # fork in this case
258 repo_name_full = form_data['repo_name_full']
258 repo_name_full = form_data['repo_name_full']
259 repo_type = form_data['repo_type']
259 repo_type = form_data['repo_type']
260 description = form_data['description']
260 description = form_data['description']
261 private = form_data['private']
261 private = form_data['private']
262 clone_uri = form_data.get('clone_uri')
262 clone_uri = form_data.get('clone_uri')
263 repo_group = safe_int(form_data['repo_group'])
263 repo_group = safe_int(form_data['repo_group'])
264 landing_ref = form_data['landing_rev']
264 landing_ref = form_data['landing_rev']
265 copy_fork_permissions = form_data.get('copy_permissions')
265 copy_fork_permissions = form_data.get('copy_permissions')
266 fork_id = safe_int(form_data.get('fork_parent_id'))
266 fork_id = safe_int(form_data.get('fork_parent_id'))
267
267
268 try:
268 try:
269 fork_of = RepoModel()._get_repo(fork_id)
269 fork_of = RepoModel()._get_repo(fork_id)
270 RepoModel()._create_repo(
270 RepoModel()._create_repo(
271 repo_name=repo_name_full,
271 repo_name=repo_name_full,
272 repo_type=repo_type,
272 repo_type=repo_type,
273 description=description,
273 description=description,
274 owner=owner,
274 owner=owner,
275 private=private,
275 private=private,
276 clone_uri=clone_uri,
276 clone_uri=clone_uri,
277 repo_group=repo_group,
277 repo_group=repo_group,
278 landing_rev=landing_ref,
278 landing_rev=landing_ref,
279 fork_of=fork_of,
279 fork_of=fork_of,
280 copy_fork_permissions=copy_fork_permissions
280 copy_fork_permissions=copy_fork_permissions
281 )
281 )
282
282
283 Session().commit()
283 Session().commit()
284
284
285 base_path = Repository.base_path()
285 base_path = Repository.base_path()
286 source_repo_path = os.path.join(base_path, fork_of.repo_name)
286 source_repo_path = os.path.join(base_path, fork_of.repo_name)
287
287
288 # now create this repo on Filesystem
288 # now create this repo on Filesystem
289 RepoModel()._create_filesystem_repo(
289 RepoModel()._create_filesystem_repo(
290 repo_name=repo_name,
290 repo_name=repo_name,
291 repo_type=repo_type,
291 repo_type=repo_type,
292 repo_group=RepoModel()._get_repo_group(repo_group),
292 repo_group=RepoModel()._get_repo_group(repo_group),
293 clone_uri=source_repo_path,
293 clone_uri=source_repo_path,
294 )
294 )
295 repo = Repository.get_by_repo_name(repo_name_full)
295 repo = Repository.get_by_repo_name(repo_name_full)
296 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
296 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
297
297
298 # update repo commit caches initially
298 # update repo commit caches initially
299 config = repo._config
299 config = repo._config
300 config.set('extensions', 'largefiles', '')
300 config.set('extensions', 'largefiles', '')
301 repo.update_commit_cache(config=config)
301 repo.update_commit_cache(config=config)
302
302
303 # set new created state
303 # set new created state
304 repo.set_state(Repository.STATE_CREATED)
304 repo.set_state(Repository.STATE_CREATED)
305
305
306 repo_id = repo.repo_id
306 repo_id = repo.repo_id
307 repo_data = repo.get_api_data()
307 repo_data = repo.get_api_data()
308 audit_logger.store(
308 audit_logger.store(
309 'repo.fork', action_data={'data': repo_data},
309 'repo.fork', action_data={'data': repo_data},
310 user=cur_user,
310 user=cur_user,
311 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
311 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
312
312
313 Session().commit()
313 Session().commit()
314 except Exception as e:
314 except Exception as e:
315 log.warning('Exception occurred when forking repository, '
315 log.warning('Exception occurred when forking repository, '
316 'doing cleanup...', exc_info=True)
316 'doing cleanup...', exc_info=True)
317 if isinstance(e, IntegrityError):
317 if isinstance(e, IntegrityError):
318 Session().rollback()
318 Session().rollback()
319
319
320 # rollback things manually !
320 # rollback things manually !
321 repo = Repository.get_by_repo_name(repo_name_full)
321 repo = Repository.get_by_repo_name(repo_name_full)
322 if repo:
322 if repo:
323 Repository.delete(repo.repo_id)
323 Repository.delete(repo.repo_id)
324 Session().commit()
324 Session().commit()
325 RepoModel()._delete_filesystem_repo(repo)
325 RepoModel()._delete_filesystem_repo(repo)
326 log.info('Cleanup of repo %s finished', repo_name_full)
326 log.info('Cleanup of repo %s finished', repo_name_full)
327 raise
327 raise
328
328
329 return True
329 return True
330
330
331
331
332 @async_task(ignore_result=True, base=RequestContextTask)
332 @async_task(ignore_result=True, base=RequestContextTask)
333 def repo_maintenance(repoid):
333 def repo_maintenance(repoid):
334 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
334 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
335 log = get_logger(repo_maintenance)
335 log = get_logger(repo_maintenance)
336 repo = Repository.get_by_id_or_repo_name(repoid)
336 repo = Repository.get_by_id_or_repo_name(repoid)
337 if repo:
337 if repo:
338 maintenance = repo_maintenance_lib.RepoMaintenance()
338 maintenance = repo_maintenance_lib.RepoMaintenance()
339 tasks = maintenance.get_tasks_for_repo(repo)
339 tasks = maintenance.get_tasks_for_repo(repo)
340 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
340 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
341 executed_types = maintenance.execute(repo)
341 executed_types = maintenance.execute(repo)
342 log.debug('Got execution results %s', executed_types)
342 log.debug('Got execution results %s', executed_types)
343 else:
343 else:
344 log.debug('Repo `%s` not found or without a clone_url', repoid)
344 log.debug('Repo `%s` not found or without a clone_url', repoid)
345
345
346
346
347 @async_task(ignore_result=True, base=RequestContextTask)
347 @async_task(ignore_result=True, base=RequestContextTask)
348 def check_for_update(send_email_notification=True, email_recipients=None):
348 def check_for_update(send_email_notification=True, email_recipients=None):
349 from rhodecode.model.update import UpdateModel
349 from rhodecode.model.update import UpdateModel
350 from rhodecode.model.notification import EmailNotificationModel
350 from rhodecode.model.notification import EmailNotificationModel
351
351
352 log = get_logger(check_for_update)
352 log = get_logger(check_for_update)
353 update_url = UpdateModel().get_update_url()
353 update_url = UpdateModel().get_update_url()
354 cur_ver = rhodecode.__version__
354 cur_ver = rhodecode.__version__
355
355
356 try:
356 try:
357 data = UpdateModel().get_update_data(update_url)
357 data = UpdateModel().get_update_data(update_url)
358
358
359 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
359 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
360 latest_ver = data['versions'][0]['version']
360 latest_ver = data['versions'][0]['version']
361 UpdateModel().store_version(latest_ver)
361 UpdateModel().store_version(latest_ver)
362
362
363 if send_email_notification:
363 if send_email_notification:
364 log.debug('Send email notification is enabled. '
364 log.debug('Send email notification is enabled. '
365 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
365 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
366 if UpdateModel().is_outdated(current_ver, latest_ver):
366 if UpdateModel().is_outdated(current_ver, latest_ver):
367
367
368 email_kwargs = {
368 email_kwargs = {
369 'current_ver': current_ver,
369 'current_ver': current_ver,
370 'latest_ver': latest_ver,
370 'latest_ver': latest_ver,
371 }
371 }
372
372
373 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
373 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
374 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
374 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
375
375
376 email_recipients = aslist(email_recipients, sep=',') or \
376 email_recipients = aslist(email_recipients, sep=',') or \
377 [user.email for user in User.get_all_super_admins()]
377 [user.email for user in User.get_all_super_admins()]
378 run_task(send_email, email_recipients, subject,
378 run_task(send_email, email_recipients, subject,
379 email_body_plaintext, email_body)
379 email_body_plaintext, email_body)
380
380
381 except Exception:
381 except Exception:
382 log.exception('Failed to check for update')
382 log.exception('Failed to check for update')
383 raise
383 raise
384
384
385
385
386 def sync_last_update_for_objects(*args, **kwargs):
386 def sync_last_update_for_objects(*args, **kwargs):
387 skip_repos = kwargs.get('skip_repos')
387 skip_repos = kwargs.get('skip_repos')
388 if not skip_repos:
388 if not skip_repos:
389 repos = Repository.query() \
389 repos = Repository.query() \
390 .order_by(Repository.group_id.asc())
390 .order_by(Repository.group_id.asc())
391
391
392 for repo in repos:
392 for repo in repos:
393 repo.update_commit_cache()
393 repo.update_commit_cache()
394
394
395 skip_groups = kwargs.get('skip_groups')
395 skip_groups = kwargs.get('skip_groups')
396 if not skip_groups:
396 if not skip_groups:
397 repo_groups = RepoGroup.query() \
397 repo_groups = RepoGroup.query() \
398 .filter(RepoGroup.group_parent_id == None)
398 .filter(RepoGroup.group_parent_id == None)
399
399
400 for root_gr in repo_groups:
400 for root_gr in repo_groups:
401 for repo_gr in reversed(root_gr.recursive_groups()):
401 for repo_gr in reversed(root_gr.recursive_groups()):
402 repo_gr.update_commit_cache()
402 repo_gr.update_commit_cache()
403
403
404
404
405 @async_task(ignore_result=True, base=RequestContextTask)
405 @async_task(ignore_result=True, base=RequestContextTask)
406 def sync_last_update(*args, **kwargs):
406 def sync_last_update(*args, **kwargs):
407 sync_last_update_for_objects(*args, **kwargs)
407 sync_last_update_for_objects(*args, **kwargs)
408
408
409
409
410 @async_task(ignore_result=False)
410 @async_task(ignore_result=False)
411 def beat_check(*args, **kwargs):
411 def beat_check(*args, **kwargs):
412 log = get_logger(beat_check)
412 log = get_logger(beat_check)
413 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
413 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
414 return time.time()
414 return time.time()
@@ -1,141 +1,140 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import os
20 import os
22 import json
21 import json
23 import logging
22 import logging
24 import datetime
23 import datetime
25 import time
24 import time
26
25
27 from functools import partial
26 from functools import partial
28
27
29 import configparser
28 import configparser
30 from celery.result import AsyncResult
29 from celery.result import AsyncResult
31 import celery.loaders.base
30 import celery.loaders.base
32 import celery.schedules
31 import celery.schedules
33
32
34 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
35
34
36
35
37 def get_task_id(task):
36 def get_task_id(task):
38 task_id = None
37 task_id = None
39 if isinstance(task, AsyncResult):
38 if isinstance(task, AsyncResult):
40 task_id = task.task_id
39 task_id = task.task_id
41
40
42 return task_id
41 return task_id
43
42
44
43
45 def crontab(value):
44 def crontab(value):
46 return celery.schedules.crontab(**value)
45 return celery.schedules.crontab(**value)
47
46
48
47
49 def timedelta(value):
48 def timedelta(value):
50 return datetime.timedelta(**value)
49 return datetime.timedelta(**value)
51
50
52
51
53 def safe_json(get, section, key):
52 def safe_json(get, section, key):
54 value = ''
53 value = ''
55 try:
54 try:
56 value = get(key)
55 value = get(key)
57 json_value = json.loads(value)
56 json_value = json.loads(value)
58 except ValueError:
57 except ValueError:
59 msg = 'The %s=%s is not valid json in section %s' % (
58 msg = 'The %s=%s is not valid json in section %s' % (
60 key, value, section
59 key, value, section
61 )
60 )
62 raise ValueError(msg)
61 raise ValueError(msg)
63
62
64 return json_value
63 return json_value
65
64
66
65
67 def raw_2_schedule(schedule_value, schedule_type):
66 def raw_2_schedule(schedule_value, schedule_type):
68 schedule_type_map = {
67 schedule_type_map = {
69 'crontab': crontab,
68 'crontab': crontab,
70 'timedelta': timedelta,
69 'timedelta': timedelta,
71 'integer': int
70 'integer': int
72 }
71 }
73 scheduler_cls = schedule_type_map.get(schedule_type)
72 scheduler_cls = schedule_type_map.get(schedule_type)
74
73
75 if scheduler_cls is None:
74 if scheduler_cls is None:
76 raise ValueError(
75 raise ValueError(
77 'schedule type %s in section is invalid' % (
76 'schedule type %s in section is invalid' % (
78 schedule_type,
77 schedule_type,
79 )
78 )
80 )
79 )
81 try:
80 try:
82 schedule = scheduler_cls(schedule_value)
81 schedule = scheduler_cls(schedule_value)
83 except TypeError:
82 except TypeError:
84 log.exception('Failed to compose a schedule from value: %r', schedule_value)
83 log.exception('Failed to compose a schedule from value: %r', schedule_value)
85 schedule = None
84 schedule = None
86 return schedule
85 return schedule
87
86
88
87
89 def get_beat_config(parser, section):
88 def get_beat_config(parser, section):
90
89
91 get = partial(parser.get, section)
90 get = partial(parser.get, section)
92 has_option = partial(parser.has_option, section)
91 has_option = partial(parser.has_option, section)
93
92
94 schedule_type = get('type')
93 schedule_type = get('type')
95 schedule_value = safe_json(get, section, 'schedule')
94 schedule_value = safe_json(get, section, 'schedule')
96
95
97 config = {
96 config = {
98 'schedule_type': schedule_type,
97 'schedule_type': schedule_type,
99 'schedule_value': schedule_value,
98 'schedule_value': schedule_value,
100 'task': get('task'),
99 'task': get('task'),
101 }
100 }
102 schedule = raw_2_schedule(schedule_value, schedule_type)
101 schedule = raw_2_schedule(schedule_value, schedule_type)
103 if schedule:
102 if schedule:
104 config['schedule'] = schedule
103 config['schedule'] = schedule
105
104
106 if has_option('args'):
105 if has_option('args'):
107 config['args'] = safe_json(get, section, 'args')
106 config['args'] = safe_json(get, section, 'args')
108
107
109 if has_option('kwargs'):
108 if has_option('kwargs'):
110 config['kwargs'] = safe_json(get, section, 'kwargs')
109 config['kwargs'] = safe_json(get, section, 'kwargs')
111
110
112 if has_option('force_update'):
111 if has_option('force_update'):
113 config['force_update'] = get('force_update')
112 config['force_update'] = get('force_update')
114
113
115 return config
114 return config
116
115
117
116
118 def parse_ini_vars(ini_vars):
117 def parse_ini_vars(ini_vars):
119 options = {}
118 options = {}
120 for pairs in ini_vars.split(','):
119 for pairs in ini_vars.split(','):
121 key, value = pairs.split('=')
120 key, value = pairs.split('=')
122 options[key] = value
121 options[key] = value
123 return options
122 return options
124
123
125
124
126 def ping_db():
125 def ping_db():
127 from rhodecode.model import meta
126 from rhodecode.model import meta
128 from rhodecode.model.db import DbMigrateVersion
127 from rhodecode.model.db import DbMigrateVersion
129 log.info('Testing DB connection...')
128 log.info('Testing DB connection...')
130
129
131 for test in range(10):
130 for test in range(10):
132 try:
131 try:
133 scalar = DbMigrateVersion.query().scalar()
132 scalar = DbMigrateVersion.query().scalar()
134 log.debug('DB PING %s@%s', scalar, scalar.version)
133 log.debug('DB PING %s@%s', scalar, scalar.version)
135 break
134 break
136 except Exception:
135 except Exception:
137 retry = 1
136 retry = 1
138 log.debug('DB not ready, next try in %ss', retry)
137 log.debug('DB not ready, next try in %ss', retry)
139 time.sleep(retry)
138 time.sleep(retry)
140 finally:
139 finally:
141 meta.Session.remove()
140 meta.Session.remove()
@@ -1,372 +1,372 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import itsdangerous
22 import itsdangerous
23 import logging
23 import logging
24 import requests
24 import requests
25 import datetime
25 import datetime
26
26
27 from dogpile.util.readwrite_lock import ReadWriteMutex
27 from dogpile.util.readwrite_lock import ReadWriteMutex
28 from pyramid.threadlocal import get_current_registry
28 from pyramid.threadlocal import get_current_registry
29
29
30 import rhodecode.lib.helpers as h
30 import rhodecode.lib.helpers as h
31 from rhodecode.lib.auth import HasRepoPermissionAny
31 from rhodecode.lib.auth import HasRepoPermissionAny
32 from rhodecode.lib.ext_json import json
32 from rhodecode.lib.ext_json import json
33 from rhodecode.model.db import User
33 from rhodecode.model.db import User
34 from rhodecode.lib.str_utils import ascii_str
34 from rhodecode.lib.str_utils import ascii_str
35 from rhodecode.lib.hash_utils import sha1_safe
35 from rhodecode.lib.hash_utils import sha1_safe
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39 LOCK = ReadWriteMutex()
39 LOCK = ReadWriteMutex()
40
40
41 USER_STATE_PUBLIC_KEYS = [
41 USER_STATE_PUBLIC_KEYS = [
42 'id', 'username', 'first_name', 'last_name',
42 'id', 'username', 'first_name', 'last_name',
43 'icon_link', 'display_name', 'display_link']
43 'icon_link', 'display_name', 'display_link']
44
44
45
45
46 class ChannelstreamException(Exception):
46 class ChannelstreamException(Exception):
47 pass
47 pass
48
48
49
49
50 class ChannelstreamConnectionException(ChannelstreamException):
50 class ChannelstreamConnectionException(ChannelstreamException):
51 pass
51 pass
52
52
53
53
54 class ChannelstreamPermissionException(ChannelstreamException):
54 class ChannelstreamPermissionException(ChannelstreamException):
55 pass
55 pass
56
56
57
57
58 def get_channelstream_server_url(config, endpoint):
58 def get_channelstream_server_url(config, endpoint):
59 return 'http://{}{}'.format(config['server'], endpoint)
59 return 'http://{}{}'.format(config['server'], endpoint)
60
60
61
61
62 def channelstream_request(config, payload, endpoint, raise_exc=True):
62 def channelstream_request(config, payload, endpoint, raise_exc=True):
63 signer = itsdangerous.TimestampSigner(config['secret'])
63 signer = itsdangerous.TimestampSigner(config['secret'])
64 sig_for_server = signer.sign(endpoint)
64 sig_for_server = signer.sign(endpoint)
65 secret_headers = {'x-channelstream-secret': sig_for_server,
65 secret_headers = {'x-channelstream-secret': sig_for_server,
66 'x-channelstream-endpoint': endpoint,
66 'x-channelstream-endpoint': endpoint,
67 'Content-Type': 'application/json'}
67 'Content-Type': 'application/json'}
68 req_url = get_channelstream_server_url(config, endpoint)
68 req_url = get_channelstream_server_url(config, endpoint)
69
69
70 log.debug('Sending a channelstream request to endpoint: `%s`', req_url)
70 log.debug('Sending a channelstream request to endpoint: `%s`', req_url)
71 response = None
71 response = None
72 try:
72 try:
73 response = requests.post(req_url, data=json.dumps(payload),
73 response = requests.post(req_url, data=json.dumps(payload),
74 headers=secret_headers).json()
74 headers=secret_headers).json()
75 except requests.ConnectionError:
75 except requests.ConnectionError:
76 log.exception('ConnectionError occurred for endpoint %s', req_url)
76 log.exception('ConnectionError occurred for endpoint %s', req_url)
77 if raise_exc:
77 if raise_exc:
78 raise ChannelstreamConnectionException(req_url)
78 raise ChannelstreamConnectionException(req_url)
79 except Exception:
79 except Exception:
80 log.exception('Exception related to Channelstream happened')
80 log.exception('Exception related to Channelstream happened')
81 if raise_exc:
81 if raise_exc:
82 raise ChannelstreamConnectionException()
82 raise ChannelstreamConnectionException()
83 log.debug('Got channelstream response: %s', response)
83 log.debug('Got channelstream response: %s', response)
84 return response
84 return response
85
85
86
86
87 def get_user_data(user_id):
87 def get_user_data(user_id):
88 user = User.get(user_id)
88 user = User.get(user_id)
89 return {
89 return {
90 'id': user.user_id,
90 'id': user.user_id,
91 'username': user.username,
91 'username': user.username,
92 'first_name': user.first_name,
92 'first_name': user.first_name,
93 'last_name': user.last_name,
93 'last_name': user.last_name,
94 'icon_link': h.gravatar_url(user.email, 60),
94 'icon_link': h.gravatar_url(user.email, 60),
95 'display_name': h.person(user, 'username_or_name_or_email'),
95 'display_name': h.person(user, 'username_or_name_or_email'),
96 'display_link': h.link_to_user(user),
96 'display_link': h.link_to_user(user),
97 'notifications': user.user_data.get('notification_status', True)
97 'notifications': user.user_data.get('notification_status', True)
98 }
98 }
99
99
100
100
101 def broadcast_validator(channel_name):
101 def broadcast_validator(channel_name):
102 """ checks if user can access the broadcast channel """
102 """ checks if user can access the broadcast channel """
103 if channel_name == 'broadcast':
103 if channel_name == 'broadcast':
104 return True
104 return True
105
105
106
106
107 def repo_validator(channel_name):
107 def repo_validator(channel_name):
108 """ checks if user can access the broadcast channel """
108 """ checks if user can access the broadcast channel """
109 channel_prefix = '/repo$'
109 channel_prefix = '/repo$'
110 if channel_name.startswith(channel_prefix):
110 if channel_name.startswith(channel_prefix):
111 elements = channel_name[len(channel_prefix):].split('$')
111 elements = channel_name[len(channel_prefix):].split('$')
112 repo_name = elements[0]
112 repo_name = elements[0]
113 can_access = HasRepoPermissionAny(
113 can_access = HasRepoPermissionAny(
114 'repository.read',
114 'repository.read',
115 'repository.write',
115 'repository.write',
116 'repository.admin')(repo_name)
116 'repository.admin')(repo_name)
117 log.debug(
117 log.debug(
118 'permission check for %s channel resulted in %s',
118 'permission check for %s channel resulted in %s',
119 repo_name, can_access)
119 repo_name, can_access)
120 if can_access:
120 if can_access:
121 return True
121 return True
122 return False
122 return False
123
123
124
124
125 def check_channel_permissions(channels, plugin_validators, should_raise=True):
125 def check_channel_permissions(channels, plugin_validators, should_raise=True):
126 valid_channels = []
126 valid_channels = []
127
127
128 validators = [broadcast_validator, repo_validator]
128 validators = [broadcast_validator, repo_validator]
129 if plugin_validators:
129 if plugin_validators:
130 validators.extend(plugin_validators)
130 validators.extend(plugin_validators)
131 for channel_name in channels:
131 for channel_name in channels:
132 is_valid = False
132 is_valid = False
133 for validator in validators:
133 for validator in validators:
134 if validator(channel_name):
134 if validator(channel_name):
135 is_valid = True
135 is_valid = True
136 break
136 break
137 if is_valid:
137 if is_valid:
138 valid_channels.append(channel_name)
138 valid_channels.append(channel_name)
139 else:
139 else:
140 if should_raise:
140 if should_raise:
141 raise ChannelstreamPermissionException()
141 raise ChannelstreamPermissionException()
142 return valid_channels
142 return valid_channels
143
143
144
144
145 def get_channels_info(self, channels):
145 def get_channels_info(self, channels):
146 payload = {'channels': channels}
146 payload = {'channels': channels}
147 # gather persistence info
147 # gather persistence info
148 return channelstream_request(self._config(), payload, '/info')
148 return channelstream_request(self._config(), payload, '/info')
149
149
150
150
151 def parse_channels_info(info_result, include_channel_info=None):
151 def parse_channels_info(info_result, include_channel_info=None):
152 """
152 """
153 Returns data that contains only secure information that can be
153 Returns data that contains only secure information that can be
154 presented to clients
154 presented to clients
155 """
155 """
156 include_channel_info = include_channel_info or []
156 include_channel_info = include_channel_info or []
157
157
158 user_state_dict = {}
158 user_state_dict = {}
159 for userinfo in info_result['users']:
159 for userinfo in info_result['users']:
160 user_state_dict[userinfo['user']] = {
160 user_state_dict[userinfo['user']] = {
161 k: v for k, v in list(userinfo['state'].items())
161 k: v for k, v in list(userinfo['state'].items())
162 if k in USER_STATE_PUBLIC_KEYS
162 if k in USER_STATE_PUBLIC_KEYS
163 }
163 }
164
164
165 channels_info = {}
165 channels_info = {}
166
166
167 for c_name, c_info in list(info_result['channels'].items()):
167 for c_name, c_info in list(info_result['channels'].items()):
168 if c_name not in include_channel_info:
168 if c_name not in include_channel_info:
169 continue
169 continue
170 connected_list = []
170 connected_list = []
171 for username in c_info['users']:
171 for username in c_info['users']:
172 connected_list.append({
172 connected_list.append({
173 'user': username,
173 'user': username,
174 'state': user_state_dict[username]
174 'state': user_state_dict[username]
175 })
175 })
176 channels_info[c_name] = {'users': connected_list,
176 channels_info[c_name] = {'users': connected_list,
177 'history': c_info['history']}
177 'history': c_info['history']}
178
178
179 return channels_info
179 return channels_info
180
180
181
181
182 def log_filepath(history_location, channel_name):
182 def log_filepath(history_location, channel_name):
183
183
184 channel_hash = ascii_str(sha1_safe(channel_name))
184 channel_hash = ascii_str(sha1_safe(channel_name))
185 filename = f'{channel_hash}.log'
185 filename = f'{channel_hash}.log'
186 filepath = os.path.join(history_location, filename)
186 filepath = os.path.join(history_location, filename)
187 return filepath
187 return filepath
188
188
189
189
190 def read_history(history_location, channel_name):
190 def read_history(history_location, channel_name):
191 filepath = log_filepath(history_location, channel_name)
191 filepath = log_filepath(history_location, channel_name)
192 if not os.path.exists(filepath):
192 if not os.path.exists(filepath):
193 return []
193 return []
194 history_lines_limit = -100
194 history_lines_limit = -100
195 history = []
195 history = []
196 with open(filepath, 'rb') as f:
196 with open(filepath, 'rb') as f:
197 for line in f.readlines()[history_lines_limit:]:
197 for line in f.readlines()[history_lines_limit:]:
198 try:
198 try:
199 history.append(json.loads(line))
199 history.append(json.loads(line))
200 except Exception:
200 except Exception:
201 log.exception('Failed to load history')
201 log.exception('Failed to load history')
202 return history
202 return history
203
203
204
204
205 def update_history_from_logs(config, channels, payload):
205 def update_history_from_logs(config, channels, payload):
206 history_location = config.get('history.location')
206 history_location = config.get('history.location')
207 for channel in channels:
207 for channel in channels:
208 history = read_history(history_location, channel)
208 history = read_history(history_location, channel)
209 payload['channels_info'][channel]['history'] = history
209 payload['channels_info'][channel]['history'] = history
210
210
211
211
212 def write_history(config, message):
212 def write_history(config, message):
213 """ writes a message to a base64encoded filename """
213 """ writes a message to a base64encoded filename """
214 history_location = config.get('history.location')
214 history_location = config.get('history.location')
215 if not os.path.exists(history_location):
215 if not os.path.exists(history_location):
216 return
216 return
217 try:
217 try:
218 LOCK.acquire_write_lock()
218 LOCK.acquire_write_lock()
219 filepath = log_filepath(history_location, message['channel'])
219 filepath = log_filepath(history_location, message['channel'])
220 json_message = json.dumps(message)
220 json_message = json.dumps(message)
221 with open(filepath, 'ab') as f:
221 with open(filepath, 'ab') as f:
222 f.write(json_message)
222 f.write(json_message)
223 f.write('\n')
223 f.write('\n')
224 finally:
224 finally:
225 LOCK.release_write_lock()
225 LOCK.release_write_lock()
226
226
227
227
228 def get_connection_validators(registry):
228 def get_connection_validators(registry):
229 validators = []
229 validators = []
230 for k, config in list(registry.rhodecode_plugins.items()):
230 for k, config in list(registry.rhodecode_plugins.items()):
231 validator = config.get('channelstream', {}).get('connect_validator')
231 validator = config.get('channelstream', {}).get('connect_validator')
232 if validator:
232 if validator:
233 validators.append(validator)
233 validators.append(validator)
234 return validators
234 return validators
235
235
236
236
237 def get_channelstream_config(registry=None):
237 def get_channelstream_config(registry=None):
238 if not registry:
238 if not registry:
239 registry = get_current_registry()
239 registry = get_current_registry()
240
240
241 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
241 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
242 channelstream_config = rhodecode_plugins.get('channelstream', {})
242 channelstream_config = rhodecode_plugins.get('channelstream', {})
243 return channelstream_config
243 return channelstream_config
244
244
245
245
246 def post_message(channel, message, username, registry=None):
246 def post_message(channel, message, username, registry=None):
247 channelstream_config = get_channelstream_config(registry)
247 channelstream_config = get_channelstream_config(registry)
248 if not channelstream_config.get('enabled'):
248 if not channelstream_config.get('enabled'):
249 return
249 return
250
250
251 message_obj = message
251 message_obj = message
252 if isinstance(message, str):
252 if isinstance(message, str):
253 message_obj = {
253 message_obj = {
254 'message': message,
254 'message': message,
255 'level': 'success',
255 'level': 'success',
256 'topic': '/notifications'
256 'topic': '/notifications'
257 }
257 }
258
258
259 log.debug('Channelstream: sending notification to channel %s', channel)
259 log.debug('Channelstream: sending notification to channel %s', channel)
260 payload = {
260 payload = {
261 'type': 'message',
261 'type': 'message',
262 'timestamp': datetime.datetime.utcnow(),
262 'timestamp': datetime.datetime.utcnow(),
263 'user': 'system',
263 'user': 'system',
264 'exclude_users': [username],
264 'exclude_users': [username],
265 'channel': channel,
265 'channel': channel,
266 'message': message_obj
266 'message': message_obj
267 }
267 }
268
268
269 try:
269 try:
270 return channelstream_request(
270 return channelstream_request(
271 channelstream_config, [payload], '/message',
271 channelstream_config, [payload], '/message',
272 raise_exc=False)
272 raise_exc=False)
273 except ChannelstreamException:
273 except ChannelstreamException:
274 log.exception('Failed to send channelstream data')
274 log.exception('Failed to send channelstream data')
275 raise
275 raise
276
276
277
277
278 def _reload_link(label):
278 def _reload_link(label):
279 return (
279 return (
280 '<a onclick="window.location.reload()">'
280 '<a onclick="window.location.reload()">'
281 '<strong>{}</strong>'
281 '<strong>{}</strong>'
282 '</a>'.format(label)
282 '</a>'.format(label)
283 )
283 )
284
284
285
285
286 def pr_channel(pull_request):
286 def pr_channel(pull_request):
287 repo_name = pull_request.target_repo.repo_name
287 repo_name = pull_request.target_repo.repo_name
288 pull_request_id = pull_request.pull_request_id
288 pull_request_id = pull_request.pull_request_id
289 channel = '/repo${}$/pr/{}'.format(repo_name, pull_request_id)
289 channel = '/repo${}$/pr/{}'.format(repo_name, pull_request_id)
290 log.debug('Getting pull-request channelstream broadcast channel: %s', channel)
290 log.debug('Getting pull-request channelstream broadcast channel: %s', channel)
291 return channel
291 return channel
292
292
293
293
294 def comment_channel(repo_name, commit_obj=None, pull_request_obj=None):
294 def comment_channel(repo_name, commit_obj=None, pull_request_obj=None):
295 channel = None
295 channel = None
296 if commit_obj:
296 if commit_obj:
297 channel = '/repo${}$/commit/{}'.format(
297 channel = '/repo${}$/commit/{}'.format(
298 repo_name, commit_obj.raw_id
298 repo_name, commit_obj.raw_id
299 )
299 )
300 elif pull_request_obj:
300 elif pull_request_obj:
301 channel = '/repo${}$/pr/{}'.format(
301 channel = '/repo${}$/pr/{}'.format(
302 repo_name, pull_request_obj.pull_request_id
302 repo_name, pull_request_obj.pull_request_id
303 )
303 )
304 log.debug('Getting comment channelstream broadcast channel: %s', channel)
304 log.debug('Getting comment channelstream broadcast channel: %s', channel)
305
305
306 return channel
306 return channel
307
307
308
308
309 def pr_update_channelstream_push(request, pr_broadcast_channel, user, msg, **kwargs):
309 def pr_update_channelstream_push(request, pr_broadcast_channel, user, msg, **kwargs):
310 """
310 """
311 Channel push on pull request update
311 Channel push on pull request update
312 """
312 """
313 if not pr_broadcast_channel:
313 if not pr_broadcast_channel:
314 return
314 return
315
315
316 _ = request.translate
316 _ = request.translate
317
317
318 message = '{} {}'.format(
318 message = '{} {}'.format(
319 msg,
319 msg,
320 _reload_link(_(' Reload page to load changes')))
320 _reload_link(_(' Reload page to load changes')))
321
321
322 message_obj = {
322 message_obj = {
323 'message': message,
323 'message': message,
324 'level': 'success',
324 'level': 'success',
325 'topic': '/notifications'
325 'topic': '/notifications'
326 }
326 }
327
327
328 post_message(
328 post_message(
329 pr_broadcast_channel, message_obj, user.username,
329 pr_broadcast_channel, message_obj, user.username,
330 registry=request.registry)
330 registry=request.registry)
331
331
332
332
333 def comment_channelstream_push(request, comment_broadcast_channel, user, msg, **kwargs):
333 def comment_channelstream_push(request, comment_broadcast_channel, user, msg, **kwargs):
334 """
334 """
335 Channelstream push on comment action, on commit, or pull-request
335 Channelstream push on comment action, on commit, or pull-request
336 """
336 """
337 if not comment_broadcast_channel:
337 if not comment_broadcast_channel:
338 return
338 return
339
339
340 _ = request.translate
340 _ = request.translate
341
341
342 comment_data = kwargs.pop('comment_data', {})
342 comment_data = kwargs.pop('comment_data', {})
343 user_data = kwargs.pop('user_data', {})
343 user_data = kwargs.pop('user_data', {})
344 comment_id = list(comment_data.keys())[0] if comment_data else ''
344 comment_id = list(comment_data.keys())[0] if comment_data else ''
345
345
346 message = '<strong>{}</strong> {} #{}'.format(
346 message = '<strong>{}</strong> {} #{}'.format(
347 user.username,
347 user.username,
348 msg,
348 msg,
349 comment_id,
349 comment_id,
350 )
350 )
351
351
352 message_obj = {
352 message_obj = {
353 'message': message,
353 'message': message,
354 'level': 'success',
354 'level': 'success',
355 'topic': '/notifications'
355 'topic': '/notifications'
356 }
356 }
357
357
358 post_message(
358 post_message(
359 comment_broadcast_channel, message_obj, user.username,
359 comment_broadcast_channel, message_obj, user.username,
360 registry=request.registry)
360 registry=request.registry)
361
361
362 message_obj = {
362 message_obj = {
363 'message': None,
363 'message': None,
364 'user': user.username,
364 'user': user.username,
365 'comment_id': comment_id,
365 'comment_id': comment_id,
366 'comment_data': comment_data,
366 'comment_data': comment_data,
367 'user_data': user_data,
367 'user_data': user_data,
368 'topic': '/comment'
368 'topic': '/comment'
369 }
369 }
370 post_message(
370 post_message(
371 comment_broadcast_channel, message_obj, user.username,
371 comment_broadcast_channel, message_obj, user.username,
372 registry=request.registry)
372 registry=request.registry)
@@ -1,797 +1,797 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import difflib
22 import difflib
23 from itertools import groupby
23 from itertools import groupby
24
24
25 from pygments import lex
25 from pygments import lex
26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
27 from pygments.lexers.special import TextLexer, Token
27 from pygments.lexers.special import TextLexer, Token
28 from pygments.lexers import get_lexer_by_name
28 from pygments.lexers import get_lexer_by_name
29
29
30 from rhodecode.lib.helpers import (
30 from rhodecode.lib.helpers import (
31 get_lexer_for_filenode, html_escape, get_custom_lexer)
31 get_lexer_for_filenode, html_escape, get_custom_lexer)
32 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode
32 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode
33 from rhodecode.lib.vcs.nodes import FileNode
33 from rhodecode.lib.vcs.nodes import FileNode
34 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
34 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
35 from rhodecode.lib.diff_match_patch import diff_match_patch
35 from rhodecode.lib.diff_match_patch import diff_match_patch
36 from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE
36 from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE
37
37
38
38
39 plain_text_lexer = get_lexer_by_name(
39 plain_text_lexer = get_lexer_by_name(
40 'text', stripall=False, stripnl=False, ensurenl=False)
40 'text', stripall=False, stripnl=False, ensurenl=False)
41
41
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 def filenode_as_lines_tokens(filenode, lexer=None):
46 def filenode_as_lines_tokens(filenode, lexer=None):
47 org_lexer = lexer
47 org_lexer = lexer
48 lexer = lexer or get_lexer_for_filenode(filenode)
48 lexer = lexer or get_lexer_for_filenode(filenode)
49 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
49 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
50 lexer, filenode, org_lexer)
50 lexer, filenode, org_lexer)
51 content = filenode.content
51 content = filenode.content
52 tokens = tokenize_string(content, lexer)
52 tokens = tokenize_string(content, lexer)
53 lines = split_token_stream(tokens, content)
53 lines = split_token_stream(tokens, content)
54 rv = list(lines)
54 rv = list(lines)
55 return rv
55 return rv
56
56
57
57
58 def tokenize_string(content, lexer):
58 def tokenize_string(content, lexer):
59 """
59 """
60 Use pygments to tokenize some content based on a lexer
60 Use pygments to tokenize some content based on a lexer
61 ensuring all original new lines and whitespace is preserved
61 ensuring all original new lines and whitespace is preserved
62 """
62 """
63
63
64 lexer.stripall = False
64 lexer.stripall = False
65 lexer.stripnl = False
65 lexer.stripnl = False
66 lexer.ensurenl = False
66 lexer.ensurenl = False
67
67
68 if isinstance(lexer, TextLexer):
68 if isinstance(lexer, TextLexer):
69 lexed = [(Token.Text, content)]
69 lexed = [(Token.Text, content)]
70 else:
70 else:
71 lexed = lex(content, lexer)
71 lexed = lex(content, lexer)
72
72
73 for token_type, token_text in lexed:
73 for token_type, token_text in lexed:
74 yield pygment_token_class(token_type), token_text
74 yield pygment_token_class(token_type), token_text
75
75
76
76
77 def split_token_stream(tokens, content):
77 def split_token_stream(tokens, content):
78 """
78 """
79 Take a list of (TokenType, text) tuples and split them by a string
79 Take a list of (TokenType, text) tuples and split them by a string
80
80
81 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
81 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
82 [(TEXT, 'some'), (TEXT, 'text'),
82 [(TEXT, 'some'), (TEXT, 'text'),
83 (TEXT, 'more'), (TEXT, 'text')]
83 (TEXT, 'more'), (TEXT, 'text')]
84 """
84 """
85
85
86 token_buffer = []
86 token_buffer = []
87 for token_class, token_text in tokens:
87 for token_class, token_text in tokens:
88 parts = token_text.split('\n')
88 parts = token_text.split('\n')
89 for part in parts[:-1]:
89 for part in parts[:-1]:
90 token_buffer.append((token_class, part))
90 token_buffer.append((token_class, part))
91 yield token_buffer
91 yield token_buffer
92 token_buffer = []
92 token_buffer = []
93
93
94 token_buffer.append((token_class, parts[-1]))
94 token_buffer.append((token_class, parts[-1]))
95
95
96 if token_buffer:
96 if token_buffer:
97 yield token_buffer
97 yield token_buffer
98 elif content:
98 elif content:
99 # this is a special case, we have the content, but tokenization didn't produce
99 # this is a special case, we have the content, but tokenization didn't produce
100 # any results. THis can happen if know file extensions like .css have some bogus
100 # any results. THis can happen if know file extensions like .css have some bogus
101 # unicode content without any newline characters
101 # unicode content without any newline characters
102 yield [(pygment_token_class(Token.Text), content)]
102 yield [(pygment_token_class(Token.Text), content)]
103
103
104
104
105 def filenode_as_annotated_lines_tokens(filenode):
105 def filenode_as_annotated_lines_tokens(filenode):
106 """
106 """
107 Take a file node and return a list of annotations => lines, if no annotation
107 Take a file node and return a list of annotations => lines, if no annotation
108 is found, it will be None.
108 is found, it will be None.
109
109
110 eg:
110 eg:
111
111
112 [
112 [
113 (annotation1, [
113 (annotation1, [
114 (1, line1_tokens_list),
114 (1, line1_tokens_list),
115 (2, line2_tokens_list),
115 (2, line2_tokens_list),
116 ]),
116 ]),
117 (annotation2, [
117 (annotation2, [
118 (3, line1_tokens_list),
118 (3, line1_tokens_list),
119 ]),
119 ]),
120 (None, [
120 (None, [
121 (4, line1_tokens_list),
121 (4, line1_tokens_list),
122 ]),
122 ]),
123 (annotation1, [
123 (annotation1, [
124 (5, line1_tokens_list),
124 (5, line1_tokens_list),
125 (6, line2_tokens_list),
125 (6, line2_tokens_list),
126 ])
126 ])
127 ]
127 ]
128 """
128 """
129
129
130 commit_cache = {} # cache commit_getter lookups
130 commit_cache = {} # cache commit_getter lookups
131
131
132 def _get_annotation(commit_id, commit_getter):
132 def _get_annotation(commit_id, commit_getter):
133 if commit_id not in commit_cache:
133 if commit_id not in commit_cache:
134 commit_cache[commit_id] = commit_getter()
134 commit_cache[commit_id] = commit_getter()
135 return commit_cache[commit_id]
135 return commit_cache[commit_id]
136
136
137 annotation_lookup = {
137 annotation_lookup = {
138 line_no: _get_annotation(commit_id, commit_getter)
138 line_no: _get_annotation(commit_id, commit_getter)
139 for line_no, commit_id, commit_getter, line_content
139 for line_no, commit_id, commit_getter, line_content
140 in filenode.annotate
140 in filenode.annotate
141 }
141 }
142
142
143 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
143 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
144 for line_no, tokens
144 for line_no, tokens
145 in enumerate(filenode_as_lines_tokens(filenode), 1))
145 in enumerate(filenode_as_lines_tokens(filenode), 1))
146
146
147 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
147 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
148
148
149 for annotation, group in grouped_annotations_lines:
149 for annotation, group in grouped_annotations_lines:
150 yield (
150 yield (
151 annotation, [(line_no, tokens)
151 annotation, [(line_no, tokens)
152 for (_, line_no, tokens) in group]
152 for (_, line_no, tokens) in group]
153 )
153 )
154
154
155
155
156 def render_tokenstream(tokenstream):
156 def render_tokenstream(tokenstream):
157 result = []
157 result = []
158 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
158 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
159
159
160 if token_class:
160 if token_class:
161 result.append('<span class="%s">' % token_class)
161 result.append('<span class="%s">' % token_class)
162 else:
162 else:
163 result.append('<span>')
163 result.append('<span>')
164
164
165 for op_tag, token_text in token_ops_texts:
165 for op_tag, token_text in token_ops_texts:
166
166
167 if op_tag:
167 if op_tag:
168 result.append('<%s>' % op_tag)
168 result.append('<%s>' % op_tag)
169
169
170 # NOTE(marcink): in some cases of mixed encodings, we might run into
170 # NOTE(marcink): in some cases of mixed encodings, we might run into
171 # troubles in the html_escape, in this case we say unicode force on token_text
171 # troubles in the html_escape, in this case we say unicode force on token_text
172 # that would ensure "correct" data even with the cost of rendered
172 # that would ensure "correct" data even with the cost of rendered
173 try:
173 try:
174 escaped_text = html_escape(token_text)
174 escaped_text = html_escape(token_text)
175 except TypeError:
175 except TypeError:
176 escaped_text = html_escape(safe_unicode(token_text))
176 escaped_text = html_escape(safe_unicode(token_text))
177
177
178 # TODO: dan: investigate showing hidden characters like space/nl/tab
178 # TODO: dan: investigate showing hidden characters like space/nl/tab
179 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
179 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
180 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
180 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
181 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
181 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
182
182
183 result.append(escaped_text)
183 result.append(escaped_text)
184
184
185 if op_tag:
185 if op_tag:
186 result.append('</%s>' % op_tag)
186 result.append('</%s>' % op_tag)
187
187
188 result.append('</span>')
188 result.append('</span>')
189
189
190 html = ''.join(result)
190 html = ''.join(result)
191 return html
191 return html
192
192
193
193
194 def rollup_tokenstream(tokenstream):
194 def rollup_tokenstream(tokenstream):
195 """
195 """
196 Group a token stream of the format:
196 Group a token stream of the format:
197
197
198 ('class', 'op', 'text')
198 ('class', 'op', 'text')
199 or
199 or
200 ('class', 'text')
200 ('class', 'text')
201
201
202 into
202 into
203
203
204 [('class1',
204 [('class1',
205 [('op1', 'text'),
205 [('op1', 'text'),
206 ('op2', 'text')]),
206 ('op2', 'text')]),
207 ('class2',
207 ('class2',
208 [('op3', 'text')])]
208 [('op3', 'text')])]
209
209
210 This is used to get the minimal tags necessary when
210 This is used to get the minimal tags necessary when
211 rendering to html eg for a token stream ie.
211 rendering to html eg for a token stream ie.
212
212
213 <span class="A"><ins>he</ins>llo</span>
213 <span class="A"><ins>he</ins>llo</span>
214 vs
214 vs
215 <span class="A"><ins>he</ins></span><span class="A">llo</span>
215 <span class="A"><ins>he</ins></span><span class="A">llo</span>
216
216
217 If a 2 tuple is passed in, the output op will be an empty string.
217 If a 2 tuple is passed in, the output op will be an empty string.
218
218
219 eg:
219 eg:
220
220
221 >>> rollup_tokenstream([('classA', '', 'h'),
221 >>> rollup_tokenstream([('classA', '', 'h'),
222 ('classA', 'del', 'ell'),
222 ('classA', 'del', 'ell'),
223 ('classA', '', 'o'),
223 ('classA', '', 'o'),
224 ('classB', '', ' '),
224 ('classB', '', ' '),
225 ('classA', '', 'the'),
225 ('classA', '', 'the'),
226 ('classA', '', 're'),
226 ('classA', '', 're'),
227 ])
227 ])
228
228
229 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
229 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
230 ('classB', [('', ' ')],
230 ('classB', [('', ' ')],
231 ('classA', [('', 'there')]]
231 ('classA', [('', 'there')]]
232
232
233 """
233 """
234 if tokenstream and len(tokenstream[0]) == 2:
234 if tokenstream and len(tokenstream[0]) == 2:
235 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
235 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
236
236
237 result = []
237 result = []
238 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
238 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
239 ops = []
239 ops = []
240 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
240 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
241 text_buffer = []
241 text_buffer = []
242 for t_class, t_op, t_text in token_text_list:
242 for t_class, t_op, t_text in token_text_list:
243 text_buffer.append(t_text)
243 text_buffer.append(t_text)
244 ops.append((token_op, ''.join(text_buffer)))
244 ops.append((token_op, ''.join(text_buffer)))
245 result.append((token_class, ops))
245 result.append((token_class, ops))
246 return result
246 return result
247
247
248
248
249 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
249 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
250 """
250 """
251 Converts a list of (token_class, token_text) tuples to a list of
251 Converts a list of (token_class, token_text) tuples to a list of
252 (token_class, token_op, token_text) tuples where token_op is one of
252 (token_class, token_op, token_text) tuples where token_op is one of
253 ('ins', 'del', '')
253 ('ins', 'del', '')
254
254
255 :param old_tokens: list of (token_class, token_text) tuples of old line
255 :param old_tokens: list of (token_class, token_text) tuples of old line
256 :param new_tokens: list of (token_class, token_text) tuples of new line
256 :param new_tokens: list of (token_class, token_text) tuples of new line
257 :param use_diff_match_patch: boolean, will use google's diff match patch
257 :param use_diff_match_patch: boolean, will use google's diff match patch
258 library which has options to 'smooth' out the character by character
258 library which has options to 'smooth' out the character by character
259 differences making nicer ins/del blocks
259 differences making nicer ins/del blocks
260 """
260 """
261
261
262 old_tokens_result = []
262 old_tokens_result = []
263 new_tokens_result = []
263 new_tokens_result = []
264
264
265 similarity = difflib.SequenceMatcher(None,
265 similarity = difflib.SequenceMatcher(None,
266 ''.join(token_text for token_class, token_text in old_tokens),
266 ''.join(token_text for token_class, token_text in old_tokens),
267 ''.join(token_text for token_class, token_text in new_tokens)
267 ''.join(token_text for token_class, token_text in new_tokens)
268 ).ratio()
268 ).ratio()
269
269
270 if similarity < 0.6: # return, the blocks are too different
270 if similarity < 0.6: # return, the blocks are too different
271 for token_class, token_text in old_tokens:
271 for token_class, token_text in old_tokens:
272 old_tokens_result.append((token_class, '', token_text))
272 old_tokens_result.append((token_class, '', token_text))
273 for token_class, token_text in new_tokens:
273 for token_class, token_text in new_tokens:
274 new_tokens_result.append((token_class, '', token_text))
274 new_tokens_result.append((token_class, '', token_text))
275 return old_tokens_result, new_tokens_result, similarity
275 return old_tokens_result, new_tokens_result, similarity
276
276
277 token_sequence_matcher = difflib.SequenceMatcher(None,
277 token_sequence_matcher = difflib.SequenceMatcher(None,
278 [x[1] for x in old_tokens],
278 [x[1] for x in old_tokens],
279 [x[1] for x in new_tokens])
279 [x[1] for x in new_tokens])
280
280
281 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
281 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
282 # check the differences by token block types first to give a more
282 # check the differences by token block types first to give a more
283 # nicer "block" level replacement vs character diffs
283 # nicer "block" level replacement vs character diffs
284
284
285 if tag == 'equal':
285 if tag == 'equal':
286 for token_class, token_text in old_tokens[o1:o2]:
286 for token_class, token_text in old_tokens[o1:o2]:
287 old_tokens_result.append((token_class, '', token_text))
287 old_tokens_result.append((token_class, '', token_text))
288 for token_class, token_text in new_tokens[n1:n2]:
288 for token_class, token_text in new_tokens[n1:n2]:
289 new_tokens_result.append((token_class, '', token_text))
289 new_tokens_result.append((token_class, '', token_text))
290 elif tag == 'delete':
290 elif tag == 'delete':
291 for token_class, token_text in old_tokens[o1:o2]:
291 for token_class, token_text in old_tokens[o1:o2]:
292 old_tokens_result.append((token_class, 'del', token_text))
292 old_tokens_result.append((token_class, 'del', token_text))
293 elif tag == 'insert':
293 elif tag == 'insert':
294 for token_class, token_text in new_tokens[n1:n2]:
294 for token_class, token_text in new_tokens[n1:n2]:
295 new_tokens_result.append((token_class, 'ins', token_text))
295 new_tokens_result.append((token_class, 'ins', token_text))
296 elif tag == 'replace':
296 elif tag == 'replace':
297 # if same type token blocks must be replaced, do a diff on the
297 # if same type token blocks must be replaced, do a diff on the
298 # characters in the token blocks to show individual changes
298 # characters in the token blocks to show individual changes
299
299
300 old_char_tokens = []
300 old_char_tokens = []
301 new_char_tokens = []
301 new_char_tokens = []
302 for token_class, token_text in old_tokens[o1:o2]:
302 for token_class, token_text in old_tokens[o1:o2]:
303 for char in token_text:
303 for char in token_text:
304 old_char_tokens.append((token_class, char))
304 old_char_tokens.append((token_class, char))
305
305
306 for token_class, token_text in new_tokens[n1:n2]:
306 for token_class, token_text in new_tokens[n1:n2]:
307 for char in token_text:
307 for char in token_text:
308 new_char_tokens.append((token_class, char))
308 new_char_tokens.append((token_class, char))
309
309
310 old_string = ''.join([token_text for
310 old_string = ''.join([token_text for
311 token_class, token_text in old_char_tokens])
311 token_class, token_text in old_char_tokens])
312 new_string = ''.join([token_text for
312 new_string = ''.join([token_text for
313 token_class, token_text in new_char_tokens])
313 token_class, token_text in new_char_tokens])
314
314
315 char_sequence = difflib.SequenceMatcher(
315 char_sequence = difflib.SequenceMatcher(
316 None, old_string, new_string)
316 None, old_string, new_string)
317 copcodes = char_sequence.get_opcodes()
317 copcodes = char_sequence.get_opcodes()
318 obuffer, nbuffer = [], []
318 obuffer, nbuffer = [], []
319
319
320 if use_diff_match_patch:
320 if use_diff_match_patch:
321 dmp = diff_match_patch()
321 dmp = diff_match_patch()
322 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
322 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
323 reps = dmp.diff_main(old_string, new_string)
323 reps = dmp.diff_main(old_string, new_string)
324 dmp.diff_cleanupEfficiency(reps)
324 dmp.diff_cleanupEfficiency(reps)
325
325
326 a, b = 0, 0
326 a, b = 0, 0
327 for op, rep in reps:
327 for op, rep in reps:
328 l = len(rep)
328 l = len(rep)
329 if op == 0:
329 if op == 0:
330 for i, c in enumerate(rep):
330 for i, c in enumerate(rep):
331 obuffer.append((old_char_tokens[a+i][0], '', c))
331 obuffer.append((old_char_tokens[a+i][0], '', c))
332 nbuffer.append((new_char_tokens[b+i][0], '', c))
332 nbuffer.append((new_char_tokens[b+i][0], '', c))
333 a += l
333 a += l
334 b += l
334 b += l
335 elif op == -1:
335 elif op == -1:
336 for i, c in enumerate(rep):
336 for i, c in enumerate(rep):
337 obuffer.append((old_char_tokens[a+i][0], 'del', c))
337 obuffer.append((old_char_tokens[a+i][0], 'del', c))
338 a += l
338 a += l
339 elif op == 1:
339 elif op == 1:
340 for i, c in enumerate(rep):
340 for i, c in enumerate(rep):
341 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
341 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
342 b += l
342 b += l
343 else:
343 else:
344 for ctag, co1, co2, cn1, cn2 in copcodes:
344 for ctag, co1, co2, cn1, cn2 in copcodes:
345 if ctag == 'equal':
345 if ctag == 'equal':
346 for token_class, token_text in old_char_tokens[co1:co2]:
346 for token_class, token_text in old_char_tokens[co1:co2]:
347 obuffer.append((token_class, '', token_text))
347 obuffer.append((token_class, '', token_text))
348 for token_class, token_text in new_char_tokens[cn1:cn2]:
348 for token_class, token_text in new_char_tokens[cn1:cn2]:
349 nbuffer.append((token_class, '', token_text))
349 nbuffer.append((token_class, '', token_text))
350 elif ctag == 'delete':
350 elif ctag == 'delete':
351 for token_class, token_text in old_char_tokens[co1:co2]:
351 for token_class, token_text in old_char_tokens[co1:co2]:
352 obuffer.append((token_class, 'del', token_text))
352 obuffer.append((token_class, 'del', token_text))
353 elif ctag == 'insert':
353 elif ctag == 'insert':
354 for token_class, token_text in new_char_tokens[cn1:cn2]:
354 for token_class, token_text in new_char_tokens[cn1:cn2]:
355 nbuffer.append((token_class, 'ins', token_text))
355 nbuffer.append((token_class, 'ins', token_text))
356 elif ctag == 'replace':
356 elif ctag == 'replace':
357 for token_class, token_text in old_char_tokens[co1:co2]:
357 for token_class, token_text in old_char_tokens[co1:co2]:
358 obuffer.append((token_class, 'del', token_text))
358 obuffer.append((token_class, 'del', token_text))
359 for token_class, token_text in new_char_tokens[cn1:cn2]:
359 for token_class, token_text in new_char_tokens[cn1:cn2]:
360 nbuffer.append((token_class, 'ins', token_text))
360 nbuffer.append((token_class, 'ins', token_text))
361
361
362 old_tokens_result.extend(obuffer)
362 old_tokens_result.extend(obuffer)
363 new_tokens_result.extend(nbuffer)
363 new_tokens_result.extend(nbuffer)
364
364
365 return old_tokens_result, new_tokens_result, similarity
365 return old_tokens_result, new_tokens_result, similarity
366
366
367
367
368 def diffset_node_getter(commit):
368 def diffset_node_getter(commit):
369 def get_node(fname):
369 def get_node(fname):
370 try:
370 try:
371 return commit.get_node(fname)
371 return commit.get_node(fname)
372 except NodeDoesNotExistError:
372 except NodeDoesNotExistError:
373 return None
373 return None
374
374
375 return get_node
375 return get_node
376
376
377
377
378 class DiffSet(object):
378 class DiffSet(object):
379 """
379 """
380 An object for parsing the diff result from diffs.DiffProcessor and
380 An object for parsing the diff result from diffs.DiffProcessor and
381 adding highlighting, side by side/unified renderings and line diffs
381 adding highlighting, side by side/unified renderings and line diffs
382 """
382 """
383
383
384 HL_REAL = 'REAL' # highlights using original file, slow
384 HL_REAL = 'REAL' # highlights using original file, slow
385 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
385 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
386 # in the case of multiline code
386 # in the case of multiline code
387 HL_NONE = 'NONE' # no highlighting, fastest
387 HL_NONE = 'NONE' # no highlighting, fastest
388
388
389 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
389 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
390 source_repo_name=None,
390 source_repo_name=None,
391 source_node_getter=lambda filename: None,
391 source_node_getter=lambda filename: None,
392 target_repo_name=None,
392 target_repo_name=None,
393 target_node_getter=lambda filename: None,
393 target_node_getter=lambda filename: None,
394 source_nodes=None, target_nodes=None,
394 source_nodes=None, target_nodes=None,
395 # files over this size will use fast highlighting
395 # files over this size will use fast highlighting
396 max_file_size_limit=150 * 1024,
396 max_file_size_limit=150 * 1024,
397 ):
397 ):
398
398
399 self.highlight_mode = highlight_mode
399 self.highlight_mode = highlight_mode
400 self.highlighted_filenodes = {
400 self.highlighted_filenodes = {
401 'before': {},
401 'before': {},
402 'after': {}
402 'after': {}
403 }
403 }
404 self.source_node_getter = source_node_getter
404 self.source_node_getter = source_node_getter
405 self.target_node_getter = target_node_getter
405 self.target_node_getter = target_node_getter
406 self.source_nodes = source_nodes or {}
406 self.source_nodes = source_nodes or {}
407 self.target_nodes = target_nodes or {}
407 self.target_nodes = target_nodes or {}
408 self.repo_name = repo_name
408 self.repo_name = repo_name
409 self.target_repo_name = target_repo_name or repo_name
409 self.target_repo_name = target_repo_name or repo_name
410 self.source_repo_name = source_repo_name or repo_name
410 self.source_repo_name = source_repo_name or repo_name
411 self.max_file_size_limit = max_file_size_limit
411 self.max_file_size_limit = max_file_size_limit
412
412
413 def render_patchset(self, patchset, source_ref=None, target_ref=None):
413 def render_patchset(self, patchset, source_ref=None, target_ref=None):
414 diffset = AttributeDict(dict(
414 diffset = AttributeDict(dict(
415 lines_added=0,
415 lines_added=0,
416 lines_deleted=0,
416 lines_deleted=0,
417 changed_files=0,
417 changed_files=0,
418 files=[],
418 files=[],
419 file_stats={},
419 file_stats={},
420 limited_diff=isinstance(patchset, LimitedDiffContainer),
420 limited_diff=isinstance(patchset, LimitedDiffContainer),
421 repo_name=self.repo_name,
421 repo_name=self.repo_name,
422 target_repo_name=self.target_repo_name,
422 target_repo_name=self.target_repo_name,
423 source_repo_name=self.source_repo_name,
423 source_repo_name=self.source_repo_name,
424 source_ref=source_ref,
424 source_ref=source_ref,
425 target_ref=target_ref,
425 target_ref=target_ref,
426 ))
426 ))
427 for patch in patchset:
427 for patch in patchset:
428 diffset.file_stats[patch['filename']] = patch['stats']
428 diffset.file_stats[patch['filename']] = patch['stats']
429 filediff = self.render_patch(patch)
429 filediff = self.render_patch(patch)
430 filediff.diffset = StrictAttributeDict(dict(
430 filediff.diffset = StrictAttributeDict(dict(
431 source_ref=diffset.source_ref,
431 source_ref=diffset.source_ref,
432 target_ref=diffset.target_ref,
432 target_ref=diffset.target_ref,
433 repo_name=diffset.repo_name,
433 repo_name=diffset.repo_name,
434 source_repo_name=diffset.source_repo_name,
434 source_repo_name=diffset.source_repo_name,
435 target_repo_name=diffset.target_repo_name,
435 target_repo_name=diffset.target_repo_name,
436 ))
436 ))
437 diffset.files.append(filediff)
437 diffset.files.append(filediff)
438 diffset.changed_files += 1
438 diffset.changed_files += 1
439 if not patch['stats']['binary']:
439 if not patch['stats']['binary']:
440 diffset.lines_added += patch['stats']['added']
440 diffset.lines_added += patch['stats']['added']
441 diffset.lines_deleted += patch['stats']['deleted']
441 diffset.lines_deleted += patch['stats']['deleted']
442
442
443 return diffset
443 return diffset
444
444
445 _lexer_cache = {}
445 _lexer_cache = {}
446
446
447 def _get_lexer_for_filename(self, filename, filenode=None):
447 def _get_lexer_for_filename(self, filename, filenode=None):
448 # cached because we might need to call it twice for source/target
448 # cached because we might need to call it twice for source/target
449 if filename not in self._lexer_cache:
449 if filename not in self._lexer_cache:
450 if filenode:
450 if filenode:
451 lexer = filenode.lexer
451 lexer = filenode.lexer
452 extension = filenode.extension
452 extension = filenode.extension
453 else:
453 else:
454 lexer = FileNode.get_lexer(filename=filename)
454 lexer = FileNode.get_lexer(filename=filename)
455 extension = filename.split('.')[-1]
455 extension = filename.split('.')[-1]
456
456
457 lexer = get_custom_lexer(extension) or lexer
457 lexer = get_custom_lexer(extension) or lexer
458 self._lexer_cache[filename] = lexer
458 self._lexer_cache[filename] = lexer
459 return self._lexer_cache[filename]
459 return self._lexer_cache[filename]
460
460
461 def render_patch(self, patch):
461 def render_patch(self, patch):
462 log.debug('rendering diff for %r', patch['filename'])
462 log.debug('rendering diff for %r', patch['filename'])
463
463
464 source_filename = patch['original_filename']
464 source_filename = patch['original_filename']
465 target_filename = patch['filename']
465 target_filename = patch['filename']
466
466
467 source_lexer = plain_text_lexer
467 source_lexer = plain_text_lexer
468 target_lexer = plain_text_lexer
468 target_lexer = plain_text_lexer
469
469
470 if not patch['stats']['binary']:
470 if not patch['stats']['binary']:
471 node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None
471 node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None
472 hl_mode = node_hl_mode or self.highlight_mode
472 hl_mode = node_hl_mode or self.highlight_mode
473
473
474 if hl_mode == self.HL_REAL:
474 if hl_mode == self.HL_REAL:
475 if (source_filename and patch['operation'] in ('D', 'M')
475 if (source_filename and patch['operation'] in ('D', 'M')
476 and source_filename not in self.source_nodes):
476 and source_filename not in self.source_nodes):
477 self.source_nodes[source_filename] = (
477 self.source_nodes[source_filename] = (
478 self.source_node_getter(source_filename))
478 self.source_node_getter(source_filename))
479
479
480 if (target_filename and patch['operation'] in ('A', 'M')
480 if (target_filename and patch['operation'] in ('A', 'M')
481 and target_filename not in self.target_nodes):
481 and target_filename not in self.target_nodes):
482 self.target_nodes[target_filename] = (
482 self.target_nodes[target_filename] = (
483 self.target_node_getter(target_filename))
483 self.target_node_getter(target_filename))
484
484
485 elif hl_mode == self.HL_FAST:
485 elif hl_mode == self.HL_FAST:
486 source_lexer = self._get_lexer_for_filename(source_filename)
486 source_lexer = self._get_lexer_for_filename(source_filename)
487 target_lexer = self._get_lexer_for_filename(target_filename)
487 target_lexer = self._get_lexer_for_filename(target_filename)
488
488
489 source_file = self.source_nodes.get(source_filename, source_filename)
489 source_file = self.source_nodes.get(source_filename, source_filename)
490 target_file = self.target_nodes.get(target_filename, target_filename)
490 target_file = self.target_nodes.get(target_filename, target_filename)
491 raw_id_uid = ''
491 raw_id_uid = ''
492 if self.source_nodes.get(source_filename):
492 if self.source_nodes.get(source_filename):
493 raw_id_uid = self.source_nodes[source_filename].commit.raw_id
493 raw_id_uid = self.source_nodes[source_filename].commit.raw_id
494
494
495 if not raw_id_uid and self.target_nodes.get(target_filename):
495 if not raw_id_uid and self.target_nodes.get(target_filename):
496 # in case this is a new file we only have it in target
496 # in case this is a new file we only have it in target
497 raw_id_uid = self.target_nodes[target_filename].commit.raw_id
497 raw_id_uid = self.target_nodes[target_filename].commit.raw_id
498
498
499 source_filenode, target_filenode = None, None
499 source_filenode, target_filenode = None, None
500
500
501 # TODO: dan: FileNode.lexer works on the content of the file - which
501 # TODO: dan: FileNode.lexer works on the content of the file - which
502 # can be slow - issue #4289 explains a lexer clean up - which once
502 # can be slow - issue #4289 explains a lexer clean up - which once
503 # done can allow caching a lexer for a filenode to avoid the file lookup
503 # done can allow caching a lexer for a filenode to avoid the file lookup
504 if isinstance(source_file, FileNode):
504 if isinstance(source_file, FileNode):
505 source_filenode = source_file
505 source_filenode = source_file
506 #source_lexer = source_file.lexer
506 #source_lexer = source_file.lexer
507 source_lexer = self._get_lexer_for_filename(source_filename)
507 source_lexer = self._get_lexer_for_filename(source_filename)
508 source_file.lexer = source_lexer
508 source_file.lexer = source_lexer
509
509
510 if isinstance(target_file, FileNode):
510 if isinstance(target_file, FileNode):
511 target_filenode = target_file
511 target_filenode = target_file
512 #target_lexer = target_file.lexer
512 #target_lexer = target_file.lexer
513 target_lexer = self._get_lexer_for_filename(target_filename)
513 target_lexer = self._get_lexer_for_filename(target_filename)
514 target_file.lexer = target_lexer
514 target_file.lexer = target_lexer
515
515
516 source_file_path, target_file_path = None, None
516 source_file_path, target_file_path = None, None
517
517
518 if source_filename != '/dev/null':
518 if source_filename != '/dev/null':
519 source_file_path = source_filename
519 source_file_path = source_filename
520 if target_filename != '/dev/null':
520 if target_filename != '/dev/null':
521 target_file_path = target_filename
521 target_file_path = target_filename
522
522
523 source_file_type = source_lexer.name
523 source_file_type = source_lexer.name
524 target_file_type = target_lexer.name
524 target_file_type = target_lexer.name
525
525
526 filediff = AttributeDict({
526 filediff = AttributeDict({
527 'source_file_path': source_file_path,
527 'source_file_path': source_file_path,
528 'target_file_path': target_file_path,
528 'target_file_path': target_file_path,
529 'source_filenode': source_filenode,
529 'source_filenode': source_filenode,
530 'target_filenode': target_filenode,
530 'target_filenode': target_filenode,
531 'source_file_type': target_file_type,
531 'source_file_type': target_file_type,
532 'target_file_type': source_file_type,
532 'target_file_type': source_file_type,
533 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
533 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
534 'operation': patch['operation'],
534 'operation': patch['operation'],
535 'source_mode': patch['stats']['old_mode'],
535 'source_mode': patch['stats']['old_mode'],
536 'target_mode': patch['stats']['new_mode'],
536 'target_mode': patch['stats']['new_mode'],
537 'limited_diff': patch['is_limited_diff'],
537 'limited_diff': patch['is_limited_diff'],
538 'hunks': [],
538 'hunks': [],
539 'hunk_ops': None,
539 'hunk_ops': None,
540 'diffset': self,
540 'diffset': self,
541 'raw_id': raw_id_uid,
541 'raw_id': raw_id_uid,
542 })
542 })
543
543
544 file_chunks = patch['chunks'][1:]
544 file_chunks = patch['chunks'][1:]
545 for i, hunk in enumerate(file_chunks, 1):
545 for i, hunk in enumerate(file_chunks, 1):
546 hunkbit = self.parse_hunk(hunk, source_file, target_file)
546 hunkbit = self.parse_hunk(hunk, source_file, target_file)
547 hunkbit.source_file_path = source_file_path
547 hunkbit.source_file_path = source_file_path
548 hunkbit.target_file_path = target_file_path
548 hunkbit.target_file_path = target_file_path
549 hunkbit.index = i
549 hunkbit.index = i
550 filediff.hunks.append(hunkbit)
550 filediff.hunks.append(hunkbit)
551
551
552 # Simulate hunk on OPS type line which doesn't really contain any diff
552 # Simulate hunk on OPS type line which doesn't really contain any diff
553 # this allows commenting on those
553 # this allows commenting on those
554 if not file_chunks:
554 if not file_chunks:
555 actions = []
555 actions = []
556 for op_id, op_text in filediff.patch['stats']['ops'].items():
556 for op_id, op_text in filediff.patch['stats']['ops'].items():
557 if op_id == DEL_FILENODE:
557 if op_id == DEL_FILENODE:
558 actions.append('file was removed')
558 actions.append('file was removed')
559 elif op_id == BIN_FILENODE:
559 elif op_id == BIN_FILENODE:
560 actions.append('binary diff hidden')
560 actions.append('binary diff hidden')
561 else:
561 else:
562 actions.append(safe_unicode(op_text))
562 actions.append(safe_unicode(op_text))
563 action_line = 'NO CONTENT: ' + \
563 action_line = 'NO CONTENT: ' + \
564 ', '.join(actions) or 'UNDEFINED_ACTION'
564 ', '.join(actions) or 'UNDEFINED_ACTION'
565
565
566 hunk_ops = {'source_length': 0, 'source_start': 0,
566 hunk_ops = {'source_length': 0, 'source_start': 0,
567 'lines': [
567 'lines': [
568 {'new_lineno': 0, 'old_lineno': 1,
568 {'new_lineno': 0, 'old_lineno': 1,
569 'action': 'unmod-no-hl', 'line': action_line}
569 'action': 'unmod-no-hl', 'line': action_line}
570 ],
570 ],
571 'section_header': '', 'target_start': 1, 'target_length': 1}
571 'section_header': '', 'target_start': 1, 'target_length': 1}
572
572
573 hunkbit = self.parse_hunk(hunk_ops, source_file, target_file)
573 hunkbit = self.parse_hunk(hunk_ops, source_file, target_file)
574 hunkbit.source_file_path = source_file_path
574 hunkbit.source_file_path = source_file_path
575 hunkbit.target_file_path = target_file_path
575 hunkbit.target_file_path = target_file_path
576 filediff.hunk_ops = hunkbit
576 filediff.hunk_ops = hunkbit
577 return filediff
577 return filediff
578
578
579 def parse_hunk(self, hunk, source_file, target_file):
579 def parse_hunk(self, hunk, source_file, target_file):
580 result = AttributeDict(dict(
580 result = AttributeDict(dict(
581 source_start=hunk['source_start'],
581 source_start=hunk['source_start'],
582 source_length=hunk['source_length'],
582 source_length=hunk['source_length'],
583 target_start=hunk['target_start'],
583 target_start=hunk['target_start'],
584 target_length=hunk['target_length'],
584 target_length=hunk['target_length'],
585 section_header=hunk['section_header'],
585 section_header=hunk['section_header'],
586 lines=[],
586 lines=[],
587 ))
587 ))
588 before, after = [], []
588 before, after = [], []
589
589
590 for line in hunk['lines']:
590 for line in hunk['lines']:
591 if line['action'] in ['unmod', 'unmod-no-hl']:
591 if line['action'] in ['unmod', 'unmod-no-hl']:
592 no_hl = line['action'] == 'unmod-no-hl'
592 no_hl = line['action'] == 'unmod-no-hl'
593 result.lines.extend(
593 result.lines.extend(
594 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
594 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
595 after.append(line)
595 after.append(line)
596 before.append(line)
596 before.append(line)
597 elif line['action'] == 'add':
597 elif line['action'] == 'add':
598 after.append(line)
598 after.append(line)
599 elif line['action'] == 'del':
599 elif line['action'] == 'del':
600 before.append(line)
600 before.append(line)
601 elif line['action'] == 'old-no-nl':
601 elif line['action'] == 'old-no-nl':
602 before.append(line)
602 before.append(line)
603 elif line['action'] == 'new-no-nl':
603 elif line['action'] == 'new-no-nl':
604 after.append(line)
604 after.append(line)
605
605
606 all_actions = [x['action'] for x in after] + [x['action'] for x in before]
606 all_actions = [x['action'] for x in after] + [x['action'] for x in before]
607 no_hl = {x for x in all_actions} == {'unmod-no-hl'}
607 no_hl = {x for x in all_actions} == {'unmod-no-hl'}
608 result.lines.extend(
608 result.lines.extend(
609 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
609 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
610 # NOTE(marcink): we must keep list() call here so we can cache the result...
610 # NOTE(marcink): we must keep list() call here so we can cache the result...
611 result.unified = list(self.as_unified(result.lines))
611 result.unified = list(self.as_unified(result.lines))
612 result.sideside = result.lines
612 result.sideside = result.lines
613
613
614 return result
614 return result
615
615
616 def parse_lines(self, before_lines, after_lines, source_file, target_file,
616 def parse_lines(self, before_lines, after_lines, source_file, target_file,
617 no_hl=False):
617 no_hl=False):
618 # TODO: dan: investigate doing the diff comparison and fast highlighting
618 # TODO: dan: investigate doing the diff comparison and fast highlighting
619 # on the entire before and after buffered block lines rather than by
619 # on the entire before and after buffered block lines rather than by
620 # line, this means we can get better 'fast' highlighting if the context
620 # line, this means we can get better 'fast' highlighting if the context
621 # allows it - eg.
621 # allows it - eg.
622 # line 4: """
622 # line 4: """
623 # line 5: this gets highlighted as a string
623 # line 5: this gets highlighted as a string
624 # line 6: """
624 # line 6: """
625
625
626 lines = []
626 lines = []
627
627
628 before_newline = AttributeDict()
628 before_newline = AttributeDict()
629 after_newline = AttributeDict()
629 after_newline = AttributeDict()
630 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
630 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
631 before_newline_line = before_lines.pop(-1)
631 before_newline_line = before_lines.pop(-1)
632 before_newline.content = '\n {}'.format(
632 before_newline.content = '\n {}'.format(
633 render_tokenstream(
633 render_tokenstream(
634 [(x[0], '', x[1])
634 [(x[0], '', x[1])
635 for x in [('nonl', before_newline_line['line'])]]))
635 for x in [('nonl', before_newline_line['line'])]]))
636
636
637 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
637 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
638 after_newline_line = after_lines.pop(-1)
638 after_newline_line = after_lines.pop(-1)
639 after_newline.content = '\n {}'.format(
639 after_newline.content = '\n {}'.format(
640 render_tokenstream(
640 render_tokenstream(
641 [(x[0], '', x[1])
641 [(x[0], '', x[1])
642 for x in [('nonl', after_newline_line['line'])]]))
642 for x in [('nonl', after_newline_line['line'])]]))
643
643
644 while before_lines or after_lines:
644 while before_lines or after_lines:
645 before, after = None, None
645 before, after = None, None
646 before_tokens, after_tokens = None, None
646 before_tokens, after_tokens = None, None
647
647
648 if before_lines:
648 if before_lines:
649 before = before_lines.pop(0)
649 before = before_lines.pop(0)
650 if after_lines:
650 if after_lines:
651 after = after_lines.pop(0)
651 after = after_lines.pop(0)
652
652
653 original = AttributeDict()
653 original = AttributeDict()
654 modified = AttributeDict()
654 modified = AttributeDict()
655
655
656 if before:
656 if before:
657 if before['action'] == 'old-no-nl':
657 if before['action'] == 'old-no-nl':
658 before_tokens = [('nonl', before['line'])]
658 before_tokens = [('nonl', before['line'])]
659 else:
659 else:
660 before_tokens = self.get_line_tokens(
660 before_tokens = self.get_line_tokens(
661 line_text=before['line'], line_number=before['old_lineno'],
661 line_text=before['line'], line_number=before['old_lineno'],
662 input_file=source_file, no_hl=no_hl, source='before')
662 input_file=source_file, no_hl=no_hl, source='before')
663 original.lineno = before['old_lineno']
663 original.lineno = before['old_lineno']
664 original.content = before['line']
664 original.content = before['line']
665 original.action = self.action_to_op(before['action'])
665 original.action = self.action_to_op(before['action'])
666
666
667 original.get_comment_args = (
667 original.get_comment_args = (
668 source_file, 'o', before['old_lineno'])
668 source_file, 'o', before['old_lineno'])
669
669
670 if after:
670 if after:
671 if after['action'] == 'new-no-nl':
671 if after['action'] == 'new-no-nl':
672 after_tokens = [('nonl', after['line'])]
672 after_tokens = [('nonl', after['line'])]
673 else:
673 else:
674 after_tokens = self.get_line_tokens(
674 after_tokens = self.get_line_tokens(
675 line_text=after['line'], line_number=after['new_lineno'],
675 line_text=after['line'], line_number=after['new_lineno'],
676 input_file=target_file, no_hl=no_hl, source='after')
676 input_file=target_file, no_hl=no_hl, source='after')
677 modified.lineno = after['new_lineno']
677 modified.lineno = after['new_lineno']
678 modified.content = after['line']
678 modified.content = after['line']
679 modified.action = self.action_to_op(after['action'])
679 modified.action = self.action_to_op(after['action'])
680
680
681 modified.get_comment_args = (target_file, 'n', after['new_lineno'])
681 modified.get_comment_args = (target_file, 'n', after['new_lineno'])
682
682
683 # diff the lines
683 # diff the lines
684 if before_tokens and after_tokens:
684 if before_tokens and after_tokens:
685 o_tokens, m_tokens, similarity = tokens_diff(
685 o_tokens, m_tokens, similarity = tokens_diff(
686 before_tokens, after_tokens)
686 before_tokens, after_tokens)
687 original.content = render_tokenstream(o_tokens)
687 original.content = render_tokenstream(o_tokens)
688 modified.content = render_tokenstream(m_tokens)
688 modified.content = render_tokenstream(m_tokens)
689 elif before_tokens:
689 elif before_tokens:
690 original.content = render_tokenstream(
690 original.content = render_tokenstream(
691 [(x[0], '', x[1]) for x in before_tokens])
691 [(x[0], '', x[1]) for x in before_tokens])
692 elif after_tokens:
692 elif after_tokens:
693 modified.content = render_tokenstream(
693 modified.content = render_tokenstream(
694 [(x[0], '', x[1]) for x in after_tokens])
694 [(x[0], '', x[1]) for x in after_tokens])
695
695
696 if not before_lines and before_newline:
696 if not before_lines and before_newline:
697 original.content += before_newline.content
697 original.content += before_newline.content
698 before_newline = None
698 before_newline = None
699 if not after_lines and after_newline:
699 if not after_lines and after_newline:
700 modified.content += after_newline.content
700 modified.content += after_newline.content
701 after_newline = None
701 after_newline = None
702
702
703 lines.append(AttributeDict({
703 lines.append(AttributeDict({
704 'original': original,
704 'original': original,
705 'modified': modified,
705 'modified': modified,
706 }))
706 }))
707
707
708 return lines
708 return lines
709
709
710 def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False, source=''):
710 def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False, source=''):
711 filenode = None
711 filenode = None
712 filename = None
712 filename = None
713
713
714 if isinstance(input_file, str):
714 if isinstance(input_file, str):
715 filename = input_file
715 filename = input_file
716 elif isinstance(input_file, FileNode):
716 elif isinstance(input_file, FileNode):
717 filenode = input_file
717 filenode = input_file
718 filename = input_file.unicode_path
718 filename = input_file.unicode_path
719
719
720 hl_mode = self.HL_NONE if no_hl else self.highlight_mode
720 hl_mode = self.HL_NONE if no_hl else self.highlight_mode
721 if hl_mode == self.HL_REAL and filenode:
721 if hl_mode == self.HL_REAL and filenode:
722 lexer = self._get_lexer_for_filename(filename)
722 lexer = self._get_lexer_for_filename(filename)
723 file_size_allowed = input_file.size < self.max_file_size_limit
723 file_size_allowed = input_file.size < self.max_file_size_limit
724 if line_number and file_size_allowed:
724 if line_number and file_size_allowed:
725 return self.get_tokenized_filenode_line(input_file, line_number, lexer, source)
725 return self.get_tokenized_filenode_line(input_file, line_number, lexer, source)
726
726
727 if hl_mode in (self.HL_REAL, self.HL_FAST) and filename:
727 if hl_mode in (self.HL_REAL, self.HL_FAST) and filename:
728 lexer = self._get_lexer_for_filename(filename)
728 lexer = self._get_lexer_for_filename(filename)
729 return list(tokenize_string(line_text, lexer))
729 return list(tokenize_string(line_text, lexer))
730
730
731 return list(tokenize_string(line_text, plain_text_lexer))
731 return list(tokenize_string(line_text, plain_text_lexer))
732
732
733 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None, source=''):
733 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None, source=''):
734
734
735 def tokenize(_filenode):
735 def tokenize(_filenode):
736 self.highlighted_filenodes[source][filenode] = filenode_as_lines_tokens(filenode, lexer)
736 self.highlighted_filenodes[source][filenode] = filenode_as_lines_tokens(filenode, lexer)
737
737
738 if filenode not in self.highlighted_filenodes[source]:
738 if filenode not in self.highlighted_filenodes[source]:
739 tokenize(filenode)
739 tokenize(filenode)
740
740
741 try:
741 try:
742 return self.highlighted_filenodes[source][filenode][line_number - 1]
742 return self.highlighted_filenodes[source][filenode][line_number - 1]
743 except Exception:
743 except Exception:
744 log.exception('diff rendering error')
744 log.exception('diff rendering error')
745 return [('', 'L{}: rhodecode diff rendering error'.format(line_number))]
745 return [('', 'L{}: rhodecode diff rendering error'.format(line_number))]
746
746
747 def action_to_op(self, action):
747 def action_to_op(self, action):
748 return {
748 return {
749 'add': '+',
749 'add': '+',
750 'del': '-',
750 'del': '-',
751 'unmod': ' ',
751 'unmod': ' ',
752 'unmod-no-hl': ' ',
752 'unmod-no-hl': ' ',
753 'old-no-nl': ' ',
753 'old-no-nl': ' ',
754 'new-no-nl': ' ',
754 'new-no-nl': ' ',
755 }.get(action, action)
755 }.get(action, action)
756
756
757 def as_unified(self, lines):
757 def as_unified(self, lines):
758 """
758 """
759 Return a generator that yields the lines of a diff in unified order
759 Return a generator that yields the lines of a diff in unified order
760 """
760 """
761 def generator():
761 def generator():
762 buf = []
762 buf = []
763 for line in lines:
763 for line in lines:
764
764
765 if buf and not line.original or line.original.action == ' ':
765 if buf and not line.original or line.original.action == ' ':
766 for b in buf:
766 for b in buf:
767 yield b
767 yield b
768 buf = []
768 buf = []
769
769
770 if line.original:
770 if line.original:
771 if line.original.action == ' ':
771 if line.original.action == ' ':
772 yield (line.original.lineno, line.modified.lineno,
772 yield (line.original.lineno, line.modified.lineno,
773 line.original.action, line.original.content,
773 line.original.action, line.original.content,
774 line.original.get_comment_args)
774 line.original.get_comment_args)
775 continue
775 continue
776
776
777 if line.original.action == '-':
777 if line.original.action == '-':
778 yield (line.original.lineno, None,
778 yield (line.original.lineno, None,
779 line.original.action, line.original.content,
779 line.original.action, line.original.content,
780 line.original.get_comment_args)
780 line.original.get_comment_args)
781
781
782 if line.modified.action == '+':
782 if line.modified.action == '+':
783 buf.append((
783 buf.append((
784 None, line.modified.lineno,
784 None, line.modified.lineno,
785 line.modified.action, line.modified.content,
785 line.modified.action, line.modified.content,
786 line.modified.get_comment_args))
786 line.modified.get_comment_args))
787 continue
787 continue
788
788
789 if line.modified:
789 if line.modified:
790 yield (None, line.modified.lineno,
790 yield (None, line.modified.lineno,
791 line.modified.action, line.modified.content,
791 line.modified.action, line.modified.content,
792 line.modified.get_comment_args)
792 line.modified.get_comment_args)
793
793
794 for b in buf:
794 for b in buf:
795 yield b
795 yield b
796
796
797 return generator()
797 return generator()
@@ -1,30 +1,30 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 def strip_whitespace(value):
22 def strip_whitespace(value):
23 """
23 """
24 Removes leading/trailing whitespace, newlines, and tabs from the value.
24 Removes leading/trailing whitespace, newlines, and tabs from the value.
25 Implements the `colander.interface.Preparer` interface.
25 Implements the `colander.interface.Preparer` interface.
26 """
26 """
27 if isinstance(value, str):
27 if isinstance(value, str):
28 return value.strip(' \t\n\r')
28 return value.strip(' \t\n\r')
29 else:
29 else:
30 return value
30 return value
@@ -1,51 +1,50 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Legacy support for old logging formatter names.
21 Legacy support for old logging formatter names.
23
22
24 .. deprecated:: 3.2.0
23 .. deprecated:: 3.2.0
25
24
26 Got replaced by rhodecode.lib.logging_formatter. This module stays for a
25 Got replaced by rhodecode.lib.logging_formatter. This module stays for a
27 few versions to ease the migration of INI files.
26 few versions to ease the migration of INI files.
28
27
29 """
28 """
30
29
31 import warnings
30 import warnings
32
31
33 from rhodecode.lib import logging_formatter
32 from rhodecode.lib import logging_formatter
34
33
35
34
36 def _deprecated_formatter(name):
35 def _deprecated_formatter(name):
37 BaseFormatter = getattr(logging_formatter, name)
36 BaseFormatter = getattr(logging_formatter, name)
38
37
39 class LegacyFormatter(BaseFormatter):
38 class LegacyFormatter(BaseFormatter):
40
39
41 def __init__(self, *args, **kwargs):
40 def __init__(self, *args, **kwargs):
42 warnings.warn(
41 warnings.warn(
43 "Use rhodecode.lib.logging_formatter.%s instead." % name,
42 "Use rhodecode.lib.logging_formatter.%s instead." % name,
44 DeprecationWarning)
43 DeprecationWarning)
45 BaseFormatter.__init__(self, *args, **kwargs)
44 BaseFormatter.__init__(self, *args, **kwargs)
46
45
47 return LegacyFormatter
46 return LegacyFormatter
48
47
49
48
50 ColorFormatter = _deprecated_formatter('ColorFormatter')
49 ColorFormatter = _deprecated_formatter('ColorFormatter')
51 ColorFormatterSql = _deprecated_formatter('ColorFormatterSql')
50 ColorFormatterSql = _deprecated_formatter('ColorFormatterSql')
@@ -1,79 +1,78 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Provides utilities around date and time handling
21 Provides utilities around date and time handling
23 """
22 """
24
23
25 import datetime
24 import datetime
26 import time
25 import time
27
26
28
27
29 def makedate():
28 def makedate():
30 lt = time.localtime()
29 lt = time.localtime()
31 if lt[8] == 1 and time.daylight:
30 if lt[8] == 1 and time.daylight:
32 tz = time.altzone
31 tz = time.altzone
33 else:
32 else:
34 tz = time.timezone
33 tz = time.timezone
35 return time.mktime(lt), tz
34 return time.mktime(lt), tz
36
35
37
36
38 def utcdate_fromtimestamp(unixts, tzoffset=0):
37 def utcdate_fromtimestamp(unixts, tzoffset=0):
39 """
38 """
40 Makes a local datetime object out of unix timestamp
39 Makes a local datetime object out of unix timestamp
41
40
42 :param unixts:
41 :param unixts:
43 :param tzoffset:
42 :param tzoffset:
44 """
43 """
45
44
46 return datetime.datetime.utcfromtimestamp(float(unixts))
45 return datetime.datetime.utcfromtimestamp(float(unixts))
47
46
48
47
49 def date_astimestamp(value):
48 def date_astimestamp(value):
50 """
49 """
51 Convert a given `datetime.datetime` into a `float` like `time.time`
50 Convert a given `datetime.datetime` into a `float` like `time.time`
52 """
51 """
53 return time.mktime(value.timetuple()) + value.microsecond / 1E6
52 return time.mktime(value.timetuple()) + value.microsecond / 1E6
54
53
55
54
56 def date_to_timestamp_plus_offset(value):
55 def date_to_timestamp_plus_offset(value):
57 """
56 """
58 Convert a given `datetime.datetime` into a unix timestamp and offset.
57 Convert a given `datetime.datetime` into a unix timestamp and offset.
59 """
58 """
60 # TODO: johbo: The time handling looks quite fragile here since we mix
59 # TODO: johbo: The time handling looks quite fragile here since we mix
61 # system time zones with naive datetime instances.
60 # system time zones with naive datetime instances.
62 if value is None:
61 if value is None:
63 value = time.time()
62 value = time.time()
64 elif isinstance(value, datetime.datetime):
63 elif isinstance(value, datetime.datetime):
65 assert not is_aware(value), (
64 assert not is_aware(value), (
66 "This code is not prepared to handle aware datetime instances")
65 "This code is not prepared to handle aware datetime instances")
67 value = date_astimestamp(value)
66 value = date_astimestamp(value)
68 return (value, time.timezone)
67 return (value, time.timezone)
69
68
70
69
71 def is_aware(value):
70 def is_aware(value):
72 """
71 """
73 Determines if a given datetime.time is aware.
72 Determines if a given datetime.time is aware.
74
73
75 The logic is described in Python's docs:
74 The logic is described in Python's docs:
76 http://docs.python.org/library/datetime.html#datetime.tzinfo
75 http://docs.python.org/library/datetime.html#datetime.tzinfo
77 """
76 """
78 return (value.tzinfo is not None
77 return (value.tzinfo is not None
79 and value.tzinfo.utcoffset(value) is not None)
78 and value.tzinfo.utcoffset(value) is not None)
@@ -1,680 +1,679 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Database creation, and setup module for RhodeCode Enterprise. Used for creation
21 Database creation, and setup module for RhodeCode Enterprise. Used for creation
23 of database as well as for migration operations
22 of database as well as for migration operations
24 """
23 """
25
24
26 import os
25 import os
27 import sys
26 import sys
28 import time
27 import time
29 import uuid
28 import uuid
30 import logging
29 import logging
31 import getpass
30 import getpass
32 from os.path import dirname as dn, join as jn
31 from os.path import dirname as dn, join as jn
33
32
34 from sqlalchemy.engine import create_engine
33 from sqlalchemy.engine import create_engine
35
34
36 from rhodecode import __dbversion__
35 from rhodecode import __dbversion__
37 from rhodecode.model import init_model
36 from rhodecode.model import init_model
38 from rhodecode.model.user import UserModel
37 from rhodecode.model.user import UserModel
39 from rhodecode.model.db import (
38 from rhodecode.model.db import (
40 User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm,
39 User, Permission, RhodeCodeUi, RhodeCodeSetting, UserToPerm,
41 DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository)
40 DbMigrateVersion, RepoGroup, UserRepoGroupToPerm, CacheKey, Repository)
42 from rhodecode.model.meta import Session, Base
41 from rhodecode.model.meta import Session, Base
43 from rhodecode.model.permission import PermissionModel
42 from rhodecode.model.permission import PermissionModel
44 from rhodecode.model.repo import RepoModel
43 from rhodecode.model.repo import RepoModel
45 from rhodecode.model.repo_group import RepoGroupModel
44 from rhodecode.model.repo_group import RepoGroupModel
46 from rhodecode.model.settings import SettingsModel
45 from rhodecode.model.settings import SettingsModel
47
46
48
47
49 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
50
49
51
50
52 def notify(msg):
51 def notify(msg):
53 """
52 """
54 Notification for migrations messages
53 Notification for migrations messages
55 """
54 """
56 ml = len(msg) + (4 * 2)
55 ml = len(msg) + (4 * 2)
57 print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper())
56 print(('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper())
58
57
59
58
60 class DbManage(object):
59 class DbManage(object):
61
60
62 def __init__(self, log_sql, dbconf, root, tests=False,
61 def __init__(self, log_sql, dbconf, root, tests=False,
63 SESSION=None, cli_args=None):
62 SESSION=None, cli_args=None):
64 self.dbname = dbconf.split('/')[-1]
63 self.dbname = dbconf.split('/')[-1]
65 self.tests = tests
64 self.tests = tests
66 self.root = root
65 self.root = root
67 self.dburi = dbconf
66 self.dburi = dbconf
68 self.log_sql = log_sql
67 self.log_sql = log_sql
69 self.cli_args = cli_args or {}
68 self.cli_args = cli_args or {}
70 self.init_db(SESSION=SESSION)
69 self.init_db(SESSION=SESSION)
71 self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask'))
70 self.ask_ok = self.get_ask_ok_func(self.cli_args.get('force_ask'))
72
71
73 def db_exists(self):
72 def db_exists(self):
74 if not self.sa:
73 if not self.sa:
75 self.init_db()
74 self.init_db()
76 try:
75 try:
77 self.sa.query(RhodeCodeUi)\
76 self.sa.query(RhodeCodeUi)\
78 .filter(RhodeCodeUi.ui_key == '/')\
77 .filter(RhodeCodeUi.ui_key == '/')\
79 .scalar()
78 .scalar()
80 return True
79 return True
81 except Exception:
80 except Exception:
82 return False
81 return False
83 finally:
82 finally:
84 self.sa.rollback()
83 self.sa.rollback()
85
84
86 def get_ask_ok_func(self, param):
85 def get_ask_ok_func(self, param):
87 if param not in [None]:
86 if param not in [None]:
88 # return a function lambda that has a default set to param
87 # return a function lambda that has a default set to param
89 return lambda *args, **kwargs: param
88 return lambda *args, **kwargs: param
90 else:
89 else:
91 from rhodecode.lib.utils import ask_ok
90 from rhodecode.lib.utils import ask_ok
92 return ask_ok
91 return ask_ok
93
92
94 def init_db(self, SESSION=None):
93 def init_db(self, SESSION=None):
95 if SESSION:
94 if SESSION:
96 self.sa = SESSION
95 self.sa = SESSION
97 else:
96 else:
98 # init new sessions
97 # init new sessions
99 engine = create_engine(self.dburi, echo=self.log_sql)
98 engine = create_engine(self.dburi, echo=self.log_sql)
100 init_model(engine)
99 init_model(engine)
101 self.sa = Session()
100 self.sa = Session()
102
101
103 def create_tables(self, override=False):
102 def create_tables(self, override=False):
104 """
103 """
105 Create a auth database
104 Create a auth database
106 """
105 """
107
106
108 log.info("Existing database with the same name is going to be destroyed.")
107 log.info("Existing database with the same name is going to be destroyed.")
109 log.info("Setup command will run DROP ALL command on that database.")
108 log.info("Setup command will run DROP ALL command on that database.")
110 if self.tests:
109 if self.tests:
111 destroy = True
110 destroy = True
112 else:
111 else:
113 destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]')
112 destroy = self.ask_ok('Are you sure that you want to destroy the old database? [y/n]')
114 if not destroy:
113 if not destroy:
115 log.info('db tables bootstrap: Nothing done.')
114 log.info('db tables bootstrap: Nothing done.')
116 sys.exit(0)
115 sys.exit(0)
117 if destroy:
116 if destroy:
118 Base.metadata.drop_all()
117 Base.metadata.drop_all()
119
118
120 checkfirst = not override
119 checkfirst = not override
121 Base.metadata.create_all(checkfirst=checkfirst)
120 Base.metadata.create_all(checkfirst=checkfirst)
122 log.info('Created tables for %s', self.dbname)
121 log.info('Created tables for %s', self.dbname)
123
122
124 def set_db_version(self):
123 def set_db_version(self):
125 ver = DbMigrateVersion()
124 ver = DbMigrateVersion()
126 ver.version = __dbversion__
125 ver.version = __dbversion__
127 ver.repository_id = 'rhodecode_db_migrations'
126 ver.repository_id = 'rhodecode_db_migrations'
128 ver.repository_path = 'versions'
127 ver.repository_path = 'versions'
129 self.sa.add(ver)
128 self.sa.add(ver)
130 log.info('db version set to: %s', __dbversion__)
129 log.info('db version set to: %s', __dbversion__)
131
130
132 def run_post_migration_tasks(self):
131 def run_post_migration_tasks(self):
133 """
132 """
134 Run various tasks before actually doing migrations
133 Run various tasks before actually doing migrations
135 """
134 """
136 # delete cache keys on each upgrade
135 # delete cache keys on each upgrade
137 total = CacheKey.query().count()
136 total = CacheKey.query().count()
138 log.info("Deleting (%s) cache keys now...", total)
137 log.info("Deleting (%s) cache keys now...", total)
139 CacheKey.delete_all_cache()
138 CacheKey.delete_all_cache()
140
139
141 def upgrade(self, version=None):
140 def upgrade(self, version=None):
142 """
141 """
143 Upgrades given database schema to given revision following
142 Upgrades given database schema to given revision following
144 all needed steps, to perform the upgrade
143 all needed steps, to perform the upgrade
145
144
146 """
145 """
147
146
148 from rhodecode.lib.dbmigrate.migrate.versioning import api
147 from rhodecode.lib.dbmigrate.migrate.versioning import api
149 from rhodecode.lib.dbmigrate.migrate.exceptions import \
148 from rhodecode.lib.dbmigrate.migrate.exceptions import \
150 DatabaseNotControlledError
149 DatabaseNotControlledError
151
150
152 if 'sqlite' in self.dburi:
151 if 'sqlite' in self.dburi:
153 print(
152 print(
154 '********************** WARNING **********************\n'
153 '********************** WARNING **********************\n'
155 'Make sure your version of sqlite is at least 3.7.X. \n'
154 'Make sure your version of sqlite is at least 3.7.X. \n'
156 'Earlier versions are known to fail on some migrations\n'
155 'Earlier versions are known to fail on some migrations\n'
157 '*****************************************************\n')
156 '*****************************************************\n')
158
157
159 upgrade = self.ask_ok(
158 upgrade = self.ask_ok(
160 'You are about to perform a database upgrade. Make '
159 'You are about to perform a database upgrade. Make '
161 'sure you have backed up your database. '
160 'sure you have backed up your database. '
162 'Continue ? [y/n]')
161 'Continue ? [y/n]')
163 if not upgrade:
162 if not upgrade:
164 log.info('No upgrade performed')
163 log.info('No upgrade performed')
165 sys.exit(0)
164 sys.exit(0)
166
165
167 repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
166 repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
168 'rhodecode/lib/dbmigrate')
167 'rhodecode/lib/dbmigrate')
169 db_uri = self.dburi
168 db_uri = self.dburi
170
169
171 if version:
170 if version:
172 DbMigrateVersion.set_version(version)
171 DbMigrateVersion.set_version(version)
173
172
174 try:
173 try:
175 curr_version = api.db_version(db_uri, repository_path)
174 curr_version = api.db_version(db_uri, repository_path)
176 msg = ('Found current database db_uri under version '
175 msg = ('Found current database db_uri under version '
177 'control with version {}'.format(curr_version))
176 'control with version {}'.format(curr_version))
178
177
179 except (RuntimeError, DatabaseNotControlledError):
178 except (RuntimeError, DatabaseNotControlledError):
180 curr_version = 1
179 curr_version = 1
181 msg = ('Current database is not under version control. Setting '
180 msg = ('Current database is not under version control. Setting '
182 'as version %s' % curr_version)
181 'as version %s' % curr_version)
183 api.version_control(db_uri, repository_path, curr_version)
182 api.version_control(db_uri, repository_path, curr_version)
184
183
185 notify(msg)
184 notify(msg)
186
185
187
186
188 if curr_version == __dbversion__:
187 if curr_version == __dbversion__:
189 log.info('This database is already at the newest version')
188 log.info('This database is already at the newest version')
190 sys.exit(0)
189 sys.exit(0)
191
190
192 upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
191 upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
193 notify('attempting to upgrade database from '
192 notify('attempting to upgrade database from '
194 'version %s to version %s' % (curr_version, __dbversion__))
193 'version %s to version %s' % (curr_version, __dbversion__))
195
194
196 # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
195 # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
197 _step = None
196 _step = None
198 for step in upgrade_steps:
197 for step in upgrade_steps:
199 notify('performing upgrade step %s' % step)
198 notify('performing upgrade step %s' % step)
200 time.sleep(0.5)
199 time.sleep(0.5)
201
200
202 api.upgrade(db_uri, repository_path, step)
201 api.upgrade(db_uri, repository_path, step)
203 self.sa.rollback()
202 self.sa.rollback()
204 notify('schema upgrade for step %s completed' % (step,))
203 notify('schema upgrade for step %s completed' % (step,))
205
204
206 _step = step
205 _step = step
207
206
208 self.run_post_migration_tasks()
207 self.run_post_migration_tasks()
209 notify('upgrade to version %s successful' % _step)
208 notify('upgrade to version %s successful' % _step)
210
209
211 def fix_repo_paths(self):
210 def fix_repo_paths(self):
212 """
211 """
213 Fixes an old RhodeCode version path into new one without a '*'
212 Fixes an old RhodeCode version path into new one without a '*'
214 """
213 """
215
214
216 paths = self.sa.query(RhodeCodeUi)\
215 paths = self.sa.query(RhodeCodeUi)\
217 .filter(RhodeCodeUi.ui_key == '/')\
216 .filter(RhodeCodeUi.ui_key == '/')\
218 .scalar()
217 .scalar()
219
218
220 paths.ui_value = paths.ui_value.replace('*', '')
219 paths.ui_value = paths.ui_value.replace('*', '')
221
220
222 try:
221 try:
223 self.sa.add(paths)
222 self.sa.add(paths)
224 self.sa.commit()
223 self.sa.commit()
225 except Exception:
224 except Exception:
226 self.sa.rollback()
225 self.sa.rollback()
227 raise
226 raise
228
227
229 def fix_default_user(self):
228 def fix_default_user(self):
230 """
229 """
231 Fixes an old default user with some 'nicer' default values,
230 Fixes an old default user with some 'nicer' default values,
232 used mostly for anonymous access
231 used mostly for anonymous access
233 """
232 """
234 def_user = self.sa.query(User)\
233 def_user = self.sa.query(User)\
235 .filter(User.username == User.DEFAULT_USER)\
234 .filter(User.username == User.DEFAULT_USER)\
236 .one()
235 .one()
237
236
238 def_user.name = 'Anonymous'
237 def_user.name = 'Anonymous'
239 def_user.lastname = 'User'
238 def_user.lastname = 'User'
240 def_user.email = User.DEFAULT_USER_EMAIL
239 def_user.email = User.DEFAULT_USER_EMAIL
241
240
242 try:
241 try:
243 self.sa.add(def_user)
242 self.sa.add(def_user)
244 self.sa.commit()
243 self.sa.commit()
245 except Exception:
244 except Exception:
246 self.sa.rollback()
245 self.sa.rollback()
247 raise
246 raise
248
247
249 def fix_settings(self):
248 def fix_settings(self):
250 """
249 """
251 Fixes rhodecode settings and adds ga_code key for google analytics
250 Fixes rhodecode settings and adds ga_code key for google analytics
252 """
251 """
253
252
254 hgsettings3 = RhodeCodeSetting('ga_code', '')
253 hgsettings3 = RhodeCodeSetting('ga_code', '')
255
254
256 try:
255 try:
257 self.sa.add(hgsettings3)
256 self.sa.add(hgsettings3)
258 self.sa.commit()
257 self.sa.commit()
259 except Exception:
258 except Exception:
260 self.sa.rollback()
259 self.sa.rollback()
261 raise
260 raise
262
261
263 def create_admin_and_prompt(self):
262 def create_admin_and_prompt(self):
264
263
265 # defaults
264 # defaults
266 defaults = self.cli_args
265 defaults = self.cli_args
267 username = defaults.get('username')
266 username = defaults.get('username')
268 password = defaults.get('password')
267 password = defaults.get('password')
269 email = defaults.get('email')
268 email = defaults.get('email')
270
269
271 if username is None:
270 if username is None:
272 username = eval(input('Specify admin username:'))
271 username = eval(input('Specify admin username:'))
273 if password is None:
272 if password is None:
274 password = self._get_admin_password()
273 password = self._get_admin_password()
275 if not password:
274 if not password:
276 # second try
275 # second try
277 password = self._get_admin_password()
276 password = self._get_admin_password()
278 if not password:
277 if not password:
279 sys.exit()
278 sys.exit()
280 if email is None:
279 if email is None:
281 email = eval(input('Specify admin email:'))
280 email = eval(input('Specify admin email:'))
282 api_key = self.cli_args.get('api_key')
281 api_key = self.cli_args.get('api_key')
283 self.create_user(username, password, email, True,
282 self.create_user(username, password, email, True,
284 strict_creation_check=False,
283 strict_creation_check=False,
285 api_key=api_key)
284 api_key=api_key)
286
285
287 def _get_admin_password(self):
286 def _get_admin_password(self):
288 password = getpass.getpass('Specify admin password '
287 password = getpass.getpass('Specify admin password '
289 '(min 6 chars):')
288 '(min 6 chars):')
290 confirm = getpass.getpass('Confirm password:')
289 confirm = getpass.getpass('Confirm password:')
291
290
292 if password != confirm:
291 if password != confirm:
293 log.error('passwords mismatch')
292 log.error('passwords mismatch')
294 return False
293 return False
295 if len(password) < 6:
294 if len(password) < 6:
296 log.error('password is too short - use at least 6 characters')
295 log.error('password is too short - use at least 6 characters')
297 return False
296 return False
298
297
299 return password
298 return password
300
299
301 def create_test_admin_and_users(self):
300 def create_test_admin_and_users(self):
302 log.info('creating admin and regular test users')
301 log.info('creating admin and regular test users')
303 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \
302 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, \
304 TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
303 TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
305 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
304 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
306 TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
305 TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
307 TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
306 TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
308
307
309 self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
308 self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
310 TEST_USER_ADMIN_EMAIL, True, api_key=True)
309 TEST_USER_ADMIN_EMAIL, True, api_key=True)
311
310
312 self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
311 self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
313 TEST_USER_REGULAR_EMAIL, False, api_key=True)
312 TEST_USER_REGULAR_EMAIL, False, api_key=True)
314
313
315 self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
314 self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
316 TEST_USER_REGULAR2_EMAIL, False, api_key=True)
315 TEST_USER_REGULAR2_EMAIL, False, api_key=True)
317
316
318 def create_ui_settings(self, repo_store_path):
317 def create_ui_settings(self, repo_store_path):
319 """
318 """
320 Creates ui settings, fills out hooks
319 Creates ui settings, fills out hooks
321 and disables dotencode
320 and disables dotencode
322 """
321 """
323 settings_model = SettingsModel(sa=self.sa)
322 settings_model = SettingsModel(sa=self.sa)
324 from rhodecode.lib.vcs.backends.hg import largefiles_store
323 from rhodecode.lib.vcs.backends.hg import largefiles_store
325 from rhodecode.lib.vcs.backends.git import lfs_store
324 from rhodecode.lib.vcs.backends.git import lfs_store
326
325
327 # Build HOOKS
326 # Build HOOKS
328 hooks = [
327 hooks = [
329 (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'),
328 (RhodeCodeUi.HOOK_REPO_SIZE, 'python:vcsserver.hooks.repo_size'),
330
329
331 # HG
330 # HG
332 (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'),
331 (RhodeCodeUi.HOOK_PRE_PULL, 'python:vcsserver.hooks.pre_pull'),
333 (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'),
332 (RhodeCodeUi.HOOK_PULL, 'python:vcsserver.hooks.log_pull_action'),
334 (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'),
333 (RhodeCodeUi.HOOK_PRE_PUSH, 'python:vcsserver.hooks.pre_push'),
335 (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'),
334 (RhodeCodeUi.HOOK_PRETX_PUSH, 'python:vcsserver.hooks.pre_push'),
336 (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'),
335 (RhodeCodeUi.HOOK_PUSH, 'python:vcsserver.hooks.log_push_action'),
337 (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'),
336 (RhodeCodeUi.HOOK_PUSH_KEY, 'python:vcsserver.hooks.key_push'),
338
337
339 ]
338 ]
340
339
341 for key, value in hooks:
340 for key, value in hooks:
342 hook_obj = settings_model.get_ui_by_key(key)
341 hook_obj = settings_model.get_ui_by_key(key)
343 hooks2 = hook_obj if hook_obj else RhodeCodeUi()
342 hooks2 = hook_obj if hook_obj else RhodeCodeUi()
344 hooks2.ui_section = 'hooks'
343 hooks2.ui_section = 'hooks'
345 hooks2.ui_key = key
344 hooks2.ui_key = key
346 hooks2.ui_value = value
345 hooks2.ui_value = value
347 self.sa.add(hooks2)
346 self.sa.add(hooks2)
348
347
349 # enable largefiles
348 # enable largefiles
350 largefiles = RhodeCodeUi()
349 largefiles = RhodeCodeUi()
351 largefiles.ui_section = 'extensions'
350 largefiles.ui_section = 'extensions'
352 largefiles.ui_key = 'largefiles'
351 largefiles.ui_key = 'largefiles'
353 largefiles.ui_value = ''
352 largefiles.ui_value = ''
354 self.sa.add(largefiles)
353 self.sa.add(largefiles)
355
354
356 # set default largefiles cache dir, defaults to
355 # set default largefiles cache dir, defaults to
357 # /repo_store_location/.cache/largefiles
356 # /repo_store_location/.cache/largefiles
358 largefiles = RhodeCodeUi()
357 largefiles = RhodeCodeUi()
359 largefiles.ui_section = 'largefiles'
358 largefiles.ui_section = 'largefiles'
360 largefiles.ui_key = 'usercache'
359 largefiles.ui_key = 'usercache'
361 largefiles.ui_value = largefiles_store(repo_store_path)
360 largefiles.ui_value = largefiles_store(repo_store_path)
362
361
363 self.sa.add(largefiles)
362 self.sa.add(largefiles)
364
363
365 # set default lfs cache dir, defaults to
364 # set default lfs cache dir, defaults to
366 # /repo_store_location/.cache/lfs_store
365 # /repo_store_location/.cache/lfs_store
367 lfsstore = RhodeCodeUi()
366 lfsstore = RhodeCodeUi()
368 lfsstore.ui_section = 'vcs_git_lfs'
367 lfsstore.ui_section = 'vcs_git_lfs'
369 lfsstore.ui_key = 'store_location'
368 lfsstore.ui_key = 'store_location'
370 lfsstore.ui_value = lfs_store(repo_store_path)
369 lfsstore.ui_value = lfs_store(repo_store_path)
371
370
372 self.sa.add(lfsstore)
371 self.sa.add(lfsstore)
373
372
374 # enable hgsubversion disabled by default
373 # enable hgsubversion disabled by default
375 hgsubversion = RhodeCodeUi()
374 hgsubversion = RhodeCodeUi()
376 hgsubversion.ui_section = 'extensions'
375 hgsubversion.ui_section = 'extensions'
377 hgsubversion.ui_key = 'hgsubversion'
376 hgsubversion.ui_key = 'hgsubversion'
378 hgsubversion.ui_value = ''
377 hgsubversion.ui_value = ''
379 hgsubversion.ui_active = False
378 hgsubversion.ui_active = False
380 self.sa.add(hgsubversion)
379 self.sa.add(hgsubversion)
381
380
382 # enable hgevolve disabled by default
381 # enable hgevolve disabled by default
383 hgevolve = RhodeCodeUi()
382 hgevolve = RhodeCodeUi()
384 hgevolve.ui_section = 'extensions'
383 hgevolve.ui_section = 'extensions'
385 hgevolve.ui_key = 'evolve'
384 hgevolve.ui_key = 'evolve'
386 hgevolve.ui_value = ''
385 hgevolve.ui_value = ''
387 hgevolve.ui_active = False
386 hgevolve.ui_active = False
388 self.sa.add(hgevolve)
387 self.sa.add(hgevolve)
389
388
390 hgevolve = RhodeCodeUi()
389 hgevolve = RhodeCodeUi()
391 hgevolve.ui_section = 'experimental'
390 hgevolve.ui_section = 'experimental'
392 hgevolve.ui_key = 'evolution'
391 hgevolve.ui_key = 'evolution'
393 hgevolve.ui_value = ''
392 hgevolve.ui_value = ''
394 hgevolve.ui_active = False
393 hgevolve.ui_active = False
395 self.sa.add(hgevolve)
394 self.sa.add(hgevolve)
396
395
397 hgevolve = RhodeCodeUi()
396 hgevolve = RhodeCodeUi()
398 hgevolve.ui_section = 'experimental'
397 hgevolve.ui_section = 'experimental'
399 hgevolve.ui_key = 'evolution.exchange'
398 hgevolve.ui_key = 'evolution.exchange'
400 hgevolve.ui_value = ''
399 hgevolve.ui_value = ''
401 hgevolve.ui_active = False
400 hgevolve.ui_active = False
402 self.sa.add(hgevolve)
401 self.sa.add(hgevolve)
403
402
404 hgevolve = RhodeCodeUi()
403 hgevolve = RhodeCodeUi()
405 hgevolve.ui_section = 'extensions'
404 hgevolve.ui_section = 'extensions'
406 hgevolve.ui_key = 'topic'
405 hgevolve.ui_key = 'topic'
407 hgevolve.ui_value = ''
406 hgevolve.ui_value = ''
408 hgevolve.ui_active = False
407 hgevolve.ui_active = False
409 self.sa.add(hgevolve)
408 self.sa.add(hgevolve)
410
409
411 # enable hggit disabled by default
410 # enable hggit disabled by default
412 hggit = RhodeCodeUi()
411 hggit = RhodeCodeUi()
413 hggit.ui_section = 'extensions'
412 hggit.ui_section = 'extensions'
414 hggit.ui_key = 'hggit'
413 hggit.ui_key = 'hggit'
415 hggit.ui_value = ''
414 hggit.ui_value = ''
416 hggit.ui_active = False
415 hggit.ui_active = False
417 self.sa.add(hggit)
416 self.sa.add(hggit)
418
417
419 # set svn branch defaults
418 # set svn branch defaults
420 branches = ["/branches/*", "/trunk"]
419 branches = ["/branches/*", "/trunk"]
421 tags = ["/tags/*"]
420 tags = ["/tags/*"]
422
421
423 for branch in branches:
422 for branch in branches:
424 settings_model.create_ui_section_value(
423 settings_model.create_ui_section_value(
425 RhodeCodeUi.SVN_BRANCH_ID, branch)
424 RhodeCodeUi.SVN_BRANCH_ID, branch)
426
425
427 for tag in tags:
426 for tag in tags:
428 settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag)
427 settings_model.create_ui_section_value(RhodeCodeUi.SVN_TAG_ID, tag)
429
428
430 def create_auth_plugin_options(self, skip_existing=False):
429 def create_auth_plugin_options(self, skip_existing=False):
431 """
430 """
432 Create default auth plugin settings, and make it active
431 Create default auth plugin settings, and make it active
433
432
434 :param skip_existing:
433 :param skip_existing:
435 """
434 """
436 defaults = [
435 defaults = [
437 ('auth_plugins',
436 ('auth_plugins',
438 'egg:rhodecode-enterprise-ce#token,egg:rhodecode-enterprise-ce#rhodecode',
437 'egg:rhodecode-enterprise-ce#token,egg:rhodecode-enterprise-ce#rhodecode',
439 'list'),
438 'list'),
440
439
441 ('auth_authtoken_enabled',
440 ('auth_authtoken_enabled',
442 'True',
441 'True',
443 'bool'),
442 'bool'),
444
443
445 ('auth_rhodecode_enabled',
444 ('auth_rhodecode_enabled',
446 'True',
445 'True',
447 'bool'),
446 'bool'),
448 ]
447 ]
449 for k, v, t in defaults:
448 for k, v, t in defaults:
450 if (skip_existing and
449 if (skip_existing and
451 SettingsModel().get_setting_by_name(k) is not None):
450 SettingsModel().get_setting_by_name(k) is not None):
452 log.debug('Skipping option %s', k)
451 log.debug('Skipping option %s', k)
453 continue
452 continue
454 setting = RhodeCodeSetting(k, v, t)
453 setting = RhodeCodeSetting(k, v, t)
455 self.sa.add(setting)
454 self.sa.add(setting)
456
455
457 def create_default_options(self, skip_existing=False):
456 def create_default_options(self, skip_existing=False):
458 """Creates default settings"""
457 """Creates default settings"""
459
458
460 for k, v, t in [
459 for k, v, t in [
461 ('default_repo_enable_locking', False, 'bool'),
460 ('default_repo_enable_locking', False, 'bool'),
462 ('default_repo_enable_downloads', False, 'bool'),
461 ('default_repo_enable_downloads', False, 'bool'),
463 ('default_repo_enable_statistics', False, 'bool'),
462 ('default_repo_enable_statistics', False, 'bool'),
464 ('default_repo_private', False, 'bool'),
463 ('default_repo_private', False, 'bool'),
465 ('default_repo_type', 'hg', 'unicode')]:
464 ('default_repo_type', 'hg', 'unicode')]:
466
465
467 if (skip_existing and
466 if (skip_existing and
468 SettingsModel().get_setting_by_name(k) is not None):
467 SettingsModel().get_setting_by_name(k) is not None):
469 log.debug('Skipping option %s', k)
468 log.debug('Skipping option %s', k)
470 continue
469 continue
471 setting = RhodeCodeSetting(k, v, t)
470 setting = RhodeCodeSetting(k, v, t)
472 self.sa.add(setting)
471 self.sa.add(setting)
473
472
474 def fixup_groups(self):
473 def fixup_groups(self):
475 def_usr = User.get_default_user()
474 def_usr = User.get_default_user()
476 for g in RepoGroup.query().all():
475 for g in RepoGroup.query().all():
477 g.group_name = g.get_new_name(g.name)
476 g.group_name = g.get_new_name(g.name)
478 self.sa.add(g)
477 self.sa.add(g)
479 # get default perm
478 # get default perm
480 default = UserRepoGroupToPerm.query()\
479 default = UserRepoGroupToPerm.query()\
481 .filter(UserRepoGroupToPerm.group == g)\
480 .filter(UserRepoGroupToPerm.group == g)\
482 .filter(UserRepoGroupToPerm.user == def_usr)\
481 .filter(UserRepoGroupToPerm.user == def_usr)\
483 .scalar()
482 .scalar()
484
483
485 if default is None:
484 if default is None:
486 log.debug('missing default permission for group %s adding', g)
485 log.debug('missing default permission for group %s adding', g)
487 perm_obj = RepoGroupModel()._create_default_perms(g)
486 perm_obj = RepoGroupModel()._create_default_perms(g)
488 self.sa.add(perm_obj)
487 self.sa.add(perm_obj)
489
488
490 def reset_permissions(self, username):
489 def reset_permissions(self, username):
491 """
490 """
492 Resets permissions to default state, useful when old systems had
491 Resets permissions to default state, useful when old systems had
493 bad permissions, we must clean them up
492 bad permissions, we must clean them up
494
493
495 :param username:
494 :param username:
496 """
495 """
497 default_user = User.get_by_username(username)
496 default_user = User.get_by_username(username)
498 if not default_user:
497 if not default_user:
499 return
498 return
500
499
501 u2p = UserToPerm.query()\
500 u2p = UserToPerm.query()\
502 .filter(UserToPerm.user == default_user).all()
501 .filter(UserToPerm.user == default_user).all()
503 fixed = False
502 fixed = False
504 if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
503 if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
505 for p in u2p:
504 for p in u2p:
506 Session().delete(p)
505 Session().delete(p)
507 fixed = True
506 fixed = True
508 self.populate_default_permissions()
507 self.populate_default_permissions()
509 return fixed
508 return fixed
510
509
511 def config_prompt(self, test_repo_path='', retries=3):
510 def config_prompt(self, test_repo_path='', retries=3):
512 defaults = self.cli_args
511 defaults = self.cli_args
513 _path = defaults.get('repos_location')
512 _path = defaults.get('repos_location')
514 if retries == 3:
513 if retries == 3:
515 log.info('Setting up repositories config')
514 log.info('Setting up repositories config')
516
515
517 if _path is not None:
516 if _path is not None:
518 path = _path
517 path = _path
519 elif not self.tests and not test_repo_path:
518 elif not self.tests and not test_repo_path:
520 path = eval(input(
519 path = eval(input(
521 'Enter a valid absolute path to store repositories. '
520 'Enter a valid absolute path to store repositories. '
522 'All repositories in that path will be added automatically:'
521 'All repositories in that path will be added automatically:'
523 ))
522 ))
524 else:
523 else:
525 path = test_repo_path
524 path = test_repo_path
526 path_ok = True
525 path_ok = True
527
526
528 # check proper dir
527 # check proper dir
529 if not os.path.isdir(path):
528 if not os.path.isdir(path):
530 path_ok = False
529 path_ok = False
531 log.error('Given path %s is not a valid directory', path)
530 log.error('Given path %s is not a valid directory', path)
532
531
533 elif not os.path.isabs(path):
532 elif not os.path.isabs(path):
534 path_ok = False
533 path_ok = False
535 log.error('Given path %s is not an absolute path', path)
534 log.error('Given path %s is not an absolute path', path)
536
535
537 # check if path is at least readable.
536 # check if path is at least readable.
538 if not os.access(path, os.R_OK):
537 if not os.access(path, os.R_OK):
539 path_ok = False
538 path_ok = False
540 log.error('Given path %s is not readable', path)
539 log.error('Given path %s is not readable', path)
541
540
542 # check write access, warn user about non writeable paths
541 # check write access, warn user about non writeable paths
543 elif not os.access(path, os.W_OK) and path_ok:
542 elif not os.access(path, os.W_OK) and path_ok:
544 log.warning('No write permission to given path %s', path)
543 log.warning('No write permission to given path %s', path)
545
544
546 q = ('Given path %s is not writeable, do you want to '
545 q = ('Given path %s is not writeable, do you want to '
547 'continue with read only mode ? [y/n]' % (path,))
546 'continue with read only mode ? [y/n]' % (path,))
548 if not self.ask_ok(q):
547 if not self.ask_ok(q):
549 log.error('Canceled by user')
548 log.error('Canceled by user')
550 sys.exit(-1)
549 sys.exit(-1)
551
550
552 if retries == 0:
551 if retries == 0:
553 sys.exit('max retries reached')
552 sys.exit('max retries reached')
554 if not path_ok:
553 if not path_ok:
555 retries -= 1
554 retries -= 1
556 return self.config_prompt(test_repo_path, retries)
555 return self.config_prompt(test_repo_path, retries)
557
556
558 real_path = os.path.normpath(os.path.realpath(path))
557 real_path = os.path.normpath(os.path.realpath(path))
559
558
560 if real_path != os.path.normpath(path):
559 if real_path != os.path.normpath(path):
561 q = ('Path looks like a symlink, RhodeCode Enterprise will store '
560 q = ('Path looks like a symlink, RhodeCode Enterprise will store '
562 'given path as %s ? [y/n]') % (real_path,)
561 'given path as %s ? [y/n]') % (real_path,)
563 if not self.ask_ok(q):
562 if not self.ask_ok(q):
564 log.error('Canceled by user')
563 log.error('Canceled by user')
565 sys.exit(-1)
564 sys.exit(-1)
566
565
567 return real_path
566 return real_path
568
567
569 def create_settings(self, path):
568 def create_settings(self, path):
570
569
571 self.create_ui_settings(path)
570 self.create_ui_settings(path)
572
571
573 ui_config = [
572 ui_config = [
574 ('web', 'push_ssl', 'False'),
573 ('web', 'push_ssl', 'False'),
575 ('web', 'allow_archive', 'gz zip bz2'),
574 ('web', 'allow_archive', 'gz zip bz2'),
576 ('web', 'allow_push', '*'),
575 ('web', 'allow_push', '*'),
577 ('web', 'baseurl', '/'),
576 ('web', 'baseurl', '/'),
578 ('paths', '/', path),
577 ('paths', '/', path),
579 ('phases', 'publish', 'True')
578 ('phases', 'publish', 'True')
580 ]
579 ]
581 for section, key, value in ui_config:
580 for section, key, value in ui_config:
582 ui_conf = RhodeCodeUi()
581 ui_conf = RhodeCodeUi()
583 setattr(ui_conf, 'ui_section', section)
582 setattr(ui_conf, 'ui_section', section)
584 setattr(ui_conf, 'ui_key', key)
583 setattr(ui_conf, 'ui_key', key)
585 setattr(ui_conf, 'ui_value', value)
584 setattr(ui_conf, 'ui_value', value)
586 self.sa.add(ui_conf)
585 self.sa.add(ui_conf)
587
586
588 # rhodecode app settings
587 # rhodecode app settings
589 settings = [
588 settings = [
590 ('realm', 'RhodeCode', 'unicode'),
589 ('realm', 'RhodeCode', 'unicode'),
591 ('title', '', 'unicode'),
590 ('title', '', 'unicode'),
592 ('pre_code', '', 'unicode'),
591 ('pre_code', '', 'unicode'),
593 ('post_code', '', 'unicode'),
592 ('post_code', '', 'unicode'),
594
593
595 # Visual
594 # Visual
596 ('show_public_icon', True, 'bool'),
595 ('show_public_icon', True, 'bool'),
597 ('show_private_icon', True, 'bool'),
596 ('show_private_icon', True, 'bool'),
598 ('stylify_metatags', True, 'bool'),
597 ('stylify_metatags', True, 'bool'),
599 ('dashboard_items', 100, 'int'),
598 ('dashboard_items', 100, 'int'),
600 ('admin_grid_items', 25, 'int'),
599 ('admin_grid_items', 25, 'int'),
601
600
602 ('markup_renderer', 'markdown', 'unicode'),
601 ('markup_renderer', 'markdown', 'unicode'),
603
602
604 ('repository_fields', True, 'bool'),
603 ('repository_fields', True, 'bool'),
605 ('show_version', True, 'bool'),
604 ('show_version', True, 'bool'),
606 ('show_revision_number', True, 'bool'),
605 ('show_revision_number', True, 'bool'),
607 ('show_sha_length', 12, 'int'),
606 ('show_sha_length', 12, 'int'),
608
607
609 ('use_gravatar', False, 'bool'),
608 ('use_gravatar', False, 'bool'),
610 ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
609 ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
611
610
612 ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
611 ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
613 ('clone_uri_id_tmpl', Repository.DEFAULT_CLONE_URI_ID, 'unicode'),
612 ('clone_uri_id_tmpl', Repository.DEFAULT_CLONE_URI_ID, 'unicode'),
614 ('clone_uri_ssh_tmpl', Repository.DEFAULT_CLONE_URI_SSH, 'unicode'),
613 ('clone_uri_ssh_tmpl', Repository.DEFAULT_CLONE_URI_SSH, 'unicode'),
615 ('support_url', '', 'unicode'),
614 ('support_url', '', 'unicode'),
616 ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'),
615 ('update_url', RhodeCodeSetting.DEFAULT_UPDATE_URL, 'unicode'),
617
616
618 # VCS Settings
617 # VCS Settings
619 ('pr_merge_enabled', True, 'bool'),
618 ('pr_merge_enabled', True, 'bool'),
620 ('use_outdated_comments', True, 'bool'),
619 ('use_outdated_comments', True, 'bool'),
621 ('diff_cache', True, 'bool'),
620 ('diff_cache', True, 'bool'),
622 ]
621 ]
623
622
624 for key, val, type_ in settings:
623 for key, val, type_ in settings:
625 sett = RhodeCodeSetting(key, val, type_)
624 sett = RhodeCodeSetting(key, val, type_)
626 self.sa.add(sett)
625 self.sa.add(sett)
627
626
628 self.create_auth_plugin_options()
627 self.create_auth_plugin_options()
629 self.create_default_options()
628 self.create_default_options()
630
629
631 log.info('created ui config')
630 log.info('created ui config')
632
631
633 def create_user(self, username, password, email='', admin=False,
632 def create_user(self, username, password, email='', admin=False,
634 strict_creation_check=True, api_key=None):
633 strict_creation_check=True, api_key=None):
635 log.info('creating user `%s`', username)
634 log.info('creating user `%s`', username)
636 user = UserModel().create_or_update(
635 user = UserModel().create_or_update(
637 username, password, email, firstname='RhodeCode', lastname='Admin',
636 username, password, email, firstname='RhodeCode', lastname='Admin',
638 active=True, admin=admin, extern_type="rhodecode",
637 active=True, admin=admin, extern_type="rhodecode",
639 strict_creation_check=strict_creation_check)
638 strict_creation_check=strict_creation_check)
640
639
641 if api_key:
640 if api_key:
642 log.info('setting a new default auth token for user `%s`', username)
641 log.info('setting a new default auth token for user `%s`', username)
643 UserModel().add_auth_token(
642 UserModel().add_auth_token(
644 user=user, lifetime_minutes=-1,
643 user=user, lifetime_minutes=-1,
645 role=UserModel.auth_token_role.ROLE_ALL,
644 role=UserModel.auth_token_role.ROLE_ALL,
646 description='BUILTIN TOKEN')
645 description='BUILTIN TOKEN')
647
646
648 def create_default_user(self):
647 def create_default_user(self):
649 log.info('creating default user')
648 log.info('creating default user')
650 # create default user for handling default permissions.
649 # create default user for handling default permissions.
651 user = UserModel().create_or_update(username=User.DEFAULT_USER,
650 user = UserModel().create_or_update(username=User.DEFAULT_USER,
652 password=str(uuid.uuid1())[:20],
651 password=str(uuid.uuid1())[:20],
653 email=User.DEFAULT_USER_EMAIL,
652 email=User.DEFAULT_USER_EMAIL,
654 firstname='Anonymous',
653 firstname='Anonymous',
655 lastname='User',
654 lastname='User',
656 strict_creation_check=False)
655 strict_creation_check=False)
657 # based on configuration options activate/de-activate this user which
656 # based on configuration options activate/de-activate this user which
658 # controls anonymous access
657 # controls anonymous access
659 if self.cli_args.get('public_access') is False:
658 if self.cli_args.get('public_access') is False:
660 log.info('Public access disabled')
659 log.info('Public access disabled')
661 user.active = False
660 user.active = False
662 Session().add(user)
661 Session().add(user)
663 Session().commit()
662 Session().commit()
664
663
665 def create_permissions(self):
664 def create_permissions(self):
666 """
665 """
667 Creates all permissions defined in the system
666 Creates all permissions defined in the system
668 """
667 """
669 # module.(access|create|change|delete)_[name]
668 # module.(access|create|change|delete)_[name]
670 # module.(none|read|write|admin)
669 # module.(none|read|write|admin)
671 log.info('creating permissions')
670 log.info('creating permissions')
672 PermissionModel(self.sa).create_permissions()
671 PermissionModel(self.sa).create_permissions()
673
672
674 def populate_default_permissions(self):
673 def populate_default_permissions(self):
675 """
674 """
676 Populate default permissions. It will create only the default
675 Populate default permissions. It will create only the default
677 permissions that are missing, and not alter already defined ones
676 permissions that are missing, and not alter already defined ones
678 """
677 """
679 log.info('creating default user permissions')
678 log.info('creating default user permissions')
680 PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER)
679 PermissionModel(self.sa).create_default_user_permissions(user=User.DEFAULT_USER)
@@ -1,1271 +1,1271 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Set of diffing helpers, previously part of vcs
23 Set of diffing helpers, previously part of vcs
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import bz2
28 import bz2
29 import gzip
29 import gzip
30 import time
30 import time
31
31
32 import collections
32 import collections
33 import difflib
33 import difflib
34 import logging
34 import logging
35 import pickle
35 import pickle
36 from itertools import tee
36 from itertools import tee
37
37
38 from rhodecode.lib.vcs.exceptions import VCSError
38 from rhodecode.lib.vcs.exceptions import VCSError
39 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
39 from rhodecode.lib.vcs.nodes import FileNode, SubModuleNode
40 from rhodecode.lib.utils2 import safe_unicode, safe_str
40 from rhodecode.lib.utils2 import safe_unicode, safe_str
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44 # define max context, a file with more than this numbers of lines is unusable
44 # define max context, a file with more than this numbers of lines is unusable
45 # in browser anyway
45 # in browser anyway
46 MAX_CONTEXT = 20 * 1024
46 MAX_CONTEXT = 20 * 1024
47 DEFAULT_CONTEXT = 3
47 DEFAULT_CONTEXT = 3
48
48
49
49
50 def get_diff_context(request):
50 def get_diff_context(request):
51 return MAX_CONTEXT if request.GET.get('fullcontext', '') == '1' else DEFAULT_CONTEXT
51 return MAX_CONTEXT if request.GET.get('fullcontext', '') == '1' else DEFAULT_CONTEXT
52
52
53
53
54 def get_diff_whitespace_flag(request):
54 def get_diff_whitespace_flag(request):
55 return request.GET.get('ignorews', '') == '1'
55 return request.GET.get('ignorews', '') == '1'
56
56
57
57
58 class OPS(object):
58 class OPS(object):
59 ADD = 'A'
59 ADD = 'A'
60 MOD = 'M'
60 MOD = 'M'
61 DEL = 'D'
61 DEL = 'D'
62
62
63
63
64 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
64 def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
65 """
65 """
66 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
66 Returns git style diff between given ``filenode_old`` and ``filenode_new``.
67
67
68 :param ignore_whitespace: ignore whitespaces in diff
68 :param ignore_whitespace: ignore whitespaces in diff
69 """
69 """
70 # make sure we pass in default context
70 # make sure we pass in default context
71 context = context or 3
71 context = context or 3
72 # protect against IntOverflow when passing HUGE context
72 # protect against IntOverflow when passing HUGE context
73 if context > MAX_CONTEXT:
73 if context > MAX_CONTEXT:
74 context = MAX_CONTEXT
74 context = MAX_CONTEXT
75
75
76 submodules = [o for o in [filenode_new, filenode_old] if isinstance(o, SubModuleNode)]
76 submodules = [o for o in [filenode_new, filenode_old] if isinstance(o, SubModuleNode)]
77 if submodules:
77 if submodules:
78 return ''
78 return ''
79
79
80 for filenode in (filenode_old, filenode_new):
80 for filenode in (filenode_old, filenode_new):
81 if not isinstance(filenode, FileNode):
81 if not isinstance(filenode, FileNode):
82 raise VCSError(
82 raise VCSError(
83 "Given object should be FileNode object, not %s"
83 "Given object should be FileNode object, not %s"
84 % filenode.__class__)
84 % filenode.__class__)
85
85
86 repo = filenode_new.commit.repository
86 repo = filenode_new.commit.repository
87 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
87 old_commit = filenode_old.commit or repo.EMPTY_COMMIT
88 new_commit = filenode_new.commit
88 new_commit = filenode_new.commit
89
89
90 vcs_gitdiff = repo.get_diff(
90 vcs_gitdiff = repo.get_diff(
91 old_commit, new_commit, filenode_new.path,
91 old_commit, new_commit, filenode_new.path,
92 ignore_whitespace, context, path1=filenode_old.path)
92 ignore_whitespace, context, path1=filenode_old.path)
93 return vcs_gitdiff
93 return vcs_gitdiff
94
94
95 NEW_FILENODE = 1
95 NEW_FILENODE = 1
96 DEL_FILENODE = 2
96 DEL_FILENODE = 2
97 MOD_FILENODE = 3
97 MOD_FILENODE = 3
98 RENAMED_FILENODE = 4
98 RENAMED_FILENODE = 4
99 COPIED_FILENODE = 5
99 COPIED_FILENODE = 5
100 CHMOD_FILENODE = 6
100 CHMOD_FILENODE = 6
101 BIN_FILENODE = 7
101 BIN_FILENODE = 7
102
102
103
103
104 class LimitedDiffContainer(object):
104 class LimitedDiffContainer(object):
105
105
106 def __init__(self, diff_limit, cur_diff_size, diff):
106 def __init__(self, diff_limit, cur_diff_size, diff):
107 self.diff = diff
107 self.diff = diff
108 self.diff_limit = diff_limit
108 self.diff_limit = diff_limit
109 self.cur_diff_size = cur_diff_size
109 self.cur_diff_size = cur_diff_size
110
110
111 def __getitem__(self, key):
111 def __getitem__(self, key):
112 return self.diff.__getitem__(key)
112 return self.diff.__getitem__(key)
113
113
114 def __iter__(self):
114 def __iter__(self):
115 for l in self.diff:
115 for l in self.diff:
116 yield l
116 yield l
117
117
118
118
119 class Action(object):
119 class Action(object):
120 """
120 """
121 Contains constants for the action value of the lines in a parsed diff.
121 Contains constants for the action value of the lines in a parsed diff.
122 """
122 """
123
123
124 ADD = 'add'
124 ADD = 'add'
125 DELETE = 'del'
125 DELETE = 'del'
126 UNMODIFIED = 'unmod'
126 UNMODIFIED = 'unmod'
127
127
128 CONTEXT = 'context'
128 CONTEXT = 'context'
129 OLD_NO_NL = 'old-no-nl'
129 OLD_NO_NL = 'old-no-nl'
130 NEW_NO_NL = 'new-no-nl'
130 NEW_NO_NL = 'new-no-nl'
131
131
132
132
133 class DiffProcessor(object):
133 class DiffProcessor(object):
134 """
134 """
135 Give it a unified or git diff and it returns a list of the files that were
135 Give it a unified or git diff and it returns a list of the files that were
136 mentioned in the diff together with a dict of meta information that
136 mentioned in the diff together with a dict of meta information that
137 can be used to render it in a HTML template.
137 can be used to render it in a HTML template.
138
138
139 .. note:: Unicode handling
139 .. note:: Unicode handling
140
140
141 The original diffs are a byte sequence and can contain filenames
141 The original diffs are a byte sequence and can contain filenames
142 in mixed encodings. This class generally returns `unicode` objects
142 in mixed encodings. This class generally returns `unicode` objects
143 since the result is intended for presentation to the user.
143 since the result is intended for presentation to the user.
144
144
145 """
145 """
146 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
146 _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
147 _newline_marker = re.compile(r'^\\ No newline at end of file')
147 _newline_marker = re.compile(r'^\\ No newline at end of file')
148
148
149 # used for inline highlighter word split
149 # used for inline highlighter word split
150 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
150 _token_re = re.compile(r'()(&gt;|&lt;|&amp;|\W+?)')
151
151
152 # collapse ranges of commits over given number
152 # collapse ranges of commits over given number
153 _collapse_commits_over = 5
153 _collapse_commits_over = 5
154
154
155 def __init__(self, diff, format='gitdiff', diff_limit=None,
155 def __init__(self, diff, format='gitdiff', diff_limit=None,
156 file_limit=None, show_full_diff=True):
156 file_limit=None, show_full_diff=True):
157 """
157 """
158 :param diff: A `Diff` object representing a diff from a vcs backend
158 :param diff: A `Diff` object representing a diff from a vcs backend
159 :param format: format of diff passed, `udiff` or `gitdiff`
159 :param format: format of diff passed, `udiff` or `gitdiff`
160 :param diff_limit: define the size of diff that is considered "big"
160 :param diff_limit: define the size of diff that is considered "big"
161 based on that parameter cut off will be triggered, set to None
161 based on that parameter cut off will be triggered, set to None
162 to show full diff
162 to show full diff
163 """
163 """
164 self._diff = diff
164 self._diff = diff
165 self._format = format
165 self._format = format
166 self.adds = 0
166 self.adds = 0
167 self.removes = 0
167 self.removes = 0
168 # calculate diff size
168 # calculate diff size
169 self.diff_limit = diff_limit
169 self.diff_limit = diff_limit
170 self.file_limit = file_limit
170 self.file_limit = file_limit
171 self.show_full_diff = show_full_diff
171 self.show_full_diff = show_full_diff
172 self.cur_diff_size = 0
172 self.cur_diff_size = 0
173 self.parsed = False
173 self.parsed = False
174 self.parsed_diff = []
174 self.parsed_diff = []
175
175
176 log.debug('Initialized DiffProcessor with %s mode', format)
176 log.debug('Initialized DiffProcessor with %s mode', format)
177 if format == 'gitdiff':
177 if format == 'gitdiff':
178 self.differ = self._highlight_line_difflib
178 self.differ = self._highlight_line_difflib
179 self._parser = self._parse_gitdiff
179 self._parser = self._parse_gitdiff
180 else:
180 else:
181 self.differ = self._highlight_line_udiff
181 self.differ = self._highlight_line_udiff
182 self._parser = self._new_parse_gitdiff
182 self._parser = self._new_parse_gitdiff
183
183
184 def _copy_iterator(self):
184 def _copy_iterator(self):
185 """
185 """
186 make a fresh copy of generator, we should not iterate thru
186 make a fresh copy of generator, we should not iterate thru
187 an original as it's needed for repeating operations on
187 an original as it's needed for repeating operations on
188 this instance of DiffProcessor
188 this instance of DiffProcessor
189 """
189 """
190 self.__udiff, iterator_copy = tee(self.__udiff)
190 self.__udiff, iterator_copy = tee(self.__udiff)
191 return iterator_copy
191 return iterator_copy
192
192
193 def _escaper(self, string):
193 def _escaper(self, string):
194 """
194 """
195 Escaper for diff escapes special chars and checks the diff limit
195 Escaper for diff escapes special chars and checks the diff limit
196
196
197 :param string:
197 :param string:
198 """
198 """
199 self.cur_diff_size += len(string)
199 self.cur_diff_size += len(string)
200
200
201 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
201 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
202 raise DiffLimitExceeded('Diff Limit Exceeded')
202 raise DiffLimitExceeded('Diff Limit Exceeded')
203
203
204 return string \
204 return string \
205 .replace('&', '&amp;')\
205 .replace('&', '&amp;')\
206 .replace('<', '&lt;')\
206 .replace('<', '&lt;')\
207 .replace('>', '&gt;')
207 .replace('>', '&gt;')
208
208
209 def _line_counter(self, l):
209 def _line_counter(self, l):
210 """
210 """
211 Checks each line and bumps total adds/removes for this diff
211 Checks each line and bumps total adds/removes for this diff
212
212
213 :param l:
213 :param l:
214 """
214 """
215 if l.startswith('+') and not l.startswith('+++'):
215 if l.startswith('+') and not l.startswith('+++'):
216 self.adds += 1
216 self.adds += 1
217 elif l.startswith('-') and not l.startswith('---'):
217 elif l.startswith('-') and not l.startswith('---'):
218 self.removes += 1
218 self.removes += 1
219 return safe_unicode(l)
219 return safe_unicode(l)
220
220
221 def _highlight_line_difflib(self, line, next_):
221 def _highlight_line_difflib(self, line, next_):
222 """
222 """
223 Highlight inline changes in both lines.
223 Highlight inline changes in both lines.
224 """
224 """
225
225
226 if line['action'] == Action.DELETE:
226 if line['action'] == Action.DELETE:
227 old, new = line, next_
227 old, new = line, next_
228 else:
228 else:
229 old, new = next_, line
229 old, new = next_, line
230
230
231 oldwords = self._token_re.split(old['line'])
231 oldwords = self._token_re.split(old['line'])
232 newwords = self._token_re.split(new['line'])
232 newwords = self._token_re.split(new['line'])
233 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
233 sequence = difflib.SequenceMatcher(None, oldwords, newwords)
234
234
235 oldfragments, newfragments = [], []
235 oldfragments, newfragments = [], []
236 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
236 for tag, i1, i2, j1, j2 in sequence.get_opcodes():
237 oldfrag = ''.join(oldwords[i1:i2])
237 oldfrag = ''.join(oldwords[i1:i2])
238 newfrag = ''.join(newwords[j1:j2])
238 newfrag = ''.join(newwords[j1:j2])
239 if tag != 'equal':
239 if tag != 'equal':
240 if oldfrag:
240 if oldfrag:
241 oldfrag = '<del>%s</del>' % oldfrag
241 oldfrag = '<del>%s</del>' % oldfrag
242 if newfrag:
242 if newfrag:
243 newfrag = '<ins>%s</ins>' % newfrag
243 newfrag = '<ins>%s</ins>' % newfrag
244 oldfragments.append(oldfrag)
244 oldfragments.append(oldfrag)
245 newfragments.append(newfrag)
245 newfragments.append(newfrag)
246
246
247 old['line'] = "".join(oldfragments)
247 old['line'] = "".join(oldfragments)
248 new['line'] = "".join(newfragments)
248 new['line'] = "".join(newfragments)
249
249
250 def _highlight_line_udiff(self, line, next_):
250 def _highlight_line_udiff(self, line, next_):
251 """
251 """
252 Highlight inline changes in both lines.
252 Highlight inline changes in both lines.
253 """
253 """
254 start = 0
254 start = 0
255 limit = min(len(line['line']), len(next_['line']))
255 limit = min(len(line['line']), len(next_['line']))
256 while start < limit and line['line'][start] == next_['line'][start]:
256 while start < limit and line['line'][start] == next_['line'][start]:
257 start += 1
257 start += 1
258 end = -1
258 end = -1
259 limit -= start
259 limit -= start
260 while -end <= limit and line['line'][end] == next_['line'][end]:
260 while -end <= limit and line['line'][end] == next_['line'][end]:
261 end -= 1
261 end -= 1
262 end += 1
262 end += 1
263 if start or end:
263 if start or end:
264 def do(l):
264 def do(l):
265 last = end + len(l['line'])
265 last = end + len(l['line'])
266 if l['action'] == Action.ADD:
266 if l['action'] == Action.ADD:
267 tag = 'ins'
267 tag = 'ins'
268 else:
268 else:
269 tag = 'del'
269 tag = 'del'
270 l['line'] = '%s<%s>%s</%s>%s' % (
270 l['line'] = '%s<%s>%s</%s>%s' % (
271 l['line'][:start],
271 l['line'][:start],
272 tag,
272 tag,
273 l['line'][start:last],
273 l['line'][start:last],
274 tag,
274 tag,
275 l['line'][last:]
275 l['line'][last:]
276 )
276 )
277 do(line)
277 do(line)
278 do(next_)
278 do(next_)
279
279
280 def _clean_line(self, line, command):
280 def _clean_line(self, line, command):
281 if command in ['+', '-', ' ']:
281 if command in ['+', '-', ' ']:
282 # only modify the line if it's actually a diff thing
282 # only modify the line if it's actually a diff thing
283 line = line[1:]
283 line = line[1:]
284 return line
284 return line
285
285
286 def _parse_gitdiff(self, inline_diff=True):
286 def _parse_gitdiff(self, inline_diff=True):
287 _files = []
287 _files = []
288 diff_container = lambda arg: arg
288 diff_container = lambda arg: arg
289
289
290 for chunk in self._diff.chunks():
290 for chunk in self._diff.chunks():
291 head = chunk.header
291 head = chunk.header
292
292
293 diff = map(self._escaper, self.diff_splitter(chunk.diff))
293 diff = map(self._escaper, self.diff_splitter(chunk.diff))
294 raw_diff = chunk.raw
294 raw_diff = chunk.raw
295 limited_diff = False
295 limited_diff = False
296 exceeds_limit = False
296 exceeds_limit = False
297
297
298 op = None
298 op = None
299 stats = {
299 stats = {
300 'added': 0,
300 'added': 0,
301 'deleted': 0,
301 'deleted': 0,
302 'binary': False,
302 'binary': False,
303 'ops': {},
303 'ops': {},
304 }
304 }
305
305
306 if head['deleted_file_mode']:
306 if head['deleted_file_mode']:
307 op = OPS.DEL
307 op = OPS.DEL
308 stats['binary'] = True
308 stats['binary'] = True
309 stats['ops'][DEL_FILENODE] = 'deleted file'
309 stats['ops'][DEL_FILENODE] = 'deleted file'
310
310
311 elif head['new_file_mode']:
311 elif head['new_file_mode']:
312 op = OPS.ADD
312 op = OPS.ADD
313 stats['binary'] = True
313 stats['binary'] = True
314 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
314 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
315 else: # modify operation, can be copy, rename or chmod
315 else: # modify operation, can be copy, rename or chmod
316
316
317 # CHMOD
317 # CHMOD
318 if head['new_mode'] and head['old_mode']:
318 if head['new_mode'] and head['old_mode']:
319 op = OPS.MOD
319 op = OPS.MOD
320 stats['binary'] = True
320 stats['binary'] = True
321 stats['ops'][CHMOD_FILENODE] = (
321 stats['ops'][CHMOD_FILENODE] = (
322 'modified file chmod %s => %s' % (
322 'modified file chmod %s => %s' % (
323 head['old_mode'], head['new_mode']))
323 head['old_mode'], head['new_mode']))
324 # RENAME
324 # RENAME
325 if head['rename_from'] != head['rename_to']:
325 if head['rename_from'] != head['rename_to']:
326 op = OPS.MOD
326 op = OPS.MOD
327 stats['binary'] = True
327 stats['binary'] = True
328 stats['ops'][RENAMED_FILENODE] = (
328 stats['ops'][RENAMED_FILENODE] = (
329 'file renamed from %s to %s' % (
329 'file renamed from %s to %s' % (
330 head['rename_from'], head['rename_to']))
330 head['rename_from'], head['rename_to']))
331 # COPY
331 # COPY
332 if head.get('copy_from') and head.get('copy_to'):
332 if head.get('copy_from') and head.get('copy_to'):
333 op = OPS.MOD
333 op = OPS.MOD
334 stats['binary'] = True
334 stats['binary'] = True
335 stats['ops'][COPIED_FILENODE] = (
335 stats['ops'][COPIED_FILENODE] = (
336 'file copied from %s to %s' % (
336 'file copied from %s to %s' % (
337 head['copy_from'], head['copy_to']))
337 head['copy_from'], head['copy_to']))
338
338
339 # If our new parsed headers didn't match anything fallback to
339 # If our new parsed headers didn't match anything fallback to
340 # old style detection
340 # old style detection
341 if op is None:
341 if op is None:
342 if not head['a_file'] and head['b_file']:
342 if not head['a_file'] and head['b_file']:
343 op = OPS.ADD
343 op = OPS.ADD
344 stats['binary'] = True
344 stats['binary'] = True
345 stats['ops'][NEW_FILENODE] = 'new file'
345 stats['ops'][NEW_FILENODE] = 'new file'
346
346
347 elif head['a_file'] and not head['b_file']:
347 elif head['a_file'] and not head['b_file']:
348 op = OPS.DEL
348 op = OPS.DEL
349 stats['binary'] = True
349 stats['binary'] = True
350 stats['ops'][DEL_FILENODE] = 'deleted file'
350 stats['ops'][DEL_FILENODE] = 'deleted file'
351
351
352 # it's not ADD not DELETE
352 # it's not ADD not DELETE
353 if op is None:
353 if op is None:
354 op = OPS.MOD
354 op = OPS.MOD
355 stats['binary'] = True
355 stats['binary'] = True
356 stats['ops'][MOD_FILENODE] = 'modified file'
356 stats['ops'][MOD_FILENODE] = 'modified file'
357
357
358 # a real non-binary diff
358 # a real non-binary diff
359 if head['a_file'] or head['b_file']:
359 if head['a_file'] or head['b_file']:
360 try:
360 try:
361 raw_diff, chunks, _stats = self._parse_lines(diff)
361 raw_diff, chunks, _stats = self._parse_lines(diff)
362 stats['binary'] = False
362 stats['binary'] = False
363 stats['added'] = _stats[0]
363 stats['added'] = _stats[0]
364 stats['deleted'] = _stats[1]
364 stats['deleted'] = _stats[1]
365 # explicit mark that it's a modified file
365 # explicit mark that it's a modified file
366 if op == OPS.MOD:
366 if op == OPS.MOD:
367 stats['ops'][MOD_FILENODE] = 'modified file'
367 stats['ops'][MOD_FILENODE] = 'modified file'
368 exceeds_limit = len(raw_diff) > self.file_limit
368 exceeds_limit = len(raw_diff) > self.file_limit
369
369
370 # changed from _escaper function so we validate size of
370 # changed from _escaper function so we validate size of
371 # each file instead of the whole diff
371 # each file instead of the whole diff
372 # diff will hide big files but still show small ones
372 # diff will hide big files but still show small ones
373 # from my tests, big files are fairly safe to be parsed
373 # from my tests, big files are fairly safe to be parsed
374 # but the browser is the bottleneck
374 # but the browser is the bottleneck
375 if not self.show_full_diff and exceeds_limit:
375 if not self.show_full_diff and exceeds_limit:
376 raise DiffLimitExceeded('File Limit Exceeded')
376 raise DiffLimitExceeded('File Limit Exceeded')
377
377
378 except DiffLimitExceeded:
378 except DiffLimitExceeded:
379 diff_container = lambda _diff: \
379 diff_container = lambda _diff: \
380 LimitedDiffContainer(
380 LimitedDiffContainer(
381 self.diff_limit, self.cur_diff_size, _diff)
381 self.diff_limit, self.cur_diff_size, _diff)
382
382
383 exceeds_limit = len(raw_diff) > self.file_limit
383 exceeds_limit = len(raw_diff) > self.file_limit
384 limited_diff = True
384 limited_diff = True
385 chunks = []
385 chunks = []
386
386
387 else: # GIT format binary patch, or possibly empty diff
387 else: # GIT format binary patch, or possibly empty diff
388 if head['bin_patch']:
388 if head['bin_patch']:
389 # we have operation already extracted, but we mark simply
389 # we have operation already extracted, but we mark simply
390 # it's a diff we wont show for binary files
390 # it's a diff we wont show for binary files
391 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
391 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
392 chunks = []
392 chunks = []
393
393
394 if chunks and not self.show_full_diff and op == OPS.DEL:
394 if chunks and not self.show_full_diff and op == OPS.DEL:
395 # if not full diff mode show deleted file contents
395 # if not full diff mode show deleted file contents
396 # TODO: anderson: if the view is not too big, there is no way
396 # TODO: anderson: if the view is not too big, there is no way
397 # to see the content of the file
397 # to see the content of the file
398 chunks = []
398 chunks = []
399
399
400 chunks.insert(0, [{
400 chunks.insert(0, [{
401 'old_lineno': '',
401 'old_lineno': '',
402 'new_lineno': '',
402 'new_lineno': '',
403 'action': Action.CONTEXT,
403 'action': Action.CONTEXT,
404 'line': msg,
404 'line': msg,
405 } for _op, msg in stats['ops'].items()
405 } for _op, msg in stats['ops'].items()
406 if _op not in [MOD_FILENODE]])
406 if _op not in [MOD_FILENODE]])
407
407
408 _files.append({
408 _files.append({
409 'filename': safe_unicode(head['b_path']),
409 'filename': safe_unicode(head['b_path']),
410 'old_revision': head['a_blob_id'],
410 'old_revision': head['a_blob_id'],
411 'new_revision': head['b_blob_id'],
411 'new_revision': head['b_blob_id'],
412 'chunks': chunks,
412 'chunks': chunks,
413 'raw_diff': safe_unicode(raw_diff),
413 'raw_diff': safe_unicode(raw_diff),
414 'operation': op,
414 'operation': op,
415 'stats': stats,
415 'stats': stats,
416 'exceeds_limit': exceeds_limit,
416 'exceeds_limit': exceeds_limit,
417 'is_limited_diff': limited_diff,
417 'is_limited_diff': limited_diff,
418 })
418 })
419
419
420 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
420 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
421 OPS.DEL: 2}.get(info['operation'])
421 OPS.DEL: 2}.get(info['operation'])
422
422
423 if not inline_diff:
423 if not inline_diff:
424 return diff_container(sorted(_files, key=sorter))
424 return diff_container(sorted(_files, key=sorter))
425
425
426 # highlight inline changes
426 # highlight inline changes
427 for diff_data in _files:
427 for diff_data in _files:
428 for chunk in diff_data['chunks']:
428 for chunk in diff_data['chunks']:
429 lineiter = iter(chunk)
429 lineiter = iter(chunk)
430 try:
430 try:
431 while 1:
431 while 1:
432 line = next(lineiter)
432 line = next(lineiter)
433 if line['action'] not in (
433 if line['action'] not in (
434 Action.UNMODIFIED, Action.CONTEXT):
434 Action.UNMODIFIED, Action.CONTEXT):
435 nextline = next(lineiter)
435 nextline = next(lineiter)
436 if nextline['action'] in ['unmod', 'context'] or \
436 if nextline['action'] in ['unmod', 'context'] or \
437 nextline['action'] == line['action']:
437 nextline['action'] == line['action']:
438 continue
438 continue
439 self.differ(line, nextline)
439 self.differ(line, nextline)
440 except StopIteration:
440 except StopIteration:
441 pass
441 pass
442
442
443 return diff_container(sorted(_files, key=sorter))
443 return diff_container(sorted(_files, key=sorter))
444
444
445 def _check_large_diff(self):
445 def _check_large_diff(self):
446 if self.diff_limit:
446 if self.diff_limit:
447 log.debug('Checking if diff exceeds current diff_limit of %s', self.diff_limit)
447 log.debug('Checking if diff exceeds current diff_limit of %s', self.diff_limit)
448 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
448 if not self.show_full_diff and (self.cur_diff_size > self.diff_limit):
449 raise DiffLimitExceeded('Diff Limit `%s` Exceeded', self.diff_limit)
449 raise DiffLimitExceeded('Diff Limit `%s` Exceeded', self.diff_limit)
450
450
451 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
451 # FIXME: NEWDIFFS: dan: this replaces _parse_gitdiff
452 def _new_parse_gitdiff(self, inline_diff=True):
452 def _new_parse_gitdiff(self, inline_diff=True):
453 _files = []
453 _files = []
454
454
455 # this can be overriden later to a LimitedDiffContainer type
455 # this can be overriden later to a LimitedDiffContainer type
456 diff_container = lambda arg: arg
456 diff_container = lambda arg: arg
457
457
458 for chunk in self._diff.chunks():
458 for chunk in self._diff.chunks():
459 head = chunk.header
459 head = chunk.header
460 log.debug('parsing diff %r', head)
460 log.debug('parsing diff %r', head)
461
461
462 raw_diff = chunk.raw
462 raw_diff = chunk.raw
463 limited_diff = False
463 limited_diff = False
464 exceeds_limit = False
464 exceeds_limit = False
465
465
466 op = None
466 op = None
467 stats = {
467 stats = {
468 'added': 0,
468 'added': 0,
469 'deleted': 0,
469 'deleted': 0,
470 'binary': False,
470 'binary': False,
471 'old_mode': None,
471 'old_mode': None,
472 'new_mode': None,
472 'new_mode': None,
473 'ops': {},
473 'ops': {},
474 }
474 }
475 if head['old_mode']:
475 if head['old_mode']:
476 stats['old_mode'] = head['old_mode']
476 stats['old_mode'] = head['old_mode']
477 if head['new_mode']:
477 if head['new_mode']:
478 stats['new_mode'] = head['new_mode']
478 stats['new_mode'] = head['new_mode']
479 if head['b_mode']:
479 if head['b_mode']:
480 stats['new_mode'] = head['b_mode']
480 stats['new_mode'] = head['b_mode']
481
481
482 # delete file
482 # delete file
483 if head['deleted_file_mode']:
483 if head['deleted_file_mode']:
484 op = OPS.DEL
484 op = OPS.DEL
485 stats['binary'] = True
485 stats['binary'] = True
486 stats['ops'][DEL_FILENODE] = 'deleted file'
486 stats['ops'][DEL_FILENODE] = 'deleted file'
487
487
488 # new file
488 # new file
489 elif head['new_file_mode']:
489 elif head['new_file_mode']:
490 op = OPS.ADD
490 op = OPS.ADD
491 stats['binary'] = True
491 stats['binary'] = True
492 stats['old_mode'] = None
492 stats['old_mode'] = None
493 stats['new_mode'] = head['new_file_mode']
493 stats['new_mode'] = head['new_file_mode']
494 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
494 stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
495
495
496 # modify operation, can be copy, rename or chmod
496 # modify operation, can be copy, rename or chmod
497 else:
497 else:
498 # CHMOD
498 # CHMOD
499 if head['new_mode'] and head['old_mode']:
499 if head['new_mode'] and head['old_mode']:
500 op = OPS.MOD
500 op = OPS.MOD
501 stats['binary'] = True
501 stats['binary'] = True
502 stats['ops'][CHMOD_FILENODE] = (
502 stats['ops'][CHMOD_FILENODE] = (
503 'modified file chmod %s => %s' % (
503 'modified file chmod %s => %s' % (
504 head['old_mode'], head['new_mode']))
504 head['old_mode'], head['new_mode']))
505
505
506 # RENAME
506 # RENAME
507 if head['rename_from'] != head['rename_to']:
507 if head['rename_from'] != head['rename_to']:
508 op = OPS.MOD
508 op = OPS.MOD
509 stats['binary'] = True
509 stats['binary'] = True
510 stats['renamed'] = (head['rename_from'], head['rename_to'])
510 stats['renamed'] = (head['rename_from'], head['rename_to'])
511 stats['ops'][RENAMED_FILENODE] = (
511 stats['ops'][RENAMED_FILENODE] = (
512 'file renamed from %s to %s' % (
512 'file renamed from %s to %s' % (
513 head['rename_from'], head['rename_to']))
513 head['rename_from'], head['rename_to']))
514 # COPY
514 # COPY
515 if head.get('copy_from') and head.get('copy_to'):
515 if head.get('copy_from') and head.get('copy_to'):
516 op = OPS.MOD
516 op = OPS.MOD
517 stats['binary'] = True
517 stats['binary'] = True
518 stats['copied'] = (head['copy_from'], head['copy_to'])
518 stats['copied'] = (head['copy_from'], head['copy_to'])
519 stats['ops'][COPIED_FILENODE] = (
519 stats['ops'][COPIED_FILENODE] = (
520 'file copied from %s to %s' % (
520 'file copied from %s to %s' % (
521 head['copy_from'], head['copy_to']))
521 head['copy_from'], head['copy_to']))
522
522
523 # If our new parsed headers didn't match anything fallback to
523 # If our new parsed headers didn't match anything fallback to
524 # old style detection
524 # old style detection
525 if op is None:
525 if op is None:
526 if not head['a_file'] and head['b_file']:
526 if not head['a_file'] and head['b_file']:
527 op = OPS.ADD
527 op = OPS.ADD
528 stats['binary'] = True
528 stats['binary'] = True
529 stats['new_file'] = True
529 stats['new_file'] = True
530 stats['ops'][NEW_FILENODE] = 'new file'
530 stats['ops'][NEW_FILENODE] = 'new file'
531
531
532 elif head['a_file'] and not head['b_file']:
532 elif head['a_file'] and not head['b_file']:
533 op = OPS.DEL
533 op = OPS.DEL
534 stats['binary'] = True
534 stats['binary'] = True
535 stats['ops'][DEL_FILENODE] = 'deleted file'
535 stats['ops'][DEL_FILENODE] = 'deleted file'
536
536
537 # it's not ADD not DELETE
537 # it's not ADD not DELETE
538 if op is None:
538 if op is None:
539 op = OPS.MOD
539 op = OPS.MOD
540 stats['binary'] = True
540 stats['binary'] = True
541 stats['ops'][MOD_FILENODE] = 'modified file'
541 stats['ops'][MOD_FILENODE] = 'modified file'
542
542
543 # a real non-binary diff
543 # a real non-binary diff
544 if head['a_file'] or head['b_file']:
544 if head['a_file'] or head['b_file']:
545 # simulate splitlines, so we keep the line end part
545 # simulate splitlines, so we keep the line end part
546 diff = self.diff_splitter(chunk.diff)
546 diff = self.diff_splitter(chunk.diff)
547
547
548 # append each file to the diff size
548 # append each file to the diff size
549 raw_chunk_size = len(raw_diff)
549 raw_chunk_size = len(raw_diff)
550
550
551 exceeds_limit = raw_chunk_size > self.file_limit
551 exceeds_limit = raw_chunk_size > self.file_limit
552 self.cur_diff_size += raw_chunk_size
552 self.cur_diff_size += raw_chunk_size
553
553
554 try:
554 try:
555 # Check each file instead of the whole diff.
555 # Check each file instead of the whole diff.
556 # Diff will hide big files but still show small ones.
556 # Diff will hide big files but still show small ones.
557 # From the tests big files are fairly safe to be parsed
557 # From the tests big files are fairly safe to be parsed
558 # but the browser is the bottleneck.
558 # but the browser is the bottleneck.
559 if not self.show_full_diff and exceeds_limit:
559 if not self.show_full_diff and exceeds_limit:
560 log.debug('File `%s` exceeds current file_limit of %s',
560 log.debug('File `%s` exceeds current file_limit of %s',
561 safe_unicode(head['b_path']), self.file_limit)
561 safe_unicode(head['b_path']), self.file_limit)
562 raise DiffLimitExceeded(
562 raise DiffLimitExceeded(
563 'File Limit %s Exceeded', self.file_limit)
563 'File Limit %s Exceeded', self.file_limit)
564
564
565 self._check_large_diff()
565 self._check_large_diff()
566
566
567 raw_diff, chunks, _stats = self._new_parse_lines(diff)
567 raw_diff, chunks, _stats = self._new_parse_lines(diff)
568 stats['binary'] = False
568 stats['binary'] = False
569 stats['added'] = _stats[0]
569 stats['added'] = _stats[0]
570 stats['deleted'] = _stats[1]
570 stats['deleted'] = _stats[1]
571 # explicit mark that it's a modified file
571 # explicit mark that it's a modified file
572 if op == OPS.MOD:
572 if op == OPS.MOD:
573 stats['ops'][MOD_FILENODE] = 'modified file'
573 stats['ops'][MOD_FILENODE] = 'modified file'
574
574
575 except DiffLimitExceeded:
575 except DiffLimitExceeded:
576 diff_container = lambda _diff: \
576 diff_container = lambda _diff: \
577 LimitedDiffContainer(
577 LimitedDiffContainer(
578 self.diff_limit, self.cur_diff_size, _diff)
578 self.diff_limit, self.cur_diff_size, _diff)
579
579
580 limited_diff = True
580 limited_diff = True
581 chunks = []
581 chunks = []
582
582
583 else: # GIT format binary patch, or possibly empty diff
583 else: # GIT format binary patch, or possibly empty diff
584 if head['bin_patch']:
584 if head['bin_patch']:
585 # we have operation already extracted, but we mark simply
585 # we have operation already extracted, but we mark simply
586 # it's a diff we wont show for binary files
586 # it's a diff we wont show for binary files
587 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
587 stats['ops'][BIN_FILENODE] = 'binary diff hidden'
588 chunks = []
588 chunks = []
589
589
590 # Hide content of deleted node by setting empty chunks
590 # Hide content of deleted node by setting empty chunks
591 if chunks and not self.show_full_diff and op == OPS.DEL:
591 if chunks and not self.show_full_diff and op == OPS.DEL:
592 # if not full diff mode show deleted file contents
592 # if not full diff mode show deleted file contents
593 # TODO: anderson: if the view is not too big, there is no way
593 # TODO: anderson: if the view is not too big, there is no way
594 # to see the content of the file
594 # to see the content of the file
595 chunks = []
595 chunks = []
596
596
597 chunks.insert(
597 chunks.insert(
598 0, [{'old_lineno': '',
598 0, [{'old_lineno': '',
599 'new_lineno': '',
599 'new_lineno': '',
600 'action': Action.CONTEXT,
600 'action': Action.CONTEXT,
601 'line': msg,
601 'line': msg,
602 } for _op, msg in stats['ops'].items()
602 } for _op, msg in stats['ops'].items()
603 if _op not in [MOD_FILENODE]])
603 if _op not in [MOD_FILENODE]])
604
604
605 original_filename = safe_unicode(head['a_path'])
605 original_filename = safe_unicode(head['a_path'])
606 _files.append({
606 _files.append({
607 'original_filename': original_filename,
607 'original_filename': original_filename,
608 'filename': safe_unicode(head['b_path']),
608 'filename': safe_unicode(head['b_path']),
609 'old_revision': head['a_blob_id'],
609 'old_revision': head['a_blob_id'],
610 'new_revision': head['b_blob_id'],
610 'new_revision': head['b_blob_id'],
611 'chunks': chunks,
611 'chunks': chunks,
612 'raw_diff': safe_unicode(raw_diff),
612 'raw_diff': safe_unicode(raw_diff),
613 'operation': op,
613 'operation': op,
614 'stats': stats,
614 'stats': stats,
615 'exceeds_limit': exceeds_limit,
615 'exceeds_limit': exceeds_limit,
616 'is_limited_diff': limited_diff,
616 'is_limited_diff': limited_diff,
617 })
617 })
618
618
619 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
619 sorter = lambda info: {OPS.ADD: 0, OPS.MOD: 1,
620 OPS.DEL: 2}.get(info['operation'])
620 OPS.DEL: 2}.get(info['operation'])
621
621
622 return diff_container(sorted(_files, key=sorter))
622 return diff_container(sorted(_files, key=sorter))
623
623
624 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
624 # FIXME: NEWDIFFS: dan: this gets replaced by _new_parse_lines
625 def _parse_lines(self, diff_iter):
625 def _parse_lines(self, diff_iter):
626 """
626 """
627 Parse the diff an return data for the template.
627 Parse the diff an return data for the template.
628 """
628 """
629
629
630 stats = [0, 0]
630 stats = [0, 0]
631 chunks = []
631 chunks = []
632 raw_diff = []
632 raw_diff = []
633
633
634 try:
634 try:
635 line = next(diff_iter)
635 line = next(diff_iter)
636
636
637 while line:
637 while line:
638 raw_diff.append(line)
638 raw_diff.append(line)
639 lines = []
639 lines = []
640 chunks.append(lines)
640 chunks.append(lines)
641
641
642 match = self._chunk_re.match(line)
642 match = self._chunk_re.match(line)
643
643
644 if not match:
644 if not match:
645 break
645 break
646
646
647 gr = match.groups()
647 gr = match.groups()
648 (old_line, old_end,
648 (old_line, old_end,
649 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
649 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
650 old_line -= 1
650 old_line -= 1
651 new_line -= 1
651 new_line -= 1
652
652
653 context = len(gr) == 5
653 context = len(gr) == 5
654 old_end += old_line
654 old_end += old_line
655 new_end += new_line
655 new_end += new_line
656
656
657 if context:
657 if context:
658 # skip context only if it's first line
658 # skip context only if it's first line
659 if int(gr[0]) > 1:
659 if int(gr[0]) > 1:
660 lines.append({
660 lines.append({
661 'old_lineno': '...',
661 'old_lineno': '...',
662 'new_lineno': '...',
662 'new_lineno': '...',
663 'action': Action.CONTEXT,
663 'action': Action.CONTEXT,
664 'line': line,
664 'line': line,
665 })
665 })
666
666
667 line = next(diff_iter)
667 line = next(diff_iter)
668
668
669 while old_line < old_end or new_line < new_end:
669 while old_line < old_end or new_line < new_end:
670 command = ' '
670 command = ' '
671 if line:
671 if line:
672 command = line[0]
672 command = line[0]
673
673
674 affects_old = affects_new = False
674 affects_old = affects_new = False
675
675
676 # ignore those if we don't expect them
676 # ignore those if we don't expect them
677 if command in '#@':
677 if command in '#@':
678 continue
678 continue
679 elif command == '+':
679 elif command == '+':
680 affects_new = True
680 affects_new = True
681 action = Action.ADD
681 action = Action.ADD
682 stats[0] += 1
682 stats[0] += 1
683 elif command == '-':
683 elif command == '-':
684 affects_old = True
684 affects_old = True
685 action = Action.DELETE
685 action = Action.DELETE
686 stats[1] += 1
686 stats[1] += 1
687 else:
687 else:
688 affects_old = affects_new = True
688 affects_old = affects_new = True
689 action = Action.UNMODIFIED
689 action = Action.UNMODIFIED
690
690
691 if not self._newline_marker.match(line):
691 if not self._newline_marker.match(line):
692 old_line += affects_old
692 old_line += affects_old
693 new_line += affects_new
693 new_line += affects_new
694 lines.append({
694 lines.append({
695 'old_lineno': affects_old and old_line or '',
695 'old_lineno': affects_old and old_line or '',
696 'new_lineno': affects_new and new_line or '',
696 'new_lineno': affects_new and new_line or '',
697 'action': action,
697 'action': action,
698 'line': self._clean_line(line, command)
698 'line': self._clean_line(line, command)
699 })
699 })
700 raw_diff.append(line)
700 raw_diff.append(line)
701
701
702 line = next(diff_iter)
702 line = next(diff_iter)
703
703
704 if self._newline_marker.match(line):
704 if self._newline_marker.match(line):
705 # we need to append to lines, since this is not
705 # we need to append to lines, since this is not
706 # counted in the line specs of diff
706 # counted in the line specs of diff
707 lines.append({
707 lines.append({
708 'old_lineno': '...',
708 'old_lineno': '...',
709 'new_lineno': '...',
709 'new_lineno': '...',
710 'action': Action.CONTEXT,
710 'action': Action.CONTEXT,
711 'line': self._clean_line(line, command)
711 'line': self._clean_line(line, command)
712 })
712 })
713
713
714 except StopIteration:
714 except StopIteration:
715 pass
715 pass
716 return ''.join(raw_diff), chunks, stats
716 return ''.join(raw_diff), chunks, stats
717
717
718 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
718 # FIXME: NEWDIFFS: dan: this replaces _parse_lines
719 def _new_parse_lines(self, diff_iter):
719 def _new_parse_lines(self, diff_iter):
720 """
720 """
721 Parse the diff an return data for the template.
721 Parse the diff an return data for the template.
722 """
722 """
723
723
724 stats = [0, 0]
724 stats = [0, 0]
725 chunks = []
725 chunks = []
726 raw_diff = []
726 raw_diff = []
727
727
728 try:
728 try:
729 line = next(diff_iter)
729 line = next(diff_iter)
730
730
731 while line:
731 while line:
732 raw_diff.append(line)
732 raw_diff.append(line)
733 # match header e.g @@ -0,0 +1 @@\n'
733 # match header e.g @@ -0,0 +1 @@\n'
734 match = self._chunk_re.match(line)
734 match = self._chunk_re.match(line)
735
735
736 if not match:
736 if not match:
737 break
737 break
738
738
739 gr = match.groups()
739 gr = match.groups()
740 (old_line, old_end,
740 (old_line, old_end,
741 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
741 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
742
742
743 lines = []
743 lines = []
744 hunk = {
744 hunk = {
745 'section_header': gr[-1],
745 'section_header': gr[-1],
746 'source_start': old_line,
746 'source_start': old_line,
747 'source_length': old_end,
747 'source_length': old_end,
748 'target_start': new_line,
748 'target_start': new_line,
749 'target_length': new_end,
749 'target_length': new_end,
750 'lines': lines,
750 'lines': lines,
751 }
751 }
752 chunks.append(hunk)
752 chunks.append(hunk)
753
753
754 old_line -= 1
754 old_line -= 1
755 new_line -= 1
755 new_line -= 1
756
756
757 context = len(gr) == 5
757 context = len(gr) == 5
758 old_end += old_line
758 old_end += old_line
759 new_end += new_line
759 new_end += new_line
760
760
761 line = next(diff_iter)
761 line = next(diff_iter)
762
762
763 while old_line < old_end or new_line < new_end:
763 while old_line < old_end or new_line < new_end:
764 command = ' '
764 command = ' '
765 if line:
765 if line:
766 command = line[0]
766 command = line[0]
767
767
768 affects_old = affects_new = False
768 affects_old = affects_new = False
769
769
770 # ignore those if we don't expect them
770 # ignore those if we don't expect them
771 if command in '#@':
771 if command in '#@':
772 continue
772 continue
773 elif command == '+':
773 elif command == '+':
774 affects_new = True
774 affects_new = True
775 action = Action.ADD
775 action = Action.ADD
776 stats[0] += 1
776 stats[0] += 1
777 elif command == '-':
777 elif command == '-':
778 affects_old = True
778 affects_old = True
779 action = Action.DELETE
779 action = Action.DELETE
780 stats[1] += 1
780 stats[1] += 1
781 else:
781 else:
782 affects_old = affects_new = True
782 affects_old = affects_new = True
783 action = Action.UNMODIFIED
783 action = Action.UNMODIFIED
784
784
785 if not self._newline_marker.match(line):
785 if not self._newline_marker.match(line):
786 old_line += affects_old
786 old_line += affects_old
787 new_line += affects_new
787 new_line += affects_new
788 lines.append({
788 lines.append({
789 'old_lineno': affects_old and old_line or '',
789 'old_lineno': affects_old and old_line or '',
790 'new_lineno': affects_new and new_line or '',
790 'new_lineno': affects_new and new_line or '',
791 'action': action,
791 'action': action,
792 'line': self._clean_line(line, command)
792 'line': self._clean_line(line, command)
793 })
793 })
794 raw_diff.append(line)
794 raw_diff.append(line)
795
795
796 line = next(diff_iter)
796 line = next(diff_iter)
797
797
798 if self._newline_marker.match(line):
798 if self._newline_marker.match(line):
799 # we need to append to lines, since this is not
799 # we need to append to lines, since this is not
800 # counted in the line specs of diff
800 # counted in the line specs of diff
801 if affects_old:
801 if affects_old:
802 action = Action.OLD_NO_NL
802 action = Action.OLD_NO_NL
803 elif affects_new:
803 elif affects_new:
804 action = Action.NEW_NO_NL
804 action = Action.NEW_NO_NL
805 else:
805 else:
806 raise Exception('invalid context for no newline')
806 raise Exception('invalid context for no newline')
807
807
808 lines.append({
808 lines.append({
809 'old_lineno': None,
809 'old_lineno': None,
810 'new_lineno': None,
810 'new_lineno': None,
811 'action': action,
811 'action': action,
812 'line': self._clean_line(line, command)
812 'line': self._clean_line(line, command)
813 })
813 })
814
814
815 except StopIteration:
815 except StopIteration:
816 pass
816 pass
817
817
818 return ''.join(raw_diff), chunks, stats
818 return ''.join(raw_diff), chunks, stats
819
819
820 def _safe_id(self, idstring):
820 def _safe_id(self, idstring):
821 """Make a string safe for including in an id attribute.
821 """Make a string safe for including in an id attribute.
822
822
823 The HTML spec says that id attributes 'must begin with
823 The HTML spec says that id attributes 'must begin with
824 a letter ([A-Za-z]) and may be followed by any number
824 a letter ([A-Za-z]) and may be followed by any number
825 of letters, digits ([0-9]), hyphens ("-"), underscores
825 of letters, digits ([0-9]), hyphens ("-"), underscores
826 ("_"), colons (":"), and periods (".")'. These regexps
826 ("_"), colons (":"), and periods (".")'. These regexps
827 are slightly over-zealous, in that they remove colons
827 are slightly over-zealous, in that they remove colons
828 and periods unnecessarily.
828 and periods unnecessarily.
829
829
830 Whitespace is transformed into underscores, and then
830 Whitespace is transformed into underscores, and then
831 anything which is not a hyphen or a character that
831 anything which is not a hyphen or a character that
832 matches \w (alphanumerics and underscore) is removed.
832 matches \w (alphanumerics and underscore) is removed.
833
833
834 """
834 """
835 # Transform all whitespace to underscore
835 # Transform all whitespace to underscore
836 idstring = re.sub(r'\s', "_", '%s' % idstring)
836 idstring = re.sub(r'\s', "_", '%s' % idstring)
837 # Remove everything that is not a hyphen or a member of \w
837 # Remove everything that is not a hyphen or a member of \w
838 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
838 idstring = re.sub(r'(?!-)\W', "", idstring).lower()
839 return idstring
839 return idstring
840
840
841 @classmethod
841 @classmethod
842 def diff_splitter(cls, string):
842 def diff_splitter(cls, string):
843 """
843 """
844 Diff split that emulates .splitlines() but works only on \n
844 Diff split that emulates .splitlines() but works only on \n
845 """
845 """
846 if not string:
846 if not string:
847 return
847 return
848 elif string == '\n':
848 elif string == '\n':
849 yield '\n'
849 yield '\n'
850 else:
850 else:
851
851
852 has_newline = string.endswith('\n')
852 has_newline = string.endswith('\n')
853 elements = string.split('\n')
853 elements = string.split('\n')
854 if has_newline:
854 if has_newline:
855 # skip last element as it's empty string from newlines
855 # skip last element as it's empty string from newlines
856 elements = elements[:-1]
856 elements = elements[:-1]
857
857
858 len_elements = len(elements)
858 len_elements = len(elements)
859
859
860 for cnt, line in enumerate(elements, start=1):
860 for cnt, line in enumerate(elements, start=1):
861 last_line = cnt == len_elements
861 last_line = cnt == len_elements
862 if last_line and not has_newline:
862 if last_line and not has_newline:
863 yield safe_unicode(line)
863 yield safe_unicode(line)
864 else:
864 else:
865 yield safe_unicode(line) + '\n'
865 yield safe_unicode(line) + '\n'
866
866
867 def prepare(self, inline_diff=True):
867 def prepare(self, inline_diff=True):
868 """
868 """
869 Prepare the passed udiff for HTML rendering.
869 Prepare the passed udiff for HTML rendering.
870
870
871 :return: A list of dicts with diff information.
871 :return: A list of dicts with diff information.
872 """
872 """
873 parsed = self._parser(inline_diff=inline_diff)
873 parsed = self._parser(inline_diff=inline_diff)
874 self.parsed = True
874 self.parsed = True
875 self.parsed_diff = parsed
875 self.parsed_diff = parsed
876 return parsed
876 return parsed
877
877
878 def as_raw(self, diff_lines=None):
878 def as_raw(self, diff_lines=None):
879 """
879 """
880 Returns raw diff as a byte string
880 Returns raw diff as a byte string
881 """
881 """
882 return self._diff.raw
882 return self._diff.raw
883
883
884 def as_html(self, table_class='code-difftable', line_class='line',
884 def as_html(self, table_class='code-difftable', line_class='line',
885 old_lineno_class='lineno old', new_lineno_class='lineno new',
885 old_lineno_class='lineno old', new_lineno_class='lineno new',
886 code_class='code', enable_comments=False, parsed_lines=None):
886 code_class='code', enable_comments=False, parsed_lines=None):
887 """
887 """
888 Return given diff as html table with customized css classes
888 Return given diff as html table with customized css classes
889 """
889 """
890 # TODO(marcink): not sure how to pass in translator
890 # TODO(marcink): not sure how to pass in translator
891 # here in an efficient way, leave the _ for proper gettext extraction
891 # here in an efficient way, leave the _ for proper gettext extraction
892 _ = lambda s: s
892 _ = lambda s: s
893
893
894 def _link_to_if(condition, label, url):
894 def _link_to_if(condition, label, url):
895 """
895 """
896 Generates a link if condition is meet or just the label if not.
896 Generates a link if condition is meet or just the label if not.
897 """
897 """
898
898
899 if condition:
899 if condition:
900 return '''<a href="%(url)s" class="tooltip"
900 return '''<a href="%(url)s" class="tooltip"
901 title="%(title)s">%(label)s</a>''' % {
901 title="%(title)s">%(label)s</a>''' % {
902 'title': _('Click to select line'),
902 'title': _('Click to select line'),
903 'url': url,
903 'url': url,
904 'label': label
904 'label': label
905 }
905 }
906 else:
906 else:
907 return label
907 return label
908 if not self.parsed:
908 if not self.parsed:
909 self.prepare()
909 self.prepare()
910
910
911 diff_lines = self.parsed_diff
911 diff_lines = self.parsed_diff
912 if parsed_lines:
912 if parsed_lines:
913 diff_lines = parsed_lines
913 diff_lines = parsed_lines
914
914
915 _html_empty = True
915 _html_empty = True
916 _html = []
916 _html = []
917 _html.append('''<table class="%(table_class)s">\n''' % {
917 _html.append('''<table class="%(table_class)s">\n''' % {
918 'table_class': table_class
918 'table_class': table_class
919 })
919 })
920
920
921 for diff in diff_lines:
921 for diff in diff_lines:
922 for line in diff['chunks']:
922 for line in diff['chunks']:
923 _html_empty = False
923 _html_empty = False
924 for change in line:
924 for change in line:
925 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
925 _html.append('''<tr class="%(lc)s %(action)s">\n''' % {
926 'lc': line_class,
926 'lc': line_class,
927 'action': change['action']
927 'action': change['action']
928 })
928 })
929 anchor_old_id = ''
929 anchor_old_id = ''
930 anchor_new_id = ''
930 anchor_new_id = ''
931 anchor_old = "%(filename)s_o%(oldline_no)s" % {
931 anchor_old = "%(filename)s_o%(oldline_no)s" % {
932 'filename': self._safe_id(diff['filename']),
932 'filename': self._safe_id(diff['filename']),
933 'oldline_no': change['old_lineno']
933 'oldline_no': change['old_lineno']
934 }
934 }
935 anchor_new = "%(filename)s_n%(oldline_no)s" % {
935 anchor_new = "%(filename)s_n%(oldline_no)s" % {
936 'filename': self._safe_id(diff['filename']),
936 'filename': self._safe_id(diff['filename']),
937 'oldline_no': change['new_lineno']
937 'oldline_no': change['new_lineno']
938 }
938 }
939 cond_old = (change['old_lineno'] != '...' and
939 cond_old = (change['old_lineno'] != '...' and
940 change['old_lineno'])
940 change['old_lineno'])
941 cond_new = (change['new_lineno'] != '...' and
941 cond_new = (change['new_lineno'] != '...' and
942 change['new_lineno'])
942 change['new_lineno'])
943 if cond_old:
943 if cond_old:
944 anchor_old_id = 'id="%s"' % anchor_old
944 anchor_old_id = 'id="%s"' % anchor_old
945 if cond_new:
945 if cond_new:
946 anchor_new_id = 'id="%s"' % anchor_new
946 anchor_new_id = 'id="%s"' % anchor_new
947
947
948 if change['action'] != Action.CONTEXT:
948 if change['action'] != Action.CONTEXT:
949 anchor_link = True
949 anchor_link = True
950 else:
950 else:
951 anchor_link = False
951 anchor_link = False
952
952
953 ###########################################################
953 ###########################################################
954 # COMMENT ICONS
954 # COMMENT ICONS
955 ###########################################################
955 ###########################################################
956 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
956 _html.append('''\t<td class="add-comment-line"><span class="add-comment-content">''')
957
957
958 if enable_comments and change['action'] != Action.CONTEXT:
958 if enable_comments and change['action'] != Action.CONTEXT:
959 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
959 _html.append('''<a href="#"><span class="icon-comment-add"></span></a>''')
960
960
961 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
961 _html.append('''</span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td>\n''')
962
962
963 ###########################################################
963 ###########################################################
964 # OLD LINE NUMBER
964 # OLD LINE NUMBER
965 ###########################################################
965 ###########################################################
966 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
966 _html.append('''\t<td %(a_id)s class="%(olc)s">''' % {
967 'a_id': anchor_old_id,
967 'a_id': anchor_old_id,
968 'olc': old_lineno_class
968 'olc': old_lineno_class
969 })
969 })
970
970
971 _html.append('''%(link)s''' % {
971 _html.append('''%(link)s''' % {
972 'link': _link_to_if(anchor_link, change['old_lineno'],
972 'link': _link_to_if(anchor_link, change['old_lineno'],
973 '#%s' % anchor_old)
973 '#%s' % anchor_old)
974 })
974 })
975 _html.append('''</td>\n''')
975 _html.append('''</td>\n''')
976 ###########################################################
976 ###########################################################
977 # NEW LINE NUMBER
977 # NEW LINE NUMBER
978 ###########################################################
978 ###########################################################
979
979
980 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
980 _html.append('''\t<td %(a_id)s class="%(nlc)s">''' % {
981 'a_id': anchor_new_id,
981 'a_id': anchor_new_id,
982 'nlc': new_lineno_class
982 'nlc': new_lineno_class
983 })
983 })
984
984
985 _html.append('''%(link)s''' % {
985 _html.append('''%(link)s''' % {
986 'link': _link_to_if(anchor_link, change['new_lineno'],
986 'link': _link_to_if(anchor_link, change['new_lineno'],
987 '#%s' % anchor_new)
987 '#%s' % anchor_new)
988 })
988 })
989 _html.append('''</td>\n''')
989 _html.append('''</td>\n''')
990 ###########################################################
990 ###########################################################
991 # CODE
991 # CODE
992 ###########################################################
992 ###########################################################
993 code_classes = [code_class]
993 code_classes = [code_class]
994 if (not enable_comments or
994 if (not enable_comments or
995 change['action'] == Action.CONTEXT):
995 change['action'] == Action.CONTEXT):
996 code_classes.append('no-comment')
996 code_classes.append('no-comment')
997 _html.append('\t<td class="%s">' % ' '.join(code_classes))
997 _html.append('\t<td class="%s">' % ' '.join(code_classes))
998 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
998 _html.append('''\n\t\t<pre>%(code)s</pre>\n''' % {
999 'code': change['line']
999 'code': change['line']
1000 })
1000 })
1001
1001
1002 _html.append('''\t</td>''')
1002 _html.append('''\t</td>''')
1003 _html.append('''\n</tr>\n''')
1003 _html.append('''\n</tr>\n''')
1004 _html.append('''</table>''')
1004 _html.append('''</table>''')
1005 if _html_empty:
1005 if _html_empty:
1006 return None
1006 return None
1007 return ''.join(_html)
1007 return ''.join(_html)
1008
1008
1009 def stat(self):
1009 def stat(self):
1010 """
1010 """
1011 Returns tuple of added, and removed lines for this instance
1011 Returns tuple of added, and removed lines for this instance
1012 """
1012 """
1013 return self.adds, self.removes
1013 return self.adds, self.removes
1014
1014
1015 def get_context_of_line(
1015 def get_context_of_line(
1016 self, path, diff_line=None, context_before=3, context_after=3):
1016 self, path, diff_line=None, context_before=3, context_after=3):
1017 """
1017 """
1018 Returns the context lines for the specified diff line.
1018 Returns the context lines for the specified diff line.
1019
1019
1020 :type diff_line: :class:`DiffLineNumber`
1020 :type diff_line: :class:`DiffLineNumber`
1021 """
1021 """
1022 assert self.parsed, "DiffProcessor is not initialized."
1022 assert self.parsed, "DiffProcessor is not initialized."
1023
1023
1024 if None not in diff_line:
1024 if None not in diff_line:
1025 raise ValueError(
1025 raise ValueError(
1026 "Cannot specify both line numbers: {}".format(diff_line))
1026 "Cannot specify both line numbers: {}".format(diff_line))
1027
1027
1028 file_diff = self._get_file_diff(path)
1028 file_diff = self._get_file_diff(path)
1029 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
1029 chunk, idx = self._find_chunk_line_index(file_diff, diff_line)
1030
1030
1031 first_line_to_include = max(idx - context_before, 0)
1031 first_line_to_include = max(idx - context_before, 0)
1032 first_line_after_context = idx + context_after + 1
1032 first_line_after_context = idx + context_after + 1
1033 context_lines = chunk[first_line_to_include:first_line_after_context]
1033 context_lines = chunk[first_line_to_include:first_line_after_context]
1034
1034
1035 line_contents = [
1035 line_contents = [
1036 _context_line(line) for line in context_lines
1036 _context_line(line) for line in context_lines
1037 if _is_diff_content(line)]
1037 if _is_diff_content(line)]
1038 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1038 # TODO: johbo: Interim fixup, the diff chunks drop the final newline.
1039 # Once they are fixed, we can drop this line here.
1039 # Once they are fixed, we can drop this line here.
1040 if line_contents:
1040 if line_contents:
1041 line_contents[-1] = (
1041 line_contents[-1] = (
1042 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1042 line_contents[-1][0], line_contents[-1][1].rstrip('\n') + '\n')
1043 return line_contents
1043 return line_contents
1044
1044
1045 def find_context(self, path, context, offset=0):
1045 def find_context(self, path, context, offset=0):
1046 """
1046 """
1047 Finds the given `context` inside of the diff.
1047 Finds the given `context` inside of the diff.
1048
1048
1049 Use the parameter `offset` to specify which offset the target line has
1049 Use the parameter `offset` to specify which offset the target line has
1050 inside of the given `context`. This way the correct diff line will be
1050 inside of the given `context`. This way the correct diff line will be
1051 returned.
1051 returned.
1052
1052
1053 :param offset: Shall be used to specify the offset of the main line
1053 :param offset: Shall be used to specify the offset of the main line
1054 within the given `context`.
1054 within the given `context`.
1055 """
1055 """
1056 if offset < 0 or offset >= len(context):
1056 if offset < 0 or offset >= len(context):
1057 raise ValueError(
1057 raise ValueError(
1058 "Only positive values up to the length of the context "
1058 "Only positive values up to the length of the context "
1059 "minus one are allowed.")
1059 "minus one are allowed.")
1060
1060
1061 matches = []
1061 matches = []
1062 file_diff = self._get_file_diff(path)
1062 file_diff = self._get_file_diff(path)
1063
1063
1064 for chunk in file_diff['chunks']:
1064 for chunk in file_diff['chunks']:
1065 context_iter = iter(context)
1065 context_iter = iter(context)
1066 for line_idx, line in enumerate(chunk):
1066 for line_idx, line in enumerate(chunk):
1067 try:
1067 try:
1068 if _context_line(line) == next(context_iter):
1068 if _context_line(line) == next(context_iter):
1069 continue
1069 continue
1070 except StopIteration:
1070 except StopIteration:
1071 matches.append((line_idx, chunk))
1071 matches.append((line_idx, chunk))
1072 context_iter = iter(context)
1072 context_iter = iter(context)
1073
1073
1074 # Increment position and triger StopIteration
1074 # Increment position and triger StopIteration
1075 # if we had a match at the end
1075 # if we had a match at the end
1076 line_idx += 1
1076 line_idx += 1
1077 try:
1077 try:
1078 next(context_iter)
1078 next(context_iter)
1079 except StopIteration:
1079 except StopIteration:
1080 matches.append((line_idx, chunk))
1080 matches.append((line_idx, chunk))
1081
1081
1082 effective_offset = len(context) - offset
1082 effective_offset = len(context) - offset
1083 found_at_diff_lines = [
1083 found_at_diff_lines = [
1084 _line_to_diff_line_number(chunk[idx - effective_offset])
1084 _line_to_diff_line_number(chunk[idx - effective_offset])
1085 for idx, chunk in matches]
1085 for idx, chunk in matches]
1086
1086
1087 return found_at_diff_lines
1087 return found_at_diff_lines
1088
1088
1089 def _get_file_diff(self, path):
1089 def _get_file_diff(self, path):
1090 for file_diff in self.parsed_diff:
1090 for file_diff in self.parsed_diff:
1091 if file_diff['filename'] == path:
1091 if file_diff['filename'] == path:
1092 break
1092 break
1093 else:
1093 else:
1094 raise FileNotInDiffException("File {} not in diff".format(path))
1094 raise FileNotInDiffException("File {} not in diff".format(path))
1095 return file_diff
1095 return file_diff
1096
1096
1097 def _find_chunk_line_index(self, file_diff, diff_line):
1097 def _find_chunk_line_index(self, file_diff, diff_line):
1098 for chunk in file_diff['chunks']:
1098 for chunk in file_diff['chunks']:
1099 for idx, line in enumerate(chunk):
1099 for idx, line in enumerate(chunk):
1100 if line['old_lineno'] == diff_line.old:
1100 if line['old_lineno'] == diff_line.old:
1101 return chunk, idx
1101 return chunk, idx
1102 if line['new_lineno'] == diff_line.new:
1102 if line['new_lineno'] == diff_line.new:
1103 return chunk, idx
1103 return chunk, idx
1104 raise LineNotInDiffException(
1104 raise LineNotInDiffException(
1105 "The line {} is not part of the diff.".format(diff_line))
1105 "The line {} is not part of the diff.".format(diff_line))
1106
1106
1107
1107
1108 def _is_diff_content(line):
1108 def _is_diff_content(line):
1109 return line['action'] in (
1109 return line['action'] in (
1110 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1110 Action.UNMODIFIED, Action.ADD, Action.DELETE)
1111
1111
1112
1112
1113 def _context_line(line):
1113 def _context_line(line):
1114 return (line['action'], line['line'])
1114 return (line['action'], line['line'])
1115
1115
1116
1116
1117 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1117 DiffLineNumber = collections.namedtuple('DiffLineNumber', ['old', 'new'])
1118
1118
1119
1119
1120 def _line_to_diff_line_number(line):
1120 def _line_to_diff_line_number(line):
1121 new_line_no = line['new_lineno'] or None
1121 new_line_no = line['new_lineno'] or None
1122 old_line_no = line['old_lineno'] or None
1122 old_line_no = line['old_lineno'] or None
1123 return DiffLineNumber(old=old_line_no, new=new_line_no)
1123 return DiffLineNumber(old=old_line_no, new=new_line_no)
1124
1124
1125
1125
1126 class FileNotInDiffException(Exception):
1126 class FileNotInDiffException(Exception):
1127 """
1127 """
1128 Raised when the context for a missing file is requested.
1128 Raised when the context for a missing file is requested.
1129
1129
1130 If you request the context for a line in a file which is not part of the
1130 If you request the context for a line in a file which is not part of the
1131 given diff, then this exception is raised.
1131 given diff, then this exception is raised.
1132 """
1132 """
1133
1133
1134
1134
1135 class LineNotInDiffException(Exception):
1135 class LineNotInDiffException(Exception):
1136 """
1136 """
1137 Raised when the context for a missing line is requested.
1137 Raised when the context for a missing line is requested.
1138
1138
1139 If you request the context for a line in a file and this line is not
1139 If you request the context for a line in a file and this line is not
1140 part of the given diff, then this exception is raised.
1140 part of the given diff, then this exception is raised.
1141 """
1141 """
1142
1142
1143
1143
1144 class DiffLimitExceeded(Exception):
1144 class DiffLimitExceeded(Exception):
1145 pass
1145 pass
1146
1146
1147
1147
1148 # NOTE(marcink): if diffs.mako change, probably this
1148 # NOTE(marcink): if diffs.mako change, probably this
1149 # needs a bump to next version
1149 # needs a bump to next version
1150 CURRENT_DIFF_VERSION = 'v5'
1150 CURRENT_DIFF_VERSION = 'v5'
1151
1151
1152
1152
1153 def _cleanup_cache_file(cached_diff_file):
1153 def _cleanup_cache_file(cached_diff_file):
1154 # cleanup file to not store it "damaged"
1154 # cleanup file to not store it "damaged"
1155 try:
1155 try:
1156 os.remove(cached_diff_file)
1156 os.remove(cached_diff_file)
1157 except Exception:
1157 except Exception:
1158 log.exception('Failed to cleanup path %s', cached_diff_file)
1158 log.exception('Failed to cleanup path %s', cached_diff_file)
1159
1159
1160
1160
1161 def _get_compression_mode(cached_diff_file):
1161 def _get_compression_mode(cached_diff_file):
1162 mode = 'bz2'
1162 mode = 'bz2'
1163 if 'mode:plain' in cached_diff_file:
1163 if 'mode:plain' in cached_diff_file:
1164 mode = 'plain'
1164 mode = 'plain'
1165 elif 'mode:gzip' in cached_diff_file:
1165 elif 'mode:gzip' in cached_diff_file:
1166 mode = 'gzip'
1166 mode = 'gzip'
1167 return mode
1167 return mode
1168
1168
1169
1169
1170 def cache_diff(cached_diff_file, diff, commits):
1170 def cache_diff(cached_diff_file, diff, commits):
1171 compression_mode = _get_compression_mode(cached_diff_file)
1171 compression_mode = _get_compression_mode(cached_diff_file)
1172
1172
1173 struct = {
1173 struct = {
1174 'version': CURRENT_DIFF_VERSION,
1174 'version': CURRENT_DIFF_VERSION,
1175 'diff': diff,
1175 'diff': diff,
1176 'commits': commits
1176 'commits': commits
1177 }
1177 }
1178
1178
1179 start = time.time()
1179 start = time.time()
1180 try:
1180 try:
1181 if compression_mode == 'plain':
1181 if compression_mode == 'plain':
1182 with open(cached_diff_file, 'wb') as f:
1182 with open(cached_diff_file, 'wb') as f:
1183 pickle.dump(struct, f)
1183 pickle.dump(struct, f)
1184 elif compression_mode == 'gzip':
1184 elif compression_mode == 'gzip':
1185 with gzip.GzipFile(cached_diff_file, 'wb') as f:
1185 with gzip.GzipFile(cached_diff_file, 'wb') as f:
1186 pickle.dump(struct, f)
1186 pickle.dump(struct, f)
1187 else:
1187 else:
1188 with bz2.BZ2File(cached_diff_file, 'wb') as f:
1188 with bz2.BZ2File(cached_diff_file, 'wb') as f:
1189 pickle.dump(struct, f)
1189 pickle.dump(struct, f)
1190 except Exception:
1190 except Exception:
1191 log.warn('Failed to save cache', exc_info=True)
1191 log.warn('Failed to save cache', exc_info=True)
1192 _cleanup_cache_file(cached_diff_file)
1192 _cleanup_cache_file(cached_diff_file)
1193
1193
1194 log.debug('Saved diff cache under %s in %.4fs', cached_diff_file, time.time() - start)
1194 log.debug('Saved diff cache under %s in %.4fs', cached_diff_file, time.time() - start)
1195
1195
1196
1196
1197 def load_cached_diff(cached_diff_file):
1197 def load_cached_diff(cached_diff_file):
1198 compression_mode = _get_compression_mode(cached_diff_file)
1198 compression_mode = _get_compression_mode(cached_diff_file)
1199
1199
1200 default_struct = {
1200 default_struct = {
1201 'version': CURRENT_DIFF_VERSION,
1201 'version': CURRENT_DIFF_VERSION,
1202 'diff': None,
1202 'diff': None,
1203 'commits': None
1203 'commits': None
1204 }
1204 }
1205
1205
1206 has_cache = os.path.isfile(cached_diff_file)
1206 has_cache = os.path.isfile(cached_diff_file)
1207 if not has_cache:
1207 if not has_cache:
1208 log.debug('Reading diff cache file failed %s', cached_diff_file)
1208 log.debug('Reading diff cache file failed %s', cached_diff_file)
1209 return default_struct
1209 return default_struct
1210
1210
1211 data = None
1211 data = None
1212
1212
1213 start = time.time()
1213 start = time.time()
1214 try:
1214 try:
1215 if compression_mode == 'plain':
1215 if compression_mode == 'plain':
1216 with open(cached_diff_file, 'rb') as f:
1216 with open(cached_diff_file, 'rb') as f:
1217 data = pickle.load(f)
1217 data = pickle.load(f)
1218 elif compression_mode == 'gzip':
1218 elif compression_mode == 'gzip':
1219 with gzip.GzipFile(cached_diff_file, 'rb') as f:
1219 with gzip.GzipFile(cached_diff_file, 'rb') as f:
1220 data = pickle.load(f)
1220 data = pickle.load(f)
1221 else:
1221 else:
1222 with bz2.BZ2File(cached_diff_file, 'rb') as f:
1222 with bz2.BZ2File(cached_diff_file, 'rb') as f:
1223 data = pickle.load(f)
1223 data = pickle.load(f)
1224 except Exception:
1224 except Exception:
1225 log.warn('Failed to read diff cache file', exc_info=True)
1225 log.warn('Failed to read diff cache file', exc_info=True)
1226
1226
1227 if not data:
1227 if not data:
1228 data = default_struct
1228 data = default_struct
1229
1229
1230 if not isinstance(data, dict):
1230 if not isinstance(data, dict):
1231 # old version of data ?
1231 # old version of data ?
1232 data = default_struct
1232 data = default_struct
1233
1233
1234 # check version
1234 # check version
1235 if data.get('version') != CURRENT_DIFF_VERSION:
1235 if data.get('version') != CURRENT_DIFF_VERSION:
1236 # purge cache
1236 # purge cache
1237 _cleanup_cache_file(cached_diff_file)
1237 _cleanup_cache_file(cached_diff_file)
1238 return default_struct
1238 return default_struct
1239
1239
1240 log.debug('Loaded diff cache from %s in %.4fs', cached_diff_file, time.time() - start)
1240 log.debug('Loaded diff cache from %s in %.4fs', cached_diff_file, time.time() - start)
1241
1241
1242 return data
1242 return data
1243
1243
1244
1244
1245 def generate_diff_cache_key(*args):
1245 def generate_diff_cache_key(*args):
1246 """
1246 """
1247 Helper to generate a cache key using arguments
1247 Helper to generate a cache key using arguments
1248 """
1248 """
1249 def arg_mapper(input_param):
1249 def arg_mapper(input_param):
1250 input_param = safe_str(input_param)
1250 input_param = safe_str(input_param)
1251 # we cannot allow '/' in arguments since it would allow
1251 # we cannot allow '/' in arguments since it would allow
1252 # subdirectory usage
1252 # subdirectory usage
1253 input_param.replace('/', '_')
1253 input_param.replace('/', '_')
1254 return input_param or None # prevent empty string arguments
1254 return input_param or None # prevent empty string arguments
1255
1255
1256 return '_'.join([
1256 return '_'.join([
1257 '{}' for i in range(len(args))]).format(*map(arg_mapper, args))
1257 '{}' for i in range(len(args))]).format(*map(arg_mapper, args))
1258
1258
1259
1259
1260 def diff_cache_exist(cache_storage, *args):
1260 def diff_cache_exist(cache_storage, *args):
1261 """
1261 """
1262 Based on all generated arguments check and return a cache path
1262 Based on all generated arguments check and return a cache path
1263 """
1263 """
1264 args = list(args) + ['mode:gzip']
1264 args = list(args) + ['mode:gzip']
1265 cache_key = generate_diff_cache_key(*args)
1265 cache_key = generate_diff_cache_key(*args)
1266 cache_file_path = os.path.join(cache_storage, cache_key)
1266 cache_file_path = os.path.join(cache_storage, cache_key)
1267 # prevent path traversal attacks using some param that have e.g '../../'
1267 # prevent path traversal attacks using some param that have e.g '../../'
1268 if not os.path.abspath(cache_file_path).startswith(cache_storage):
1268 if not os.path.abspath(cache_file_path).startswith(cache_storage):
1269 raise ValueError('Final path must be within {}'.format(cache_storage))
1269 raise ValueError('Final path must be within {}'.format(cache_storage))
1270
1270
1271 return cache_file_path
1271 return cache_file_path
@@ -1,48 +1,49 b''
1 from rhodecode.lib.str_utils import safe_bytes
1 from rhodecode.lib.str_utils import safe_bytes
2 from rhodecode.lib.encrypt import encrypt_data, validate_and_decrypt_data
2 from rhodecode.lib.encrypt import encrypt_data, validate_and_decrypt_data
3 from rhodecode.lib.encrypt2 import Encryptor
3 from rhodecode.lib.encrypt2 import Encryptor
4
4
5 ALLOWED_ALGOS = ['aes', 'fernet']
5 ALLOWED_ALGOS = ['aes', 'fernet']
6
6
7
7
8 def get_default_algo():
8 def get_default_algo():
9 import rhodecode
9 import rhodecode
10 return rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
10 return rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
11
11
12
12
13 def encrypt_value(value: bytes, enc_key: bytes, algo: str = ''):
13 def encrypt_value(value: bytes, enc_key: bytes, algo: str = ''):
14 if not algo:
14 if not algo:
15 # not explicit algo, just use what's set by config
15 # not explicit algo, just use what's set by config
16 algo = get_default_algo()
16 algo = get_default_algo()
17
17 if algo not in ALLOWED_ALGOS:
18 if algo not in ALLOWED_ALGOS:
18 ValueError(f'Bad encryption algorithm, should be {ALLOWED_ALGOS}, got: {algo}')
19 ValueError(f'Bad encryption algorithm, should be {ALLOWED_ALGOS}, got: {algo}')
19
20
20 enc_key = safe_bytes(enc_key)
21 enc_key = safe_bytes(enc_key)
21 value = safe_bytes(value)
22 value = safe_bytes(value)
22
23
23 if algo == 'aes':
24 if algo == 'aes':
24 return encrypt_data(value, enc_key=enc_key)
25 return encrypt_data(value, enc_key=enc_key)
25 if algo == 'fernet':
26 if algo == 'fernet':
26 return Encryptor(enc_key).encrypt(value)
27 return Encryptor(enc_key).encrypt(value)
27
28
28 return value
29 return value
29
30
30
31
31 def decrypt_value(value: bytes, enc_key: bytes, algo: str = '', strict_mode: bool = False):
32 def decrypt_value(value: bytes, enc_key: bytes, algo: str = '', strict_mode: bool = False):
32
33
33 if not algo:
34 if not algo:
34 # not explicit algo, just use what's set by config
35 # not explicit algo, just use what's set by config
35 algo = get_default_algo()
36 algo = get_default_algo()
36 if algo not in ALLOWED_ALGOS:
37 if algo not in ALLOWED_ALGOS:
37 ValueError(f'Bad encryption algorithm, should be {ALLOWED_ALGOS}, got: {algo}')
38 ValueError(f'Bad encryption algorithm, should be {ALLOWED_ALGOS}, got: {algo}')
38
39
39 enc_key = safe_bytes(enc_key)
40 enc_key = safe_bytes(enc_key)
40 value = safe_bytes(value)
41 value = safe_bytes(value)
41 safe = not strict_mode
42 safe = not strict_mode
42
43
43 if algo == 'aes':
44 if algo == 'aes':
44 return validate_and_decrypt_data(value, enc_key, safe=safe)
45 return validate_and_decrypt_data(value, enc_key, safe=safe)
45 if algo == 'fernet':
46 if algo == 'fernet':
46 return Encryptor(enc_key).decrypt(value, safe=safe)
47 return Encryptor(enc_key).decrypt(value, safe=safe)
47
48
48 return value
49 return value
@@ -1,152 +1,152 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Generic encryption library for RhodeCode
23 Generic encryption library for RhodeCode
24 """
24 """
25
25
26 import base64
26 import base64
27 import logging
27 import logging
28
28
29 from Crypto.Cipher import AES
29 from Crypto.Cipher import AES
30 from Crypto import Random
30 from Crypto import Random
31 from Crypto.Hash import HMAC, SHA256
31 from Crypto.Hash import HMAC, SHA256
32
32
33 from rhodecode.lib.str_utils import safe_bytes, safe_str
33 from rhodecode.lib.str_utils import safe_bytes, safe_str
34 from rhodecode.lib.exceptions import signature_verification_error
34 from rhodecode.lib.exceptions import signature_verification_error
35
35
36
36
37 class InvalidDecryptedValue(str):
37 class InvalidDecryptedValue(str):
38
38
39 def __new__(cls, content):
39 def __new__(cls, content):
40 """
40 """
41 This will generate something like this::
41 This will generate something like this::
42 <InvalidDecryptedValue(QkWusFgLJXR6m42v...)>
42 <InvalidDecryptedValue(QkWusFgLJXR6m42v...)>
43 And represent a safe indicator that encryption key is broken
43 And represent a safe indicator that encryption key is broken
44 """
44 """
45 content = '<{}({}...)>'.format(cls.__name__, content[:16])
45 content = '<{}({}...)>'.format(cls.__name__, content[:16])
46 return str.__new__(cls, content)
46 return str.__new__(cls, content)
47
47
48 KEY_FORMAT = b'enc$aes_hmac${1}'
48 KEY_FORMAT = b'enc$aes_hmac${1}'
49
49
50
50
51 class AESCipher(object):
51 class AESCipher(object):
52
52
53 def __init__(self, key: bytes, hmac=False, strict_verification=True):
53 def __init__(self, key: bytes, hmac=False, strict_verification=True):
54
54
55 if not key:
55 if not key:
56 raise ValueError('passed key variable is empty')
56 raise ValueError('passed key variable is empty')
57 self.strict_verification = strict_verification
57 self.strict_verification = strict_verification
58 self.block_size = 32
58 self.block_size = 32
59 self.hmac_size = 32
59 self.hmac_size = 32
60 self.hmac = hmac
60 self.hmac = hmac
61
61
62 self.key = SHA256.new(safe_bytes(key)).digest()
62 self.key = SHA256.new(safe_bytes(key)).digest()
63 self.hmac_key = SHA256.new(self.key).digest()
63 self.hmac_key = SHA256.new(self.key).digest()
64
64
65 def verify_hmac_signature(self, raw_data):
65 def verify_hmac_signature(self, raw_data):
66 org_hmac_signature = raw_data[-self.hmac_size:]
66 org_hmac_signature = raw_data[-self.hmac_size:]
67 data_without_sig = raw_data[:-self.hmac_size]
67 data_without_sig = raw_data[:-self.hmac_size]
68 recomputed_hmac = HMAC.new(
68 recomputed_hmac = HMAC.new(
69 self.hmac_key, data_without_sig, digestmod=SHA256).digest()
69 self.hmac_key, data_without_sig, digestmod=SHA256).digest()
70 return org_hmac_signature == recomputed_hmac
70 return org_hmac_signature == recomputed_hmac
71
71
72 def encrypt(self, raw: bytes):
72 def encrypt(self, raw: bytes):
73 raw = self._pad(raw)
73 raw = self._pad(raw)
74 iv = Random.new().read(AES.block_size)
74 iv = Random.new().read(AES.block_size)
75 cipher = AES.new(self.key, AES.MODE_CBC, iv)
75 cipher = AES.new(self.key, AES.MODE_CBC, iv)
76 enc_value = cipher.encrypt(raw)
76 enc_value = cipher.encrypt(raw)
77
77
78 hmac_signature = b''
78 hmac_signature = b''
79 if self.hmac:
79 if self.hmac:
80 # compute hmac+sha256 on iv + enc text, we use
80 # compute hmac+sha256 on iv + enc text, we use
81 # encrypt then mac method to create the signature
81 # encrypt then mac method to create the signature
82 hmac_signature = HMAC.new(
82 hmac_signature = HMAC.new(
83 self.hmac_key, iv + enc_value, digestmod=SHA256).digest()
83 self.hmac_key, iv + enc_value, digestmod=SHA256).digest()
84
84
85 return base64.b64encode(iv + enc_value + hmac_signature)
85 return base64.b64encode(iv + enc_value + hmac_signature)
86
86
87 def decrypt(self, enc, safe=True) -> bytes | InvalidDecryptedValue:
87 def decrypt(self, enc, safe=True) -> bytes | InvalidDecryptedValue:
88 enc_org = enc
88 enc_org = enc
89 try:
89 try:
90 enc = base64.b64decode(enc)
90 enc = base64.b64decode(enc)
91 except Exception:
91 except Exception:
92 logging.exception('Failed Base64 decode')
92 logging.exception('Failed Base64 decode')
93 raise signature_verification_error('Failed Base64 decode')
93 raise signature_verification_error('Failed Base64 decode')
94
94
95 if self.hmac and len(enc) > self.hmac_size:
95 if self.hmac and len(enc) > self.hmac_size:
96 if self.verify_hmac_signature(enc):
96 if self.verify_hmac_signature(enc):
97 # cut off the HMAC verification digest
97 # cut off the HMAC verification digest
98 enc = enc[:-self.hmac_size]
98 enc = enc[:-self.hmac_size]
99 else:
99 else:
100
100
101 decrypt_fail = InvalidDecryptedValue(safe_str(enc_org))
101 decrypt_fail = InvalidDecryptedValue(safe_str(enc_org))
102 if safe:
102 if safe:
103 return decrypt_fail
103 return decrypt_fail
104 raise signature_verification_error(decrypt_fail)
104 raise signature_verification_error(decrypt_fail)
105
105
106 iv = enc[:AES.block_size]
106 iv = enc[:AES.block_size]
107 cipher = AES.new(self.key, AES.MODE_CBC, iv)
107 cipher = AES.new(self.key, AES.MODE_CBC, iv)
108 return self._unpad(cipher.decrypt(enc[AES.block_size:]))
108 return self._unpad(cipher.decrypt(enc[AES.block_size:]))
109
109
110 def _pad(self, s):
110 def _pad(self, s):
111 block_pad = (self.block_size - len(s) % self.block_size)
111 block_pad = (self.block_size - len(s) % self.block_size)
112 return s + block_pad * safe_bytes(chr(block_pad))
112 return s + block_pad * safe_bytes(chr(block_pad))
113
113
114 @staticmethod
114 @staticmethod
115 def _unpad(s):
115 def _unpad(s):
116 return s[:-ord(s[len(s)-1:])]
116 return s[:-ord(s[len(s)-1:])]
117
117
118
118
119 def validate_and_decrypt_data(enc_data, enc_key, enc_strict_mode=False, safe=True):
119 def validate_and_decrypt_data(enc_data, enc_key, enc_strict_mode=False, safe=True):
120 enc_data = safe_str(enc_data)
120 enc_data = safe_str(enc_data)
121
121
122 parts = enc_data.split('$', 3)
122 parts = enc_data.split('$', 3)
123 if len(parts) != 3:
123 if len(parts) != 3:
124 raise ValueError(f'Encrypted Data has invalid format, expected {KEY_FORMAT}, got {parts}')
124 raise ValueError(f'Encrypted Data has invalid format, expected {KEY_FORMAT}, got {parts}')
125
125
126 enc_type = parts[1]
126 enc_type = parts[1]
127 enc_data_part = parts[2]
127 enc_data_part = parts[2]
128
128
129 if parts[0] != 'enc':
129 if parts[0] != 'enc':
130 # parts ok but without our header ?
130 # parts ok but without our header ?
131 return enc_data
131 return enc_data
132
132
133 # at that stage we know it's our encryption
133 # at that stage we know it's our encryption
134 if enc_type == 'aes':
134 if enc_type == 'aes':
135 decrypted_data = AESCipher(enc_key).decrypt(enc_data_part, safe=safe)
135 decrypted_data = AESCipher(enc_key).decrypt(enc_data_part, safe=safe)
136 elif enc_type == 'aes_hmac':
136 elif enc_type == 'aes_hmac':
137 decrypted_data = AESCipher(
137 decrypted_data = AESCipher(
138 enc_key, hmac=True,
138 enc_key, hmac=True,
139 strict_verification=enc_strict_mode).decrypt(enc_data_part, safe=safe)
139 strict_verification=enc_strict_mode).decrypt(enc_data_part, safe=safe)
140
140
141 else:
141 else:
142 raise ValueError(
142 raise ValueError(
143 f'Encryption type part is wrong, must be `aes` '
143 f'Encryption type part is wrong, must be `aes` '
144 f'or `aes_hmac`, got `{enc_type}` instead')
144 f'or `aes_hmac`, got `{enc_type}` instead')
145
145
146 return decrypted_data
146 return decrypted_data
147
147
148
148
149 def encrypt_data(data, enc_key: bytes):
149 def encrypt_data(data, enc_key: bytes):
150 enc_key = safe_bytes(enc_key)
150 enc_key = safe_bytes(enc_key)
151 enc_value = AESCipher(enc_key, hmac=True).encrypt(safe_bytes(data))
151 enc_value = AESCipher(enc_key, hmac=True).encrypt(safe_bytes(data))
152 return KEY_FORMAT.replace(b'{1}', enc_value)
152 return KEY_FORMAT.replace(b'{1}', enc_value)
@@ -1,232 +1,231 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import os
20 import os
22 import time
21 import time
23 import sys
22 import sys
24 import datetime
23 import datetime
25 import msgpack
24 import msgpack
26 import logging
25 import logging
27 import traceback
26 import traceback
28 import tempfile
27 import tempfile
29 import glob
28 import glob
30
29
31 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
32
31
33 # NOTE: Any changes should be synced with exc_tracking at vcsserver.lib.exc_tracking
32 # NOTE: Any changes should be synced with exc_tracking at vcsserver.lib.exc_tracking
34 global_prefix = 'rhodecode'
33 global_prefix = 'rhodecode'
35 exc_store_dir_name = 'rc_exception_store_v1'
34 exc_store_dir_name = 'rc_exception_store_v1'
36
35
37
36
38 def exc_serialize(exc_id, tb, exc_type, extra_data=None):
37 def exc_serialize(exc_id, tb, exc_type, extra_data=None):
39
38
40 data = {
39 data = {
41 'version': 'v1',
40 'version': 'v1',
42 'exc_id': exc_id,
41 'exc_id': exc_id,
43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
42 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
44 'exc_timestamp': repr(time.time()),
43 'exc_timestamp': repr(time.time()),
45 'exc_message': tb,
44 'exc_message': tb,
46 'exc_type': exc_type,
45 'exc_type': exc_type,
47 }
46 }
48 if extra_data:
47 if extra_data:
49 data.update(extra_data)
48 data.update(extra_data)
50 return msgpack.packb(data), data
49 return msgpack.packb(data), data
51
50
52
51
53 def exc_unserialize(tb):
52 def exc_unserialize(tb):
54 return msgpack.unpackb(tb)
53 return msgpack.unpackb(tb)
55
54
56 _exc_store = None
55 _exc_store = None
57
56
58
57
59 def get_exc_store():
58 def get_exc_store():
60 """
59 """
61 Get and create exception store if it's not existing
60 Get and create exception store if it's not existing
62 """
61 """
63 global _exc_store
62 global _exc_store
64 import rhodecode as app
63 import rhodecode as app
65
64
66 if _exc_store is not None:
65 if _exc_store is not None:
67 # quick global cache
66 # quick global cache
68 return _exc_store
67 return _exc_store
69
68
70 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
69 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
71 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
70 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
72
71
73 _exc_store_path = os.path.abspath(_exc_store_path)
72 _exc_store_path = os.path.abspath(_exc_store_path)
74 if not os.path.isdir(_exc_store_path):
73 if not os.path.isdir(_exc_store_path):
75 os.makedirs(_exc_store_path)
74 os.makedirs(_exc_store_path)
76 log.debug('Initializing exceptions store at %s', _exc_store_path)
75 log.debug('Initializing exceptions store at %s', _exc_store_path)
77 _exc_store = _exc_store_path
76 _exc_store = _exc_store_path
78
77
79 return _exc_store_path
78 return _exc_store_path
80
79
81
80
82 def _store_exception(exc_id, exc_type_name, exc_traceback, prefix, send_email=None):
81 def _store_exception(exc_id, exc_type_name, exc_traceback, prefix, send_email=None):
83 """
82 """
84 Low level function to store exception in the exception tracker
83 Low level function to store exception in the exception tracker
85 """
84 """
86 from pyramid.threadlocal import get_current_request
85 from pyramid.threadlocal import get_current_request
87 import rhodecode as app
86 import rhodecode as app
88 request = get_current_request()
87 request = get_current_request()
89 extra_data = {}
88 extra_data = {}
90 # NOTE(marcink): store request information into exc_data
89 # NOTE(marcink): store request information into exc_data
91 if request:
90 if request:
92 extra_data['client_address'] = getattr(request, 'client_addr', '')
91 extra_data['client_address'] = getattr(request, 'client_addr', '')
93 extra_data['user_agent'] = getattr(request, 'user_agent', '')
92 extra_data['user_agent'] = getattr(request, 'user_agent', '')
94 extra_data['method'] = getattr(request, 'method', '')
93 extra_data['method'] = getattr(request, 'method', '')
95 extra_data['url'] = getattr(request, 'url', '')
94 extra_data['url'] = getattr(request, 'url', '')
96
95
97 exc_store_path = get_exc_store()
96 exc_store_path = get_exc_store()
98 exc_data, org_data = exc_serialize(exc_id, exc_traceback, exc_type_name, extra_data=extra_data)
97 exc_data, org_data = exc_serialize(exc_id, exc_traceback, exc_type_name, extra_data=extra_data)
99
98
100 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
99 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
101 if not os.path.isdir(exc_store_path):
100 if not os.path.isdir(exc_store_path):
102 os.makedirs(exc_store_path)
101 os.makedirs(exc_store_path)
103 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
102 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
104 with open(stored_exc_path, 'wb') as f:
103 with open(stored_exc_path, 'wb') as f:
105 f.write(exc_data)
104 f.write(exc_data)
106 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
105 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
107
106
108 if send_email is None:
107 if send_email is None:
109 # NOTE(marcink): read app config unless we specify explicitly
108 # NOTE(marcink): read app config unless we specify explicitly
110 send_email = app.CONFIG.get('exception_tracker.send_email', False)
109 send_email = app.CONFIG.get('exception_tracker.send_email', False)
111
110
112 mail_server = app.CONFIG.get('smtp_server') or None
111 mail_server = app.CONFIG.get('smtp_server') or None
113 send_email = send_email and mail_server
112 send_email = send_email and mail_server
114 if send_email and request:
113 if send_email and request:
115 try:
114 try:
116 send_exc_email(request, exc_id, exc_type_name)
115 send_exc_email(request, exc_id, exc_type_name)
117 except Exception:
116 except Exception:
118 log.exception('Failed to send exception email')
117 log.exception('Failed to send exception email')
119 exc_info = sys.exc_info()
118 exc_info = sys.exc_info()
120 store_exception(id(exc_info), exc_info, send_email=False)
119 store_exception(id(exc_info), exc_info, send_email=False)
121
120
122
121
123 def send_exc_email(request, exc_id, exc_type_name):
122 def send_exc_email(request, exc_id, exc_type_name):
124 import rhodecode as app
123 import rhodecode as app
125 from rhodecode.apps._base import TemplateArgs
124 from rhodecode.apps._base import TemplateArgs
126 from rhodecode.lib.utils2 import aslist
125 from rhodecode.lib.utils2 import aslist
127 from rhodecode.lib.celerylib import run_task, tasks
126 from rhodecode.lib.celerylib import run_task, tasks
128 from rhodecode.lib.base import attach_context_attributes
127 from rhodecode.lib.base import attach_context_attributes
129 from rhodecode.model.notification import EmailNotificationModel
128 from rhodecode.model.notification import EmailNotificationModel
130
129
131 recipients = aslist(app.CONFIG.get('exception_tracker.send_email_recipients', ''))
130 recipients = aslist(app.CONFIG.get('exception_tracker.send_email_recipients', ''))
132 log.debug('Sending Email exception to: `%s`', recipients or 'all super admins')
131 log.debug('Sending Email exception to: `%s`', recipients or 'all super admins')
133
132
134 # NOTE(marcink): needed for email template rendering
133 # NOTE(marcink): needed for email template rendering
135 user_id = None
134 user_id = None
136 if hasattr(request, 'user'):
135 if hasattr(request, 'user'):
137 user_id = request.user.user_id
136 user_id = request.user.user_id
138 attach_context_attributes(TemplateArgs(), request, user_id=user_id, is_api=True)
137 attach_context_attributes(TemplateArgs(), request, user_id=user_id, is_api=True)
139
138
140 email_kwargs = {
139 email_kwargs = {
141 'email_prefix': app.CONFIG.get('exception_tracker.email_prefix', '') or '[RHODECODE ERROR]',
140 'email_prefix': app.CONFIG.get('exception_tracker.email_prefix', '') or '[RHODECODE ERROR]',
142 'exc_url': request.route_url('admin_settings_exception_tracker_show', exception_id=exc_id),
141 'exc_url': request.route_url('admin_settings_exception_tracker_show', exception_id=exc_id),
143 'exc_id': exc_id,
142 'exc_id': exc_id,
144 'exc_type_name': exc_type_name,
143 'exc_type_name': exc_type_name,
145 'exc_traceback': read_exception(exc_id, prefix=None),
144 'exc_traceback': read_exception(exc_id, prefix=None),
146 }
145 }
147
146
148 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
147 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
149 EmailNotificationModel.TYPE_EMAIL_EXCEPTION, **email_kwargs)
148 EmailNotificationModel.TYPE_EMAIL_EXCEPTION, **email_kwargs)
150
149
151 run_task(tasks.send_email, recipients, subject,
150 run_task(tasks.send_email, recipients, subject,
152 email_body_plaintext, email_body)
151 email_body_plaintext, email_body)
153
152
154
153
155 def _prepare_exception(exc_info):
154 def _prepare_exception(exc_info):
156 exc_type, exc_value, exc_traceback = exc_info
155 exc_type, exc_value, exc_traceback = exc_info
157 exc_type_name = exc_type.__name__
156 exc_type_name = exc_type.__name__
158
157
159 tb = ''.join(traceback.format_exception(
158 tb = ''.join(traceback.format_exception(
160 exc_type, exc_value, exc_traceback, None))
159 exc_type, exc_value, exc_traceback, None))
161
160
162 return exc_type_name, tb
161 return exc_type_name, tb
163
162
164
163
165 def store_exception(exc_id, exc_info, prefix=global_prefix, send_email=None):
164 def store_exception(exc_id, exc_info, prefix=global_prefix, send_email=None):
166 """
165 """
167 Example usage::
166 Example usage::
168
167
169 exc_info = sys.exc_info()
168 exc_info = sys.exc_info()
170 store_exception(id(exc_info), exc_info)
169 store_exception(id(exc_info), exc_info)
171 """
170 """
172
171
173 try:
172 try:
174 exc_type_name, exc_traceback = _prepare_exception(exc_info)
173 exc_type_name, exc_traceback = _prepare_exception(exc_info)
175 _store_exception(exc_id=exc_id, exc_type_name=exc_type_name,
174 _store_exception(exc_id=exc_id, exc_type_name=exc_type_name,
176 exc_traceback=exc_traceback, prefix=prefix, send_email=send_email)
175 exc_traceback=exc_traceback, prefix=prefix, send_email=send_email)
177 return exc_id, exc_type_name
176 return exc_id, exc_type_name
178 except Exception:
177 except Exception:
179 log.exception('Failed to store exception `%s` information', exc_id)
178 log.exception('Failed to store exception `%s` information', exc_id)
180 # there's no way this can fail, it will crash server badly if it does.
179 # there's no way this can fail, it will crash server badly if it does.
181 pass
180 pass
182
181
183
182
184 def _find_exc_file(exc_id, prefix=global_prefix):
183 def _find_exc_file(exc_id, prefix=global_prefix):
185 exc_store_path = get_exc_store()
184 exc_store_path = get_exc_store()
186 if prefix:
185 if prefix:
187 exc_id = '{}_{}'.format(exc_id, prefix)
186 exc_id = '{}_{}'.format(exc_id, prefix)
188 else:
187 else:
189 # search without a prefix
188 # search without a prefix
190 exc_id = '{}'.format(exc_id)
189 exc_id = '{}'.format(exc_id)
191
190
192 found_exc_id = None
191 found_exc_id = None
193 matches = glob.glob(os.path.join(exc_store_path, exc_id) + '*')
192 matches = glob.glob(os.path.join(exc_store_path, exc_id) + '*')
194 if matches:
193 if matches:
195 found_exc_id = matches[0]
194 found_exc_id = matches[0]
196
195
197 return found_exc_id
196 return found_exc_id
198
197
199
198
200 def _read_exception(exc_id, prefix):
199 def _read_exception(exc_id, prefix):
201 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
200 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
202 if exc_id_file_path:
201 if exc_id_file_path:
203 with open(exc_id_file_path, 'rb') as f:
202 with open(exc_id_file_path, 'rb') as f:
204 return exc_unserialize(f.read())
203 return exc_unserialize(f.read())
205 else:
204 else:
206 log.debug('Exception File `%s` not found', exc_id_file_path)
205 log.debug('Exception File `%s` not found', exc_id_file_path)
207 return None
206 return None
208
207
209
208
210 def read_exception(exc_id, prefix=global_prefix):
209 def read_exception(exc_id, prefix=global_prefix):
211 try:
210 try:
212 return _read_exception(exc_id=exc_id, prefix=prefix)
211 return _read_exception(exc_id=exc_id, prefix=prefix)
213 except Exception:
212 except Exception:
214 log.exception('Failed to read exception `%s` information', exc_id)
213 log.exception('Failed to read exception `%s` information', exc_id)
215 # there's no way this can fail, it will crash server badly if it does.
214 # there's no way this can fail, it will crash server badly if it does.
216 return None
215 return None
217
216
218
217
219 def delete_exception(exc_id, prefix=global_prefix):
218 def delete_exception(exc_id, prefix=global_prefix):
220 try:
219 try:
221 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
220 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
222 if exc_id_file_path:
221 if exc_id_file_path:
223 os.remove(exc_id_file_path)
222 os.remove(exc_id_file_path)
224
223
225 except Exception:
224 except Exception:
226 log.exception('Failed to remove exception `%s` information', exc_id)
225 log.exception('Failed to remove exception `%s` information', exc_id)
227 # there's no way this can fail, it will crash server badly if it does.
226 # there's no way this can fail, it will crash server badly if it does.
228 pass
227 pass
229
228
230
229
231 def generate_id():
230 def generate_id():
232 return id(object())
231 return id(object())
@@ -1,183 +1,182 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Set of custom exceptions used in RhodeCode
21 Set of custom exceptions used in RhodeCode
23 """
22 """
24
23
25 from webob.exc import HTTPClientError
24 from webob.exc import HTTPClientError
26 from pyramid.httpexceptions import HTTPBadGateway
25 from pyramid.httpexceptions import HTTPBadGateway
27
26
28
27
29 class LdapUsernameError(Exception):
28 class LdapUsernameError(Exception):
30 pass
29 pass
31
30
32
31
33 class LdapPasswordError(Exception):
32 class LdapPasswordError(Exception):
34 pass
33 pass
35
34
36
35
37 class LdapConnectionError(Exception):
36 class LdapConnectionError(Exception):
38 pass
37 pass
39
38
40
39
41 class LdapImportError(Exception):
40 class LdapImportError(Exception):
42 pass
41 pass
43
42
44
43
45 class DefaultUserException(Exception):
44 class DefaultUserException(Exception):
46 pass
45 pass
47
46
48
47
49 class UserOwnsReposException(Exception):
48 class UserOwnsReposException(Exception):
50 pass
49 pass
51
50
52
51
53 class UserOwnsRepoGroupsException(Exception):
52 class UserOwnsRepoGroupsException(Exception):
54 pass
53 pass
55
54
56
55
57 class UserOwnsUserGroupsException(Exception):
56 class UserOwnsUserGroupsException(Exception):
58 pass
57 pass
59
58
60
59
61 class UserOwnsPullRequestsException(Exception):
60 class UserOwnsPullRequestsException(Exception):
62 pass
61 pass
63
62
64
63
65 class UserOwnsArtifactsException(Exception):
64 class UserOwnsArtifactsException(Exception):
66 pass
65 pass
67
66
68
67
69 class UserGroupAssignedException(Exception):
68 class UserGroupAssignedException(Exception):
70 pass
69 pass
71
70
72
71
73 class StatusChangeOnClosedPullRequestError(Exception):
72 class StatusChangeOnClosedPullRequestError(Exception):
74 pass
73 pass
75
74
76
75
77 class AttachedForksError(Exception):
76 class AttachedForksError(Exception):
78 pass
77 pass
79
78
80
79
81 class AttachedPullRequestsError(Exception):
80 class AttachedPullRequestsError(Exception):
82 pass
81 pass
83
82
84
83
85 class RepoGroupAssignmentError(Exception):
84 class RepoGroupAssignmentError(Exception):
86 pass
85 pass
87
86
88
87
89 class NonRelativePathError(Exception):
88 class NonRelativePathError(Exception):
90 pass
89 pass
91
90
92
91
93 class HTTPRequirementError(HTTPClientError):
92 class HTTPRequirementError(HTTPClientError):
94 title = explanation = 'Repository Requirement Missing'
93 title = explanation = 'Repository Requirement Missing'
95 reason = None
94 reason = None
96
95
97 def __init__(self, message, *args, **kwargs):
96 def __init__(self, message, *args, **kwargs):
98 self.title = self.explanation = message
97 self.title = self.explanation = message
99 super(HTTPRequirementError, self).__init__(*args, **kwargs)
98 super(HTTPRequirementError, self).__init__(*args, **kwargs)
100 self.args = (message, )
99 self.args = (message, )
101
100
102
101
103 class HTTPLockedRC(HTTPClientError):
102 class HTTPLockedRC(HTTPClientError):
104 """
103 """
105 Special Exception For locked Repos in RhodeCode, the return code can
104 Special Exception For locked Repos in RhodeCode, the return code can
106 be overwritten by _code keyword argument passed into constructors
105 be overwritten by _code keyword argument passed into constructors
107 """
106 """
108 code = 423
107 code = 423
109 title = explanation = 'Repository Locked'
108 title = explanation = 'Repository Locked'
110 reason = None
109 reason = None
111
110
112 def __init__(self, message, *args, **kwargs):
111 def __init__(self, message, *args, **kwargs):
113 from rhodecode import CONFIG
112 from rhodecode import CONFIG
114 from rhodecode.lib.utils2 import safe_int
113 from rhodecode.lib.utils2 import safe_int
115 _code = CONFIG.get('lock_ret_code')
114 _code = CONFIG.get('lock_ret_code')
116 self.code = safe_int(_code, self.code)
115 self.code = safe_int(_code, self.code)
117 self.title = self.explanation = message
116 self.title = self.explanation = message
118 super(HTTPLockedRC, self).__init__(*args, **kwargs)
117 super(HTTPLockedRC, self).__init__(*args, **kwargs)
119 self.args = (message, )
118 self.args = (message, )
120
119
121
120
122 class HTTPBranchProtected(HTTPClientError):
121 class HTTPBranchProtected(HTTPClientError):
123 """
122 """
124 Special Exception For Indicating that branch is protected in RhodeCode, the
123 Special Exception For Indicating that branch is protected in RhodeCode, the
125 return code can be overwritten by _code keyword argument passed into constructors
124 return code can be overwritten by _code keyword argument passed into constructors
126 """
125 """
127 code = 403
126 code = 403
128 title = explanation = 'Branch Protected'
127 title = explanation = 'Branch Protected'
129 reason = None
128 reason = None
130
129
131 def __init__(self, message, *args, **kwargs):
130 def __init__(self, message, *args, **kwargs):
132 self.title = self.explanation = message
131 self.title = self.explanation = message
133 super(HTTPBranchProtected, self).__init__(*args, **kwargs)
132 super(HTTPBranchProtected, self).__init__(*args, **kwargs)
134 self.args = (message, )
133 self.args = (message, )
135
134
136
135
137 class IMCCommitError(Exception):
136 class IMCCommitError(Exception):
138 pass
137 pass
139
138
140
139
141 class UserCreationError(Exception):
140 class UserCreationError(Exception):
142 pass
141 pass
143
142
144
143
145 class NotAllowedToCreateUserError(Exception):
144 class NotAllowedToCreateUserError(Exception):
146 pass
145 pass
147
146
148
147
149 class RepositoryCreationError(Exception):
148 class RepositoryCreationError(Exception):
150 pass
149 pass
151
150
152
151
153 class VCSServerUnavailable(HTTPBadGateway):
152 class VCSServerUnavailable(HTTPBadGateway):
154 """ HTTP Exception class for VCS Server errors """
153 """ HTTP Exception class for VCS Server errors """
155 code = 502
154 code = 502
156 title = 'VCS Server Error'
155 title = 'VCS Server Error'
157 causes = [
156 causes = [
158 'VCS Server is not running',
157 'VCS Server is not running',
159 'Incorrect vcs.server=host:port',
158 'Incorrect vcs.server=host:port',
160 'Incorrect vcs.server.protocol',
159 'Incorrect vcs.server.protocol',
161 ]
160 ]
162
161
163 def __init__(self, message=''):
162 def __init__(self, message=''):
164 self.explanation = 'Could not connect to VCS Server'
163 self.explanation = 'Could not connect to VCS Server'
165 if message:
164 if message:
166 self.explanation += ': ' + message
165 self.explanation += ': ' + message
167 super(VCSServerUnavailable, self).__init__()
166 super(VCSServerUnavailable, self).__init__()
168
167
169
168
170 class ArtifactMetadataDuplicate(ValueError):
169 class ArtifactMetadataDuplicate(ValueError):
171
170
172 def __init__(self, *args, **kwargs):
171 def __init__(self, *args, **kwargs):
173 self.err_section = kwargs.pop('err_section', None)
172 self.err_section = kwargs.pop('err_section', None)
174 self.err_key = kwargs.pop('err_key', None)
173 self.err_key = kwargs.pop('err_key', None)
175 super(ArtifactMetadataDuplicate, self).__init__(*args, **kwargs)
174 super(ArtifactMetadataDuplicate, self).__init__(*args, **kwargs)
176
175
177
176
178 class ArtifactMetadataBadValueType(ValueError):
177 class ArtifactMetadataBadValueType(ValueError):
179 pass
178 pass
180
179
181
180
182 class CommentVersionMismatch(ValueError):
181 class CommentVersionMismatch(ValueError):
183 pass
182 pass
@@ -1,41 +1,40 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 from rhodecode.lib import ext_json
20 from rhodecode.lib import ext_json
22
21
23
22
24 def pyramid_ext_json(info):
23 def pyramid_ext_json(info):
25 """
24 """
26 Custom json renderer for pyramid to use our ext_json lib
25 Custom json renderer for pyramid to use our ext_json lib
27 """
26 """
28 def _render(value, system):
27 def _render(value, system):
29 request = system.get('request')
28 request = system.get('request')
30 indent = None
29 indent = None
31 if request is not None:
30 if request is not None:
32 response = request.response
31 response = request.response
33 ct = response.content_type
32 ct = response.content_type
34 if ct == response.default_content_type:
33 if ct == response.default_content_type:
35 response.content_type = 'application/json'
34 response.content_type = 'application/json'
36 indent = getattr(request, 'ext_json_indent', None)
35 indent = getattr(request, 'ext_json_indent', None)
37 if indent:
36 if indent:
38 return ext_json.formatted_json(value)
37 return ext_json.formatted_json(value)
39 return ext_json.json.dumps(value)
38 return ext_json.json.dumps(value)
40
39
41 return _render
40 return _render
@@ -1,21 +1,21 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from .feedgenerator import Rss201rev2Feed, Atom1Feed No newline at end of file
21 from .feedgenerator import Rss201rev2Feed, Atom1Feed
@@ -1,142 +1,141 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Modified mercurial DAG graph functions that re-uses VCS structure
21 Modified mercurial DAG graph functions that re-uses VCS structure
23
22
24 It allows to have a shared codebase for DAG generation for hg, git, svn repos
23 It allows to have a shared codebase for DAG generation for hg, git, svn repos
25 """
24 """
26
25
27 nullrev = -1
26 nullrev = -1
28
27
29
28
30 def grandparent(parent_idx_func, lowest_idx, roots, head):
29 def grandparent(parent_idx_func, lowest_idx, roots, head):
31 """
30 """
32 Return all ancestors of head in roots which commit is
31 Return all ancestors of head in roots which commit is
33 greater or equal to lowest_idx.
32 greater or equal to lowest_idx.
34 """
33 """
35 pending = set([head])
34 pending = set([head])
36 seen = set()
35 seen = set()
37 kept = set()
36 kept = set()
38 llowestrev = max(nullrev, lowest_idx)
37 llowestrev = max(nullrev, lowest_idx)
39 while pending:
38 while pending:
40 r = pending.pop()
39 r = pending.pop()
41 if r >= llowestrev and r not in seen:
40 if r >= llowestrev and r not in seen:
42 if r in roots:
41 if r in roots:
43 kept.add(r)
42 kept.add(r)
44 else:
43 else:
45 pending.update(parent_idx_func(r))
44 pending.update(parent_idx_func(r))
46 seen.add(r)
45 seen.add(r)
47 return sorted(kept)
46 return sorted(kept)
48
47
49
48
50 def _dagwalker(repo, commits):
49 def _dagwalker(repo, commits):
51 if not commits:
50 if not commits:
52 return
51 return
53
52
54 def get_parent_indexes(idx):
53 def get_parent_indexes(idx):
55 return [commit.idx for commit in repo[idx].parents]
54 return [commit.idx for commit in repo[idx].parents]
56
55
57 indexes = [commit['idx'] for commit in commits]
56 indexes = [commit['idx'] for commit in commits]
58 lowest_idx = min(indexes)
57 lowest_idx = min(indexes)
59 known_indexes = set(indexes)
58 known_indexes = set(indexes)
60
59
61 grandparnet_cache = {}
60 grandparnet_cache = {}
62 for commit in commits:
61 for commit in commits:
63 parents = sorted(set([p['idx'] for p in commit['parents']
62 parents = sorted(set([p['idx'] for p in commit['parents']
64 if p['idx'] in known_indexes]))
63 if p['idx'] in known_indexes]))
65 mpars = [p['idx'] for p in commit['parents'] if
64 mpars = [p['idx'] for p in commit['parents'] if
66 p['idx'] != nullrev and p['idx'] not in parents]
65 p['idx'] != nullrev and p['idx'] not in parents]
67 for mpar in mpars:
66 for mpar in mpars:
68 gp = grandparnet_cache.get(mpar)
67 gp = grandparnet_cache.get(mpar)
69 if gp is None:
68 if gp is None:
70 gp = grandparnet_cache[mpar] = grandparent(
69 gp = grandparnet_cache[mpar] = grandparent(
71 get_parent_indexes, lowest_idx, indexes, mpar)
70 get_parent_indexes, lowest_idx, indexes, mpar)
72 if not gp:
71 if not gp:
73 parents.append(mpar)
72 parents.append(mpar)
74 else:
73 else:
75 parents.extend(g for g in gp if g not in parents)
74 parents.extend(g for g in gp if g not in parents)
76
75
77 yield (commit['raw_id'], commit['idx'], parents, commit['branch'])
76 yield (commit['raw_id'], commit['idx'], parents, commit['branch'])
78
77
79
78
80 def _colored(dag):
79 def _colored(dag):
81 """annotates a DAG with colored edge information
80 """annotates a DAG with colored edge information
82
81
83 For each DAG node this function emits tuples::
82 For each DAG node this function emits tuples::
84
83
85 ((col, color), [(col, nextcol, color)])
84 ((col, color), [(col, nextcol, color)])
86
85
87 with the following new elements:
86 with the following new elements:
88
87
89 - Tuple (col, color) with column and color index for the current node
88 - Tuple (col, color) with column and color index for the current node
90 - A list of tuples indicating the edges between the current node and its
89 - A list of tuples indicating the edges between the current node and its
91 parents.
90 parents.
92 """
91 """
93 seen = []
92 seen = []
94 colors = {}
93 colors = {}
95 newcolor = 1
94 newcolor = 1
96
95
97 for commit_id, commit_idx, parents, branch in dag:
96 for commit_id, commit_idx, parents, branch in dag:
98
97
99 # Compute seen and next_
98 # Compute seen and next_
100 if commit_idx not in seen:
99 if commit_idx not in seen:
101 seen.append(commit_idx) # new head
100 seen.append(commit_idx) # new head
102 colors[commit_idx] = newcolor
101 colors[commit_idx] = newcolor
103 newcolor += 1
102 newcolor += 1
104
103
105 col = seen.index(commit_idx)
104 col = seen.index(commit_idx)
106 color = colors.pop(commit_idx)
105 color = colors.pop(commit_idx)
107 next_ = seen[:]
106 next_ = seen[:]
108
107
109 # Add parents to next_
108 # Add parents to next_
110 addparents = [p for p in parents if p not in next_]
109 addparents = [p for p in parents if p not in next_]
111 next_[col:col + 1] = addparents
110 next_[col:col + 1] = addparents
112
111
113 # Set colors for the parents
112 # Set colors for the parents
114 for i, p in enumerate(addparents):
113 for i, p in enumerate(addparents):
115 if i == 0:
114 if i == 0:
116 colors[p] = color
115 colors[p] = color
117 else:
116 else:
118 colors[p] = newcolor
117 colors[p] = newcolor
119 newcolor += 1
118 newcolor += 1
120
119
121 # Add edges to the graph
120 # Add edges to the graph
122 edges = []
121 edges = []
123 for ecol, eid in enumerate(seen):
122 for ecol, eid in enumerate(seen):
124 if eid in next_:
123 if eid in next_:
125 edges.append((ecol, next_.index(eid), colors[eid]))
124 edges.append((ecol, next_.index(eid), colors[eid]))
126 elif eid == commit_idx:
125 elif eid == commit_idx:
127 total_parents = len(parents)
126 total_parents = len(parents)
128 edges.extend([
127 edges.extend([
129 (ecol, next_.index(p),
128 (ecol, next_.index(p),
130 _get_edge_color(p, total_parents, color, colors))
129 _get_edge_color(p, total_parents, color, colors))
131 for p in parents])
130 for p in parents])
132
131
133 # Yield and move on
132 # Yield and move on
134 yield (commit_id, (col, color), edges, branch)
133 yield (commit_id, (col, color), edges, branch)
135 seen = next_
134 seen = next_
136
135
137
136
138 def _get_edge_color(parent, total_parents, color, colors):
137 def _get_edge_color(parent, total_parents, color, colors):
139 if total_parents <= 1:
138 if total_parents <= 1:
140 return color
139 return color
141
140
142 return colors.get(parent, color)
141 return colors.get(parent, color)
@@ -1,56 +1,56 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import hashlib
21 import hashlib
22 from rhodecode.lib.str_utils import safe_bytes, safe_str
22 from rhodecode.lib.str_utils import safe_bytes, safe_str
23
23
24
24
25 def md5(s):
25 def md5(s):
26 return hashlib.md5(s).hexdigest()
26 return hashlib.md5(s).hexdigest()
27
27
28
28
29 def md5_safe(s, return_type=''):
29 def md5_safe(s, return_type=''):
30
30
31 val = md5(safe_bytes(s))
31 val = md5(safe_bytes(s))
32 if return_type == 'str':
32 if return_type == 'str':
33 val = safe_str(val)
33 val = safe_str(val)
34 return val
34 return val
35
35
36
36
37 def sha1(s):
37 def sha1(s):
38 return hashlib.sha1(s).hexdigest()
38 return hashlib.sha1(s).hexdigest()
39
39
40
40
41 def sha1_safe(s, return_type=''):
41 def sha1_safe(s, return_type=''):
42 val = sha1(safe_bytes(s))
42 val = sha1(safe_bytes(s))
43 if return_type == 'str':
43 if return_type == 'str':
44 val = safe_str(val)
44 val = safe_str(val)
45 return val
45 return val
46
46
47
47
48 def sha256(s):
48 def sha256(s):
49 return hashlib.sha256(s).hexdigest()
49 return hashlib.sha256(s).hexdigest()
50
50
51
51
52 def sha256_safe(s, return_type=''):
52 def sha256_safe(s, return_type=''):
53 val = sha256(safe_bytes(s))
53 val = sha256(safe_bytes(s))
54 if return_type == 'str':
54 if return_type == 'str':
55 val = safe_str(val)
55 val = safe_str(val)
56 return val
56 return val
@@ -1,2165 +1,2164 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Helper functions
21 Helper functions
23
22
24 Consists of functions to typically be used within templates, but also
23 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
24 available to Controllers. This module is available to both as 'h'.
26 """
25 """
27 import base64
26 import base64
28 import collections
27 import collections
29
28
30 import os
29 import os
31 import random
30 import random
32 import hashlib
31 import hashlib
33 from io import StringIO
32 from io import StringIO
34 import textwrap
33 import textwrap
35 import urllib.request, urllib.parse, urllib.error
34 import urllib.request, urllib.parse, urllib.error
36 import math
35 import math
37 import logging
36 import logging
38 import re
37 import re
39 import time
38 import time
40 import string
39 import string
41 import hashlib
40 import hashlib
42 import regex
41 import regex
43 from collections import OrderedDict
42 from collections import OrderedDict
44
43
45 import pygments
44 import pygments
46 import itertools
45 import itertools
47 import fnmatch
46 import fnmatch
48 import bleach
47 import bleach
49
48
50 from datetime import datetime
49 from datetime import datetime
51 from functools import partial
50 from functools import partial
52 from pygments.formatters.html import HtmlFormatter
51 from pygments.formatters.html import HtmlFormatter
53 from pygments.lexers import (
52 from pygments.lexers import (
54 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
53 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
55
54
56 from pyramid.threadlocal import get_current_request
55 from pyramid.threadlocal import get_current_request
57 from tempita import looper
56 from tempita import looper
58 from webhelpers2.html import literal, HTML, escape
57 from webhelpers2.html import literal, HTML, escape
59 from webhelpers2.html._autolink import _auto_link_urls
58 from webhelpers2.html._autolink import _auto_link_urls
60 from webhelpers2.html.tools import (
59 from webhelpers2.html.tools import (
61 button_to, highlight, js_obfuscate, strip_links, strip_tags)
60 button_to, highlight, js_obfuscate, strip_links, strip_tags)
62
61
63 from webhelpers2.text import (
62 from webhelpers2.text import (
64 chop_at, collapse, convert_accented_entities,
63 chop_at, collapse, convert_accented_entities,
65 convert_misc_entities, lchop, plural, rchop, remove_formatting,
64 convert_misc_entities, lchop, plural, rchop, remove_formatting,
66 replace_whitespace, urlify, truncate, wrap_paragraphs)
65 replace_whitespace, urlify, truncate, wrap_paragraphs)
67 from webhelpers2.date import time_ago_in_words
66 from webhelpers2.date import time_ago_in_words
68
67
69 from webhelpers2.html.tags import (
68 from webhelpers2.html.tags import (
70 _input, NotGiven, _make_safe_id_component as safeid,
69 _input, NotGiven, _make_safe_id_component as safeid,
71 form as insecure_form,
70 form as insecure_form,
72 auto_discovery_link, checkbox, end_form, file,
71 auto_discovery_link, checkbox, end_form, file,
73 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
72 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
74 select as raw_select, stylesheet_link, submit, text, password, textarea,
73 select as raw_select, stylesheet_link, submit, text, password, textarea,
75 ul, radio, Options)
74 ul, radio, Options)
76
75
77 from webhelpers2.number import format_byte_size
76 from webhelpers2.number import format_byte_size
78
77
79 from rhodecode.lib.action_parser import action_parser
78 from rhodecode.lib.action_parser import action_parser
80 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
79 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
81 from rhodecode.lib import ext_json
80 from rhodecode.lib import ext_json
82 from rhodecode.lib.ext_json import json
81 from rhodecode.lib.ext_json import json
83 from rhodecode.lib.str_utils import safe_bytes
82 from rhodecode.lib.str_utils import safe_bytes
84 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
83 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
85 from rhodecode.lib.utils2 import (
84 from rhodecode.lib.utils2 import (
86 str2bool, safe_unicode, safe_str,
85 str2bool, safe_unicode, safe_str,
87 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
86 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
88 AttributeDict, safe_int, md5, md5_safe, get_host_info)
87 AttributeDict, safe_int, md5, md5_safe, get_host_info)
89 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
88 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
90 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
89 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
91 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
90 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
92 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
91 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
93 from rhodecode.lib.index.search_utils import get_matching_line_offsets
92 from rhodecode.lib.index.search_utils import get_matching_line_offsets
94 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
93 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
95 from rhodecode.model.changeset_status import ChangesetStatusModel
94 from rhodecode.model.changeset_status import ChangesetStatusModel
96 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
95 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
97 from rhodecode.model.repo_group import RepoGroupModel
96 from rhodecode.model.repo_group import RepoGroupModel
98 from rhodecode.model.settings import IssueTrackerSettingsModel
97 from rhodecode.model.settings import IssueTrackerSettingsModel
99
98
100
99
101 log = logging.getLogger(__name__)
100 log = logging.getLogger(__name__)
102
101
103
102
104 DEFAULT_USER = User.DEFAULT_USER
103 DEFAULT_USER = User.DEFAULT_USER
105 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
104 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
106
105
107
106
108 def asset(path, ver=None, **kwargs):
107 def asset(path, ver=None, **kwargs):
109 """
108 """
110 Helper to generate a static asset file path for rhodecode assets
109 Helper to generate a static asset file path for rhodecode assets
111
110
112 eg. h.asset('images/image.png', ver='3923')
111 eg. h.asset('images/image.png', ver='3923')
113
112
114 :param path: path of asset
113 :param path: path of asset
115 :param ver: optional version query param to append as ?ver=
114 :param ver: optional version query param to append as ?ver=
116 """
115 """
117 request = get_current_request()
116 request = get_current_request()
118 query = {}
117 query = {}
119 query.update(kwargs)
118 query.update(kwargs)
120 if ver:
119 if ver:
121 query = {'ver': ver}
120 query = {'ver': ver}
122 return request.static_path(
121 return request.static_path(
123 'rhodecode:public/{}'.format(path), _query=query)
122 'rhodecode:public/{}'.format(path), _query=query)
124
123
125
124
126 default_html_escape_table = {
125 default_html_escape_table = {
127 ord('&'): u'&amp;',
126 ord('&'): u'&amp;',
128 ord('<'): u'&lt;',
127 ord('<'): u'&lt;',
129 ord('>'): u'&gt;',
128 ord('>'): u'&gt;',
130 ord('"'): u'&quot;',
129 ord('"'): u'&quot;',
131 ord("'"): u'&#39;',
130 ord("'"): u'&#39;',
132 }
131 }
133
132
134
133
135 def html_escape(text, html_escape_table=default_html_escape_table):
134 def html_escape(text, html_escape_table=default_html_escape_table):
136 """Produce entities within text."""
135 """Produce entities within text."""
137 return text.translate(html_escape_table)
136 return text.translate(html_escape_table)
138
137
139
138
140 def str_json(*args, **kwargs):
139 def str_json(*args, **kwargs):
141 return ext_json.str_json(*args, **kwargs)
140 return ext_json.str_json(*args, **kwargs)
142
141
143
142
144 def formatted_str_json(*args, **kwargs):
143 def formatted_str_json(*args, **kwargs):
145 return ext_json.formatted_str_json(*args, **kwargs)
144 return ext_json.formatted_str_json(*args, **kwargs)
146
145
147
146
148 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
147 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
149 """
148 """
150 Truncate string ``s`` at the first occurrence of ``sub``.
149 Truncate string ``s`` at the first occurrence of ``sub``.
151
150
152 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
151 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
153 """
152 """
154 suffix_if_chopped = suffix_if_chopped or ''
153 suffix_if_chopped = suffix_if_chopped or ''
155 pos = s.find(sub)
154 pos = s.find(sub)
156 if pos == -1:
155 if pos == -1:
157 return s
156 return s
158
157
159 if inclusive:
158 if inclusive:
160 pos += len(sub)
159 pos += len(sub)
161
160
162 chopped = s[:pos]
161 chopped = s[:pos]
163 left = s[pos:].strip()
162 left = s[pos:].strip()
164
163
165 if left and suffix_if_chopped:
164 if left and suffix_if_chopped:
166 chopped += suffix_if_chopped
165 chopped += suffix_if_chopped
167
166
168 return chopped
167 return chopped
169
168
170
169
171 def shorter(text, size=20, prefix=False):
170 def shorter(text, size=20, prefix=False):
172 postfix = '...'
171 postfix = '...'
173 if len(text) > size:
172 if len(text) > size:
174 if prefix:
173 if prefix:
175 # shorten in front
174 # shorten in front
176 return postfix + text[-(size - len(postfix)):]
175 return postfix + text[-(size - len(postfix)):]
177 else:
176 else:
178 return text[:size - len(postfix)] + postfix
177 return text[:size - len(postfix)] + postfix
179 return text
178 return text
180
179
181
180
182 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
181 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
183 """
182 """
184 Reset button
183 Reset button
185 """
184 """
186 return _input(type, name, value, id, attrs)
185 return _input(type, name, value, id, attrs)
187
186
188
187
189 def select(name, selected_values, options, id=NotGiven, **attrs):
188 def select(name, selected_values, options, id=NotGiven, **attrs):
190
189
191 if isinstance(options, (list, tuple)):
190 if isinstance(options, (list, tuple)):
192 options_iter = options
191 options_iter = options
193 # Handle old value,label lists ... where value also can be value,label lists
192 # Handle old value,label lists ... where value also can be value,label lists
194 options = Options()
193 options = Options()
195 for opt in options_iter:
194 for opt in options_iter:
196 if isinstance(opt, tuple) and len(opt) == 2:
195 if isinstance(opt, tuple) and len(opt) == 2:
197 value, label = opt
196 value, label = opt
198 elif isinstance(opt, str):
197 elif isinstance(opt, str):
199 value = label = opt
198 value = label = opt
200 else:
199 else:
201 raise ValueError('invalid select option type %r' % type(opt))
200 raise ValueError('invalid select option type %r' % type(opt))
202
201
203 if isinstance(value, (list, tuple)):
202 if isinstance(value, (list, tuple)):
204 option_group = options.add_optgroup(label)
203 option_group = options.add_optgroup(label)
205 for opt2 in value:
204 for opt2 in value:
206 if isinstance(opt2, tuple) and len(opt2) == 2:
205 if isinstance(opt2, tuple) and len(opt2) == 2:
207 group_value, group_label = opt2
206 group_value, group_label = opt2
208 elif isinstance(opt2, str):
207 elif isinstance(opt2, str):
209 group_value = group_label = opt2
208 group_value = group_label = opt2
210 else:
209 else:
211 raise ValueError('invalid select option type %r' % type(opt2))
210 raise ValueError('invalid select option type %r' % type(opt2))
212
211
213 option_group.add_option(group_label, group_value)
212 option_group.add_option(group_label, group_value)
214 else:
213 else:
215 options.add_option(label, value)
214 options.add_option(label, value)
216
215
217 return raw_select(name, selected_values, options, id=id, **attrs)
216 return raw_select(name, selected_values, options, id=id, **attrs)
218
217
219
218
220 def branding(name, length=40):
219 def branding(name, length=40):
221 return truncate(name, length, indicator="")
220 return truncate(name, length, indicator="")
222
221
223
222
224 def FID(raw_id, path):
223 def FID(raw_id, path):
225 """
224 """
226 Creates a unique ID for filenode based on it's hash of path and commit
225 Creates a unique ID for filenode based on it's hash of path and commit
227 it's safe to use in urls
226 it's safe to use in urls
228
227
229 :param raw_id:
228 :param raw_id:
230 :param path:
229 :param path:
231 """
230 """
232
231
233 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
232 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
234
233
235
234
236 class _GetError(object):
235 class _GetError(object):
237 """Get error from form_errors, and represent it as span wrapped error
236 """Get error from form_errors, and represent it as span wrapped error
238 message
237 message
239
238
240 :param field_name: field to fetch errors for
239 :param field_name: field to fetch errors for
241 :param form_errors: form errors dict
240 :param form_errors: form errors dict
242 """
241 """
243
242
244 def __call__(self, field_name, form_errors):
243 def __call__(self, field_name, form_errors):
245 tmpl = """<span class="error_msg">%s</span>"""
244 tmpl = """<span class="error_msg">%s</span>"""
246 if form_errors and field_name in form_errors:
245 if form_errors and field_name in form_errors:
247 return literal(tmpl % form_errors.get(field_name))
246 return literal(tmpl % form_errors.get(field_name))
248
247
249
248
250 get_error = _GetError()
249 get_error = _GetError()
251
250
252
251
253 class _ToolTip(object):
252 class _ToolTip(object):
254
253
255 def __call__(self, tooltip_title, trim_at=50):
254 def __call__(self, tooltip_title, trim_at=50):
256 """
255 """
257 Special function just to wrap our text into nice formatted
256 Special function just to wrap our text into nice formatted
258 autowrapped text
257 autowrapped text
259
258
260 :param tooltip_title:
259 :param tooltip_title:
261 """
260 """
262 tooltip_title = escape(tooltip_title)
261 tooltip_title = escape(tooltip_title)
263 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
262 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
264 return tooltip_title
263 return tooltip_title
265
264
266
265
267 tooltip = _ToolTip()
266 tooltip = _ToolTip()
268
267
269 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
268 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
270
269
271
270
272 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
271 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
273 limit_items=False, linkify_last_item=False, hide_last_item=False,
272 limit_items=False, linkify_last_item=False, hide_last_item=False,
274 copy_path_icon=True):
273 copy_path_icon=True):
275 if isinstance(file_path, str):
274 if isinstance(file_path, str):
276 file_path = safe_unicode(file_path)
275 file_path = safe_unicode(file_path)
277
276
278 if at_ref:
277 if at_ref:
279 route_qry = {'at': at_ref}
278 route_qry = {'at': at_ref}
280 default_landing_ref = at_ref or landing_ref_name or commit_id
279 default_landing_ref = at_ref or landing_ref_name or commit_id
281 else:
280 else:
282 route_qry = None
281 route_qry = None
283 default_landing_ref = commit_id
282 default_landing_ref = commit_id
284
283
285 # first segment is a `HOME` link to repo files root location
284 # first segment is a `HOME` link to repo files root location
286 root_name = literal(u'<i class="icon-home"></i>')
285 root_name = literal(u'<i class="icon-home"></i>')
287
286
288 url_segments = [
287 url_segments = [
289 link_to(
288 link_to(
290 root_name,
289 root_name,
291 repo_files_by_ref_url(
290 repo_files_by_ref_url(
292 repo_name,
291 repo_name,
293 repo_type,
292 repo_type,
294 f_path=None, # None here is a special case for SVN repos,
293 f_path=None, # None here is a special case for SVN repos,
295 # that won't prefix with a ref
294 # that won't prefix with a ref
296 ref_name=default_landing_ref,
295 ref_name=default_landing_ref,
297 commit_id=commit_id,
296 commit_id=commit_id,
298 query=route_qry
297 query=route_qry
299 )
298 )
300 )]
299 )]
301
300
302 path_segments = file_path.split('/')
301 path_segments = file_path.split('/')
303 last_cnt = len(path_segments) - 1
302 last_cnt = len(path_segments) - 1
304 for cnt, segment in enumerate(path_segments):
303 for cnt, segment in enumerate(path_segments):
305 if not segment:
304 if not segment:
306 continue
305 continue
307 segment_html = escape(segment)
306 segment_html = escape(segment)
308
307
309 last_item = cnt == last_cnt
308 last_item = cnt == last_cnt
310
309
311 if last_item and hide_last_item:
310 if last_item and hide_last_item:
312 # iterate over and hide last element
311 # iterate over and hide last element
313 continue
312 continue
314
313
315 if last_item and linkify_last_item is False:
314 if last_item and linkify_last_item is False:
316 # plain version
315 # plain version
317 url_segments.append(segment_html)
316 url_segments.append(segment_html)
318 else:
317 else:
319 url_segments.append(
318 url_segments.append(
320 link_to(
319 link_to(
321 segment_html,
320 segment_html,
322 repo_files_by_ref_url(
321 repo_files_by_ref_url(
323 repo_name,
322 repo_name,
324 repo_type,
323 repo_type,
325 f_path='/'.join(path_segments[:cnt + 1]),
324 f_path='/'.join(path_segments[:cnt + 1]),
326 ref_name=default_landing_ref,
325 ref_name=default_landing_ref,
327 commit_id=commit_id,
326 commit_id=commit_id,
328 query=route_qry
327 query=route_qry
329 ),
328 ),
330 ))
329 ))
331
330
332 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
331 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
333 if limit_items and len(limited_url_segments) < len(url_segments):
332 if limit_items and len(limited_url_segments) < len(url_segments):
334 url_segments = limited_url_segments
333 url_segments = limited_url_segments
335
334
336 full_path = file_path
335 full_path = file_path
337 if copy_path_icon:
336 if copy_path_icon:
338 icon = files_icon.format(escape(full_path))
337 icon = files_icon.format(escape(full_path))
339 else:
338 else:
340 icon = ''
339 icon = ''
341
340
342 if file_path == '':
341 if file_path == '':
343 return root_name
342 return root_name
344 else:
343 else:
345 return literal(' / '.join(url_segments) + icon)
344 return literal(' / '.join(url_segments) + icon)
346
345
347
346
348 def files_url_data(request):
347 def files_url_data(request):
349 import urllib.request, urllib.parse, urllib.error
348 import urllib.request, urllib.parse, urllib.error
350 matchdict = request.matchdict
349 matchdict = request.matchdict
351
350
352 if 'f_path' not in matchdict:
351 if 'f_path' not in matchdict:
353 matchdict['f_path'] = ''
352 matchdict['f_path'] = ''
354 else:
353 else:
355 matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path']))
354 matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path']))
356 if 'commit_id' not in matchdict:
355 if 'commit_id' not in matchdict:
357 matchdict['commit_id'] = 'tip'
356 matchdict['commit_id'] = 'tip'
358
357
359 return ext_json.str_json(matchdict)
358 return ext_json.str_json(matchdict)
360
359
361
360
362 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
361 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
363 _is_svn = is_svn(db_repo_type)
362 _is_svn = is_svn(db_repo_type)
364 final_f_path = f_path
363 final_f_path = f_path
365
364
366 if _is_svn:
365 if _is_svn:
367 """
366 """
368 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
367 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
369 actually commit_id followed by the ref_name. This should be done only in case
368 actually commit_id followed by the ref_name. This should be done only in case
370 This is a initial landing url, without additional paths.
369 This is a initial landing url, without additional paths.
371
370
372 like: /1000/tags/1.0.0/?at=tags/1.0.0
371 like: /1000/tags/1.0.0/?at=tags/1.0.0
373 """
372 """
374
373
375 if ref_name and ref_name != 'tip':
374 if ref_name and ref_name != 'tip':
376 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
375 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
377 # for SVN we only do this magic prefix if it's root, .eg landing revision
376 # for SVN we only do this magic prefix if it's root, .eg landing revision
378 # of files link. If we are in the tree we don't need this since we traverse the url
377 # of files link. If we are in the tree we don't need this since we traverse the url
379 # that has everything stored
378 # that has everything stored
380 if f_path in ['', '/']:
379 if f_path in ['', '/']:
381 final_f_path = '/'.join([ref_name, f_path])
380 final_f_path = '/'.join([ref_name, f_path])
382
381
383 # SVN always needs a commit_id explicitly, without a named REF
382 # SVN always needs a commit_id explicitly, without a named REF
384 default_commit_id = commit_id
383 default_commit_id = commit_id
385 else:
384 else:
386 """
385 """
387 For git and mercurial we construct a new URL using the names instead of commit_id
386 For git and mercurial we construct a new URL using the names instead of commit_id
388 like: /master/some_path?at=master
387 like: /master/some_path?at=master
389 """
388 """
390 # We currently do not support branches with slashes
389 # We currently do not support branches with slashes
391 if '/' in ref_name:
390 if '/' in ref_name:
392 default_commit_id = commit_id
391 default_commit_id = commit_id
393 else:
392 else:
394 default_commit_id = ref_name
393 default_commit_id = ref_name
395
394
396 # sometimes we pass f_path as None, to indicate explicit no prefix,
395 # sometimes we pass f_path as None, to indicate explicit no prefix,
397 # we translate it to string to not have None
396 # we translate it to string to not have None
398 final_f_path = final_f_path or ''
397 final_f_path = final_f_path or ''
399
398
400 files_url = route_path(
399 files_url = route_path(
401 'repo_files',
400 'repo_files',
402 repo_name=db_repo_name,
401 repo_name=db_repo_name,
403 commit_id=default_commit_id,
402 commit_id=default_commit_id,
404 f_path=final_f_path,
403 f_path=final_f_path,
405 _query=query
404 _query=query
406 )
405 )
407 return files_url
406 return files_url
408
407
409
408
410 def code_highlight(code, lexer, formatter, use_hl_filter=False):
409 def code_highlight(code, lexer, formatter, use_hl_filter=False):
411 """
410 """
412 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
411 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
413
412
414 If ``outfile`` is given and a valid file object (an object
413 If ``outfile`` is given and a valid file object (an object
415 with a ``write`` method), the result will be written to it, otherwise
414 with a ``write`` method), the result will be written to it, otherwise
416 it is returned as a string.
415 it is returned as a string.
417 """
416 """
418 if use_hl_filter:
417 if use_hl_filter:
419 # add HL filter
418 # add HL filter
420 from rhodecode.lib.index import search_utils
419 from rhodecode.lib.index import search_utils
421 lexer.add_filter(search_utils.ElasticSearchHLFilter())
420 lexer.add_filter(search_utils.ElasticSearchHLFilter())
422 return pygments.format(pygments.lex(code, lexer), formatter)
421 return pygments.format(pygments.lex(code, lexer), formatter)
423
422
424
423
425 class CodeHtmlFormatter(HtmlFormatter):
424 class CodeHtmlFormatter(HtmlFormatter):
426 """
425 """
427 My code Html Formatter for source codes
426 My code Html Formatter for source codes
428 """
427 """
429
428
430 def wrap(self, source, outfile):
429 def wrap(self, source, outfile):
431 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
430 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
432
431
433 def _wrap_code(self, source):
432 def _wrap_code(self, source):
434 for cnt, it in enumerate(source):
433 for cnt, it in enumerate(source):
435 i, t = it
434 i, t = it
436 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
435 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
437 yield i, t
436 yield i, t
438
437
439 def _wrap_tablelinenos(self, inner):
438 def _wrap_tablelinenos(self, inner):
440 dummyoutfile = StringIO.StringIO()
439 dummyoutfile = StringIO.StringIO()
441 lncount = 0
440 lncount = 0
442 for t, line in inner:
441 for t, line in inner:
443 if t:
442 if t:
444 lncount += 1
443 lncount += 1
445 dummyoutfile.write(line)
444 dummyoutfile.write(line)
446
445
447 fl = self.linenostart
446 fl = self.linenostart
448 mw = len(str(lncount + fl - 1))
447 mw = len(str(lncount + fl - 1))
449 sp = self.linenospecial
448 sp = self.linenospecial
450 st = self.linenostep
449 st = self.linenostep
451 la = self.lineanchors
450 la = self.lineanchors
452 aln = self.anchorlinenos
451 aln = self.anchorlinenos
453 nocls = self.noclasses
452 nocls = self.noclasses
454 if sp:
453 if sp:
455 lines = []
454 lines = []
456
455
457 for i in range(fl, fl + lncount):
456 for i in range(fl, fl + lncount):
458 if i % st == 0:
457 if i % st == 0:
459 if i % sp == 0:
458 if i % sp == 0:
460 if aln:
459 if aln:
461 lines.append('<a href="#%s%d" class="special">%*d</a>' %
460 lines.append('<a href="#%s%d" class="special">%*d</a>' %
462 (la, i, mw, i))
461 (la, i, mw, i))
463 else:
462 else:
464 lines.append('<span class="special">%*d</span>' % (mw, i))
463 lines.append('<span class="special">%*d</span>' % (mw, i))
465 else:
464 else:
466 if aln:
465 if aln:
467 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
466 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
468 else:
467 else:
469 lines.append('%*d' % (mw, i))
468 lines.append('%*d' % (mw, i))
470 else:
469 else:
471 lines.append('')
470 lines.append('')
472 ls = '\n'.join(lines)
471 ls = '\n'.join(lines)
473 else:
472 else:
474 lines = []
473 lines = []
475 for i in range(fl, fl + lncount):
474 for i in range(fl, fl + lncount):
476 if i % st == 0:
475 if i % st == 0:
477 if aln:
476 if aln:
478 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
477 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
479 else:
478 else:
480 lines.append('%*d' % (mw, i))
479 lines.append('%*d' % (mw, i))
481 else:
480 else:
482 lines.append('')
481 lines.append('')
483 ls = '\n'.join(lines)
482 ls = '\n'.join(lines)
484
483
485 # in case you wonder about the seemingly redundant <div> here: since the
484 # in case you wonder about the seemingly redundant <div> here: since the
486 # content in the other cell also is wrapped in a div, some browsers in
485 # content in the other cell also is wrapped in a div, some browsers in
487 # some configurations seem to mess up the formatting...
486 # some configurations seem to mess up the formatting...
488 if nocls:
487 if nocls:
489 yield 0, ('<table class="%stable">' % self.cssclass +
488 yield 0, ('<table class="%stable">' % self.cssclass +
490 '<tr><td><div class="linenodiv" '
489 '<tr><td><div class="linenodiv" '
491 'style="background-color: #f0f0f0; padding-right: 10px">'
490 'style="background-color: #f0f0f0; padding-right: 10px">'
492 '<pre style="line-height: 125%">' +
491 '<pre style="line-height: 125%">' +
493 ls + '</pre></div></td><td id="hlcode" class="code">')
492 ls + '</pre></div></td><td id="hlcode" class="code">')
494 else:
493 else:
495 yield 0, ('<table class="%stable">' % self.cssclass +
494 yield 0, ('<table class="%stable">' % self.cssclass +
496 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
495 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
497 ls + '</pre></div></td><td id="hlcode" class="code">')
496 ls + '</pre></div></td><td id="hlcode" class="code">')
498 yield 0, dummyoutfile.getvalue()
497 yield 0, dummyoutfile.getvalue()
499 yield 0, '</td></tr></table>'
498 yield 0, '</td></tr></table>'
500
499
501
500
502 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
501 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
503 def __init__(self, **kw):
502 def __init__(self, **kw):
504 # only show these line numbers if set
503 # only show these line numbers if set
505 self.only_lines = kw.pop('only_line_numbers', [])
504 self.only_lines = kw.pop('only_line_numbers', [])
506 self.query_terms = kw.pop('query_terms', [])
505 self.query_terms = kw.pop('query_terms', [])
507 self.max_lines = kw.pop('max_lines', 5)
506 self.max_lines = kw.pop('max_lines', 5)
508 self.line_context = kw.pop('line_context', 3)
507 self.line_context = kw.pop('line_context', 3)
509 self.url = kw.pop('url', None)
508 self.url = kw.pop('url', None)
510
509
511 super(CodeHtmlFormatter, self).__init__(**kw)
510 super(CodeHtmlFormatter, self).__init__(**kw)
512
511
513 def _wrap_code(self, source):
512 def _wrap_code(self, source):
514 for cnt, it in enumerate(source):
513 for cnt, it in enumerate(source):
515 i, t = it
514 i, t = it
516 t = '<pre>%s</pre>' % t
515 t = '<pre>%s</pre>' % t
517 yield i, t
516 yield i, t
518
517
519 def _wrap_tablelinenos(self, inner):
518 def _wrap_tablelinenos(self, inner):
520 yield 0, '<table class="code-highlight %stable">' % self.cssclass
519 yield 0, '<table class="code-highlight %stable">' % self.cssclass
521
520
522 last_shown_line_number = 0
521 last_shown_line_number = 0
523 current_line_number = 1
522 current_line_number = 1
524
523
525 for t, line in inner:
524 for t, line in inner:
526 if not t:
525 if not t:
527 yield t, line
526 yield t, line
528 continue
527 continue
529
528
530 if current_line_number in self.only_lines:
529 if current_line_number in self.only_lines:
531 if last_shown_line_number + 1 != current_line_number:
530 if last_shown_line_number + 1 != current_line_number:
532 yield 0, '<tr>'
531 yield 0, '<tr>'
533 yield 0, '<td class="line">...</td>'
532 yield 0, '<td class="line">...</td>'
534 yield 0, '<td id="hlcode" class="code"></td>'
533 yield 0, '<td id="hlcode" class="code"></td>'
535 yield 0, '</tr>'
534 yield 0, '</tr>'
536
535
537 yield 0, '<tr>'
536 yield 0, '<tr>'
538 if self.url:
537 if self.url:
539 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
538 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
540 self.url, current_line_number, current_line_number)
539 self.url, current_line_number, current_line_number)
541 else:
540 else:
542 yield 0, '<td class="line"><a href="">%i</a></td>' % (
541 yield 0, '<td class="line"><a href="">%i</a></td>' % (
543 current_line_number)
542 current_line_number)
544 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
543 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
545 yield 0, '</tr>'
544 yield 0, '</tr>'
546
545
547 last_shown_line_number = current_line_number
546 last_shown_line_number = current_line_number
548
547
549 current_line_number += 1
548 current_line_number += 1
550
549
551 yield 0, '</table>'
550 yield 0, '</table>'
552
551
553
552
554 def hsv_to_rgb(h, s, v):
553 def hsv_to_rgb(h, s, v):
555 """ Convert hsv color values to rgb """
554 """ Convert hsv color values to rgb """
556
555
557 if s == 0.0:
556 if s == 0.0:
558 return v, v, v
557 return v, v, v
559 i = int(h * 6.0) # XXX assume int() truncates!
558 i = int(h * 6.0) # XXX assume int() truncates!
560 f = (h * 6.0) - i
559 f = (h * 6.0) - i
561 p = v * (1.0 - s)
560 p = v * (1.0 - s)
562 q = v * (1.0 - s * f)
561 q = v * (1.0 - s * f)
563 t = v * (1.0 - s * (1.0 - f))
562 t = v * (1.0 - s * (1.0 - f))
564 i = i % 6
563 i = i % 6
565 if i == 0:
564 if i == 0:
566 return v, t, p
565 return v, t, p
567 if i == 1:
566 if i == 1:
568 return q, v, p
567 return q, v, p
569 if i == 2:
568 if i == 2:
570 return p, v, t
569 return p, v, t
571 if i == 3:
570 if i == 3:
572 return p, q, v
571 return p, q, v
573 if i == 4:
572 if i == 4:
574 return t, p, v
573 return t, p, v
575 if i == 5:
574 if i == 5:
576 return v, p, q
575 return v, p, q
577
576
578
577
579 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
578 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
580 """
579 """
581 Generator for getting n of evenly distributed colors using
580 Generator for getting n of evenly distributed colors using
582 hsv color and golden ratio. It always return same order of colors
581 hsv color and golden ratio. It always return same order of colors
583
582
584 :param n: number of colors to generate
583 :param n: number of colors to generate
585 :param saturation: saturation of returned colors
584 :param saturation: saturation of returned colors
586 :param lightness: lightness of returned colors
585 :param lightness: lightness of returned colors
587 :returns: RGB tuple
586 :returns: RGB tuple
588 """
587 """
589
588
590 golden_ratio = 0.618033988749895
589 golden_ratio = 0.618033988749895
591 h = 0.22717784590367374
590 h = 0.22717784590367374
592
591
593 for _ in range(n):
592 for _ in range(n):
594 h += golden_ratio
593 h += golden_ratio
595 h %= 1
594 h %= 1
596 HSV_tuple = [h, saturation, lightness]
595 HSV_tuple = [h, saturation, lightness]
597 RGB_tuple = hsv_to_rgb(*HSV_tuple)
596 RGB_tuple = hsv_to_rgb(*HSV_tuple)
598 yield map(lambda x: str(int(x * 256)), RGB_tuple)
597 yield map(lambda x: str(int(x * 256)), RGB_tuple)
599
598
600
599
601 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
600 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
602 """
601 """
603 Returns a function which when called with an argument returns a unique
602 Returns a function which when called with an argument returns a unique
604 color for that argument, eg.
603 color for that argument, eg.
605
604
606 :param n: number of colors to generate
605 :param n: number of colors to generate
607 :param saturation: saturation of returned colors
606 :param saturation: saturation of returned colors
608 :param lightness: lightness of returned colors
607 :param lightness: lightness of returned colors
609 :returns: css RGB string
608 :returns: css RGB string
610
609
611 >>> color_hash = color_hasher()
610 >>> color_hash = color_hasher()
612 >>> color_hash('hello')
611 >>> color_hash('hello')
613 'rgb(34, 12, 59)'
612 'rgb(34, 12, 59)'
614 >>> color_hash('hello')
613 >>> color_hash('hello')
615 'rgb(34, 12, 59)'
614 'rgb(34, 12, 59)'
616 >>> color_hash('other')
615 >>> color_hash('other')
617 'rgb(90, 224, 159)'
616 'rgb(90, 224, 159)'
618 """
617 """
619
618
620 color_dict = {}
619 color_dict = {}
621 cgenerator = unique_color_generator(
620 cgenerator = unique_color_generator(
622 saturation=saturation, lightness=lightness)
621 saturation=saturation, lightness=lightness)
623
622
624 def get_color_string(thing):
623 def get_color_string(thing):
625 if thing in color_dict:
624 if thing in color_dict:
626 col = color_dict[thing]
625 col = color_dict[thing]
627 else:
626 else:
628 col = color_dict[thing] = next(cgenerator)
627 col = color_dict[thing] = next(cgenerator)
629 return "rgb(%s)" % (', '.join(col))
628 return "rgb(%s)" % (', '.join(col))
630
629
631 return get_color_string
630 return get_color_string
632
631
633
632
634 def get_lexer_safe(mimetype=None, filepath=None):
633 def get_lexer_safe(mimetype=None, filepath=None):
635 """
634 """
636 Tries to return a relevant pygments lexer using mimetype/filepath name,
635 Tries to return a relevant pygments lexer using mimetype/filepath name,
637 defaulting to plain text if none could be found
636 defaulting to plain text if none could be found
638 """
637 """
639 lexer = None
638 lexer = None
640 try:
639 try:
641 if mimetype:
640 if mimetype:
642 lexer = get_lexer_for_mimetype(mimetype)
641 lexer = get_lexer_for_mimetype(mimetype)
643 if not lexer:
642 if not lexer:
644 lexer = get_lexer_for_filename(filepath)
643 lexer = get_lexer_for_filename(filepath)
645 except pygments.util.ClassNotFound:
644 except pygments.util.ClassNotFound:
646 pass
645 pass
647
646
648 if not lexer:
647 if not lexer:
649 lexer = get_lexer_by_name('text')
648 lexer = get_lexer_by_name('text')
650
649
651 return lexer
650 return lexer
652
651
653
652
654 def get_lexer_for_filenode(filenode):
653 def get_lexer_for_filenode(filenode):
655 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
654 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
656 return lexer
655 return lexer
657
656
658
657
659 def pygmentize(filenode, **kwargs):
658 def pygmentize(filenode, **kwargs):
660 """
659 """
661 pygmentize function using pygments
660 pygmentize function using pygments
662
661
663 :param filenode:
662 :param filenode:
664 """
663 """
665 lexer = get_lexer_for_filenode(filenode)
664 lexer = get_lexer_for_filenode(filenode)
666 return literal(code_highlight(filenode.content, lexer,
665 return literal(code_highlight(filenode.content, lexer,
667 CodeHtmlFormatter(**kwargs)))
666 CodeHtmlFormatter(**kwargs)))
668
667
669
668
670 def is_following_repo(repo_name, user_id):
669 def is_following_repo(repo_name, user_id):
671 from rhodecode.model.scm import ScmModel
670 from rhodecode.model.scm import ScmModel
672 return ScmModel().is_following_repo(repo_name, user_id)
671 return ScmModel().is_following_repo(repo_name, user_id)
673
672
674
673
675 class _Message(object):
674 class _Message(object):
676 """A message returned by ``Flash.pop_messages()``.
675 """A message returned by ``Flash.pop_messages()``.
677
676
678 Converting the message to a string returns the message text. Instances
677 Converting the message to a string returns the message text. Instances
679 also have the following attributes:
678 also have the following attributes:
680
679
681 * ``message``: the message text.
680 * ``message``: the message text.
682 * ``category``: the category specified when the message was created.
681 * ``category``: the category specified when the message was created.
683 """
682 """
684
683
685 def __init__(self, category, message, sub_data=None):
684 def __init__(self, category, message, sub_data=None):
686 self.category = category
685 self.category = category
687 self.message = message
686 self.message = message
688 self.sub_data = sub_data or {}
687 self.sub_data = sub_data or {}
689
688
690 def __str__(self):
689 def __str__(self):
691 return self.message
690 return self.message
692
691
693 __unicode__ = __str__
692 __unicode__ = __str__
694
693
695 def __html__(self):
694 def __html__(self):
696 return escape(safe_unicode(self.message))
695 return escape(safe_unicode(self.message))
697
696
698
697
699 class Flash(object):
698 class Flash(object):
700 # List of allowed categories. If None, allow any category.
699 # List of allowed categories. If None, allow any category.
701 categories = ["warning", "notice", "error", "success"]
700 categories = ["warning", "notice", "error", "success"]
702
701
703 # Default category if none is specified.
702 # Default category if none is specified.
704 default_category = "notice"
703 default_category = "notice"
705
704
706 def __init__(self, session_key="flash", categories=None,
705 def __init__(self, session_key="flash", categories=None,
707 default_category=None):
706 default_category=None):
708 """
707 """
709 Instantiate a ``Flash`` object.
708 Instantiate a ``Flash`` object.
710
709
711 ``session_key`` is the key to save the messages under in the user's
710 ``session_key`` is the key to save the messages under in the user's
712 session.
711 session.
713
712
714 ``categories`` is an optional list which overrides the default list
713 ``categories`` is an optional list which overrides the default list
715 of categories.
714 of categories.
716
715
717 ``default_category`` overrides the default category used for messages
716 ``default_category`` overrides the default category used for messages
718 when none is specified.
717 when none is specified.
719 """
718 """
720 self.session_key = session_key
719 self.session_key = session_key
721 if categories is not None:
720 if categories is not None:
722 self.categories = categories
721 self.categories = categories
723 if default_category is not None:
722 if default_category is not None:
724 self.default_category = default_category
723 self.default_category = default_category
725 if self.categories and self.default_category not in self.categories:
724 if self.categories and self.default_category not in self.categories:
726 raise ValueError(
725 raise ValueError(
727 "unrecognized default category %r" % (self.default_category,))
726 "unrecognized default category %r" % (self.default_category,))
728
727
729 def pop_messages(self, session=None, request=None):
728 def pop_messages(self, session=None, request=None):
730 """
729 """
731 Return all accumulated messages and delete them from the session.
730 Return all accumulated messages and delete them from the session.
732
731
733 The return value is a list of ``Message`` objects.
732 The return value is a list of ``Message`` objects.
734 """
733 """
735 messages = []
734 messages = []
736
735
737 if not session:
736 if not session:
738 if not request:
737 if not request:
739 request = get_current_request()
738 request = get_current_request()
740 session = request.session
739 session = request.session
741
740
742 # Pop the 'old' pylons flash messages. They are tuples of the form
741 # Pop the 'old' pylons flash messages. They are tuples of the form
743 # (category, message)
742 # (category, message)
744 for cat, msg in session.pop(self.session_key, []):
743 for cat, msg in session.pop(self.session_key, []):
745 messages.append(_Message(cat, msg))
744 messages.append(_Message(cat, msg))
746
745
747 # Pop the 'new' pyramid flash messages for each category as list
746 # Pop the 'new' pyramid flash messages for each category as list
748 # of strings.
747 # of strings.
749 for cat in self.categories:
748 for cat in self.categories:
750 for msg in session.pop_flash(queue=cat):
749 for msg in session.pop_flash(queue=cat):
751 sub_data = {}
750 sub_data = {}
752 if hasattr(msg, 'rsplit'):
751 if hasattr(msg, 'rsplit'):
753 flash_data = msg.rsplit('|DELIM|', 1)
752 flash_data = msg.rsplit('|DELIM|', 1)
754 org_message = flash_data[0]
753 org_message = flash_data[0]
755 if len(flash_data) > 1:
754 if len(flash_data) > 1:
756 sub_data = json.loads(flash_data[1])
755 sub_data = json.loads(flash_data[1])
757 else:
756 else:
758 org_message = msg
757 org_message = msg
759
758
760 messages.append(_Message(cat, org_message, sub_data=sub_data))
759 messages.append(_Message(cat, org_message, sub_data=sub_data))
761
760
762 # Map messages from the default queue to the 'notice' category.
761 # Map messages from the default queue to the 'notice' category.
763 for msg in session.pop_flash():
762 for msg in session.pop_flash():
764 messages.append(_Message('notice', msg))
763 messages.append(_Message('notice', msg))
765
764
766 session.save()
765 session.save()
767 return messages
766 return messages
768
767
769 def json_alerts(self, session=None, request=None):
768 def json_alerts(self, session=None, request=None):
770 payloads = []
769 payloads = []
771 messages = flash.pop_messages(session=session, request=request) or []
770 messages = flash.pop_messages(session=session, request=request) or []
772 for message in messages:
771 for message in messages:
773 payloads.append({
772 payloads.append({
774 'message': {
773 'message': {
775 'message': u'{}'.format(message.message),
774 'message': u'{}'.format(message.message),
776 'level': message.category,
775 'level': message.category,
777 'force': True,
776 'force': True,
778 'subdata': message.sub_data
777 'subdata': message.sub_data
779 }
778 }
780 })
779 })
781 return safe_str(json.dumps(payloads))
780 return safe_str(json.dumps(payloads))
782
781
783 def __call__(self, message, category=None, ignore_duplicate=True,
782 def __call__(self, message, category=None, ignore_duplicate=True,
784 session=None, request=None):
783 session=None, request=None):
785
784
786 if not session:
785 if not session:
787 if not request:
786 if not request:
788 request = get_current_request()
787 request = get_current_request()
789 session = request.session
788 session = request.session
790
789
791 session.flash(
790 session.flash(
792 message, queue=category, allow_duplicate=not ignore_duplicate)
791 message, queue=category, allow_duplicate=not ignore_duplicate)
793
792
794
793
795 flash = Flash()
794 flash = Flash()
796
795
797 #==============================================================================
796 #==============================================================================
798 # SCM FILTERS available via h.
797 # SCM FILTERS available via h.
799 #==============================================================================
798 #==============================================================================
800 from rhodecode.lib.vcs.utils import author_name, author_email
799 from rhodecode.lib.vcs.utils import author_name, author_email
801 from rhodecode.lib.utils2 import age, age_from_seconds
800 from rhodecode.lib.utils2 import age, age_from_seconds
802 from rhodecode.model.db import User, ChangesetStatus
801 from rhodecode.model.db import User, ChangesetStatus
803
802
804
803
805 email = author_email
804 email = author_email
806
805
807
806
808 def capitalize(raw_text):
807 def capitalize(raw_text):
809 return raw_text.capitalize()
808 return raw_text.capitalize()
810
809
811
810
812 def short_id(long_id):
811 def short_id(long_id):
813 return long_id[:12]
812 return long_id[:12]
814
813
815
814
816 def hide_credentials(url):
815 def hide_credentials(url):
817 from rhodecode.lib.utils2 import credentials_filter
816 from rhodecode.lib.utils2 import credentials_filter
818 return credentials_filter(url)
817 return credentials_filter(url)
819
818
820
819
821 import pytz
820 import pytz
822 import tzlocal
821 import tzlocal
823 local_timezone = tzlocal.get_localzone()
822 local_timezone = tzlocal.get_localzone()
824
823
825
824
826 def get_timezone(datetime_iso, time_is_local=False):
825 def get_timezone(datetime_iso, time_is_local=False):
827 tzinfo = '+00:00'
826 tzinfo = '+00:00'
828
827
829 # detect if we have a timezone info, otherwise, add it
828 # detect if we have a timezone info, otherwise, add it
830 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
829 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
831 force_timezone = os.environ.get('RC_TIMEZONE', '')
830 force_timezone = os.environ.get('RC_TIMEZONE', '')
832 if force_timezone:
831 if force_timezone:
833 force_timezone = pytz.timezone(force_timezone)
832 force_timezone = pytz.timezone(force_timezone)
834 timezone = force_timezone or local_timezone
833 timezone = force_timezone or local_timezone
835 offset = timezone.localize(datetime_iso).strftime('%z')
834 offset = timezone.localize(datetime_iso).strftime('%z')
836 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
835 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
837 return tzinfo
836 return tzinfo
838
837
839
838
840 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
839 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
841 title = value or format_date(datetime_iso)
840 title = value or format_date(datetime_iso)
842 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
841 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
843
842
844 return literal(
843 return literal(
845 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
844 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
846 cls='tooltip' if tooltip else '',
845 cls='tooltip' if tooltip else '',
847 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
846 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
848 title=title, dt=datetime_iso, tzinfo=tzinfo
847 title=title, dt=datetime_iso, tzinfo=tzinfo
849 ))
848 ))
850
849
851
850
852 def _shorten_commit_id(commit_id, commit_len=None):
851 def _shorten_commit_id(commit_id, commit_len=None):
853 if commit_len is None:
852 if commit_len is None:
854 request = get_current_request()
853 request = get_current_request()
855 commit_len = request.call_context.visual.show_sha_length
854 commit_len = request.call_context.visual.show_sha_length
856 return commit_id[:commit_len]
855 return commit_id[:commit_len]
857
856
858
857
859 def show_id(commit, show_idx=None, commit_len=None):
858 def show_id(commit, show_idx=None, commit_len=None):
860 """
859 """
861 Configurable function that shows ID
860 Configurable function that shows ID
862 by default it's r123:fffeeefffeee
861 by default it's r123:fffeeefffeee
863
862
864 :param commit: commit instance
863 :param commit: commit instance
865 """
864 """
866 if show_idx is None:
865 if show_idx is None:
867 request = get_current_request()
866 request = get_current_request()
868 show_idx = request.call_context.visual.show_revision_number
867 show_idx = request.call_context.visual.show_revision_number
869
868
870 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
869 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
871 if show_idx:
870 if show_idx:
872 return 'r%s:%s' % (commit.idx, raw_id)
871 return 'r%s:%s' % (commit.idx, raw_id)
873 else:
872 else:
874 return '%s' % (raw_id, )
873 return '%s' % (raw_id, )
875
874
876
875
877 def format_date(date):
876 def format_date(date):
878 """
877 """
879 use a standardized formatting for dates used in RhodeCode
878 use a standardized formatting for dates used in RhodeCode
880
879
881 :param date: date/datetime object
880 :param date: date/datetime object
882 :return: formatted date
881 :return: formatted date
883 """
882 """
884
883
885 if date:
884 if date:
886 _fmt = "%a, %d %b %Y %H:%M:%S"
885 _fmt = "%a, %d %b %Y %H:%M:%S"
887 return safe_unicode(date.strftime(_fmt))
886 return safe_unicode(date.strftime(_fmt))
888
887
889 return u""
888 return u""
890
889
891
890
892 class _RepoChecker(object):
891 class _RepoChecker(object):
893
892
894 def __init__(self, backend_alias):
893 def __init__(self, backend_alias):
895 self._backend_alias = backend_alias
894 self._backend_alias = backend_alias
896
895
897 def __call__(self, repository):
896 def __call__(self, repository):
898 if hasattr(repository, 'alias'):
897 if hasattr(repository, 'alias'):
899 _type = repository.alias
898 _type = repository.alias
900 elif hasattr(repository, 'repo_type'):
899 elif hasattr(repository, 'repo_type'):
901 _type = repository.repo_type
900 _type = repository.repo_type
902 else:
901 else:
903 _type = repository
902 _type = repository
904 return _type == self._backend_alias
903 return _type == self._backend_alias
905
904
906
905
907 is_git = _RepoChecker('git')
906 is_git = _RepoChecker('git')
908 is_hg = _RepoChecker('hg')
907 is_hg = _RepoChecker('hg')
909 is_svn = _RepoChecker('svn')
908 is_svn = _RepoChecker('svn')
910
909
911
910
912 def get_repo_type_by_name(repo_name):
911 def get_repo_type_by_name(repo_name):
913 repo = Repository.get_by_repo_name(repo_name)
912 repo = Repository.get_by_repo_name(repo_name)
914 if repo:
913 if repo:
915 return repo.repo_type
914 return repo.repo_type
916
915
917
916
918 def is_svn_without_proxy(repository):
917 def is_svn_without_proxy(repository):
919 if is_svn(repository):
918 if is_svn(repository):
920 from rhodecode.model.settings import VcsSettingsModel
919 from rhodecode.model.settings import VcsSettingsModel
921 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
920 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
922 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
921 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
923 return False
922 return False
924
923
925
924
926 def discover_user(author):
925 def discover_user(author):
927 """
926 """
928 Tries to discover RhodeCode User based on the author string. Author string
927 Tries to discover RhodeCode User based on the author string. Author string
929 is typically `FirstName LastName <email@address.com>`
928 is typically `FirstName LastName <email@address.com>`
930 """
929 """
931
930
932 # if author is already an instance use it for extraction
931 # if author is already an instance use it for extraction
933 if isinstance(author, User):
932 if isinstance(author, User):
934 return author
933 return author
935
934
936 # Valid email in the attribute passed, see if they're in the system
935 # Valid email in the attribute passed, see if they're in the system
937 _email = author_email(author)
936 _email = author_email(author)
938 if _email != '':
937 if _email != '':
939 user = User.get_by_email(_email, case_insensitive=True, cache=True)
938 user = User.get_by_email(_email, case_insensitive=True, cache=True)
940 if user is not None:
939 if user is not None:
941 return user
940 return user
942
941
943 # Maybe it's a username, we try to extract it and fetch by username ?
942 # Maybe it's a username, we try to extract it and fetch by username ?
944 _author = author_name(author)
943 _author = author_name(author)
945 user = User.get_by_username(_author, case_insensitive=True, cache=True)
944 user = User.get_by_username(_author, case_insensitive=True, cache=True)
946 if user is not None:
945 if user is not None:
947 return user
946 return user
948
947
949 return None
948 return None
950
949
951
950
952 def email_or_none(author):
951 def email_or_none(author):
953 # extract email from the commit string
952 # extract email from the commit string
954 _email = author_email(author)
953 _email = author_email(author)
955
954
956 # If we have an email, use it, otherwise
955 # If we have an email, use it, otherwise
957 # see if it contains a username we can get an email from
956 # see if it contains a username we can get an email from
958 if _email != '':
957 if _email != '':
959 return _email
958 return _email
960 else:
959 else:
961 user = User.get_by_username(
960 user = User.get_by_username(
962 author_name(author), case_insensitive=True, cache=True)
961 author_name(author), case_insensitive=True, cache=True)
963
962
964 if user is not None:
963 if user is not None:
965 return user.email
964 return user.email
966
965
967 # No valid email, not a valid user in the system, none!
966 # No valid email, not a valid user in the system, none!
968 return None
967 return None
969
968
970
969
971 def link_to_user(author, length=0, **kwargs):
970 def link_to_user(author, length=0, **kwargs):
972 user = discover_user(author)
971 user = discover_user(author)
973 # user can be None, but if we have it already it means we can re-use it
972 # user can be None, but if we have it already it means we can re-use it
974 # in the person() function, so we save 1 intensive-query
973 # in the person() function, so we save 1 intensive-query
975 if user:
974 if user:
976 author = user
975 author = user
977
976
978 display_person = person(author, 'username_or_name_or_email')
977 display_person = person(author, 'username_or_name_or_email')
979 if length:
978 if length:
980 display_person = shorter(display_person, length)
979 display_person = shorter(display_person, length)
981
980
982 if user and user.username != user.DEFAULT_USER:
981 if user and user.username != user.DEFAULT_USER:
983 return link_to(
982 return link_to(
984 escape(display_person),
983 escape(display_person),
985 route_path('user_profile', username=user.username),
984 route_path('user_profile', username=user.username),
986 **kwargs)
985 **kwargs)
987 else:
986 else:
988 return escape(display_person)
987 return escape(display_person)
989
988
990
989
991 def link_to_group(users_group_name, **kwargs):
990 def link_to_group(users_group_name, **kwargs):
992 return link_to(
991 return link_to(
993 escape(users_group_name),
992 escape(users_group_name),
994 route_path('user_group_profile', user_group_name=users_group_name),
993 route_path('user_group_profile', user_group_name=users_group_name),
995 **kwargs)
994 **kwargs)
996
995
997
996
998 def person(author, show_attr="username_and_name"):
997 def person(author, show_attr="username_and_name"):
999 user = discover_user(author)
998 user = discover_user(author)
1000 if user:
999 if user:
1001 return getattr(user, show_attr)
1000 return getattr(user, show_attr)
1002 else:
1001 else:
1003 _author = author_name(author)
1002 _author = author_name(author)
1004 _email = email(author)
1003 _email = email(author)
1005 return _author or _email
1004 return _author or _email
1006
1005
1007
1006
1008 def author_string(email):
1007 def author_string(email):
1009 if email:
1008 if email:
1010 user = User.get_by_email(email, case_insensitive=True, cache=True)
1009 user = User.get_by_email(email, case_insensitive=True, cache=True)
1011 if user:
1010 if user:
1012 if user.first_name or user.last_name:
1011 if user.first_name or user.last_name:
1013 return '%s %s &lt;%s&gt;' % (
1012 return '%s %s &lt;%s&gt;' % (
1014 user.first_name, user.last_name, email)
1013 user.first_name, user.last_name, email)
1015 else:
1014 else:
1016 return email
1015 return email
1017 else:
1016 else:
1018 return email
1017 return email
1019 else:
1018 else:
1020 return None
1019 return None
1021
1020
1022
1021
1023 def person_by_id(id_, show_attr="username_and_name"):
1022 def person_by_id(id_, show_attr="username_and_name"):
1024 # attr to return from fetched user
1023 # attr to return from fetched user
1025 person_getter = lambda usr: getattr(usr, show_attr)
1024 person_getter = lambda usr: getattr(usr, show_attr)
1026
1025
1027 #maybe it's an ID ?
1026 #maybe it's an ID ?
1028 if str(id_).isdigit() or isinstance(id_, int):
1027 if str(id_).isdigit() or isinstance(id_, int):
1029 id_ = int(id_)
1028 id_ = int(id_)
1030 user = User.get(id_)
1029 user = User.get(id_)
1031 if user is not None:
1030 if user is not None:
1032 return person_getter(user)
1031 return person_getter(user)
1033 return id_
1032 return id_
1034
1033
1035
1034
1036 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1035 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1037 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1036 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1038 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1037 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1039
1038
1040
1039
1041 tags_paterns = OrderedDict((
1040 tags_paterns = OrderedDict((
1042 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1041 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1043 '<div class="metatag" tag="lang">\\2</div>')),
1042 '<div class="metatag" tag="lang">\\2</div>')),
1044
1043
1045 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1044 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1046 '<div class="metatag" tag="see">see: \\1 </div>')),
1045 '<div class="metatag" tag="see">see: \\1 </div>')),
1047
1046
1048 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1047 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1049 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1048 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1050
1049
1051 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1050 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1052 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1051 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1053
1052
1054 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1053 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1055 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1054 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1056
1055
1057 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1056 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1058 '<div class="metatag" tag="state \\1">\\1</div>')),
1057 '<div class="metatag" tag="state \\1">\\1</div>')),
1059
1058
1060 # label in grey
1059 # label in grey
1061 ('label', (re.compile(r'\[([a-z]+)\]'),
1060 ('label', (re.compile(r'\[([a-z]+)\]'),
1062 '<div class="metatag" tag="label">\\1</div>')),
1061 '<div class="metatag" tag="label">\\1</div>')),
1063
1062
1064 # generic catch all in grey
1063 # generic catch all in grey
1065 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1064 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1066 '<div class="metatag" tag="generic">\\1</div>')),
1065 '<div class="metatag" tag="generic">\\1</div>')),
1067 ))
1066 ))
1068
1067
1069
1068
1070 def extract_metatags(value):
1069 def extract_metatags(value):
1071 """
1070 """
1072 Extract supported meta-tags from given text value
1071 Extract supported meta-tags from given text value
1073 """
1072 """
1074 tags = []
1073 tags = []
1075 if not value:
1074 if not value:
1076 return tags, ''
1075 return tags, ''
1077
1076
1078 for key, val in tags_paterns.items():
1077 for key, val in tags_paterns.items():
1079 pat, replace_html = val
1078 pat, replace_html = val
1080 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1079 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1081 value = pat.sub('', value)
1080 value = pat.sub('', value)
1082
1081
1083 return tags, value
1082 return tags, value
1084
1083
1085
1084
1086 def style_metatag(tag_type, value):
1085 def style_metatag(tag_type, value):
1087 """
1086 """
1088 converts tags from value into html equivalent
1087 converts tags from value into html equivalent
1089 """
1088 """
1090 if not value:
1089 if not value:
1091 return ''
1090 return ''
1092
1091
1093 html_value = value
1092 html_value = value
1094 tag_data = tags_paterns.get(tag_type)
1093 tag_data = tags_paterns.get(tag_type)
1095 if tag_data:
1094 if tag_data:
1096 pat, replace_html = tag_data
1095 pat, replace_html = tag_data
1097 # convert to plain `unicode` instead of a markup tag to be used in
1096 # convert to plain `unicode` instead of a markup tag to be used in
1098 # regex expressions. safe_unicode doesn't work here
1097 # regex expressions. safe_unicode doesn't work here
1099 html_value = pat.sub(replace_html, value)
1098 html_value = pat.sub(replace_html, value)
1100
1099
1101 return html_value
1100 return html_value
1102
1101
1103
1102
1104 def bool2icon(value, show_at_false=True):
1103 def bool2icon(value, show_at_false=True):
1105 """
1104 """
1106 Returns boolean value of a given value, represented as html element with
1105 Returns boolean value of a given value, represented as html element with
1107 classes that will represent icons
1106 classes that will represent icons
1108
1107
1109 :param value: given value to convert to html node
1108 :param value: given value to convert to html node
1110 """
1109 """
1111
1110
1112 if value: # does bool conversion
1111 if value: # does bool conversion
1113 return HTML.tag('i', class_="icon-true", title='True')
1112 return HTML.tag('i', class_="icon-true", title='True')
1114 else: # not true as bool
1113 else: # not true as bool
1115 if show_at_false:
1114 if show_at_false:
1116 return HTML.tag('i', class_="icon-false", title='False')
1115 return HTML.tag('i', class_="icon-false", title='False')
1117 return HTML.tag('i')
1116 return HTML.tag('i')
1118
1117
1119
1118
1120 def b64(inp):
1119 def b64(inp):
1121 return base64.b64encode(inp)
1120 return base64.b64encode(inp)
1122
1121
1123 #==============================================================================
1122 #==============================================================================
1124 # PERMS
1123 # PERMS
1125 #==============================================================================
1124 #==============================================================================
1126 from rhodecode.lib.auth import (
1125 from rhodecode.lib.auth import (
1127 HasPermissionAny, HasPermissionAll,
1126 HasPermissionAny, HasPermissionAll,
1128 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1127 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1129 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1128 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1130 csrf_token_key, AuthUser)
1129 csrf_token_key, AuthUser)
1131
1130
1132
1131
1133 #==============================================================================
1132 #==============================================================================
1134 # GRAVATAR URL
1133 # GRAVATAR URL
1135 #==============================================================================
1134 #==============================================================================
1136 class InitialsGravatar(object):
1135 class InitialsGravatar(object):
1137 def __init__(self, email_address, first_name, last_name, size=30,
1136 def __init__(self, email_address, first_name, last_name, size=30,
1138 background=None, text_color='#fff'):
1137 background=None, text_color='#fff'):
1139 self.size = size
1138 self.size = size
1140 self.first_name = first_name
1139 self.first_name = first_name
1141 self.last_name = last_name
1140 self.last_name = last_name
1142 self.email_address = email_address
1141 self.email_address = email_address
1143 self.background = background or self.str2color(email_address)
1142 self.background = background or self.str2color(email_address)
1144 self.text_color = text_color
1143 self.text_color = text_color
1145
1144
1146 def get_color_bank(self):
1145 def get_color_bank(self):
1147 """
1146 """
1148 returns a predefined list of colors that gravatars can use.
1147 returns a predefined list of colors that gravatars can use.
1149 Those are randomized distinct colors that guarantee readability and
1148 Those are randomized distinct colors that guarantee readability and
1150 uniqueness.
1149 uniqueness.
1151
1150
1152 generated with: http://phrogz.net/css/distinct-colors.html
1151 generated with: http://phrogz.net/css/distinct-colors.html
1153 """
1152 """
1154 return [
1153 return [
1155 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1154 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1156 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1155 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1157 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1156 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1158 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1157 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1159 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1158 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1160 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1159 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1161 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1160 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1162 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1161 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1163 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1162 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1164 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1163 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1165 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1164 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1166 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1165 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1167 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1166 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1168 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1167 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1169 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1168 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1170 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1169 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1171 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1170 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1172 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1171 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1173 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1172 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1174 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1173 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1175 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1174 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1176 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1175 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1177 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1176 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1178 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1177 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1179 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1178 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1180 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1179 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1181 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1180 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1182 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1181 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1183 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1182 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1184 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1183 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1185 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1184 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1186 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1185 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1187 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1186 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1188 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1187 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1189 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1188 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1190 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1189 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1191 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1190 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1192 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1191 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1193 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1192 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1194 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1193 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1195 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1194 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1196 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1195 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1197 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1196 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1198 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1197 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1199 '#4f8c46', '#368dd9', '#5c0073'
1198 '#4f8c46', '#368dd9', '#5c0073'
1200 ]
1199 ]
1201
1200
1202 def rgb_to_hex_color(self, rgb_tuple):
1201 def rgb_to_hex_color(self, rgb_tuple):
1203 """
1202 """
1204 Converts an rgb_tuple passed to an hex color.
1203 Converts an rgb_tuple passed to an hex color.
1205
1204
1206 :param rgb_tuple: tuple with 3 ints represents rgb color space
1205 :param rgb_tuple: tuple with 3 ints represents rgb color space
1207 """
1206 """
1208 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1207 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1209
1208
1210 def email_to_int_list(self, email_str):
1209 def email_to_int_list(self, email_str):
1211 """
1210 """
1212 Get every byte of the hex digest value of email and turn it to integer.
1211 Get every byte of the hex digest value of email and turn it to integer.
1213 It's going to be always between 0-255
1212 It's going to be always between 0-255
1214 """
1213 """
1215 digest = md5_safe(email_str.lower())
1214 digest = md5_safe(email_str.lower())
1216 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1215 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1217
1216
1218 def pick_color_bank_index(self, email_str, color_bank):
1217 def pick_color_bank_index(self, email_str, color_bank):
1219 return self.email_to_int_list(email_str)[0] % len(color_bank)
1218 return self.email_to_int_list(email_str)[0] % len(color_bank)
1220
1219
1221 def str2color(self, email_str):
1220 def str2color(self, email_str):
1222 """
1221 """
1223 Tries to map in a stable algorithm an email to color
1222 Tries to map in a stable algorithm an email to color
1224
1223
1225 :param email_str:
1224 :param email_str:
1226 """
1225 """
1227 color_bank = self.get_color_bank()
1226 color_bank = self.get_color_bank()
1228 # pick position (module it's length so we always find it in the
1227 # pick position (module it's length so we always find it in the
1229 # bank even if it's smaller than 256 values
1228 # bank even if it's smaller than 256 values
1230 pos = self.pick_color_bank_index(email_str, color_bank)
1229 pos = self.pick_color_bank_index(email_str, color_bank)
1231 return color_bank[pos]
1230 return color_bank[pos]
1232
1231
1233 def normalize_email(self, email_address):
1232 def normalize_email(self, email_address):
1234 import unicodedata
1233 import unicodedata
1235 # default host used to fill in the fake/missing email
1234 # default host used to fill in the fake/missing email
1236 default_host = 'localhost'
1235 default_host = 'localhost'
1237
1236
1238 if not email_address:
1237 if not email_address:
1239 email_address = '%s@%s' % (User.DEFAULT_USER, default_host)
1238 email_address = '%s@%s' % (User.DEFAULT_USER, default_host)
1240
1239
1241 email_address = safe_unicode(email_address)
1240 email_address = safe_unicode(email_address)
1242
1241
1243 if u'@' not in email_address:
1242 if u'@' not in email_address:
1244 email_address = u'%s@%s' % (email_address, default_host)
1243 email_address = u'%s@%s' % (email_address, default_host)
1245
1244
1246 if email_address.endswith(u'@'):
1245 if email_address.endswith(u'@'):
1247 email_address = u'%s%s' % (email_address, default_host)
1246 email_address = u'%s%s' % (email_address, default_host)
1248
1247
1249 email_address = unicodedata.normalize('NFKD', email_address)\
1248 email_address = unicodedata.normalize('NFKD', email_address)\
1250 .encode('ascii', 'ignore')
1249 .encode('ascii', 'ignore')
1251 return email_address
1250 return email_address
1252
1251
1253 def get_initials(self):
1252 def get_initials(self):
1254 """
1253 """
1255 Returns 2 letter initials calculated based on the input.
1254 Returns 2 letter initials calculated based on the input.
1256 The algorithm picks first given email address, and takes first letter
1255 The algorithm picks first given email address, and takes first letter
1257 of part before @, and then the first letter of server name. In case
1256 of part before @, and then the first letter of server name. In case
1258 the part before @ is in a format of `somestring.somestring2` it replaces
1257 the part before @ is in a format of `somestring.somestring2` it replaces
1259 the server letter with first letter of somestring2
1258 the server letter with first letter of somestring2
1260
1259
1261 In case function was initialized with both first and lastname, this
1260 In case function was initialized with both first and lastname, this
1262 overrides the extraction from email by first letter of the first and
1261 overrides the extraction from email by first letter of the first and
1263 last name. We add special logic to that functionality, In case Full name
1262 last name. We add special logic to that functionality, In case Full name
1264 is compound, like Guido Von Rossum, we use last part of the last name
1263 is compound, like Guido Von Rossum, we use last part of the last name
1265 (Von Rossum) picking `R`.
1264 (Von Rossum) picking `R`.
1266
1265
1267 Function also normalizes the non-ascii characters to they ascii
1266 Function also normalizes the non-ascii characters to they ascii
1268 representation, eg Ą => A
1267 representation, eg Ą => A
1269 """
1268 """
1270 import unicodedata
1269 import unicodedata
1271 # replace non-ascii to ascii
1270 # replace non-ascii to ascii
1272 first_name = unicodedata.normalize(
1271 first_name = unicodedata.normalize(
1273 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1272 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1274 last_name = unicodedata.normalize(
1273 last_name = unicodedata.normalize(
1275 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1274 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1276
1275
1277 # do NFKD encoding, and also make sure email has proper format
1276 # do NFKD encoding, and also make sure email has proper format
1278 email_address = self.normalize_email(self.email_address)
1277 email_address = self.normalize_email(self.email_address)
1279
1278
1280 # first push the email initials
1279 # first push the email initials
1281 prefix, server = email_address.split('@', 1)
1280 prefix, server = email_address.split('@', 1)
1282
1281
1283 # check if prefix is maybe a 'first_name.last_name' syntax
1282 # check if prefix is maybe a 'first_name.last_name' syntax
1284 _dot_split = prefix.rsplit('.', 1)
1283 _dot_split = prefix.rsplit('.', 1)
1285 if len(_dot_split) == 2 and _dot_split[1]:
1284 if len(_dot_split) == 2 and _dot_split[1]:
1286 initials = [_dot_split[0][0], _dot_split[1][0]]
1285 initials = [_dot_split[0][0], _dot_split[1][0]]
1287 else:
1286 else:
1288 initials = [prefix[0], server[0]]
1287 initials = [prefix[0], server[0]]
1289
1288
1290 # then try to replace either first_name or last_name
1289 # then try to replace either first_name or last_name
1291 fn_letter = (first_name or " ")[0].strip()
1290 fn_letter = (first_name or " ")[0].strip()
1292 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1291 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1293
1292
1294 if fn_letter:
1293 if fn_letter:
1295 initials[0] = fn_letter
1294 initials[0] = fn_letter
1296
1295
1297 if ln_letter:
1296 if ln_letter:
1298 initials[1] = ln_letter
1297 initials[1] = ln_letter
1299
1298
1300 return ''.join(initials).upper()
1299 return ''.join(initials).upper()
1301
1300
1302 def get_img_data_by_type(self, font_family, img_type):
1301 def get_img_data_by_type(self, font_family, img_type):
1303 default_user = """
1302 default_user = """
1304 <svg xmlns="http://www.w3.org/2000/svg"
1303 <svg xmlns="http://www.w3.org/2000/svg"
1305 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1304 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1306 viewBox="-15 -10 439.165 429.164"
1305 viewBox="-15 -10 439.165 429.164"
1307
1306
1308 xml:space="preserve"
1307 xml:space="preserve"
1309 style="background:{background};" >
1308 style="background:{background};" >
1310
1309
1311 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1310 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1312 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1311 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1313 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1312 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1314 168.596,153.916,216.671,
1313 168.596,153.916,216.671,
1315 204.583,216.671z" fill="{text_color}"/>
1314 204.583,216.671z" fill="{text_color}"/>
1316 <path d="M407.164,374.717L360.88,
1315 <path d="M407.164,374.717L360.88,
1317 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1316 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1318 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1317 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1319 15.366-44.203,23.488-69.076,23.488c-24.877,
1318 15.366-44.203,23.488-69.076,23.488c-24.877,
1320 0-48.762-8.122-69.078-23.488
1319 0-48.762-8.122-69.078-23.488
1321 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1320 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1322 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1321 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1323 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1322 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1324 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1323 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1325 19.402-10.527 C409.699,390.129,
1324 19.402-10.527 C409.699,390.129,
1326 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1325 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1327 </svg>""".format(
1326 </svg>""".format(
1328 size=self.size,
1327 size=self.size,
1329 background='#979797', # @grey4
1328 background='#979797', # @grey4
1330 text_color=self.text_color,
1329 text_color=self.text_color,
1331 font_family=font_family)
1330 font_family=font_family)
1332
1331
1333 return {
1332 return {
1334 "default_user": default_user
1333 "default_user": default_user
1335 }[img_type]
1334 }[img_type]
1336
1335
1337 def get_img_data(self, svg_type=None):
1336 def get_img_data(self, svg_type=None):
1338 """
1337 """
1339 generates the svg metadata for image
1338 generates the svg metadata for image
1340 """
1339 """
1341 fonts = [
1340 fonts = [
1342 '-apple-system',
1341 '-apple-system',
1343 'BlinkMacSystemFont',
1342 'BlinkMacSystemFont',
1344 'Segoe UI',
1343 'Segoe UI',
1345 'Roboto',
1344 'Roboto',
1346 'Oxygen-Sans',
1345 'Oxygen-Sans',
1347 'Ubuntu',
1346 'Ubuntu',
1348 'Cantarell',
1347 'Cantarell',
1349 'Helvetica Neue',
1348 'Helvetica Neue',
1350 'sans-serif'
1349 'sans-serif'
1351 ]
1350 ]
1352 font_family = ','.join(fonts)
1351 font_family = ','.join(fonts)
1353 if svg_type:
1352 if svg_type:
1354 return self.get_img_data_by_type(font_family, svg_type)
1353 return self.get_img_data_by_type(font_family, svg_type)
1355
1354
1356 initials = self.get_initials()
1355 initials = self.get_initials()
1357 img_data = """
1356 img_data = """
1358 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1357 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1359 width="{size}" height="{size}"
1358 width="{size}" height="{size}"
1360 style="width: 100%; height: 100%; background-color: {background}"
1359 style="width: 100%; height: 100%; background-color: {background}"
1361 viewBox="0 0 {size} {size}">
1360 viewBox="0 0 {size} {size}">
1362 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1361 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1363 pointer-events="auto" fill="{text_color}"
1362 pointer-events="auto" fill="{text_color}"
1364 font-family="{font_family}"
1363 font-family="{font_family}"
1365 style="font-weight: 400; font-size: {f_size}px;">{text}
1364 style="font-weight: 400; font-size: {f_size}px;">{text}
1366 </text>
1365 </text>
1367 </svg>""".format(
1366 </svg>""".format(
1368 size=self.size,
1367 size=self.size,
1369 f_size=self.size/2.05, # scale the text inside the box nicely
1368 f_size=self.size/2.05, # scale the text inside the box nicely
1370 background=self.background,
1369 background=self.background,
1371 text_color=self.text_color,
1370 text_color=self.text_color,
1372 text=initials.upper(),
1371 text=initials.upper(),
1373 font_family=font_family)
1372 font_family=font_family)
1374
1373
1375 return img_data
1374 return img_data
1376
1375
1377 def generate_svg(self, svg_type=None):
1376 def generate_svg(self, svg_type=None):
1378 img_data = self.get_img_data(svg_type)
1377 img_data = self.get_img_data(svg_type)
1379 return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data)
1378 return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data)
1380
1379
1381
1380
1382 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1381 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1383
1382
1384 svg_type = None
1383 svg_type = None
1385 if email_address == User.DEFAULT_USER_EMAIL:
1384 if email_address == User.DEFAULT_USER_EMAIL:
1386 svg_type = 'default_user'
1385 svg_type = 'default_user'
1387
1386
1388 klass = InitialsGravatar(email_address, first_name, last_name, size)
1387 klass = InitialsGravatar(email_address, first_name, last_name, size)
1389
1388
1390 if store_on_disk:
1389 if store_on_disk:
1391 from rhodecode.apps.file_store import utils as store_utils
1390 from rhodecode.apps.file_store import utils as store_utils
1392 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1391 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1393 FileOverSizeException
1392 FileOverSizeException
1394 from rhodecode.model.db import Session
1393 from rhodecode.model.db import Session
1395
1394
1396 image_key = md5_safe(email_address.lower()
1395 image_key = md5_safe(email_address.lower()
1397 + first_name.lower() + last_name.lower())
1396 + first_name.lower() + last_name.lower())
1398
1397
1399 storage = store_utils.get_file_storage(request.registry.settings)
1398 storage = store_utils.get_file_storage(request.registry.settings)
1400 filename = '{}.svg'.format(image_key)
1399 filename = '{}.svg'.format(image_key)
1401 subdir = 'gravatars'
1400 subdir = 'gravatars'
1402 # since final name has a counter, we apply the 0
1401 # since final name has a counter, we apply the 0
1403 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1402 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1404 store_uid = os.path.join(subdir, uid)
1403 store_uid = os.path.join(subdir, uid)
1405
1404
1406 db_entry = FileStore.get_by_store_uid(store_uid)
1405 db_entry = FileStore.get_by_store_uid(store_uid)
1407 if db_entry:
1406 if db_entry:
1408 return request.route_path('download_file', fid=store_uid)
1407 return request.route_path('download_file', fid=store_uid)
1409
1408
1410 img_data = klass.get_img_data(svg_type=svg_type)
1409 img_data = klass.get_img_data(svg_type=svg_type)
1411 img_file = store_utils.bytes_to_file_obj(img_data)
1410 img_file = store_utils.bytes_to_file_obj(img_data)
1412
1411
1413 try:
1412 try:
1414 store_uid, metadata = storage.save_file(
1413 store_uid, metadata = storage.save_file(
1415 img_file, filename, directory=subdir,
1414 img_file, filename, directory=subdir,
1416 extensions=['.svg'], randomized_name=False)
1415 extensions=['.svg'], randomized_name=False)
1417 except (FileNotAllowedException, FileOverSizeException):
1416 except (FileNotAllowedException, FileOverSizeException):
1418 raise
1417 raise
1419
1418
1420 try:
1419 try:
1421 entry = FileStore.create(
1420 entry = FileStore.create(
1422 file_uid=store_uid, filename=metadata["filename"],
1421 file_uid=store_uid, filename=metadata["filename"],
1423 file_hash=metadata["sha256"], file_size=metadata["size"],
1422 file_hash=metadata["sha256"], file_size=metadata["size"],
1424 file_display_name=filename,
1423 file_display_name=filename,
1425 file_description=u'user gravatar `{}`'.format(safe_unicode(filename)),
1424 file_description=u'user gravatar `{}`'.format(safe_unicode(filename)),
1426 hidden=True, check_acl=False, user_id=1
1425 hidden=True, check_acl=False, user_id=1
1427 )
1426 )
1428 Session().add(entry)
1427 Session().add(entry)
1429 Session().commit()
1428 Session().commit()
1430 log.debug('Stored upload in DB as %s', entry)
1429 log.debug('Stored upload in DB as %s', entry)
1431 except Exception:
1430 except Exception:
1432 raise
1431 raise
1433
1432
1434 return request.route_path('download_file', fid=store_uid)
1433 return request.route_path('download_file', fid=store_uid)
1435
1434
1436 else:
1435 else:
1437 return klass.generate_svg(svg_type=svg_type)
1436 return klass.generate_svg(svg_type=svg_type)
1438
1437
1439
1438
1440 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1439 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1441 return safe_str(gravatar_url_tmpl)\
1440 return safe_str(gravatar_url_tmpl)\
1442 .replace('{email}', email_address) \
1441 .replace('{email}', email_address) \
1443 .replace('{md5email}', md5_safe(email_address.lower())) \
1442 .replace('{md5email}', md5_safe(email_address.lower())) \
1444 .replace('{netloc}', request.host) \
1443 .replace('{netloc}', request.host) \
1445 .replace('{scheme}', request.scheme) \
1444 .replace('{scheme}', request.scheme) \
1446 .replace('{size}', safe_str(size))
1445 .replace('{size}', safe_str(size))
1447
1446
1448
1447
1449 def gravatar_url(email_address, size=30, request=None):
1448 def gravatar_url(email_address, size=30, request=None):
1450 request = request or get_current_request()
1449 request = request or get_current_request()
1451 _use_gravatar = request.call_context.visual.use_gravatar
1450 _use_gravatar = request.call_context.visual.use_gravatar
1452
1451
1453 email_address = email_address or User.DEFAULT_USER_EMAIL
1452 email_address = email_address or User.DEFAULT_USER_EMAIL
1454 if isinstance(email_address, str):
1453 if isinstance(email_address, str):
1455 # hashlib crashes on unicode items
1454 # hashlib crashes on unicode items
1456 email_address = safe_str(email_address)
1455 email_address = safe_str(email_address)
1457
1456
1458 # empty email or default user
1457 # empty email or default user
1459 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1458 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1460 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1459 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1461
1460
1462 if _use_gravatar:
1461 if _use_gravatar:
1463 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1462 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1464 or User.DEFAULT_GRAVATAR_URL
1463 or User.DEFAULT_GRAVATAR_URL
1465 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1464 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1466
1465
1467 else:
1466 else:
1468 return initials_gravatar(request, email_address, '', '', size=size)
1467 return initials_gravatar(request, email_address, '', '', size=size)
1469
1468
1470
1469
1471 def breadcrumb_repo_link(repo):
1470 def breadcrumb_repo_link(repo):
1472 """
1471 """
1473 Makes a breadcrumbs path link to repo
1472 Makes a breadcrumbs path link to repo
1474
1473
1475 ex::
1474 ex::
1476 group >> subgroup >> repo
1475 group >> subgroup >> repo
1477
1476
1478 :param repo: a Repository instance
1477 :param repo: a Repository instance
1479 """
1478 """
1480
1479
1481 path = [
1480 path = [
1482 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1481 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1483 title='last change:{}'.format(format_date(group.last_commit_change)))
1482 title='last change:{}'.format(format_date(group.last_commit_change)))
1484 for group in repo.groups_with_parents
1483 for group in repo.groups_with_parents
1485 ] + [
1484 ] + [
1486 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1485 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1487 title='last change:{}'.format(format_date(repo.last_commit_change)))
1486 title='last change:{}'.format(format_date(repo.last_commit_change)))
1488 ]
1487 ]
1489
1488
1490 return literal(' &raquo; '.join(path))
1489 return literal(' &raquo; '.join(path))
1491
1490
1492
1491
1493 def breadcrumb_repo_group_link(repo_group):
1492 def breadcrumb_repo_group_link(repo_group):
1494 """
1493 """
1495 Makes a breadcrumbs path link to repo
1494 Makes a breadcrumbs path link to repo
1496
1495
1497 ex::
1496 ex::
1498 group >> subgroup
1497 group >> subgroup
1499
1498
1500 :param repo_group: a Repository Group instance
1499 :param repo_group: a Repository Group instance
1501 """
1500 """
1502
1501
1503 path = [
1502 path = [
1504 link_to(group.name,
1503 link_to(group.name,
1505 route_path('repo_group_home', repo_group_name=group.group_name),
1504 route_path('repo_group_home', repo_group_name=group.group_name),
1506 title='last change:{}'.format(format_date(group.last_commit_change)))
1505 title='last change:{}'.format(format_date(group.last_commit_change)))
1507 for group in repo_group.parents
1506 for group in repo_group.parents
1508 ] + [
1507 ] + [
1509 link_to(repo_group.name,
1508 link_to(repo_group.name,
1510 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1509 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1511 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1510 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1512 ]
1511 ]
1513
1512
1514 return literal(' &raquo; '.join(path))
1513 return literal(' &raquo; '.join(path))
1515
1514
1516
1515
1517 def format_byte_size_binary(file_size):
1516 def format_byte_size_binary(file_size):
1518 """
1517 """
1519 Formats file/folder sizes to standard.
1518 Formats file/folder sizes to standard.
1520 """
1519 """
1521 if file_size is None:
1520 if file_size is None:
1522 file_size = 0
1521 file_size = 0
1523
1522
1524 formatted_size = format_byte_size(file_size, binary=True)
1523 formatted_size = format_byte_size(file_size, binary=True)
1525 return formatted_size
1524 return formatted_size
1526
1525
1527
1526
1528 def urlify_text(text_, safe=True, **href_attrs):
1527 def urlify_text(text_, safe=True, **href_attrs):
1529 """
1528 """
1530 Extract urls from text and make html links out of them
1529 Extract urls from text and make html links out of them
1531 """
1530 """
1532
1531
1533 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1532 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1534 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1533 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1535
1534
1536 def url_func(match_obj):
1535 def url_func(match_obj):
1537 url_full = match_obj.groups()[0]
1536 url_full = match_obj.groups()[0]
1538 a_options = dict(href_attrs)
1537 a_options = dict(href_attrs)
1539 a_options['href'] = url_full
1538 a_options['href'] = url_full
1540 a_text = url_full
1539 a_text = url_full
1541 return HTML.tag("a", a_text, **a_options)
1540 return HTML.tag("a", a_text, **a_options)
1542
1541
1543 _new_text = url_pat.sub(url_func, text_)
1542 _new_text = url_pat.sub(url_func, text_)
1544
1543
1545 if safe:
1544 if safe:
1546 return literal(_new_text)
1545 return literal(_new_text)
1547 return _new_text
1546 return _new_text
1548
1547
1549
1548
1550 def urlify_commits(text_, repo_name):
1549 def urlify_commits(text_, repo_name):
1551 """
1550 """
1552 Extract commit ids from text and make link from them
1551 Extract commit ids from text and make link from them
1553
1552
1554 :param text_:
1553 :param text_:
1555 :param repo_name: repo name to build the URL with
1554 :param repo_name: repo name to build the URL with
1556 """
1555 """
1557
1556
1558 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1557 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1559
1558
1560 def url_func(match_obj):
1559 def url_func(match_obj):
1561 commit_id = match_obj.groups()[1]
1560 commit_id = match_obj.groups()[1]
1562 pref = match_obj.groups()[0]
1561 pref = match_obj.groups()[0]
1563 suf = match_obj.groups()[2]
1562 suf = match_obj.groups()[2]
1564
1563
1565 tmpl = (
1564 tmpl = (
1566 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1565 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1567 '%(commit_id)s</a>%(suf)s'
1566 '%(commit_id)s</a>%(suf)s'
1568 )
1567 )
1569 return tmpl % {
1568 return tmpl % {
1570 'pref': pref,
1569 'pref': pref,
1571 'cls': 'revision-link',
1570 'cls': 'revision-link',
1572 'url': route_url(
1571 'url': route_url(
1573 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1572 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1574 'commit_id': commit_id,
1573 'commit_id': commit_id,
1575 'suf': suf,
1574 'suf': suf,
1576 'hovercard_alt': 'Commit: {}'.format(commit_id),
1575 'hovercard_alt': 'Commit: {}'.format(commit_id),
1577 'hovercard_url': route_url(
1576 'hovercard_url': route_url(
1578 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1577 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1579 }
1578 }
1580
1579
1581 new_text = url_pat.sub(url_func, text_)
1580 new_text = url_pat.sub(url_func, text_)
1582
1581
1583 return new_text
1582 return new_text
1584
1583
1585
1584
1586 def _process_url_func(match_obj, repo_name, uid, entry,
1585 def _process_url_func(match_obj, repo_name, uid, entry,
1587 return_raw_data=False, link_format='html'):
1586 return_raw_data=False, link_format='html'):
1588 pref = ''
1587 pref = ''
1589 if match_obj.group().startswith(' '):
1588 if match_obj.group().startswith(' '):
1590 pref = ' '
1589 pref = ' '
1591
1590
1592 issue_id = ''.join(match_obj.groups())
1591 issue_id = ''.join(match_obj.groups())
1593
1592
1594 if link_format == 'html':
1593 if link_format == 'html':
1595 tmpl = (
1594 tmpl = (
1596 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1595 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1597 '%(issue-prefix)s%(id-repr)s'
1596 '%(issue-prefix)s%(id-repr)s'
1598 '</a>')
1597 '</a>')
1599 elif link_format == 'html+hovercard':
1598 elif link_format == 'html+hovercard':
1600 tmpl = (
1599 tmpl = (
1601 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1600 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1602 '%(issue-prefix)s%(id-repr)s'
1601 '%(issue-prefix)s%(id-repr)s'
1603 '</a>')
1602 '</a>')
1604 elif link_format in ['rst', 'rst+hovercard']:
1603 elif link_format in ['rst', 'rst+hovercard']:
1605 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1604 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1606 elif link_format in ['markdown', 'markdown+hovercard']:
1605 elif link_format in ['markdown', 'markdown+hovercard']:
1607 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1606 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1608 else:
1607 else:
1609 raise ValueError('Bad link_format:{}'.format(link_format))
1608 raise ValueError('Bad link_format:{}'.format(link_format))
1610
1609
1611 (repo_name_cleaned,
1610 (repo_name_cleaned,
1612 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1611 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1613
1612
1614 # variables replacement
1613 # variables replacement
1615 named_vars = {
1614 named_vars = {
1616 'id': issue_id,
1615 'id': issue_id,
1617 'repo': repo_name,
1616 'repo': repo_name,
1618 'repo_name': repo_name_cleaned,
1617 'repo_name': repo_name_cleaned,
1619 'group_name': parent_group_name,
1618 'group_name': parent_group_name,
1620 # set dummy keys so we always have them
1619 # set dummy keys so we always have them
1621 'hostname': '',
1620 'hostname': '',
1622 'netloc': '',
1621 'netloc': '',
1623 'scheme': ''
1622 'scheme': ''
1624 }
1623 }
1625
1624
1626 request = get_current_request()
1625 request = get_current_request()
1627 if request:
1626 if request:
1628 # exposes, hostname, netloc, scheme
1627 # exposes, hostname, netloc, scheme
1629 host_data = get_host_info(request)
1628 host_data = get_host_info(request)
1630 named_vars.update(host_data)
1629 named_vars.update(host_data)
1631
1630
1632 # named regex variables
1631 # named regex variables
1633 named_vars.update(match_obj.groupdict())
1632 named_vars.update(match_obj.groupdict())
1634 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1633 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1635 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1634 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1636 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1635 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1637
1636
1638 def quote_cleaner(input_str):
1637 def quote_cleaner(input_str):
1639 """Remove quotes as it's HTML"""
1638 """Remove quotes as it's HTML"""
1640 return input_str.replace('"', '')
1639 return input_str.replace('"', '')
1641
1640
1642 data = {
1641 data = {
1643 'pref': pref,
1642 'pref': pref,
1644 'cls': quote_cleaner('issue-tracker-link'),
1643 'cls': quote_cleaner('issue-tracker-link'),
1645 'url': quote_cleaner(_url),
1644 'url': quote_cleaner(_url),
1646 'id-repr': issue_id,
1645 'id-repr': issue_id,
1647 'issue-prefix': entry['pref'],
1646 'issue-prefix': entry['pref'],
1648 'serv': entry['url'],
1647 'serv': entry['url'],
1649 'title': bleach.clean(desc, strip=True),
1648 'title': bleach.clean(desc, strip=True),
1650 'hovercard_url': hovercard_url
1649 'hovercard_url': hovercard_url
1651 }
1650 }
1652
1651
1653 if return_raw_data:
1652 if return_raw_data:
1654 return {
1653 return {
1655 'id': issue_id,
1654 'id': issue_id,
1656 'url': _url
1655 'url': _url
1657 }
1656 }
1658 return tmpl % data
1657 return tmpl % data
1659
1658
1660
1659
1661 def get_active_pattern_entries(repo_name):
1660 def get_active_pattern_entries(repo_name):
1662 repo = None
1661 repo = None
1663 if repo_name:
1662 if repo_name:
1664 # Retrieving repo_name to avoid invalid repo_name to explode on
1663 # Retrieving repo_name to avoid invalid repo_name to explode on
1665 # IssueTrackerSettingsModel but still passing invalid name further down
1664 # IssueTrackerSettingsModel but still passing invalid name further down
1666 repo = Repository.get_by_repo_name(repo_name, cache=True)
1665 repo = Repository.get_by_repo_name(repo_name, cache=True)
1667
1666
1668 settings_model = IssueTrackerSettingsModel(repo=repo)
1667 settings_model = IssueTrackerSettingsModel(repo=repo)
1669 active_entries = settings_model.get_settings(cache=True)
1668 active_entries = settings_model.get_settings(cache=True)
1670 return active_entries
1669 return active_entries
1671
1670
1672
1671
1673 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1672 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1674
1673
1675 allowed_link_formats = [
1674 allowed_link_formats = [
1676 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1675 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1677
1676
1678 compile_cache = {
1677 compile_cache = {
1679
1678
1680 }
1679 }
1681
1680
1682
1681
1683 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1682 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1684
1683
1685 if link_format not in allowed_link_formats:
1684 if link_format not in allowed_link_formats:
1686 raise ValueError('Link format can be only one of:{} got {}'.format(
1685 raise ValueError('Link format can be only one of:{} got {}'.format(
1687 allowed_link_formats, link_format))
1686 allowed_link_formats, link_format))
1688 issues_data = []
1687 issues_data = []
1689 errors = []
1688 errors = []
1690 new_text = text_string
1689 new_text = text_string
1691
1690
1692 if active_entries is None:
1691 if active_entries is None:
1693 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1692 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1694 active_entries = get_active_pattern_entries(repo_name)
1693 active_entries = get_active_pattern_entries(repo_name)
1695
1694
1696 log.debug('Got %s pattern entries to process', len(active_entries))
1695 log.debug('Got %s pattern entries to process', len(active_entries))
1697
1696
1698 for uid, entry in active_entries.items():
1697 for uid, entry in active_entries.items():
1699
1698
1700 if not (entry['pat'] and entry['url']):
1699 if not (entry['pat'] and entry['url']):
1701 log.debug('skipping due to missing data')
1700 log.debug('skipping due to missing data')
1702 continue
1701 continue
1703
1702
1704 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1703 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1705 uid, entry['pat'], entry['url'], entry['pref'])
1704 uid, entry['pat'], entry['url'], entry['pref'])
1706
1705
1707 if entry.get('pat_compiled'):
1706 if entry.get('pat_compiled'):
1708 pattern = entry['pat_compiled']
1707 pattern = entry['pat_compiled']
1709 elif entry['pat'] in compile_cache:
1708 elif entry['pat'] in compile_cache:
1710 pattern = compile_cache[entry['pat']]
1709 pattern = compile_cache[entry['pat']]
1711 else:
1710 else:
1712 try:
1711 try:
1713 pattern = regex.compile(r'%s' % entry['pat'])
1712 pattern = regex.compile(r'%s' % entry['pat'])
1714 except regex.error as e:
1713 except regex.error as e:
1715 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1714 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1716 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1715 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1717 errors.append(regex_err)
1716 errors.append(regex_err)
1718 continue
1717 continue
1719 compile_cache[entry['pat']] = pattern
1718 compile_cache[entry['pat']] = pattern
1720
1719
1721 data_func = partial(
1720 data_func = partial(
1722 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1721 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1723 return_raw_data=True)
1722 return_raw_data=True)
1724
1723
1725 for match_obj in pattern.finditer(text_string):
1724 for match_obj in pattern.finditer(text_string):
1726 issues_data.append(data_func(match_obj))
1725 issues_data.append(data_func(match_obj))
1727
1726
1728 url_func = partial(
1727 url_func = partial(
1729 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1728 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1730 link_format=link_format)
1729 link_format=link_format)
1731
1730
1732 new_text = pattern.sub(url_func, new_text)
1731 new_text = pattern.sub(url_func, new_text)
1733 log.debug('processed prefix:uid `%s`', uid)
1732 log.debug('processed prefix:uid `%s`', uid)
1734
1733
1735 # finally use global replace, eg !123 -> pr-link, those will not catch
1734 # finally use global replace, eg !123 -> pr-link, those will not catch
1736 # if already similar pattern exists
1735 # if already similar pattern exists
1737 server_url = '${scheme}://${netloc}'
1736 server_url = '${scheme}://${netloc}'
1738 pr_entry = {
1737 pr_entry = {
1739 'pref': '!',
1738 'pref': '!',
1740 'url': server_url + '/_admin/pull-requests/${id}',
1739 'url': server_url + '/_admin/pull-requests/${id}',
1741 'desc': 'Pull Request !${id}',
1740 'desc': 'Pull Request !${id}',
1742 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1741 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1743 }
1742 }
1744 pr_url_func = partial(
1743 pr_url_func = partial(
1745 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1744 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1746 link_format=link_format+'+hovercard')
1745 link_format=link_format+'+hovercard')
1747 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1746 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1748 log.debug('processed !pr pattern')
1747 log.debug('processed !pr pattern')
1749
1748
1750 return new_text, issues_data, errors
1749 return new_text, issues_data, errors
1751
1750
1752
1751
1753 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1752 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1754 issues_container_callback=None, error_container=None):
1753 issues_container_callback=None, error_container=None):
1755 """
1754 """
1756 Parses given text message and makes proper links.
1755 Parses given text message and makes proper links.
1757 issues are linked to given issue-server, and rest is a commit link
1756 issues are linked to given issue-server, and rest is a commit link
1758 """
1757 """
1759
1758
1760 def escaper(_text):
1759 def escaper(_text):
1761 return _text.replace('<', '&lt;').replace('>', '&gt;')
1760 return _text.replace('<', '&lt;').replace('>', '&gt;')
1762
1761
1763 new_text = escaper(commit_text)
1762 new_text = escaper(commit_text)
1764
1763
1765 # extract http/https links and make them real urls
1764 # extract http/https links and make them real urls
1766 new_text = urlify_text(new_text, safe=False)
1765 new_text = urlify_text(new_text, safe=False)
1767
1766
1768 # urlify commits - extract commit ids and make link out of them, if we have
1767 # urlify commits - extract commit ids and make link out of them, if we have
1769 # the scope of repository present.
1768 # the scope of repository present.
1770 if repository:
1769 if repository:
1771 new_text = urlify_commits(new_text, repository)
1770 new_text = urlify_commits(new_text, repository)
1772
1771
1773 # process issue tracker patterns
1772 # process issue tracker patterns
1774 new_text, issues, errors = process_patterns(
1773 new_text, issues, errors = process_patterns(
1775 new_text, repository or '', active_entries=active_pattern_entries)
1774 new_text, repository or '', active_entries=active_pattern_entries)
1776
1775
1777 if issues_container_callback is not None:
1776 if issues_container_callback is not None:
1778 for issue in issues:
1777 for issue in issues:
1779 issues_container_callback(issue)
1778 issues_container_callback(issue)
1780
1779
1781 if error_container is not None:
1780 if error_container is not None:
1782 error_container.extend(errors)
1781 error_container.extend(errors)
1783
1782
1784 return literal(new_text)
1783 return literal(new_text)
1785
1784
1786
1785
1787 def render_binary(repo_name, file_obj):
1786 def render_binary(repo_name, file_obj):
1788 """
1787 """
1789 Choose how to render a binary file
1788 Choose how to render a binary file
1790 """
1789 """
1791
1790
1792 # unicode
1791 # unicode
1793 filename = file_obj.name
1792 filename = file_obj.name
1794
1793
1795 # images
1794 # images
1796 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1795 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1797 if fnmatch.fnmatch(filename, pat=ext):
1796 if fnmatch.fnmatch(filename, pat=ext):
1798 src = route_path(
1797 src = route_path(
1799 'repo_file_raw', repo_name=repo_name,
1798 'repo_file_raw', repo_name=repo_name,
1800 commit_id=file_obj.commit.raw_id,
1799 commit_id=file_obj.commit.raw_id,
1801 f_path=file_obj.path)
1800 f_path=file_obj.path)
1802
1801
1803 return literal(
1802 return literal(
1804 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1803 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1805
1804
1806
1805
1807 def renderer_from_filename(filename, exclude=None):
1806 def renderer_from_filename(filename, exclude=None):
1808 """
1807 """
1809 choose a renderer based on filename, this works only for text based files
1808 choose a renderer based on filename, this works only for text based files
1810 """
1809 """
1811
1810
1812 # ipython
1811 # ipython
1813 for ext in ['*.ipynb']:
1812 for ext in ['*.ipynb']:
1814 if fnmatch.fnmatch(filename, pat=ext):
1813 if fnmatch.fnmatch(filename, pat=ext):
1815 return 'jupyter'
1814 return 'jupyter'
1816
1815
1817 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1816 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1818 if is_markup:
1817 if is_markup:
1819 return is_markup
1818 return is_markup
1820 return None
1819 return None
1821
1820
1822
1821
1823 def render(source, renderer='rst', mentions=False, relative_urls=None,
1822 def render(source, renderer='rst', mentions=False, relative_urls=None,
1824 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1823 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1825
1824
1826 def maybe_convert_relative_links(html_source):
1825 def maybe_convert_relative_links(html_source):
1827 if relative_urls:
1826 if relative_urls:
1828 return relative_links(html_source, relative_urls)
1827 return relative_links(html_source, relative_urls)
1829 return html_source
1828 return html_source
1830
1829
1831 if renderer == 'plain':
1830 if renderer == 'plain':
1832 return literal(
1831 return literal(
1833 MarkupRenderer.plain(source, leading_newline=False))
1832 MarkupRenderer.plain(source, leading_newline=False))
1834
1833
1835 elif renderer == 'rst':
1834 elif renderer == 'rst':
1836 if repo_name:
1835 if repo_name:
1837 # process patterns on comments if we pass in repo name
1836 # process patterns on comments if we pass in repo name
1838 source, issues, errors = process_patterns(
1837 source, issues, errors = process_patterns(
1839 source, repo_name, link_format='rst',
1838 source, repo_name, link_format='rst',
1840 active_entries=active_pattern_entries)
1839 active_entries=active_pattern_entries)
1841 if issues_container_callback is not None:
1840 if issues_container_callback is not None:
1842 for issue in issues:
1841 for issue in issues:
1843 issues_container_callback(issue)
1842 issues_container_callback(issue)
1844
1843
1845 return literal(
1844 return literal(
1846 '<div class="rst-block">%s</div>' %
1845 '<div class="rst-block">%s</div>' %
1847 maybe_convert_relative_links(
1846 maybe_convert_relative_links(
1848 MarkupRenderer.rst(source, mentions=mentions)))
1847 MarkupRenderer.rst(source, mentions=mentions)))
1849
1848
1850 elif renderer == 'markdown':
1849 elif renderer == 'markdown':
1851 if repo_name:
1850 if repo_name:
1852 # process patterns on comments if we pass in repo name
1851 # process patterns on comments if we pass in repo name
1853 source, issues, errors = process_patterns(
1852 source, issues, errors = process_patterns(
1854 source, repo_name, link_format='markdown',
1853 source, repo_name, link_format='markdown',
1855 active_entries=active_pattern_entries)
1854 active_entries=active_pattern_entries)
1856 if issues_container_callback is not None:
1855 if issues_container_callback is not None:
1857 for issue in issues:
1856 for issue in issues:
1858 issues_container_callback(issue)
1857 issues_container_callback(issue)
1859
1858
1860
1859
1861 return literal(
1860 return literal(
1862 '<div class="markdown-block">%s</div>' %
1861 '<div class="markdown-block">%s</div>' %
1863 maybe_convert_relative_links(
1862 maybe_convert_relative_links(
1864 MarkupRenderer.markdown(source, flavored=True,
1863 MarkupRenderer.markdown(source, flavored=True,
1865 mentions=mentions)))
1864 mentions=mentions)))
1866
1865
1867 elif renderer == 'jupyter':
1866 elif renderer == 'jupyter':
1868 return literal(
1867 return literal(
1869 '<div class="ipynb">%s</div>' %
1868 '<div class="ipynb">%s</div>' %
1870 maybe_convert_relative_links(
1869 maybe_convert_relative_links(
1871 MarkupRenderer.jupyter(source)))
1870 MarkupRenderer.jupyter(source)))
1872
1871
1873 # None means just show the file-source
1872 # None means just show the file-source
1874 return None
1873 return None
1875
1874
1876
1875
1877 def commit_status(repo, commit_id):
1876 def commit_status(repo, commit_id):
1878 return ChangesetStatusModel().get_status(repo, commit_id)
1877 return ChangesetStatusModel().get_status(repo, commit_id)
1879
1878
1880
1879
1881 def commit_status_lbl(commit_status):
1880 def commit_status_lbl(commit_status):
1882 return dict(ChangesetStatus.STATUSES).get(commit_status)
1881 return dict(ChangesetStatus.STATUSES).get(commit_status)
1883
1882
1884
1883
1885 def commit_time(repo_name, commit_id):
1884 def commit_time(repo_name, commit_id):
1886 repo = Repository.get_by_repo_name(repo_name)
1885 repo = Repository.get_by_repo_name(repo_name)
1887 commit = repo.get_commit(commit_id=commit_id)
1886 commit = repo.get_commit(commit_id=commit_id)
1888 return commit.date
1887 return commit.date
1889
1888
1890
1889
1891 def get_permission_name(key):
1890 def get_permission_name(key):
1892 return dict(Permission.PERMS).get(key)
1891 return dict(Permission.PERMS).get(key)
1893
1892
1894
1893
1895 def journal_filter_help(request):
1894 def journal_filter_help(request):
1896 _ = request.translate
1895 _ = request.translate
1897 from rhodecode.lib.audit_logger import ACTIONS
1896 from rhodecode.lib.audit_logger import ACTIONS
1898 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1897 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1899
1898
1900 return _(
1899 return _(
1901 'Example filter terms:\n' +
1900 'Example filter terms:\n' +
1902 ' repository:vcs\n' +
1901 ' repository:vcs\n' +
1903 ' username:marcin\n' +
1902 ' username:marcin\n' +
1904 ' username:(NOT marcin)\n' +
1903 ' username:(NOT marcin)\n' +
1905 ' action:*push*\n' +
1904 ' action:*push*\n' +
1906 ' ip:127.0.0.1\n' +
1905 ' ip:127.0.0.1\n' +
1907 ' date:20120101\n' +
1906 ' date:20120101\n' +
1908 ' date:[20120101100000 TO 20120102]\n' +
1907 ' date:[20120101100000 TO 20120102]\n' +
1909 '\n' +
1908 '\n' +
1910 'Actions: {actions}\n' +
1909 'Actions: {actions}\n' +
1911 '\n' +
1910 '\n' +
1912 'Generate wildcards using \'*\' character:\n' +
1911 'Generate wildcards using \'*\' character:\n' +
1913 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1912 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1914 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1913 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1915 '\n' +
1914 '\n' +
1916 'Optional AND / OR operators in queries\n' +
1915 'Optional AND / OR operators in queries\n' +
1917 ' "repository:vcs OR repository:test"\n' +
1916 ' "repository:vcs OR repository:test"\n' +
1918 ' "username:test AND repository:test*"\n'
1917 ' "username:test AND repository:test*"\n'
1919 ).format(actions=actions)
1918 ).format(actions=actions)
1920
1919
1921
1920
1922 def not_mapped_error(repo_name):
1921 def not_mapped_error(repo_name):
1923 from rhodecode.translation import _
1922 from rhodecode.translation import _
1924 flash(_('%s repository is not mapped to db perhaps'
1923 flash(_('%s repository is not mapped to db perhaps'
1925 ' it was created or renamed from the filesystem'
1924 ' it was created or renamed from the filesystem'
1926 ' please run the application again'
1925 ' please run the application again'
1927 ' in order to rescan repositories') % repo_name, category='error')
1926 ' in order to rescan repositories') % repo_name, category='error')
1928
1927
1929
1928
1930 def ip_range(ip_addr):
1929 def ip_range(ip_addr):
1931 from rhodecode.model.db import UserIpMap
1930 from rhodecode.model.db import UserIpMap
1932 s, e = UserIpMap._get_ip_range(ip_addr)
1931 s, e = UserIpMap._get_ip_range(ip_addr)
1933 return '%s - %s' % (s, e)
1932 return '%s - %s' % (s, e)
1934
1933
1935
1934
1936 def form(url, method='post', needs_csrf_token=True, **attrs):
1935 def form(url, method='post', needs_csrf_token=True, **attrs):
1937 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1936 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1938 if method.lower() != 'get' and needs_csrf_token:
1937 if method.lower() != 'get' and needs_csrf_token:
1939 raise Exception(
1938 raise Exception(
1940 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1939 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1941 'CSRF token. If the endpoint does not require such token you can ' +
1940 'CSRF token. If the endpoint does not require such token you can ' +
1942 'explicitly set the parameter needs_csrf_token to false.')
1941 'explicitly set the parameter needs_csrf_token to false.')
1943
1942
1944 return insecure_form(url, method=method, **attrs)
1943 return insecure_form(url, method=method, **attrs)
1945
1944
1946
1945
1947 def secure_form(form_url, method="POST", multipart=False, **attrs):
1946 def secure_form(form_url, method="POST", multipart=False, **attrs):
1948 """Start a form tag that points the action to an url. This
1947 """Start a form tag that points the action to an url. This
1949 form tag will also include the hidden field containing
1948 form tag will also include the hidden field containing
1950 the auth token.
1949 the auth token.
1951
1950
1952 The url options should be given either as a string, or as a
1951 The url options should be given either as a string, or as a
1953 ``url()`` function. The method for the form defaults to POST.
1952 ``url()`` function. The method for the form defaults to POST.
1954
1953
1955 Options:
1954 Options:
1956
1955
1957 ``multipart``
1956 ``multipart``
1958 If set to True, the enctype is set to "multipart/form-data".
1957 If set to True, the enctype is set to "multipart/form-data".
1959 ``method``
1958 ``method``
1960 The method to use when submitting the form, usually either
1959 The method to use when submitting the form, usually either
1961 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1960 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1962 hidden input with name _method is added to simulate the verb
1961 hidden input with name _method is added to simulate the verb
1963 over POST.
1962 over POST.
1964
1963
1965 """
1964 """
1966
1965
1967 if 'request' in attrs:
1966 if 'request' in attrs:
1968 session = attrs['request'].session
1967 session = attrs['request'].session
1969 del attrs['request']
1968 del attrs['request']
1970 else:
1969 else:
1971 raise ValueError(
1970 raise ValueError(
1972 'Calling this form requires request= to be passed as argument')
1971 'Calling this form requires request= to be passed as argument')
1973
1972
1974 _form = insecure_form(form_url, method, multipart, **attrs)
1973 _form = insecure_form(form_url, method, multipart, **attrs)
1975 token = literal(
1974 token = literal(
1976 '<input type="hidden" name="{}" value="{}">'.format(
1975 '<input type="hidden" name="{}" value="{}">'.format(
1977 csrf_token_key, get_csrf_token(session)))
1976 csrf_token_key, get_csrf_token(session)))
1978
1977
1979 return literal("%s\n%s" % (_form, token))
1978 return literal("%s\n%s" % (_form, token))
1980
1979
1981
1980
1982 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1981 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1983 select_html = select(name, selected, options, **attrs)
1982 select_html = select(name, selected, options, **attrs)
1984
1983
1985 select2 = """
1984 select2 = """
1986 <script>
1985 <script>
1987 $(document).ready(function() {
1986 $(document).ready(function() {
1988 $('#%s').select2({
1987 $('#%s').select2({
1989 containerCssClass: 'drop-menu %s',
1988 containerCssClass: 'drop-menu %s',
1990 dropdownCssClass: 'drop-menu-dropdown',
1989 dropdownCssClass: 'drop-menu-dropdown',
1991 dropdownAutoWidth: true%s
1990 dropdownAutoWidth: true%s
1992 });
1991 });
1993 });
1992 });
1994 </script>
1993 </script>
1995 """
1994 """
1996
1995
1997 filter_option = """,
1996 filter_option = """,
1998 minimumResultsForSearch: -1
1997 minimumResultsForSearch: -1
1999 """
1998 """
2000 input_id = attrs.get('id') or name
1999 input_id = attrs.get('id') or name
2001 extra_classes = ' '.join(attrs.pop('extra_classes', []))
2000 extra_classes = ' '.join(attrs.pop('extra_classes', []))
2002 filter_enabled = "" if enable_filter else filter_option
2001 filter_enabled = "" if enable_filter else filter_option
2003 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
2002 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
2004
2003
2005 return literal(select_html+select_script)
2004 return literal(select_html+select_script)
2006
2005
2007
2006
2008 def get_visual_attr(tmpl_context_var, attr_name):
2007 def get_visual_attr(tmpl_context_var, attr_name):
2009 """
2008 """
2010 A safe way to get a variable from visual variable of template context
2009 A safe way to get a variable from visual variable of template context
2011
2010
2012 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2011 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2013 :param attr_name: name of the attribute we fetch from the c.visual
2012 :param attr_name: name of the attribute we fetch from the c.visual
2014 """
2013 """
2015 visual = getattr(tmpl_context_var, 'visual', None)
2014 visual = getattr(tmpl_context_var, 'visual', None)
2016 if not visual:
2015 if not visual:
2017 return
2016 return
2018 else:
2017 else:
2019 return getattr(visual, attr_name, None)
2018 return getattr(visual, attr_name, None)
2020
2019
2021
2020
2022 def get_last_path_part(file_node):
2021 def get_last_path_part(file_node):
2023 if not file_node.path:
2022 if not file_node.path:
2024 return u'/'
2023 return u'/'
2025
2024
2026 path = safe_unicode(file_node.path.split('/')[-1])
2025 path = safe_unicode(file_node.path.split('/')[-1])
2027 return u'../' + path
2026 return u'../' + path
2028
2027
2029
2028
2030 def route_url(*args, **kwargs):
2029 def route_url(*args, **kwargs):
2031 """
2030 """
2032 Wrapper around pyramids `route_url` (fully qualified url) function.
2031 Wrapper around pyramids `route_url` (fully qualified url) function.
2033 """
2032 """
2034 req = get_current_request()
2033 req = get_current_request()
2035 return req.route_url(*args, **kwargs)
2034 return req.route_url(*args, **kwargs)
2036
2035
2037
2036
2038 def route_path(*args, **kwargs):
2037 def route_path(*args, **kwargs):
2039 """
2038 """
2040 Wrapper around pyramids `route_path` function.
2039 Wrapper around pyramids `route_path` function.
2041 """
2040 """
2042 req = get_current_request()
2041 req = get_current_request()
2043 return req.route_path(*args, **kwargs)
2042 return req.route_path(*args, **kwargs)
2044
2043
2045
2044
2046 def route_path_or_none(*args, **kwargs):
2045 def route_path_or_none(*args, **kwargs):
2047 try:
2046 try:
2048 return route_path(*args, **kwargs)
2047 return route_path(*args, **kwargs)
2049 except KeyError:
2048 except KeyError:
2050 return None
2049 return None
2051
2050
2052
2051
2053 def current_route_path(request, **kw):
2052 def current_route_path(request, **kw):
2054 new_args = request.GET.mixed()
2053 new_args = request.GET.mixed()
2055 new_args.update(kw)
2054 new_args.update(kw)
2056 return request.current_route_path(_query=new_args)
2055 return request.current_route_path(_query=new_args)
2057
2056
2058
2057
2059 def curl_api_example(method, args):
2058 def curl_api_example(method, args):
2060 args_json = json.dumps(OrderedDict([
2059 args_json = json.dumps(OrderedDict([
2061 ('id', 1),
2060 ('id', 1),
2062 ('auth_token', 'SECRET'),
2061 ('auth_token', 'SECRET'),
2063 ('method', method),
2062 ('method', method),
2064 ('args', args)
2063 ('args', args)
2065 ]))
2064 ]))
2066
2065
2067 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2066 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2068 api_url=route_url('apiv2'),
2067 api_url=route_url('apiv2'),
2069 args_json=args_json
2068 args_json=args_json
2070 )
2069 )
2071
2070
2072
2071
2073 def api_call_example(method, args):
2072 def api_call_example(method, args):
2074 """
2073 """
2075 Generates an API call example via CURL
2074 Generates an API call example via CURL
2076 """
2075 """
2077 curl_call = curl_api_example(method, args)
2076 curl_call = curl_api_example(method, args)
2078
2077
2079 return literal(
2078 return literal(
2080 curl_call +
2079 curl_call +
2081 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2080 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2082 "and needs to be of `api calls` role."
2081 "and needs to be of `api calls` role."
2083 .format(token_url=route_url('my_account_auth_tokens')))
2082 .format(token_url=route_url('my_account_auth_tokens')))
2084
2083
2085
2084
2086 def notification_description(notification, request):
2085 def notification_description(notification, request):
2087 """
2086 """
2088 Generate notification human readable description based on notification type
2087 Generate notification human readable description based on notification type
2089 """
2088 """
2090 from rhodecode.model.notification import NotificationModel
2089 from rhodecode.model.notification import NotificationModel
2091 return NotificationModel().make_description(
2090 return NotificationModel().make_description(
2092 notification, translate=request.translate)
2091 notification, translate=request.translate)
2093
2092
2094
2093
2095 def go_import_header(request, db_repo=None):
2094 def go_import_header(request, db_repo=None):
2096 """
2095 """
2097 Creates a header for go-import functionality in Go Lang
2096 Creates a header for go-import functionality in Go Lang
2098 """
2097 """
2099
2098
2100 if not db_repo:
2099 if not db_repo:
2101 return
2100 return
2102 if 'go-get' not in request.GET:
2101 if 'go-get' not in request.GET:
2103 return
2102 return
2104
2103
2105 clone_url = db_repo.clone_url()
2104 clone_url = db_repo.clone_url()
2106 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2105 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2107 # we have a repo and go-get flag,
2106 # we have a repo and go-get flag,
2108 return literal('<meta name="go-import" content="{} {} {}">'.format(
2107 return literal('<meta name="go-import" content="{} {} {}">'.format(
2109 prefix, db_repo.repo_type, clone_url))
2108 prefix, db_repo.repo_type, clone_url))
2110
2109
2111
2110
2112 def reviewer_as_json(*args, **kwargs):
2111 def reviewer_as_json(*args, **kwargs):
2113 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2112 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2114 return _reviewer_as_json(*args, **kwargs)
2113 return _reviewer_as_json(*args, **kwargs)
2115
2114
2116
2115
2117 def get_repo_view_type(request):
2116 def get_repo_view_type(request):
2118 route_name = request.matched_route.name
2117 route_name = request.matched_route.name
2119 route_to_view_type = {
2118 route_to_view_type = {
2120 'repo_changelog': 'commits',
2119 'repo_changelog': 'commits',
2121 'repo_commits': 'commits',
2120 'repo_commits': 'commits',
2122 'repo_files': 'files',
2121 'repo_files': 'files',
2123 'repo_summary': 'summary',
2122 'repo_summary': 'summary',
2124 'repo_commit': 'commit'
2123 'repo_commit': 'commit'
2125 }
2124 }
2126
2125
2127 return route_to_view_type.get(route_name)
2126 return route_to_view_type.get(route_name)
2128
2127
2129
2128
2130 def is_active(menu_entry, selected):
2129 def is_active(menu_entry, selected):
2131 """
2130 """
2132 Returns active class for selecting menus in templates
2131 Returns active class for selecting menus in templates
2133 <li class=${h.is_active('settings', current_active)}></li>
2132 <li class=${h.is_active('settings', current_active)}></li>
2134 """
2133 """
2135 if not isinstance(menu_entry, list):
2134 if not isinstance(menu_entry, list):
2136 menu_entry = [menu_entry]
2135 menu_entry = [menu_entry]
2137
2136
2138 if selected in menu_entry:
2137 if selected in menu_entry:
2139 return "active"
2138 return "active"
2140
2139
2141
2140
2142 class IssuesRegistry(object):
2141 class IssuesRegistry(object):
2143 """
2142 """
2144 issue_registry = IssuesRegistry()
2143 issue_registry = IssuesRegistry()
2145 some_func(issues_callback=issues_registry(...))
2144 some_func(issues_callback=issues_registry(...))
2146 """
2145 """
2147
2146
2148 def __init__(self):
2147 def __init__(self):
2149 self.issues = []
2148 self.issues = []
2150 self.unique_issues = collections.defaultdict(lambda: [])
2149 self.unique_issues = collections.defaultdict(lambda: [])
2151
2150
2152 def __call__(self, commit_dict=None):
2151 def __call__(self, commit_dict=None):
2153 def callback(issue):
2152 def callback(issue):
2154 if commit_dict and issue:
2153 if commit_dict and issue:
2155 issue['commit'] = commit_dict
2154 issue['commit'] = commit_dict
2156 self.issues.append(issue)
2155 self.issues.append(issue)
2157 self.unique_issues[issue['id']].append(issue)
2156 self.unique_issues[issue['id']].append(issue)
2158 return callback
2157 return callback
2159
2158
2160 def get_issues(self):
2159 def get_issues(self):
2161 return self.issues
2160 return self.issues
2162
2161
2163 @property
2162 @property
2164 def issues_unique_count(self):
2163 def issues_unique_count(self):
2165 return len(set(i['id'] for i in self.issues))
2164 return len(set(i['id'] for i in self.issues))
@@ -1,538 +1,538 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2013-2020 RhodeCode GmbH
3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Set of hooks run by RhodeCode Enterprise
23 Set of hooks run by RhodeCode Enterprise
24 """
24 """
25
25
26 import os
26 import os
27 import logging
27 import logging
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode import events
30 from rhodecode import events
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib import audit_logger
32 from rhodecode.lib import audit_logger
33 from rhodecode.lib.utils2 import safe_str, user_agent_normalizer
33 from rhodecode.lib.utils2 import safe_str, user_agent_normalizer
34 from rhodecode.lib.exceptions import (
34 from rhodecode.lib.exceptions import (
35 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
35 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
36 from rhodecode.model.db import Repository, User
36 from rhodecode.model.db import Repository, User
37 from rhodecode.lib.statsd_client import StatsdClient
37 from rhodecode.lib.statsd_client import StatsdClient
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 class HookResponse(object):
42 class HookResponse(object):
43 def __init__(self, status, output):
43 def __init__(self, status, output):
44 self.status = status
44 self.status = status
45 self.output = output
45 self.output = output
46
46
47 def __add__(self, other):
47 def __add__(self, other):
48 other_status = getattr(other, 'status', 0)
48 other_status = getattr(other, 'status', 0)
49 new_status = max(self.status, other_status)
49 new_status = max(self.status, other_status)
50 other_output = getattr(other, 'output', '')
50 other_output = getattr(other, 'output', '')
51 new_output = self.output + other_output
51 new_output = self.output + other_output
52
52
53 return HookResponse(new_status, new_output)
53 return HookResponse(new_status, new_output)
54
54
55 def __bool__(self):
55 def __bool__(self):
56 return self.status == 0
56 return self.status == 0
57
57
58
58
59 def is_shadow_repo(extras):
59 def is_shadow_repo(extras):
60 """
60 """
61 Returns ``True`` if this is an action executed against a shadow repository.
61 Returns ``True`` if this is an action executed against a shadow repository.
62 """
62 """
63 return extras['is_shadow_repo']
63 return extras['is_shadow_repo']
64
64
65
65
66 def _get_scm_size(alias, root_path):
66 def _get_scm_size(alias, root_path):
67
67
68 if not alias.startswith('.'):
68 if not alias.startswith('.'):
69 alias += '.'
69 alias += '.'
70
70
71 size_scm, size_root = 0, 0
71 size_scm, size_root = 0, 0
72 for path, unused_dirs, files in os.walk(safe_str(root_path)):
72 for path, unused_dirs, files in os.walk(safe_str(root_path)):
73 if path.find(alias) != -1:
73 if path.find(alias) != -1:
74 for f in files:
74 for f in files:
75 try:
75 try:
76 size_scm += os.path.getsize(os.path.join(path, f))
76 size_scm += os.path.getsize(os.path.join(path, f))
77 except OSError:
77 except OSError:
78 pass
78 pass
79 else:
79 else:
80 for f in files:
80 for f in files:
81 try:
81 try:
82 size_root += os.path.getsize(os.path.join(path, f))
82 size_root += os.path.getsize(os.path.join(path, f))
83 except OSError:
83 except OSError:
84 pass
84 pass
85
85
86 size_scm_f = h.format_byte_size_binary(size_scm)
86 size_scm_f = h.format_byte_size_binary(size_scm)
87 size_root_f = h.format_byte_size_binary(size_root)
87 size_root_f = h.format_byte_size_binary(size_root)
88 size_total_f = h.format_byte_size_binary(size_root + size_scm)
88 size_total_f = h.format_byte_size_binary(size_root + size_scm)
89
89
90 return size_scm_f, size_root_f, size_total_f
90 return size_scm_f, size_root_f, size_total_f
91
91
92
92
93 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
93 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
94 def repo_size(extras):
94 def repo_size(extras):
95 """Present size of repository after push."""
95 """Present size of repository after push."""
96 repo = Repository.get_by_repo_name(extras.repository)
96 repo = Repository.get_by_repo_name(extras.repository)
97 vcs_part = safe_str('.%s' % repo.repo_type)
97 vcs_part = safe_str('.%s' % repo.repo_type)
98 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
98 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
99 repo.repo_full_path)
99 repo.repo_full_path)
100 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
100 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
101 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
101 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
102 return HookResponse(0, msg)
102 return HookResponse(0, msg)
103
103
104
104
105 def pre_push(extras):
105 def pre_push(extras):
106 """
106 """
107 Hook executed before pushing code.
107 Hook executed before pushing code.
108
108
109 It bans pushing when the repository is locked.
109 It bans pushing when the repository is locked.
110 """
110 """
111
111
112 user = User.get_by_username(extras.username)
112 user = User.get_by_username(extras.username)
113 output = ''
113 output = ''
114 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
114 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
115 locked_by = User.get(extras.locked_by[0]).username
115 locked_by = User.get(extras.locked_by[0]).username
116 reason = extras.locked_by[2]
116 reason = extras.locked_by[2]
117 # this exception is interpreted in git/hg middlewares and based
117 # this exception is interpreted in git/hg middlewares and based
118 # on that proper return code is server to client
118 # on that proper return code is server to client
119 _http_ret = HTTPLockedRC(
119 _http_ret = HTTPLockedRC(
120 _locked_by_explanation(extras.repository, locked_by, reason))
120 _locked_by_explanation(extras.repository, locked_by, reason))
121 if str(_http_ret.code).startswith('2'):
121 if str(_http_ret.code).startswith('2'):
122 # 2xx Codes don't raise exceptions
122 # 2xx Codes don't raise exceptions
123 output = _http_ret.title
123 output = _http_ret.title
124 else:
124 else:
125 raise _http_ret
125 raise _http_ret
126
126
127 hook_response = ''
127 hook_response = ''
128 if not is_shadow_repo(extras):
128 if not is_shadow_repo(extras):
129 if extras.commit_ids and extras.check_branch_perms:
129 if extras.commit_ids and extras.check_branch_perms:
130
130
131 auth_user = user.AuthUser()
131 auth_user = user.AuthUser()
132 repo = Repository.get_by_repo_name(extras.repository)
132 repo = Repository.get_by_repo_name(extras.repository)
133 affected_branches = []
133 affected_branches = []
134 if repo.repo_type == 'hg':
134 if repo.repo_type == 'hg':
135 for entry in extras.commit_ids:
135 for entry in extras.commit_ids:
136 if entry['type'] == 'branch':
136 if entry['type'] == 'branch':
137 is_forced = bool(entry['multiple_heads'])
137 is_forced = bool(entry['multiple_heads'])
138 affected_branches.append([entry['name'], is_forced])
138 affected_branches.append([entry['name'], is_forced])
139 elif repo.repo_type == 'git':
139 elif repo.repo_type == 'git':
140 for entry in extras.commit_ids:
140 for entry in extras.commit_ids:
141 if entry['type'] == 'heads':
141 if entry['type'] == 'heads':
142 is_forced = bool(entry['pruned_sha'])
142 is_forced = bool(entry['pruned_sha'])
143 affected_branches.append([entry['name'], is_forced])
143 affected_branches.append([entry['name'], is_forced])
144
144
145 for branch_name, is_forced in affected_branches:
145 for branch_name, is_forced in affected_branches:
146
146
147 rule, branch_perm = auth_user.get_rule_and_branch_permission(
147 rule, branch_perm = auth_user.get_rule_and_branch_permission(
148 extras.repository, branch_name)
148 extras.repository, branch_name)
149 if not branch_perm:
149 if not branch_perm:
150 # no branch permission found for this branch, just keep checking
150 # no branch permission found for this branch, just keep checking
151 continue
151 continue
152
152
153 if branch_perm == 'branch.push_force':
153 if branch_perm == 'branch.push_force':
154 continue
154 continue
155 elif branch_perm == 'branch.push' and is_forced is False:
155 elif branch_perm == 'branch.push' and is_forced is False:
156 continue
156 continue
157 elif branch_perm == 'branch.push' and is_forced is True:
157 elif branch_perm == 'branch.push' and is_forced is True:
158 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
158 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
159 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
159 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
160 else:
160 else:
161 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
161 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
162 branch_name, rule)
162 branch_name, rule)
163
163
164 if halt_message:
164 if halt_message:
165 _http_ret = HTTPBranchProtected(halt_message)
165 _http_ret = HTTPBranchProtected(halt_message)
166 raise _http_ret
166 raise _http_ret
167
167
168 # Propagate to external components. This is done after checking the
168 # Propagate to external components. This is done after checking the
169 # lock, for consistent behavior.
169 # lock, for consistent behavior.
170 hook_response = pre_push_extension(
170 hook_response = pre_push_extension(
171 repo_store_path=Repository.base_path(), **extras)
171 repo_store_path=Repository.base_path(), **extras)
172 events.trigger(events.RepoPrePushEvent(
172 events.trigger(events.RepoPrePushEvent(
173 repo_name=extras.repository, extras=extras))
173 repo_name=extras.repository, extras=extras))
174
174
175 return HookResponse(0, output) + hook_response
175 return HookResponse(0, output) + hook_response
176
176
177
177
178 def pre_pull(extras):
178 def pre_pull(extras):
179 """
179 """
180 Hook executed before pulling the code.
180 Hook executed before pulling the code.
181
181
182 It bans pulling when the repository is locked.
182 It bans pulling when the repository is locked.
183 """
183 """
184
184
185 output = ''
185 output = ''
186 if extras.locked_by[0]:
186 if extras.locked_by[0]:
187 locked_by = User.get(extras.locked_by[0]).username
187 locked_by = User.get(extras.locked_by[0]).username
188 reason = extras.locked_by[2]
188 reason = extras.locked_by[2]
189 # this exception is interpreted in git/hg middlewares and based
189 # this exception is interpreted in git/hg middlewares and based
190 # on that proper return code is server to client
190 # on that proper return code is server to client
191 _http_ret = HTTPLockedRC(
191 _http_ret = HTTPLockedRC(
192 _locked_by_explanation(extras.repository, locked_by, reason))
192 _locked_by_explanation(extras.repository, locked_by, reason))
193 if str(_http_ret.code).startswith('2'):
193 if str(_http_ret.code).startswith('2'):
194 # 2xx Codes don't raise exceptions
194 # 2xx Codes don't raise exceptions
195 output = _http_ret.title
195 output = _http_ret.title
196 else:
196 else:
197 raise _http_ret
197 raise _http_ret
198
198
199 # Propagate to external components. This is done after checking the
199 # Propagate to external components. This is done after checking the
200 # lock, for consistent behavior.
200 # lock, for consistent behavior.
201 hook_response = ''
201 hook_response = ''
202 if not is_shadow_repo(extras):
202 if not is_shadow_repo(extras):
203 extras.hook_type = extras.hook_type or 'pre_pull'
203 extras.hook_type = extras.hook_type or 'pre_pull'
204 hook_response = pre_pull_extension(
204 hook_response = pre_pull_extension(
205 repo_store_path=Repository.base_path(), **extras)
205 repo_store_path=Repository.base_path(), **extras)
206 events.trigger(events.RepoPrePullEvent(
206 events.trigger(events.RepoPrePullEvent(
207 repo_name=extras.repository, extras=extras))
207 repo_name=extras.repository, extras=extras))
208
208
209 return HookResponse(0, output) + hook_response
209 return HookResponse(0, output) + hook_response
210
210
211
211
212 def post_pull(extras):
212 def post_pull(extras):
213 """Hook executed after client pulls the code."""
213 """Hook executed after client pulls the code."""
214
214
215 audit_user = audit_logger.UserWrap(
215 audit_user = audit_logger.UserWrap(
216 username=extras.username,
216 username=extras.username,
217 ip_addr=extras.ip)
217 ip_addr=extras.ip)
218 repo = audit_logger.RepoWrap(repo_name=extras.repository)
218 repo = audit_logger.RepoWrap(repo_name=extras.repository)
219 audit_logger.store(
219 audit_logger.store(
220 'user.pull', action_data={'user_agent': extras.user_agent},
220 'user.pull', action_data={'user_agent': extras.user_agent},
221 user=audit_user, repo=repo, commit=True)
221 user=audit_user, repo=repo, commit=True)
222
222
223 statsd = StatsdClient.statsd
223 statsd = StatsdClient.statsd
224 if statsd:
224 if statsd:
225 statsd.incr('rhodecode_pull_total', tags=[
225 statsd.incr('rhodecode_pull_total', tags=[
226 'user-agent:{}'.format(user_agent_normalizer(extras.user_agent)),
226 'user-agent:{}'.format(user_agent_normalizer(extras.user_agent)),
227 ])
227 ])
228 output = ''
228 output = ''
229 # make lock is a tri state False, True, None. We only make lock on True
229 # make lock is a tri state False, True, None. We only make lock on True
230 if extras.make_lock is True and not is_shadow_repo(extras):
230 if extras.make_lock is True and not is_shadow_repo(extras):
231 user = User.get_by_username(extras.username)
231 user = User.get_by_username(extras.username)
232 Repository.lock(Repository.get_by_repo_name(extras.repository),
232 Repository.lock(Repository.get_by_repo_name(extras.repository),
233 user.user_id,
233 user.user_id,
234 lock_reason=Repository.LOCK_PULL)
234 lock_reason=Repository.LOCK_PULL)
235 msg = 'Made lock on repo `%s`' % (extras.repository,)
235 msg = 'Made lock on repo `%s`' % (extras.repository,)
236 output += msg
236 output += msg
237
237
238 if extras.locked_by[0]:
238 if extras.locked_by[0]:
239 locked_by = User.get(extras.locked_by[0]).username
239 locked_by = User.get(extras.locked_by[0]).username
240 reason = extras.locked_by[2]
240 reason = extras.locked_by[2]
241 _http_ret = HTTPLockedRC(
241 _http_ret = HTTPLockedRC(
242 _locked_by_explanation(extras.repository, locked_by, reason))
242 _locked_by_explanation(extras.repository, locked_by, reason))
243 if str(_http_ret.code).startswith('2'):
243 if str(_http_ret.code).startswith('2'):
244 # 2xx Codes don't raise exceptions
244 # 2xx Codes don't raise exceptions
245 output += _http_ret.title
245 output += _http_ret.title
246
246
247 # Propagate to external components.
247 # Propagate to external components.
248 hook_response = ''
248 hook_response = ''
249 if not is_shadow_repo(extras):
249 if not is_shadow_repo(extras):
250 extras.hook_type = extras.hook_type or 'post_pull'
250 extras.hook_type = extras.hook_type or 'post_pull'
251 hook_response = post_pull_extension(
251 hook_response = post_pull_extension(
252 repo_store_path=Repository.base_path(), **extras)
252 repo_store_path=Repository.base_path(), **extras)
253 events.trigger(events.RepoPullEvent(
253 events.trigger(events.RepoPullEvent(
254 repo_name=extras.repository, extras=extras))
254 repo_name=extras.repository, extras=extras))
255
255
256 return HookResponse(0, output) + hook_response
256 return HookResponse(0, output) + hook_response
257
257
258
258
259 def post_push(extras):
259 def post_push(extras):
260 """Hook executed after user pushes to the repository."""
260 """Hook executed after user pushes to the repository."""
261 commit_ids = extras.commit_ids
261 commit_ids = extras.commit_ids
262
262
263 # log the push call
263 # log the push call
264 audit_user = audit_logger.UserWrap(
264 audit_user = audit_logger.UserWrap(
265 username=extras.username, ip_addr=extras.ip)
265 username=extras.username, ip_addr=extras.ip)
266 repo = audit_logger.RepoWrap(repo_name=extras.repository)
266 repo = audit_logger.RepoWrap(repo_name=extras.repository)
267 audit_logger.store(
267 audit_logger.store(
268 'user.push', action_data={
268 'user.push', action_data={
269 'user_agent': extras.user_agent,
269 'user_agent': extras.user_agent,
270 'commit_ids': commit_ids[:400]},
270 'commit_ids': commit_ids[:400]},
271 user=audit_user, repo=repo, commit=True)
271 user=audit_user, repo=repo, commit=True)
272
272
273 statsd = StatsdClient.statsd
273 statsd = StatsdClient.statsd
274 if statsd:
274 if statsd:
275 statsd.incr('rhodecode_push_total', tags=[
275 statsd.incr('rhodecode_push_total', tags=[
276 'user-agent:{}'.format(user_agent_normalizer(extras.user_agent)),
276 'user-agent:{}'.format(user_agent_normalizer(extras.user_agent)),
277 ])
277 ])
278
278
279 # Propagate to external components.
279 # Propagate to external components.
280 output = ''
280 output = ''
281 # make lock is a tri state False, True, None. We only release lock on False
281 # make lock is a tri state False, True, None. We only release lock on False
282 if extras.make_lock is False and not is_shadow_repo(extras):
282 if extras.make_lock is False and not is_shadow_repo(extras):
283 Repository.unlock(Repository.get_by_repo_name(extras.repository))
283 Repository.unlock(Repository.get_by_repo_name(extras.repository))
284 msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository))
284 msg = 'Released lock on repo `{}`\n'.format(safe_str(extras.repository))
285 output += msg
285 output += msg
286
286
287 if extras.locked_by[0]:
287 if extras.locked_by[0]:
288 locked_by = User.get(extras.locked_by[0]).username
288 locked_by = User.get(extras.locked_by[0]).username
289 reason = extras.locked_by[2]
289 reason = extras.locked_by[2]
290 _http_ret = HTTPLockedRC(
290 _http_ret = HTTPLockedRC(
291 _locked_by_explanation(extras.repository, locked_by, reason))
291 _locked_by_explanation(extras.repository, locked_by, reason))
292 # TODO: johbo: if not?
292 # TODO: johbo: if not?
293 if str(_http_ret.code).startswith('2'):
293 if str(_http_ret.code).startswith('2'):
294 # 2xx Codes don't raise exceptions
294 # 2xx Codes don't raise exceptions
295 output += _http_ret.title
295 output += _http_ret.title
296
296
297 if extras.new_refs:
297 if extras.new_refs:
298 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
298 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
299 safe_str(extras.server_url), safe_str(extras.repository))
299 safe_str(extras.server_url), safe_str(extras.repository))
300
300
301 for branch_name in extras.new_refs['branches']:
301 for branch_name in extras.new_refs['branches']:
302 output += 'RhodeCode: open pull request link: {}\n'.format(
302 output += 'RhodeCode: open pull request link: {}\n'.format(
303 tmpl.format(ref_type='branch', ref_name=safe_str(branch_name)))
303 tmpl.format(ref_type='branch', ref_name=safe_str(branch_name)))
304
304
305 for book_name in extras.new_refs['bookmarks']:
305 for book_name in extras.new_refs['bookmarks']:
306 output += 'RhodeCode: open pull request link: {}\n'.format(
306 output += 'RhodeCode: open pull request link: {}\n'.format(
307 tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name)))
307 tmpl.format(ref_type='bookmark', ref_name=safe_str(book_name)))
308
308
309 hook_response = ''
309 hook_response = ''
310 if not is_shadow_repo(extras):
310 if not is_shadow_repo(extras):
311 hook_response = post_push_extension(
311 hook_response = post_push_extension(
312 repo_store_path=Repository.base_path(),
312 repo_store_path=Repository.base_path(),
313 **extras)
313 **extras)
314 events.trigger(events.RepoPushEvent(
314 events.trigger(events.RepoPushEvent(
315 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
315 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
316
316
317 output += 'RhodeCode: push completed\n'
317 output += 'RhodeCode: push completed\n'
318 return HookResponse(0, output) + hook_response
318 return HookResponse(0, output) + hook_response
319
319
320
320
321 def _locked_by_explanation(repo_name, user_name, reason):
321 def _locked_by_explanation(repo_name, user_name, reason):
322 message = (
322 message = (
323 'Repository `%s` locked by user `%s`. Reason:`%s`'
323 'Repository `%s` locked by user `%s`. Reason:`%s`'
324 % (repo_name, user_name, reason))
324 % (repo_name, user_name, reason))
325 return message
325 return message
326
326
327
327
328 def check_allowed_create_user(user_dict, created_by, **kwargs):
328 def check_allowed_create_user(user_dict, created_by, **kwargs):
329 # pre create hooks
329 # pre create hooks
330 if pre_create_user.is_active():
330 if pre_create_user.is_active():
331 hook_result = pre_create_user(created_by=created_by, **user_dict)
331 hook_result = pre_create_user(created_by=created_by, **user_dict)
332 allowed = hook_result.status == 0
332 allowed = hook_result.status == 0
333 if not allowed:
333 if not allowed:
334 reason = hook_result.output
334 reason = hook_result.output
335 raise UserCreationError(reason)
335 raise UserCreationError(reason)
336
336
337
337
338 class ExtensionCallback(object):
338 class ExtensionCallback(object):
339 """
339 """
340 Forwards a given call to rcextensions, sanitizes keyword arguments.
340 Forwards a given call to rcextensions, sanitizes keyword arguments.
341
341
342 Does check if there is an extension active for that hook. If it is
342 Does check if there is an extension active for that hook. If it is
343 there, it will forward all `kwargs_keys` keyword arguments to the
343 there, it will forward all `kwargs_keys` keyword arguments to the
344 extension callback.
344 extension callback.
345 """
345 """
346
346
347 def __init__(self, hook_name, kwargs_keys):
347 def __init__(self, hook_name, kwargs_keys):
348 self._hook_name = hook_name
348 self._hook_name = hook_name
349 self._kwargs_keys = set(kwargs_keys)
349 self._kwargs_keys = set(kwargs_keys)
350
350
351 def __call__(self, *args, **kwargs):
351 def __call__(self, *args, **kwargs):
352 log.debug('Calling extension callback for `%s`', self._hook_name)
352 log.debug('Calling extension callback for `%s`', self._hook_name)
353 callback = self._get_callback()
353 callback = self._get_callback()
354 if not callback:
354 if not callback:
355 log.debug('extension callback `%s` not found, skipping...', self._hook_name)
355 log.debug('extension callback `%s` not found, skipping...', self._hook_name)
356 return
356 return
357
357
358 kwargs_to_pass = {}
358 kwargs_to_pass = {}
359 for key in self._kwargs_keys:
359 for key in self._kwargs_keys:
360 try:
360 try:
361 kwargs_to_pass[key] = kwargs[key]
361 kwargs_to_pass[key] = kwargs[key]
362 except KeyError:
362 except KeyError:
363 log.error('Failed to fetch %s key from given kwargs. '
363 log.error('Failed to fetch %s key from given kwargs. '
364 'Expected keys: %s', key, self._kwargs_keys)
364 'Expected keys: %s', key, self._kwargs_keys)
365 raise
365 raise
366
366
367 # backward compat for removed api_key for old hooks. This was it works
367 # backward compat for removed api_key for old hooks. This was it works
368 # with older rcextensions that require api_key present
368 # with older rcextensions that require api_key present
369 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
369 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
370 kwargs_to_pass['api_key'] = '_DEPRECATED_'
370 kwargs_to_pass['api_key'] = '_DEPRECATED_'
371 return callback(**kwargs_to_pass)
371 return callback(**kwargs_to_pass)
372
372
373 def is_active(self):
373 def is_active(self):
374 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
374 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
375
375
376 def _get_callback(self):
376 def _get_callback(self):
377 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
377 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
378
378
379
379
380 pre_pull_extension = ExtensionCallback(
380 pre_pull_extension = ExtensionCallback(
381 hook_name='PRE_PULL_HOOK',
381 hook_name='PRE_PULL_HOOK',
382 kwargs_keys=(
382 kwargs_keys=(
383 'server_url', 'config', 'scm', 'username', 'ip', 'action',
383 'server_url', 'config', 'scm', 'username', 'ip', 'action',
384 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
384 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
385
385
386
386
387 post_pull_extension = ExtensionCallback(
387 post_pull_extension = ExtensionCallback(
388 hook_name='PULL_HOOK',
388 hook_name='PULL_HOOK',
389 kwargs_keys=(
389 kwargs_keys=(
390 'server_url', 'config', 'scm', 'username', 'ip', 'action',
390 'server_url', 'config', 'scm', 'username', 'ip', 'action',
391 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
391 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
392
392
393
393
394 pre_push_extension = ExtensionCallback(
394 pre_push_extension = ExtensionCallback(
395 hook_name='PRE_PUSH_HOOK',
395 hook_name='PRE_PUSH_HOOK',
396 kwargs_keys=(
396 kwargs_keys=(
397 'server_url', 'config', 'scm', 'username', 'ip', 'action',
397 'server_url', 'config', 'scm', 'username', 'ip', 'action',
398 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
398 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
399
399
400
400
401 post_push_extension = ExtensionCallback(
401 post_push_extension = ExtensionCallback(
402 hook_name='PUSH_HOOK',
402 hook_name='PUSH_HOOK',
403 kwargs_keys=(
403 kwargs_keys=(
404 'server_url', 'config', 'scm', 'username', 'ip', 'action',
404 'server_url', 'config', 'scm', 'username', 'ip', 'action',
405 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
405 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
406
406
407
407
408 pre_create_user = ExtensionCallback(
408 pre_create_user = ExtensionCallback(
409 hook_name='PRE_CREATE_USER_HOOK',
409 hook_name='PRE_CREATE_USER_HOOK',
410 kwargs_keys=(
410 kwargs_keys=(
411 'username', 'password', 'email', 'firstname', 'lastname', 'active',
411 'username', 'password', 'email', 'firstname', 'lastname', 'active',
412 'admin', 'created_by'))
412 'admin', 'created_by'))
413
413
414
414
415 create_pull_request = ExtensionCallback(
415 create_pull_request = ExtensionCallback(
416 hook_name='CREATE_PULL_REQUEST',
416 hook_name='CREATE_PULL_REQUEST',
417 kwargs_keys=(
417 kwargs_keys=(
418 'server_url', 'config', 'scm', 'username', 'ip', 'action',
418 'server_url', 'config', 'scm', 'username', 'ip', 'action',
419 'repository', 'pull_request_id', 'url', 'title', 'description',
419 'repository', 'pull_request_id', 'url', 'title', 'description',
420 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
420 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
421 'mergeable', 'source', 'target', 'author', 'reviewers'))
421 'mergeable', 'source', 'target', 'author', 'reviewers'))
422
422
423
423
424 merge_pull_request = ExtensionCallback(
424 merge_pull_request = ExtensionCallback(
425 hook_name='MERGE_PULL_REQUEST',
425 hook_name='MERGE_PULL_REQUEST',
426 kwargs_keys=(
426 kwargs_keys=(
427 'server_url', 'config', 'scm', 'username', 'ip', 'action',
427 'server_url', 'config', 'scm', 'username', 'ip', 'action',
428 'repository', 'pull_request_id', 'url', 'title', 'description',
428 'repository', 'pull_request_id', 'url', 'title', 'description',
429 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
429 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
430 'mergeable', 'source', 'target', 'author', 'reviewers'))
430 'mergeable', 'source', 'target', 'author', 'reviewers'))
431
431
432
432
433 close_pull_request = ExtensionCallback(
433 close_pull_request = ExtensionCallback(
434 hook_name='CLOSE_PULL_REQUEST',
434 hook_name='CLOSE_PULL_REQUEST',
435 kwargs_keys=(
435 kwargs_keys=(
436 'server_url', 'config', 'scm', 'username', 'ip', 'action',
436 'server_url', 'config', 'scm', 'username', 'ip', 'action',
437 'repository', 'pull_request_id', 'url', 'title', 'description',
437 'repository', 'pull_request_id', 'url', 'title', 'description',
438 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
438 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
439 'mergeable', 'source', 'target', 'author', 'reviewers'))
439 'mergeable', 'source', 'target', 'author', 'reviewers'))
440
440
441
441
442 review_pull_request = ExtensionCallback(
442 review_pull_request = ExtensionCallback(
443 hook_name='REVIEW_PULL_REQUEST',
443 hook_name='REVIEW_PULL_REQUEST',
444 kwargs_keys=(
444 kwargs_keys=(
445 'server_url', 'config', 'scm', 'username', 'ip', 'action',
445 'server_url', 'config', 'scm', 'username', 'ip', 'action',
446 'repository', 'pull_request_id', 'url', 'title', 'description',
446 'repository', 'pull_request_id', 'url', 'title', 'description',
447 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
447 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
448 'mergeable', 'source', 'target', 'author', 'reviewers'))
448 'mergeable', 'source', 'target', 'author', 'reviewers'))
449
449
450
450
451 comment_pull_request = ExtensionCallback(
451 comment_pull_request = ExtensionCallback(
452 hook_name='COMMENT_PULL_REQUEST',
452 hook_name='COMMENT_PULL_REQUEST',
453 kwargs_keys=(
453 kwargs_keys=(
454 'server_url', 'config', 'scm', 'username', 'ip', 'action',
454 'server_url', 'config', 'scm', 'username', 'ip', 'action',
455 'repository', 'pull_request_id', 'url', 'title', 'description',
455 'repository', 'pull_request_id', 'url', 'title', 'description',
456 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
456 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
457 'mergeable', 'source', 'target', 'author', 'reviewers'))
457 'mergeable', 'source', 'target', 'author', 'reviewers'))
458
458
459
459
460 comment_edit_pull_request = ExtensionCallback(
460 comment_edit_pull_request = ExtensionCallback(
461 hook_name='COMMENT_EDIT_PULL_REQUEST',
461 hook_name='COMMENT_EDIT_PULL_REQUEST',
462 kwargs_keys=(
462 kwargs_keys=(
463 'server_url', 'config', 'scm', 'username', 'ip', 'action',
463 'server_url', 'config', 'scm', 'username', 'ip', 'action',
464 'repository', 'pull_request_id', 'url', 'title', 'description',
464 'repository', 'pull_request_id', 'url', 'title', 'description',
465 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
465 'status', 'comment', 'created_on', 'updated_on', 'commit_ids', 'review_status',
466 'mergeable', 'source', 'target', 'author', 'reviewers'))
466 'mergeable', 'source', 'target', 'author', 'reviewers'))
467
467
468
468
469 update_pull_request = ExtensionCallback(
469 update_pull_request = ExtensionCallback(
470 hook_name='UPDATE_PULL_REQUEST',
470 hook_name='UPDATE_PULL_REQUEST',
471 kwargs_keys=(
471 kwargs_keys=(
472 'server_url', 'config', 'scm', 'username', 'ip', 'action',
472 'server_url', 'config', 'scm', 'username', 'ip', 'action',
473 'repository', 'pull_request_id', 'url', 'title', 'description',
473 'repository', 'pull_request_id', 'url', 'title', 'description',
474 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
474 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
475 'mergeable', 'source', 'target', 'author', 'reviewers'))
475 'mergeable', 'source', 'target', 'author', 'reviewers'))
476
476
477
477
478 create_user = ExtensionCallback(
478 create_user = ExtensionCallback(
479 hook_name='CREATE_USER_HOOK',
479 hook_name='CREATE_USER_HOOK',
480 kwargs_keys=(
480 kwargs_keys=(
481 'username', 'full_name_or_username', 'full_contact', 'user_id',
481 'username', 'full_name_or_username', 'full_contact', 'user_id',
482 'name', 'firstname', 'short_contact', 'admin', 'lastname',
482 'name', 'firstname', 'short_contact', 'admin', 'lastname',
483 'ip_addresses', 'extern_type', 'extern_name',
483 'ip_addresses', 'extern_type', 'extern_name',
484 'email', 'api_keys', 'last_login',
484 'email', 'api_keys', 'last_login',
485 'full_name', 'active', 'password', 'emails',
485 'full_name', 'active', 'password', 'emails',
486 'inherit_default_permissions', 'created_by', 'created_on'))
486 'inherit_default_permissions', 'created_by', 'created_on'))
487
487
488
488
489 delete_user = ExtensionCallback(
489 delete_user = ExtensionCallback(
490 hook_name='DELETE_USER_HOOK',
490 hook_name='DELETE_USER_HOOK',
491 kwargs_keys=(
491 kwargs_keys=(
492 'username', 'full_name_or_username', 'full_contact', 'user_id',
492 'username', 'full_name_or_username', 'full_contact', 'user_id',
493 'name', 'firstname', 'short_contact', 'admin', 'lastname',
493 'name', 'firstname', 'short_contact', 'admin', 'lastname',
494 'ip_addresses',
494 'ip_addresses',
495 'email', 'last_login',
495 'email', 'last_login',
496 'full_name', 'active', 'password', 'emails',
496 'full_name', 'active', 'password', 'emails',
497 'inherit_default_permissions', 'deleted_by'))
497 'inherit_default_permissions', 'deleted_by'))
498
498
499
499
500 create_repository = ExtensionCallback(
500 create_repository = ExtensionCallback(
501 hook_name='CREATE_REPO_HOOK',
501 hook_name='CREATE_REPO_HOOK',
502 kwargs_keys=(
502 kwargs_keys=(
503 'repo_name', 'repo_type', 'description', 'private', 'created_on',
503 'repo_name', 'repo_type', 'description', 'private', 'created_on',
504 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
504 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
505 'clone_uri', 'fork_id', 'group_id', 'created_by'))
505 'clone_uri', 'fork_id', 'group_id', 'created_by'))
506
506
507
507
508 delete_repository = ExtensionCallback(
508 delete_repository = ExtensionCallback(
509 hook_name='DELETE_REPO_HOOK',
509 hook_name='DELETE_REPO_HOOK',
510 kwargs_keys=(
510 kwargs_keys=(
511 'repo_name', 'repo_type', 'description', 'private', 'created_on',
511 'repo_name', 'repo_type', 'description', 'private', 'created_on',
512 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
512 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
513 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
513 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
514
514
515
515
516 comment_commit_repository = ExtensionCallback(
516 comment_commit_repository = ExtensionCallback(
517 hook_name='COMMENT_COMMIT_REPO_HOOK',
517 hook_name='COMMENT_COMMIT_REPO_HOOK',
518 kwargs_keys=(
518 kwargs_keys=(
519 'repo_name', 'repo_type', 'description', 'private', 'created_on',
519 'repo_name', 'repo_type', 'description', 'private', 'created_on',
520 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
520 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
521 'clone_uri', 'fork_id', 'group_id',
521 'clone_uri', 'fork_id', 'group_id',
522 'repository', 'created_by', 'comment', 'commit'))
522 'repository', 'created_by', 'comment', 'commit'))
523
523
524 comment_edit_commit_repository = ExtensionCallback(
524 comment_edit_commit_repository = ExtensionCallback(
525 hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK',
525 hook_name='COMMENT_EDIT_COMMIT_REPO_HOOK',
526 kwargs_keys=(
526 kwargs_keys=(
527 'repo_name', 'repo_type', 'description', 'private', 'created_on',
527 'repo_name', 'repo_type', 'description', 'private', 'created_on',
528 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
528 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
529 'clone_uri', 'fork_id', 'group_id',
529 'clone_uri', 'fork_id', 'group_id',
530 'repository', 'created_by', 'comment', 'commit'))
530 'repository', 'created_by', 'comment', 'commit'))
531
531
532
532
533 create_repository_group = ExtensionCallback(
533 create_repository_group = ExtensionCallback(
534 hook_name='CREATE_REPO_GROUP_HOOK',
534 hook_name='CREATE_REPO_GROUP_HOOK',
535 kwargs_keys=(
535 kwargs_keys=(
536 'group_name', 'group_parent_id', 'group_description',
536 'group_name', 'group_parent_id', 'group_description',
537 'group_id', 'user_id', 'created_by', 'created_on',
537 'group_id', 'user_id', 'created_by', 'created_on',
538 'enable_locking'))
538 'enable_locking'))
@@ -1,365 +1,364 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import os
20 import os
22 import time
21 import time
23 import logging
22 import logging
24 import tempfile
23 import tempfile
25 import traceback
24 import traceback
26 import threading
25 import threading
27 import socket
26 import socket
28 import msgpack
27 import msgpack
29
28
30 from http.server import BaseHTTPRequestHandler
29 from http.server import BaseHTTPRequestHandler
31 from socketserver import TCPServer
30 from socketserver import TCPServer
32
31
33 import rhodecode
32 import rhodecode
34 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
33 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
35 from rhodecode.model import meta
34 from rhodecode.model import meta
36 from rhodecode.lib.base import bootstrap_request, bootstrap_config
35 from rhodecode.lib.base import bootstrap_request, bootstrap_config
37 from rhodecode.lib import hooks_base
36 from rhodecode.lib import hooks_base
38 from rhodecode.lib.utils2 import AttributeDict
37 from rhodecode.lib.utils2 import AttributeDict
39 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
40 from rhodecode.lib import rc_cache
39 from rhodecode.lib import rc_cache
41
40
42 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
43
42
44
43
45 class HooksHttpHandler(BaseHTTPRequestHandler):
44 class HooksHttpHandler(BaseHTTPRequestHandler):
46
45
47 def do_POST(self):
46 def do_POST(self):
48 hooks_proto, method, extras = self._read_request()
47 hooks_proto, method, extras = self._read_request()
49 log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
48 log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
50
49
51 txn_id = getattr(self.server, 'txn_id', None)
50 txn_id = getattr(self.server, 'txn_id', None)
52 if txn_id:
51 if txn_id:
53 log.debug('Computing TXN_ID based on `%s`:`%s`',
52 log.debug('Computing TXN_ID based on `%s`:`%s`',
54 extras['repository'], extras['txn_id'])
53 extras['repository'], extras['txn_id'])
55 computed_txn_id = rc_cache.utils.compute_key_from_params(
54 computed_txn_id = rc_cache.utils.compute_key_from_params(
56 extras['repository'], extras['txn_id'])
55 extras['repository'], extras['txn_id'])
57 if txn_id != computed_txn_id:
56 if txn_id != computed_txn_id:
58 raise Exception(
57 raise Exception(
59 'TXN ID fail: expected {} got {} instead'.format(
58 'TXN ID fail: expected {} got {} instead'.format(
60 txn_id, computed_txn_id))
59 txn_id, computed_txn_id))
61
60
62 request = getattr(self.server, 'request', None)
61 request = getattr(self.server, 'request', None)
63 try:
62 try:
64 hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
63 hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
65 result = self._call_hook_method(hooks, method, extras)
64 result = self._call_hook_method(hooks, method, extras)
66 except Exception as e:
65 except Exception as e:
67 exc_tb = traceback.format_exc()
66 exc_tb = traceback.format_exc()
68 result = {
67 result = {
69 'exception': e.__class__.__name__,
68 'exception': e.__class__.__name__,
70 'exception_traceback': exc_tb,
69 'exception_traceback': exc_tb,
71 'exception_args': e.args
70 'exception_args': e.args
72 }
71 }
73 self._write_response(hooks_proto, result)
72 self._write_response(hooks_proto, result)
74
73
75 def _read_request(self):
74 def _read_request(self):
76 length = int(self.headers['Content-Length'])
75 length = int(self.headers['Content-Length'])
77 hooks_proto = self.headers.get('rc-hooks-protocol') or 'json.v1'
76 hooks_proto = self.headers.get('rc-hooks-protocol') or 'json.v1'
78 if hooks_proto == 'msgpack.v1':
77 if hooks_proto == 'msgpack.v1':
79 # support for new vcsserver msgpack based protocol hooks
78 # support for new vcsserver msgpack based protocol hooks
80 data = msgpack.unpackb(self.rfile.read(length), raw=False)
79 data = msgpack.unpackb(self.rfile.read(length), raw=False)
81 else:
80 else:
82 body = self.rfile.read(length)
81 body = self.rfile.read(length)
83 data = json.loads(body)
82 data = json.loads(body)
84
83
85 return hooks_proto, data['method'], data['extras']
84 return hooks_proto, data['method'], data['extras']
86
85
87 def _write_response(self, hooks_proto, result):
86 def _write_response(self, hooks_proto, result):
88 self.send_response(200)
87 self.send_response(200)
89 if hooks_proto == 'msgpack.v1':
88 if hooks_proto == 'msgpack.v1':
90 self.send_header("Content-type", "application/msgpack")
89 self.send_header("Content-type", "application/msgpack")
91 self.end_headers()
90 self.end_headers()
92 self.wfile.write(msgpack.packb(result))
91 self.wfile.write(msgpack.packb(result))
93 else:
92 else:
94 self.send_header("Content-type", "text/json")
93 self.send_header("Content-type", "text/json")
95 self.end_headers()
94 self.end_headers()
96 self.wfile.write(json.dumps(result))
95 self.wfile.write(json.dumps(result))
97
96
98 def _call_hook_method(self, hooks, method, extras):
97 def _call_hook_method(self, hooks, method, extras):
99 try:
98 try:
100 result = getattr(hooks, method)(extras)
99 result = getattr(hooks, method)(extras)
101 finally:
100 finally:
102 meta.Session.remove()
101 meta.Session.remove()
103 return result
102 return result
104
103
105 def log_message(self, format, *args):
104 def log_message(self, format, *args):
106 """
105 """
107 This is an overridden method of BaseHTTPRequestHandler which logs using
106 This is an overridden method of BaseHTTPRequestHandler which logs using
108 logging library instead of writing directly to stderr.
107 logging library instead of writing directly to stderr.
109 """
108 """
110
109
111 message = format % args
110 message = format % args
112
111
113 log.debug(
112 log.debug(
114 "HOOKS: %s - - [%s] %s", self.client_address,
113 "HOOKS: %s - - [%s] %s", self.client_address,
115 self.log_date_time_string(), message)
114 self.log_date_time_string(), message)
116
115
117
116
118 class DummyHooksCallbackDaemon(object):
117 class DummyHooksCallbackDaemon(object):
119 hooks_uri = ''
118 hooks_uri = ''
120
119
121 def __init__(self):
120 def __init__(self):
122 self.hooks_module = Hooks.__module__
121 self.hooks_module = Hooks.__module__
123
122
124 def __enter__(self):
123 def __enter__(self):
125 log.debug('Running `%s` callback daemon', self.__class__.__name__)
124 log.debug('Running `%s` callback daemon', self.__class__.__name__)
126 return self
125 return self
127
126
128 def __exit__(self, exc_type, exc_val, exc_tb):
127 def __exit__(self, exc_type, exc_val, exc_tb):
129 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
128 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
130
129
131
130
132 class ThreadedHookCallbackDaemon(object):
131 class ThreadedHookCallbackDaemon(object):
133
132
134 _callback_thread = None
133 _callback_thread = None
135 _daemon = None
134 _daemon = None
136 _done = False
135 _done = False
137
136
138 def __init__(self, txn_id=None, host=None, port=None):
137 def __init__(self, txn_id=None, host=None, port=None):
139 self._prepare(txn_id=txn_id, host=host, port=port)
138 self._prepare(txn_id=txn_id, host=host, port=port)
140
139
141 def __enter__(self):
140 def __enter__(self):
142 log.debug('Running `%s` callback daemon', self.__class__.__name__)
141 log.debug('Running `%s` callback daemon', self.__class__.__name__)
143 self._run()
142 self._run()
144 return self
143 return self
145
144
146 def __exit__(self, exc_type, exc_val, exc_tb):
145 def __exit__(self, exc_type, exc_val, exc_tb):
147 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
146 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
148 self._stop()
147 self._stop()
149
148
150 def _prepare(self, txn_id=None, host=None, port=None):
149 def _prepare(self, txn_id=None, host=None, port=None):
151 raise NotImplementedError()
150 raise NotImplementedError()
152
151
153 def _run(self):
152 def _run(self):
154 raise NotImplementedError()
153 raise NotImplementedError()
155
154
156 def _stop(self):
155 def _stop(self):
157 raise NotImplementedError()
156 raise NotImplementedError()
158
157
159
158
160 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
159 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
161 """
160 """
162 Context manager which will run a callback daemon in a background thread.
161 Context manager which will run a callback daemon in a background thread.
163 """
162 """
164
163
165 hooks_uri = None
164 hooks_uri = None
166
165
167 # From Python docs: Polling reduces our responsiveness to a shutdown
166 # From Python docs: Polling reduces our responsiveness to a shutdown
168 # request and wastes cpu at all other times.
167 # request and wastes cpu at all other times.
169 POLL_INTERVAL = 0.01
168 POLL_INTERVAL = 0.01
170
169
171 @property
170 @property
172 def _hook_prefix(self):
171 def _hook_prefix(self):
173 return 'HOOKS: {} '.format(self.hooks_uri)
172 return 'HOOKS: {} '.format(self.hooks_uri)
174
173
175 def get_hostname(self):
174 def get_hostname(self):
176 return socket.gethostname() or '127.0.0.1'
175 return socket.gethostname() or '127.0.0.1'
177
176
178 def get_available_port(self, min_port=20000, max_port=65535):
177 def get_available_port(self, min_port=20000, max_port=65535):
179 from rhodecode.lib.utils2 import get_available_port as _get_port
178 from rhodecode.lib.utils2 import get_available_port as _get_port
180 return _get_port(min_port, max_port)
179 return _get_port(min_port, max_port)
181
180
182 def _prepare(self, txn_id=None, host=None, port=None):
181 def _prepare(self, txn_id=None, host=None, port=None):
183 from pyramid.threadlocal import get_current_request
182 from pyramid.threadlocal import get_current_request
184
183
185 if not host or host == "*":
184 if not host or host == "*":
186 host = self.get_hostname()
185 host = self.get_hostname()
187 if not port:
186 if not port:
188 port = self.get_available_port()
187 port = self.get_available_port()
189
188
190 server_address = (host, port)
189 server_address = (host, port)
191 self.hooks_uri = '{}:{}'.format(host, port)
190 self.hooks_uri = '{}:{}'.format(host, port)
192 self.txn_id = txn_id
191 self.txn_id = txn_id
193 self._done = False
192 self._done = False
194
193
195 log.debug(
194 log.debug(
196 "%s Preparing HTTP callback daemon registering hook object: %s",
195 "%s Preparing HTTP callback daemon registering hook object: %s",
197 self._hook_prefix, HooksHttpHandler)
196 self._hook_prefix, HooksHttpHandler)
198
197
199 self._daemon = TCPServer(server_address, HooksHttpHandler)
198 self._daemon = TCPServer(server_address, HooksHttpHandler)
200 # inject transaction_id for later verification
199 # inject transaction_id for later verification
201 self._daemon.txn_id = self.txn_id
200 self._daemon.txn_id = self.txn_id
202
201
203 # pass the WEB app request into daemon
202 # pass the WEB app request into daemon
204 self._daemon.request = get_current_request()
203 self._daemon.request = get_current_request()
205
204
206 def _run(self):
205 def _run(self):
207 log.debug("Running event loop of callback daemon in background thread")
206 log.debug("Running event loop of callback daemon in background thread")
208 callback_thread = threading.Thread(
207 callback_thread = threading.Thread(
209 target=self._daemon.serve_forever,
208 target=self._daemon.serve_forever,
210 kwargs={'poll_interval': self.POLL_INTERVAL})
209 kwargs={'poll_interval': self.POLL_INTERVAL})
211 callback_thread.daemon = True
210 callback_thread.daemon = True
212 callback_thread.start()
211 callback_thread.start()
213 self._callback_thread = callback_thread
212 self._callback_thread = callback_thread
214
213
215 def _stop(self):
214 def _stop(self):
216 log.debug("Waiting for background thread to finish.")
215 log.debug("Waiting for background thread to finish.")
217 self._daemon.shutdown()
216 self._daemon.shutdown()
218 self._callback_thread.join()
217 self._callback_thread.join()
219 self._daemon = None
218 self._daemon = None
220 self._callback_thread = None
219 self._callback_thread = None
221 if self.txn_id:
220 if self.txn_id:
222 txn_id_file = get_txn_id_data_path(self.txn_id)
221 txn_id_file = get_txn_id_data_path(self.txn_id)
223 log.debug('Cleaning up TXN ID %s', txn_id_file)
222 log.debug('Cleaning up TXN ID %s', txn_id_file)
224 if os.path.isfile(txn_id_file):
223 if os.path.isfile(txn_id_file):
225 os.remove(txn_id_file)
224 os.remove(txn_id_file)
226
225
227 log.debug("Background thread done.")
226 log.debug("Background thread done.")
228
227
229
228
230 def get_txn_id_data_path(txn_id):
229 def get_txn_id_data_path(txn_id):
231 import rhodecode
230 import rhodecode
232
231
233 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
232 root = rhodecode.CONFIG.get('cache_dir') or tempfile.gettempdir()
234 final_dir = os.path.join(root, 'svn_txn_id')
233 final_dir = os.path.join(root, 'svn_txn_id')
235
234
236 if not os.path.isdir(final_dir):
235 if not os.path.isdir(final_dir):
237 os.makedirs(final_dir)
236 os.makedirs(final_dir)
238 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
237 return os.path.join(final_dir, 'rc_txn_id_{}'.format(txn_id))
239
238
240
239
241 def store_txn_id_data(txn_id, data_dict):
240 def store_txn_id_data(txn_id, data_dict):
242 if not txn_id:
241 if not txn_id:
243 log.warning('Cannot store txn_id because it is empty')
242 log.warning('Cannot store txn_id because it is empty')
244 return
243 return
245
244
246 path = get_txn_id_data_path(txn_id)
245 path = get_txn_id_data_path(txn_id)
247 try:
246 try:
248 with open(path, 'wb') as f:
247 with open(path, 'wb') as f:
249 f.write(json.dumps(data_dict))
248 f.write(json.dumps(data_dict))
250 except Exception:
249 except Exception:
251 log.exception('Failed to write txn_id metadata')
250 log.exception('Failed to write txn_id metadata')
252
251
253
252
254 def get_txn_id_from_store(txn_id):
253 def get_txn_id_from_store(txn_id):
255 """
254 """
256 Reads txn_id from store and if present returns the data for callback manager
255 Reads txn_id from store and if present returns the data for callback manager
257 """
256 """
258 path = get_txn_id_data_path(txn_id)
257 path = get_txn_id_data_path(txn_id)
259 try:
258 try:
260 with open(path, 'rb') as f:
259 with open(path, 'rb') as f:
261 return json.loads(f.read())
260 return json.loads(f.read())
262 except Exception:
261 except Exception:
263 return {}
262 return {}
264
263
265
264
266 def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None):
265 def prepare_callback_daemon(extras, protocol, host, use_direct_calls, txn_id=None):
267 txn_details = get_txn_id_from_store(txn_id)
266 txn_details = get_txn_id_from_store(txn_id)
268 port = txn_details.get('port', 0)
267 port = txn_details.get('port', 0)
269 if use_direct_calls:
268 if use_direct_calls:
270 callback_daemon = DummyHooksCallbackDaemon()
269 callback_daemon = DummyHooksCallbackDaemon()
271 extras['hooks_module'] = callback_daemon.hooks_module
270 extras['hooks_module'] = callback_daemon.hooks_module
272 else:
271 else:
273 if protocol == 'http':
272 if protocol == 'http':
274 callback_daemon = HttpHooksCallbackDaemon(
273 callback_daemon = HttpHooksCallbackDaemon(
275 txn_id=txn_id, host=host, port=port)
274 txn_id=txn_id, host=host, port=port)
276 else:
275 else:
277 log.error('Unsupported callback daemon protocol "%s"', protocol)
276 log.error('Unsupported callback daemon protocol "%s"', protocol)
278 raise Exception('Unsupported callback daemon protocol.')
277 raise Exception('Unsupported callback daemon protocol.')
279
278
280 extras['hooks_uri'] = callback_daemon.hooks_uri
279 extras['hooks_uri'] = callback_daemon.hooks_uri
281 extras['hooks_protocol'] = protocol
280 extras['hooks_protocol'] = protocol
282 extras['time'] = time.time()
281 extras['time'] = time.time()
283
282
284 # register txn_id
283 # register txn_id
285 extras['txn_id'] = txn_id
284 extras['txn_id'] = txn_id
286 log.debug('Prepared a callback daemon: %s at url `%s`',
285 log.debug('Prepared a callback daemon: %s at url `%s`',
287 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
286 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
288 return callback_daemon, extras
287 return callback_daemon, extras
289
288
290
289
291 class Hooks(object):
290 class Hooks(object):
292 """
291 """
293 Exposes the hooks for remote call backs
292 Exposes the hooks for remote call backs
294 """
293 """
295 def __init__(self, request=None, log_prefix=''):
294 def __init__(self, request=None, log_prefix=''):
296 self.log_prefix = log_prefix
295 self.log_prefix = log_prefix
297 self.request = request
296 self.request = request
298
297
299 def repo_size(self, extras):
298 def repo_size(self, extras):
300 log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
299 log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
301 return self._call_hook(hooks_base.repo_size, extras)
300 return self._call_hook(hooks_base.repo_size, extras)
302
301
303 def pre_pull(self, extras):
302 def pre_pull(self, extras):
304 log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
303 log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
305 return self._call_hook(hooks_base.pre_pull, extras)
304 return self._call_hook(hooks_base.pre_pull, extras)
306
305
307 def post_pull(self, extras):
306 def post_pull(self, extras):
308 log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
307 log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
309 return self._call_hook(hooks_base.post_pull, extras)
308 return self._call_hook(hooks_base.post_pull, extras)
310
309
311 def pre_push(self, extras):
310 def pre_push(self, extras):
312 log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
311 log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
313 return self._call_hook(hooks_base.pre_push, extras)
312 return self._call_hook(hooks_base.pre_push, extras)
314
313
315 def post_push(self, extras):
314 def post_push(self, extras):
316 log.debug("%sCalled post_push of %s object", self.log_prefix, self)
315 log.debug("%sCalled post_push of %s object", self.log_prefix, self)
317 return self._call_hook(hooks_base.post_push, extras)
316 return self._call_hook(hooks_base.post_push, extras)
318
317
319 def _call_hook(self, hook, extras):
318 def _call_hook(self, hook, extras):
320 extras = AttributeDict(extras)
319 extras = AttributeDict(extras)
321 server_url = extras['server_url']
320 server_url = extras['server_url']
322
321
323 extras.request = self.request
322 extras.request = self.request
324
323
325 try:
324 try:
326 result = hook(extras)
325 result = hook(extras)
327 if result is None:
326 if result is None:
328 raise Exception(
327 raise Exception(
329 'Failed to obtain hook result from func: {}'.format(hook))
328 'Failed to obtain hook result from func: {}'.format(hook))
330 except HTTPBranchProtected as handled_error:
329 except HTTPBranchProtected as handled_error:
331 # Those special cases doesn't need error reporting. It's a case of
330 # Those special cases doesn't need error reporting. It's a case of
332 # locked repo or protected branch
331 # locked repo or protected branch
333 result = AttributeDict({
332 result = AttributeDict({
334 'status': handled_error.code,
333 'status': handled_error.code,
335 'output': handled_error.explanation
334 'output': handled_error.explanation
336 })
335 })
337 except (HTTPLockedRC, Exception) as error:
336 except (HTTPLockedRC, Exception) as error:
338 # locked needs different handling since we need to also
337 # locked needs different handling since we need to also
339 # handle PULL operations
338 # handle PULL operations
340 exc_tb = ''
339 exc_tb = ''
341 if not isinstance(error, HTTPLockedRC):
340 if not isinstance(error, HTTPLockedRC):
342 exc_tb = traceback.format_exc()
341 exc_tb = traceback.format_exc()
343 log.exception('%sException when handling hook %s', self.log_prefix, hook)
342 log.exception('%sException when handling hook %s', self.log_prefix, hook)
344 error_args = error.args
343 error_args = error.args
345 return {
344 return {
346 'status': 128,
345 'status': 128,
347 'output': '',
346 'output': '',
348 'exception': type(error).__name__,
347 'exception': type(error).__name__,
349 'exception_traceback': exc_tb,
348 'exception_traceback': exc_tb,
350 'exception_args': error_args,
349 'exception_args': error_args,
351 }
350 }
352 finally:
351 finally:
353 meta.Session.remove()
352 meta.Session.remove()
354
353
355 log.debug('%sGot hook call response %s', self.log_prefix, result)
354 log.debug('%sGot hook call response %s', self.log_prefix, result)
356 return {
355 return {
357 'status': result.status,
356 'status': result.status,
358 'output': result.output,
357 'output': result.output,
359 }
358 }
360
359
361 def __enter__(self):
360 def __enter__(self):
362 return self
361 return self
363
362
364 def __exit__(self, exc_type, exc_val, exc_tb):
363 def __exit__(self, exc_type, exc_val, exc_tb):
365 pass
364 pass
@@ -1,264 +1,263 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import webob
20 import webob
22 from pyramid.threadlocal import get_current_request
21 from pyramid.threadlocal import get_current_request
23
22
24 from rhodecode import events
23 from rhodecode import events
25 from rhodecode.lib import hooks_base
24 from rhodecode.lib import hooks_base
26 from rhodecode.lib import utils2
25 from rhodecode.lib import utils2
27
26
28
27
29 def _supports_repo_type(repo_type):
28 def _supports_repo_type(repo_type):
30 if repo_type in ('hg', 'git'):
29 if repo_type in ('hg', 'git'):
31 return True
30 return True
32 return False
31 return False
33
32
34
33
35 def _get_vcs_operation_context(username, repo_name, repo_type, action):
34 def _get_vcs_operation_context(username, repo_name, repo_type, action):
36 # NOTE(dan): import loop
35 # NOTE(dan): import loop
37 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.base import vcs_operation_context
38
37
39 check_locking = action in ('pull', 'push')
38 check_locking = action in ('pull', 'push')
40
39
41 request = get_current_request()
40 request = get_current_request()
42
41
43 try:
42 try:
44 environ = request.environ
43 environ = request.environ
45 except TypeError:
44 except TypeError:
46 # we might use this outside of request context
45 # we might use this outside of request context
47 environ = {}
46 environ = {}
48
47
49 if not environ:
48 if not environ:
50 environ = webob.Request.blank('').environ
49 environ = webob.Request.blank('').environ
51
50
52 extras = vcs_operation_context(environ, repo_name, username, action, repo_type, check_locking)
51 extras = vcs_operation_context(environ, repo_name, username, action, repo_type, check_locking)
53 return utils2.AttributeDict(extras)
52 return utils2.AttributeDict(extras)
54
53
55
54
56 def trigger_post_push_hook(username, action, hook_type, repo_name, repo_type, commit_ids):
55 def trigger_post_push_hook(username, action, hook_type, repo_name, repo_type, commit_ids):
57 """
56 """
58 Triggers push action hooks
57 Triggers push action hooks
59
58
60 :param username: username who pushes
59 :param username: username who pushes
61 :param action: push/push_local/push_remote
60 :param action: push/push_local/push_remote
62 :param hook_type: type of hook executed
61 :param hook_type: type of hook executed
63 :param repo_name: name of repo
62 :param repo_name: name of repo
64 :param repo_type: the type of SCM repo
63 :param repo_type: the type of SCM repo
65 :param commit_ids: list of commit ids that we pushed
64 :param commit_ids: list of commit ids that we pushed
66 """
65 """
67 extras = _get_vcs_operation_context(username, repo_name, repo_type, action)
66 extras = _get_vcs_operation_context(username, repo_name, repo_type, action)
68 extras.commit_ids = commit_ids
67 extras.commit_ids = commit_ids
69 extras.hook_type = hook_type
68 extras.hook_type = hook_type
70 hooks_base.post_push(extras)
69 hooks_base.post_push(extras)
71
70
72
71
73 def trigger_comment_commit_hooks(username, repo_name, repo_type, repo, data=None):
72 def trigger_comment_commit_hooks(username, repo_name, repo_type, repo, data=None):
74 """
73 """
75 Triggers when a comment is made on a commit
74 Triggers when a comment is made on a commit
76
75
77 :param username: username who creates the comment
76 :param username: username who creates the comment
78 :param repo_name: name of target repo
77 :param repo_name: name of target repo
79 :param repo_type: the type of SCM target repo
78 :param repo_type: the type of SCM target repo
80 :param repo: the repo object we trigger the event for
79 :param repo: the repo object we trigger the event for
81 :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj}
80 :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj}
82 """
81 """
83 if not _supports_repo_type(repo_type):
82 if not _supports_repo_type(repo_type):
84 return
83 return
85
84
86 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit')
85 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit')
87
86
88 comment = data['comment']
87 comment = data['comment']
89 commit = data['commit']
88 commit = data['commit']
90
89
91 events.trigger(events.RepoCommitCommentEvent(repo, commit, comment))
90 events.trigger(events.RepoCommitCommentEvent(repo, commit, comment))
92 extras.update(repo.get_dict())
91 extras.update(repo.get_dict())
93
92
94 extras.commit = commit.serialize()
93 extras.commit = commit.serialize()
95 extras.comment = comment.get_api_data()
94 extras.comment = comment.get_api_data()
96 extras.created_by = username
95 extras.created_by = username
97 hooks_base.comment_commit_repository(**extras)
96 hooks_base.comment_commit_repository(**extras)
98
97
99
98
100 def trigger_comment_commit_edit_hooks(username, repo_name, repo_type, repo, data=None):
99 def trigger_comment_commit_edit_hooks(username, repo_name, repo_type, repo, data=None):
101 """
100 """
102 Triggers when a comment is edited on a commit
101 Triggers when a comment is edited on a commit
103
102
104 :param username: username who edits the comment
103 :param username: username who edits the comment
105 :param repo_name: name of target repo
104 :param repo_name: name of target repo
106 :param repo_type: the type of SCM target repo
105 :param repo_type: the type of SCM target repo
107 :param repo: the repo object we trigger the event for
106 :param repo: the repo object we trigger the event for
108 :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj}
107 :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj}
109 """
108 """
110 if not _supports_repo_type(repo_type):
109 if not _supports_repo_type(repo_type):
111 return
110 return
112
111
113 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit')
112 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit')
114
113
115 comment = data['comment']
114 comment = data['comment']
116 commit = data['commit']
115 commit = data['commit']
117
116
118 events.trigger(events.RepoCommitCommentEditEvent(repo, commit, comment))
117 events.trigger(events.RepoCommitCommentEditEvent(repo, commit, comment))
119 extras.update(repo.get_dict())
118 extras.update(repo.get_dict())
120
119
121 extras.commit = commit.serialize()
120 extras.commit = commit.serialize()
122 extras.comment = comment.get_api_data()
121 extras.comment = comment.get_api_data()
123 extras.created_by = username
122 extras.created_by = username
124 hooks_base.comment_edit_commit_repository(**extras)
123 hooks_base.comment_edit_commit_repository(**extras)
125
124
126
125
127 def trigger_create_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
126 def trigger_create_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
128 """
127 """
129 Triggers create pull request action hooks
128 Triggers create pull request action hooks
130
129
131 :param username: username who creates the pull request
130 :param username: username who creates the pull request
132 :param repo_name: name of target repo
131 :param repo_name: name of target repo
133 :param repo_type: the type of SCM target repo
132 :param repo_type: the type of SCM target repo
134 :param pull_request: the pull request that was created
133 :param pull_request: the pull request that was created
135 :param data: extra data for specific events e.g {'comment': comment_obj}
134 :param data: extra data for specific events e.g {'comment': comment_obj}
136 """
135 """
137 if not _supports_repo_type(repo_type):
136 if not _supports_repo_type(repo_type):
138 return
137 return
139
138
140 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'create_pull_request')
139 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'create_pull_request')
141 events.trigger(events.PullRequestCreateEvent(pull_request))
140 events.trigger(events.PullRequestCreateEvent(pull_request))
142 extras.update(pull_request.get_api_data(with_merge_state=False))
141 extras.update(pull_request.get_api_data(with_merge_state=False))
143 hooks_base.create_pull_request(**extras)
142 hooks_base.create_pull_request(**extras)
144
143
145
144
146 def trigger_merge_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
145 def trigger_merge_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
147 """
146 """
148 Triggers merge pull request action hooks
147 Triggers merge pull request action hooks
149
148
150 :param username: username who creates the pull request
149 :param username: username who creates the pull request
151 :param repo_name: name of target repo
150 :param repo_name: name of target repo
152 :param repo_type: the type of SCM target repo
151 :param repo_type: the type of SCM target repo
153 :param pull_request: the pull request that was merged
152 :param pull_request: the pull request that was merged
154 :param data: extra data for specific events e.g {'comment': comment_obj}
153 :param data: extra data for specific events e.g {'comment': comment_obj}
155 """
154 """
156 if not _supports_repo_type(repo_type):
155 if not _supports_repo_type(repo_type):
157 return
156 return
158
157
159 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'merge_pull_request')
158 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'merge_pull_request')
160 events.trigger(events.PullRequestMergeEvent(pull_request))
159 events.trigger(events.PullRequestMergeEvent(pull_request))
161 extras.update(pull_request.get_api_data())
160 extras.update(pull_request.get_api_data())
162 hooks_base.merge_pull_request(**extras)
161 hooks_base.merge_pull_request(**extras)
163
162
164
163
165 def trigger_close_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
164 def trigger_close_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
166 """
165 """
167 Triggers close pull request action hooks
166 Triggers close pull request action hooks
168
167
169 :param username: username who creates the pull request
168 :param username: username who creates the pull request
170 :param repo_name: name of target repo
169 :param repo_name: name of target repo
171 :param repo_type: the type of SCM target repo
170 :param repo_type: the type of SCM target repo
172 :param pull_request: the pull request that was closed
171 :param pull_request: the pull request that was closed
173 :param data: extra data for specific events e.g {'comment': comment_obj}
172 :param data: extra data for specific events e.g {'comment': comment_obj}
174 """
173 """
175 if not _supports_repo_type(repo_type):
174 if not _supports_repo_type(repo_type):
176 return
175 return
177
176
178 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'close_pull_request')
177 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'close_pull_request')
179 events.trigger(events.PullRequestCloseEvent(pull_request))
178 events.trigger(events.PullRequestCloseEvent(pull_request))
180 extras.update(pull_request.get_api_data())
179 extras.update(pull_request.get_api_data())
181 hooks_base.close_pull_request(**extras)
180 hooks_base.close_pull_request(**extras)
182
181
183
182
184 def trigger_review_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
183 def trigger_review_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
185 """
184 """
186 Triggers review status change pull request action hooks
185 Triggers review status change pull request action hooks
187
186
188 :param username: username who creates the pull request
187 :param username: username who creates the pull request
189 :param repo_name: name of target repo
188 :param repo_name: name of target repo
190 :param repo_type: the type of SCM target repo
189 :param repo_type: the type of SCM target repo
191 :param pull_request: the pull request that review status changed
190 :param pull_request: the pull request that review status changed
192 :param data: extra data for specific events e.g {'comment': comment_obj}
191 :param data: extra data for specific events e.g {'comment': comment_obj}
193 """
192 """
194 if not _supports_repo_type(repo_type):
193 if not _supports_repo_type(repo_type):
195 return
194 return
196
195
197 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'review_pull_request')
196 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'review_pull_request')
198 status = data.get('status')
197 status = data.get('status')
199 events.trigger(events.PullRequestReviewEvent(pull_request, status))
198 events.trigger(events.PullRequestReviewEvent(pull_request, status))
200 extras.update(pull_request.get_api_data())
199 extras.update(pull_request.get_api_data())
201 hooks_base.review_pull_request(**extras)
200 hooks_base.review_pull_request(**extras)
202
201
203
202
204 def trigger_comment_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
203 def trigger_comment_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
205 """
204 """
206 Triggers when a comment is made on a pull request
205 Triggers when a comment is made on a pull request
207
206
208 :param username: username who creates the pull request
207 :param username: username who creates the pull request
209 :param repo_name: name of target repo
208 :param repo_name: name of target repo
210 :param repo_type: the type of SCM target repo
209 :param repo_type: the type of SCM target repo
211 :param pull_request: the pull request that comment was made on
210 :param pull_request: the pull request that comment was made on
212 :param data: extra data for specific events e.g {'comment': comment_obj}
211 :param data: extra data for specific events e.g {'comment': comment_obj}
213 """
212 """
214 if not _supports_repo_type(repo_type):
213 if not _supports_repo_type(repo_type):
215 return
214 return
216
215
217 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request')
216 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request')
218
217
219 comment = data['comment']
218 comment = data['comment']
220 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
219 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
221 extras.update(pull_request.get_api_data())
220 extras.update(pull_request.get_api_data())
222 extras.comment = comment.get_api_data()
221 extras.comment = comment.get_api_data()
223 hooks_base.comment_pull_request(**extras)
222 hooks_base.comment_pull_request(**extras)
224
223
225
224
226 def trigger_comment_pull_request_edit_hook(username, repo_name, repo_type, pull_request, data=None):
225 def trigger_comment_pull_request_edit_hook(username, repo_name, repo_type, pull_request, data=None):
227 """
226 """
228 Triggers when a comment was edited on a pull request
227 Triggers when a comment was edited on a pull request
229
228
230 :param username: username who made the edit
229 :param username: username who made the edit
231 :param repo_name: name of target repo
230 :param repo_name: name of target repo
232 :param repo_type: the type of SCM target repo
231 :param repo_type: the type of SCM target repo
233 :param pull_request: the pull request that comment was made on
232 :param pull_request: the pull request that comment was made on
234 :param data: extra data for specific events e.g {'comment': comment_obj}
233 :param data: extra data for specific events e.g {'comment': comment_obj}
235 """
234 """
236 if not _supports_repo_type(repo_type):
235 if not _supports_repo_type(repo_type):
237 return
236 return
238
237
239 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request')
238 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request')
240
239
241 comment = data['comment']
240 comment = data['comment']
242 events.trigger(events.PullRequestCommentEditEvent(pull_request, comment))
241 events.trigger(events.PullRequestCommentEditEvent(pull_request, comment))
243 extras.update(pull_request.get_api_data())
242 extras.update(pull_request.get_api_data())
244 extras.comment = comment.get_api_data()
243 extras.comment = comment.get_api_data()
245 hooks_base.comment_edit_pull_request(**extras)
244 hooks_base.comment_edit_pull_request(**extras)
246
245
247
246
248 def trigger_update_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
247 def trigger_update_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
249 """
248 """
250 Triggers update pull request action hooks
249 Triggers update pull request action hooks
251
250
252 :param username: username who creates the pull request
251 :param username: username who creates the pull request
253 :param repo_name: name of target repo
252 :param repo_name: name of target repo
254 :param repo_type: the type of SCM target repo
253 :param repo_type: the type of SCM target repo
255 :param pull_request: the pull request that was updated
254 :param pull_request: the pull request that was updated
256 :param data: extra data for specific events e.g {'comment': comment_obj}
255 :param data: extra data for specific events e.g {'comment': comment_obj}
257 """
256 """
258 if not _supports_repo_type(repo_type):
257 if not _supports_repo_type(repo_type):
259 return
258 return
260
259
261 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'update_pull_request')
260 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'update_pull_request')
262 events.trigger(events.PullRequestUpdateEvent(pull_request))
261 events.trigger(events.PullRequestUpdateEvent(pull_request))
263 extras.update(pull_request.get_api_data())
262 extras.update(pull_request.get_api_data())
264 hooks_base.update_pull_request(**extras)
263 hooks_base.update_pull_request(**extras)
@@ -1,25 +1,25 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2020-2020 RhodeCode GmbH
3 # Copyright (C) 2020-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 # base64 filter e.g ${ example | base64,n }
21 # base64 filter e.g ${ example | base64,n }
22 def base64(text):
22 def base64(text):
23 import base64
23 import base64
24 from rhodecode.lib.helpers import safe_str, safe_bytes
24 from rhodecode.lib.helpers import safe_str, safe_bytes
25 return safe_str(base64.encodebytes(safe_bytes(text)))
25 return safe_str(base64.encodebytes(safe_bytes(text)))
@@ -1,155 +1,155 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Index schema for RhodeCode
22 Index schema for RhodeCode
23 """
23 """
24
24
25 import importlib
25 import importlib
26 import logging
26 import logging
27
27
28 from rhodecode.lib.index.search_utils import normalize_text_for_matching
28 from rhodecode.lib.index.search_utils import normalize_text_for_matching
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32 # leave defaults for backward compat
32 # leave defaults for backward compat
33 default_searcher = 'rhodecode.lib.index.whoosh'
33 default_searcher = 'rhodecode.lib.index.whoosh'
34 default_location = '%(here)s/data/index'
34 default_location = '%(here)s/data/index'
35
35
36 ES_VERSION_2 = '2'
36 ES_VERSION_2 = '2'
37 ES_VERSION_6 = '6'
37 ES_VERSION_6 = '6'
38 # for legacy reasons we keep 2 compat as default
38 # for legacy reasons we keep 2 compat as default
39 DEFAULT_ES_VERSION = ES_VERSION_2
39 DEFAULT_ES_VERSION = ES_VERSION_2
40
40
41 try:
41 try:
42 from rhodecode_tools.lib.fts_index.elasticsearch_engine_6 import ES_CONFIG # pragma: no cover
42 from rhodecode_tools.lib.fts_index.elasticsearch_engine_6 import ES_CONFIG # pragma: no cover
43 except ImportError:
43 except ImportError:
44 log.warning('rhodecode_tools not available, use of full text search is limited')
44 log.warning('rhodecode_tools not available, use of full text search is limited')
45 pass
45 pass
46
46
47
47
48 class BaseSearcher(object):
48 class BaseSearcher(object):
49 query_lang_doc = ''
49 query_lang_doc = ''
50 es_version = None
50 es_version = None
51 name = None
51 name = None
52 DIRECTION_ASC = 'asc'
52 DIRECTION_ASC = 'asc'
53 DIRECTION_DESC = 'desc'
53 DIRECTION_DESC = 'desc'
54
54
55 def __init__(self):
55 def __init__(self):
56 pass
56 pass
57
57
58 def cleanup(self):
58 def cleanup(self):
59 pass
59 pass
60
60
61 def search(self, query, document_type, search_user,
61 def search(self, query, document_type, search_user,
62 repo_name=None, repo_group_name=None,
62 repo_name=None, repo_group_name=None,
63 raise_on_exc=True):
63 raise_on_exc=True):
64 raise Exception('NotImplemented')
64 raise Exception('NotImplemented')
65
65
66 @staticmethod
66 @staticmethod
67 def query_to_mark(query, default_field=None):
67 def query_to_mark(query, default_field=None):
68 """
68 """
69 Formats the query to mark token for jquery.mark.js highlighting. ES could
69 Formats the query to mark token for jquery.mark.js highlighting. ES could
70 have a different format optionally.
70 have a different format optionally.
71
71
72 :param default_field:
72 :param default_field:
73 :param query:
73 :param query:
74 """
74 """
75 return ' '.join(normalize_text_for_matching(query).split())
75 return ' '.join(normalize_text_for_matching(query).split())
76
76
77 @property
77 @property
78 def is_es_6(self):
78 def is_es_6(self):
79 return self.es_version == ES_VERSION_6
79 return self.es_version == ES_VERSION_6
80
80
81 def get_handlers(self):
81 def get_handlers(self):
82 return {}
82 return {}
83
83
84 @staticmethod
84 @staticmethod
85 def extract_search_tags(query):
85 def extract_search_tags(query):
86 return []
86 return []
87
87
88 @staticmethod
88 @staticmethod
89 def escape_specials(val):
89 def escape_specials(val):
90 """
90 """
91 Handle and escape reserved chars for search
91 Handle and escape reserved chars for search
92 """
92 """
93 return val
93 return val
94
94
95 def sort_def(self, search_type, direction, sort_field):
95 def sort_def(self, search_type, direction, sort_field):
96 """
96 """
97 Defines sorting for search. This function should decide if for given
97 Defines sorting for search. This function should decide if for given
98 search_type, sorting can be done with sort_field.
98 search_type, sorting can be done with sort_field.
99
99
100 It also should translate common sort fields into backend specific. e.g elasticsearch
100 It also should translate common sort fields into backend specific. e.g elasticsearch
101 """
101 """
102 raise NotImplementedError()
102 raise NotImplementedError()
103
103
104 @staticmethod
104 @staticmethod
105 def get_sort(search_type, search_val):
105 def get_sort(search_type, search_val):
106 """
106 """
107 Method used to parse the GET search sort value to a field and direction.
107 Method used to parse the GET search sort value to a field and direction.
108 e.g asc:lines == asc, lines
108 e.g asc:lines == asc, lines
109
109
110 There's also a legacy support for newfirst/oldfirst which defines commit
110 There's also a legacy support for newfirst/oldfirst which defines commit
111 sorting only
111 sorting only
112 """
112 """
113
113
114 direction = BaseSearcher.DIRECTION_ASC
114 direction = BaseSearcher.DIRECTION_ASC
115 sort_field = None
115 sort_field = None
116
116
117 if not search_val:
117 if not search_val:
118 return direction, sort_field
118 return direction, sort_field
119
119
120 if search_val.startswith('asc:'):
120 if search_val.startswith('asc:'):
121 sort_field = search_val[4:]
121 sort_field = search_val[4:]
122 direction = BaseSearcher.DIRECTION_ASC
122 direction = BaseSearcher.DIRECTION_ASC
123 elif search_val.startswith('desc:'):
123 elif search_val.startswith('desc:'):
124 sort_field = search_val[5:]
124 sort_field = search_val[5:]
125 direction = BaseSearcher.DIRECTION_DESC
125 direction = BaseSearcher.DIRECTION_DESC
126 elif search_val == 'newfirst' and search_type == 'commit':
126 elif search_val == 'newfirst' and search_type == 'commit':
127 sort_field = 'date'
127 sort_field = 'date'
128 direction = BaseSearcher.DIRECTION_DESC
128 direction = BaseSearcher.DIRECTION_DESC
129 elif search_val == 'oldfirst' and search_type == 'commit':
129 elif search_val == 'oldfirst' and search_type == 'commit':
130 sort_field = 'date'
130 sort_field = 'date'
131 direction = BaseSearcher.DIRECTION_ASC
131 direction = BaseSearcher.DIRECTION_ASC
132
132
133 return direction, sort_field
133 return direction, sort_field
134
134
135
135
136 def search_config(config, prefix='search.'):
136 def search_config(config, prefix='search.'):
137 _config = {}
137 _config = {}
138 for key in config.keys():
138 for key in config.keys():
139 if key.startswith(prefix):
139 if key.startswith(prefix):
140 _config[key[len(prefix):]] = config[key]
140 _config[key[len(prefix):]] = config[key]
141 return _config
141 return _config
142
142
143
143
144 def searcher_from_config(config, prefix='search.'):
144 def searcher_from_config(config, prefix='search.'):
145 _config = search_config(config, prefix)
145 _config = search_config(config, prefix)
146
146
147 if 'location' not in _config:
147 if 'location' not in _config:
148 _config['location'] = default_location
148 _config['location'] = default_location
149 if 'es_version' not in _config:
149 if 'es_version' not in _config:
150 # use old legacy ES version set to 2
150 # use old legacy ES version set to 2
151 _config['es_version'] = '2'
151 _config['es_version'] = '2'
152
152
153 imported = importlib.import_module(_config.get('module', default_searcher))
153 imported = importlib.import_module(_config.get('module', default_searcher))
154 searcher = imported.Searcher(config=_config)
154 searcher = imported.Searcher(config=_config)
155 return searcher
155 return searcher
@@ -1,197 +1,197 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import re
20 import re
21
21
22 import pygments.filter
22 import pygments.filter
23 import pygments.filters
23 import pygments.filters
24 from pygments.token import Comment
24 from pygments.token import Comment
25
25
26 HL_BEG_MARKER = '__RCSearchHLMarkBEG__'
26 HL_BEG_MARKER = '__RCSearchHLMarkBEG__'
27 HL_END_MARKER = '__RCSearchHLMarkEND__'
27 HL_END_MARKER = '__RCSearchHLMarkEND__'
28 HL_MARKER_RE = '{}(.*?){}'.format(HL_BEG_MARKER, HL_END_MARKER)
28 HL_MARKER_RE = '{}(.*?){}'.format(HL_BEG_MARKER, HL_END_MARKER)
29
29
30
30
31 class ElasticSearchHLFilter(pygments.filters.Filter):
31 class ElasticSearchHLFilter(pygments.filters.Filter):
32 _names = [HL_BEG_MARKER, HL_END_MARKER]
32 _names = [HL_BEG_MARKER, HL_END_MARKER]
33
33
34 def __init__(self, **options):
34 def __init__(self, **options):
35 pygments.filters.Filter.__init__(self, **options)
35 pygments.filters.Filter.__init__(self, **options)
36
36
37 def filter(self, lexer, stream):
37 def filter(self, lexer, stream):
38 def tokenize(_value):
38 def tokenize(_value):
39 for token in re.split('({}|{})'.format(
39 for token in re.split('({}|{})'.format(
40 self._names[0], self._names[1]), _value):
40 self._names[0], self._names[1]), _value):
41 if token:
41 if token:
42 yield token
42 yield token
43
43
44 hl = False
44 hl = False
45 for ttype, value in stream:
45 for ttype, value in stream:
46
46
47 if self._names[0] in value or self._names[1] in value:
47 if self._names[0] in value or self._names[1] in value:
48 for item in tokenize(value):
48 for item in tokenize(value):
49 if item == self._names[0]:
49 if item == self._names[0]:
50 # skip marker, but start HL
50 # skip marker, but start HL
51 hl = True
51 hl = True
52 continue
52 continue
53 elif item == self._names[1]:
53 elif item == self._names[1]:
54 hl = False
54 hl = False
55 continue
55 continue
56
56
57 if hl:
57 if hl:
58 yield Comment.ElasticMatch, item
58 yield Comment.ElasticMatch, item
59 else:
59 else:
60 yield ttype, item
60 yield ttype, item
61 else:
61 else:
62 if hl:
62 if hl:
63 yield Comment.ElasticMatch, value
63 yield Comment.ElasticMatch, value
64 else:
64 else:
65 yield ttype, value
65 yield ttype, value
66
66
67
67
68 def extract_phrases(text_query):
68 def extract_phrases(text_query):
69 """
69 """
70 Extracts phrases from search term string making sure phrases
70 Extracts phrases from search term string making sure phrases
71 contained in double quotes are kept together - and discarding empty values
71 contained in double quotes are kept together - and discarding empty values
72 or fully whitespace values eg.
72 or fully whitespace values eg.
73
73
74 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
74 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
75
75
76 """
76 """
77
77
78 in_phrase = False
78 in_phrase = False
79 buf = ''
79 buf = ''
80 phrases = []
80 phrases = []
81 for char in text_query:
81 for char in text_query:
82 if in_phrase:
82 if in_phrase:
83 if char == '"': # end phrase
83 if char == '"': # end phrase
84 phrases.append(buf)
84 phrases.append(buf)
85 buf = ''
85 buf = ''
86 in_phrase = False
86 in_phrase = False
87 continue
87 continue
88 else:
88 else:
89 buf += char
89 buf += char
90 continue
90 continue
91 else:
91 else:
92 if char == '"': # start phrase
92 if char == '"': # start phrase
93 in_phrase = True
93 in_phrase = True
94 phrases.append(buf)
94 phrases.append(buf)
95 buf = ''
95 buf = ''
96 continue
96 continue
97 elif char == ' ':
97 elif char == ' ':
98 phrases.append(buf)
98 phrases.append(buf)
99 buf = ''
99 buf = ''
100 continue
100 continue
101 else:
101 else:
102 buf += char
102 buf += char
103
103
104 phrases.append(buf)
104 phrases.append(buf)
105 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
105 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
106 return phrases
106 return phrases
107
107
108
108
109 def get_matching_phrase_offsets(text, phrases):
109 def get_matching_phrase_offsets(text, phrases):
110 """
110 """
111 Returns a list of string offsets in `text` that the list of `terms` match
111 Returns a list of string offsets in `text` that the list of `terms` match
112
112
113 >>> get_matching_phrase_offsets('some text here', ['some', 'here'])
113 >>> get_matching_phrase_offsets('some text here', ['some', 'here'])
114 [(0, 4), (10, 14)]
114 [(0, 4), (10, 14)]
115
115
116 """
116 """
117 phrases = phrases or []
117 phrases = phrases or []
118 offsets = []
118 offsets = []
119
119
120 for phrase in phrases:
120 for phrase in phrases:
121 for match in re.finditer(phrase, text):
121 for match in re.finditer(phrase, text):
122 offsets.append((match.start(), match.end()))
122 offsets.append((match.start(), match.end()))
123
123
124 return offsets
124 return offsets
125
125
126
126
127 def get_matching_markers_offsets(text, markers=None):
127 def get_matching_markers_offsets(text, markers=None):
128 """
128 """
129 Returns a list of string offsets in `text` that the are between matching markers
129 Returns a list of string offsets in `text` that the are between matching markers
130
130
131 >>> get_matching_markers_offsets('$1some$2 text $1here$2 marked', ['\$1(.*?)\$2'])
131 >>> get_matching_markers_offsets('$1some$2 text $1here$2 marked', ['\$1(.*?)\$2'])
132 [(0, 5), (16, 22)]
132 [(0, 5), (16, 22)]
133
133
134 """
134 """
135 markers = markers or [HL_MARKER_RE]
135 markers = markers or [HL_MARKER_RE]
136 offsets = []
136 offsets = []
137
137
138 if markers:
138 if markers:
139 for mark in markers:
139 for mark in markers:
140 for match in re.finditer(mark, text):
140 for match in re.finditer(mark, text):
141 offsets.append((match.start(), match.end()))
141 offsets.append((match.start(), match.end()))
142
142
143 return offsets
143 return offsets
144
144
145
145
146 def normalize_text_for_matching(x):
146 def normalize_text_for_matching(x):
147 """
147 """
148 Replaces all non alfanum characters to spaces and lower cases the string,
148 Replaces all non alfanum characters to spaces and lower cases the string,
149 useful for comparing two text strings without punctuation
149 useful for comparing two text strings without punctuation
150 """
150 """
151 return re.sub(r'[^\w]', ' ', x.lower())
151 return re.sub(r'[^\w]', ' ', x.lower())
152
152
153
153
154 def get_matching_line_offsets(lines, terms=None, markers=None):
154 def get_matching_line_offsets(lines, terms=None, markers=None):
155 """ Return a set of `lines` indices (starting from 1) matching a
155 """ Return a set of `lines` indices (starting from 1) matching a
156 text search query, along with `context` lines above/below matching lines
156 text search query, along with `context` lines above/below matching lines
157
157
158 :param lines: list of strings representing lines
158 :param lines: list of strings representing lines
159 :param terms: search term string to match in lines eg. 'some text'
159 :param terms: search term string to match in lines eg. 'some text'
160 :param markers: instead of terms, use highlight markers instead that
160 :param markers: instead of terms, use highlight markers instead that
161 mark beginning and end for matched item. eg. ['START(.*?)END']
161 mark beginning and end for matched item. eg. ['START(.*?)END']
162
162
163 eg.
163 eg.
164
164
165 text = '''
165 text = '''
166 words words words
166 words words words
167 words words words
167 words words words
168 some text some
168 some text some
169 words words words
169 words words words
170 words words words
170 words words words
171 text here what
171 text here what
172 '''
172 '''
173 get_matching_line_offsets(text, 'text', context=1)
173 get_matching_line_offsets(text, 'text', context=1)
174 6, {3: [(5, 9)], 6: [(0, 4)]]
174 6, {3: [(5, 9)], 6: [(0, 4)]]
175
175
176 """
176 """
177 matching_lines = {}
177 matching_lines = {}
178 line_index = 0
178 line_index = 0
179
179
180 if terms:
180 if terms:
181 phrases = [normalize_text_for_matching(phrase)
181 phrases = [normalize_text_for_matching(phrase)
182 for phrase in extract_phrases(terms)]
182 for phrase in extract_phrases(terms)]
183
183
184 for line_index, line in enumerate(lines.splitlines(), start=1):
184 for line_index, line in enumerate(lines.splitlines(), start=1):
185 normalized_line = normalize_text_for_matching(line)
185 normalized_line = normalize_text_for_matching(line)
186 match_offsets = get_matching_phrase_offsets(normalized_line, phrases)
186 match_offsets = get_matching_phrase_offsets(normalized_line, phrases)
187 if match_offsets:
187 if match_offsets:
188 matching_lines[line_index] = match_offsets
188 matching_lines[line_index] = match_offsets
189
189
190 else:
190 else:
191 markers = markers or [HL_MARKER_RE]
191 markers = markers or [HL_MARKER_RE]
192 for line_index, line in enumerate(lines.splitlines(), start=1):
192 for line_index, line in enumerate(lines.splitlines(), start=1):
193 match_offsets = get_matching_markers_offsets(line, markers=markers)
193 match_offsets = get_matching_markers_offsets(line, markers=markers)
194 if match_offsets:
194 if match_offsets:
195 matching_lines[line_index] = match_offsets
195 matching_lines[line_index] = match_offsets
196
196
197 return line_index, matching_lines
197 return line_index, matching_lines
@@ -1,311 +1,311 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Index schema for RhodeCode
22 Index schema for RhodeCode
23 """
23 """
24
24
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29
29
30 from whoosh import query as query_lib
30 from whoosh import query as query_lib
31 from whoosh.highlight import HtmlFormatter, ContextFragmenter
31 from whoosh.highlight import HtmlFormatter, ContextFragmenter
32 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
32 from whoosh.index import create_in, open_dir, exists_in, EmptyIndexError
33 from whoosh.qparser import QueryParser, QueryParserError
33 from whoosh.qparser import QueryParser, QueryParserError
34
34
35 import rhodecode.lib.helpers as h
35 import rhodecode.lib.helpers as h
36 from rhodecode.lib.index import BaseSearcher
36 from rhodecode.lib.index import BaseSearcher
37 from rhodecode.lib.utils2 import safe_unicode
37 from rhodecode.lib.utils2 import safe_unicode
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 try:
42 try:
43 # we first try to import from rhodecode tools, fallback to copies if
43 # we first try to import from rhodecode tools, fallback to copies if
44 # we're unable to
44 # we're unable to
45 from rhodecode_tools.lib.fts_index.whoosh_schema import (
45 from rhodecode_tools.lib.fts_index.whoosh_schema import (
46 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
46 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
47 COMMIT_SCHEMA)
47 COMMIT_SCHEMA)
48 except ImportError:
48 except ImportError:
49 log.warning('rhodecode_tools schema not available, doing a fallback '
49 log.warning('rhodecode_tools schema not available, doing a fallback '
50 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
50 'import from `rhodecode.lib.index.whoosh_fallback_schema`')
51 from rhodecode.lib.index.whoosh_fallback_schema import (
51 from rhodecode.lib.index.whoosh_fallback_schema import (
52 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
52 ANALYZER, FILE_INDEX_NAME, FILE_SCHEMA, COMMIT_INDEX_NAME,
53 COMMIT_SCHEMA)
53 COMMIT_SCHEMA)
54
54
55
55
56 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
56 FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
57 FRAGMENTER = ContextFragmenter(200)
57 FRAGMENTER = ContextFragmenter(200)
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class WhooshSearcher(BaseSearcher):
62 class WhooshSearcher(BaseSearcher):
63 # this also shows in UI
63 # this also shows in UI
64 query_lang_doc = 'http://whoosh.readthedocs.io/en/latest/querylang.html'
64 query_lang_doc = 'http://whoosh.readthedocs.io/en/latest/querylang.html'
65 name = 'whoosh'
65 name = 'whoosh'
66
66
67 def __init__(self, config):
67 def __init__(self, config):
68 super(Searcher, self).__init__()
68 super(Searcher, self).__init__()
69 self.config = config
69 self.config = config
70 if not os.path.isdir(self.config['location']):
70 if not os.path.isdir(self.config['location']):
71 os.makedirs(self.config['location'])
71 os.makedirs(self.config['location'])
72
72
73 opener = create_in
73 opener = create_in
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
74 if exists_in(self.config['location'], indexname=FILE_INDEX_NAME):
75 opener = open_dir
75 opener = open_dir
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
76 file_index = opener(self.config['location'], schema=FILE_SCHEMA,
77 indexname=FILE_INDEX_NAME)
77 indexname=FILE_INDEX_NAME)
78
78
79 opener = create_in
79 opener = create_in
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
80 if exists_in(self.config['location'], indexname=COMMIT_INDEX_NAME):
81 opener = open_dir
81 opener = open_dir
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
82 changeset_index = opener(self.config['location'], schema=COMMIT_SCHEMA,
83 indexname=COMMIT_INDEX_NAME)
83 indexname=COMMIT_INDEX_NAME)
84
84
85 self.commit_schema = COMMIT_SCHEMA
85 self.commit_schema = COMMIT_SCHEMA
86 self.commit_index = changeset_index
86 self.commit_index = changeset_index
87 self.file_schema = FILE_SCHEMA
87 self.file_schema = FILE_SCHEMA
88 self.file_index = file_index
88 self.file_index = file_index
89 self.searcher = None
89 self.searcher = None
90
90
91 def cleanup(self):
91 def cleanup(self):
92 if self.searcher:
92 if self.searcher:
93 self.searcher.close()
93 self.searcher.close()
94
94
95 def _extend_query(self, query):
95 def _extend_query(self, query):
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
96 hashes = re.compile('([0-9a-f]{5,40})').findall(query)
97 if hashes:
97 if hashes:
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
98 hashes_or_query = ' OR '.join('commit_id:%s*' % h for h in hashes)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
99 query = u'(%s) OR %s' % (query, hashes_or_query)
100 return query
100 return query
101
101
102 def sort_def(self, search_type, direction, sort_field):
102 def sort_def(self, search_type, direction, sort_field):
103
103
104 if search_type == 'commit':
104 if search_type == 'commit':
105 field_defs = {
105 field_defs = {
106 'message': 'message',
106 'message': 'message',
107 'date': 'date',
107 'date': 'date',
108 'author_email': 'author',
108 'author_email': 'author',
109 }
109 }
110 elif search_type == 'path':
110 elif search_type == 'path':
111 field_defs = {
111 field_defs = {
112 'file': 'path',
112 'file': 'path',
113 'size': 'size',
113 'size': 'size',
114 'lines': 'lines',
114 'lines': 'lines',
115 }
115 }
116 elif search_type == 'content':
116 elif search_type == 'content':
117 # NOTE(dan): content doesn't support any sorting
117 # NOTE(dan): content doesn't support any sorting
118 field_defs = {}
118 field_defs = {}
119 else:
119 else:
120 return ''
120 return ''
121
121
122 if sort_field in field_defs:
122 if sort_field in field_defs:
123 return field_defs[sort_field]
123 return field_defs[sort_field]
124
124
125 def search(self, query, document_type, search_user,
125 def search(self, query, document_type, search_user,
126 repo_name=None, repo_group_name=None,
126 repo_name=None, repo_group_name=None,
127 requested_page=1, page_limit=10, sort=None, raise_on_exc=True):
127 requested_page=1, page_limit=10, sort=None, raise_on_exc=True):
128
128
129 original_query = query
129 original_query = query
130 query = self._extend_query(query)
130 query = self._extend_query(query)
131
131
132 log.debug(u'QUERY: %s on %s', query, document_type)
132 log.debug(u'QUERY: %s on %s', query, document_type)
133 result = {
133 result = {
134 'results': [],
134 'results': [],
135 'count': 0,
135 'count': 0,
136 'error': None,
136 'error': None,
137 'runtime': 0
137 'runtime': 0
138 }
138 }
139 search_type, index_name, schema_defn = self._prepare_for_search(
139 search_type, index_name, schema_defn = self._prepare_for_search(
140 document_type)
140 document_type)
141 self._init_searcher(index_name)
141 self._init_searcher(index_name)
142 try:
142 try:
143 qp = QueryParser(search_type, schema=schema_defn)
143 qp = QueryParser(search_type, schema=schema_defn)
144 allowed_repos_filter = self._get_repo_filter(
144 allowed_repos_filter = self._get_repo_filter(
145 search_user, repo_name)
145 search_user, repo_name)
146 try:
146 try:
147 query = qp.parse(safe_unicode(query))
147 query = qp.parse(safe_unicode(query))
148 log.debug('query: %s (%s)', query, repr(query))
148 log.debug('query: %s (%s)', query, repr(query))
149
149
150 reverse, sorted_by = False, None
150 reverse, sorted_by = False, None
151 direction, sort_field = self.get_sort(search_type, sort)
151 direction, sort_field = self.get_sort(search_type, sort)
152 if sort_field:
152 if sort_field:
153 sort_definition = self.sort_def(search_type, direction, sort_field)
153 sort_definition = self.sort_def(search_type, direction, sort_field)
154 if sort_definition:
154 if sort_definition:
155 sorted_by = sort_definition
155 sorted_by = sort_definition
156 if direction == Searcher.DIRECTION_DESC:
156 if direction == Searcher.DIRECTION_DESC:
157 reverse = True
157 reverse = True
158 if direction == Searcher.DIRECTION_ASC:
158 if direction == Searcher.DIRECTION_ASC:
159 reverse = False
159 reverse = False
160
160
161 whoosh_results = self.searcher.search(
161 whoosh_results = self.searcher.search(
162 query, filter=allowed_repos_filter, limit=None,
162 query, filter=allowed_repos_filter, limit=None,
163 sortedby=sorted_by, reverse=reverse)
163 sortedby=sorted_by, reverse=reverse)
164
164
165 # fixes for 32k limit that whoosh uses for highlight
165 # fixes for 32k limit that whoosh uses for highlight
166 whoosh_results.fragmenter.charlimit = None
166 whoosh_results.fragmenter.charlimit = None
167 res_ln = whoosh_results.scored_length()
167 res_ln = whoosh_results.scored_length()
168 result['runtime'] = whoosh_results.runtime
168 result['runtime'] = whoosh_results.runtime
169 result['count'] = res_ln
169 result['count'] = res_ln
170 result['results'] = WhooshResultWrapper(
170 result['results'] = WhooshResultWrapper(
171 search_type, res_ln, whoosh_results)
171 search_type, res_ln, whoosh_results)
172
172
173 except QueryParserError:
173 except QueryParserError:
174 result['error'] = 'Invalid search query. Try quoting it.'
174 result['error'] = 'Invalid search query. Try quoting it.'
175 except (EmptyIndexError, IOError, OSError):
175 except (EmptyIndexError, IOError, OSError):
176 msg = 'There is no index to search in. Please run whoosh indexer'
176 msg = 'There is no index to search in. Please run whoosh indexer'
177 log.exception(msg)
177 log.exception(msg)
178 result['error'] = msg
178 result['error'] = msg
179 except Exception:
179 except Exception:
180 msg = 'An error occurred during this search operation'
180 msg = 'An error occurred during this search operation'
181 log.exception(msg)
181 log.exception(msg)
182 result['error'] = msg
182 result['error'] = msg
183
183
184 return result
184 return result
185
185
186 def statistics(self, translator):
186 def statistics(self, translator):
187 _ = translator
187 _ = translator
188 stats = [
188 stats = [
189 {'key': _('Index Type'), 'value': 'Whoosh'},
189 {'key': _('Index Type'), 'value': 'Whoosh'},
190 {'sep': True},
190 {'sep': True},
191
191
192 {'key': _('File Index'), 'value': str(self.file_index)},
192 {'key': _('File Index'), 'value': str(self.file_index)},
193 {'key': _('Indexed documents'), 'value': self.file_index.doc_count()},
193 {'key': _('Indexed documents'), 'value': self.file_index.doc_count()},
194 {'key': _('Last update'), 'value': h.time_to_datetime(self.file_index.last_modified())},
194 {'key': _('Last update'), 'value': h.time_to_datetime(self.file_index.last_modified())},
195
195
196 {'sep': True},
196 {'sep': True},
197
197
198 {'key': _('Commit index'), 'value': str(self.commit_index)},
198 {'key': _('Commit index'), 'value': str(self.commit_index)},
199 {'key': _('Indexed documents'), 'value': str(self.commit_index.doc_count())},
199 {'key': _('Indexed documents'), 'value': str(self.commit_index.doc_count())},
200 {'key': _('Last update'), 'value': h.time_to_datetime(self.commit_index.last_modified())}
200 {'key': _('Last update'), 'value': h.time_to_datetime(self.commit_index.last_modified())}
201 ]
201 ]
202 return stats
202 return stats
203
203
204 def _get_repo_filter(self, auth_user, repo_name):
204 def _get_repo_filter(self, auth_user, repo_name):
205
205
206 allowed_to_search = [
206 allowed_to_search = [
207 repo for repo, perm in
207 repo for repo, perm in
208 auth_user.permissions['repositories'].items()
208 auth_user.permissions['repositories'].items()
209 if perm != 'repository.none']
209 if perm != 'repository.none']
210
210
211 if repo_name:
211 if repo_name:
212 repo_filter = [query_lib.Term('repository', repo_name)]
212 repo_filter = [query_lib.Term('repository', repo_name)]
213
213
214 elif 'hg.admin' in auth_user.permissions.get('global', []):
214 elif 'hg.admin' in auth_user.permissions.get('global', []):
215 return None
215 return None
216
216
217 else:
217 else:
218 repo_filter = [query_lib.Term('repository', _rn)
218 repo_filter = [query_lib.Term('repository', _rn)
219 for _rn in allowed_to_search]
219 for _rn in allowed_to_search]
220 # in case we're not allowed to search anywhere, it's a trick
220 # in case we're not allowed to search anywhere, it's a trick
221 # to tell whoosh we're filtering, on ALL results
221 # to tell whoosh we're filtering, on ALL results
222 repo_filter = repo_filter or [query_lib.Term('repository', '')]
222 repo_filter = repo_filter or [query_lib.Term('repository', '')]
223
223
224 return query_lib.Or(repo_filter)
224 return query_lib.Or(repo_filter)
225
225
226 def _prepare_for_search(self, cur_type):
226 def _prepare_for_search(self, cur_type):
227 search_type = {
227 search_type = {
228 'content': 'content',
228 'content': 'content',
229 'commit': 'message',
229 'commit': 'message',
230 'path': 'path',
230 'path': 'path',
231 'repository': 'repository'
231 'repository': 'repository'
232 }.get(cur_type, 'content')
232 }.get(cur_type, 'content')
233
233
234 index_name = {
234 index_name = {
235 'content': FILE_INDEX_NAME,
235 'content': FILE_INDEX_NAME,
236 'commit': COMMIT_INDEX_NAME,
236 'commit': COMMIT_INDEX_NAME,
237 'path': FILE_INDEX_NAME
237 'path': FILE_INDEX_NAME
238 }.get(cur_type, FILE_INDEX_NAME)
238 }.get(cur_type, FILE_INDEX_NAME)
239
239
240 schema_defn = {
240 schema_defn = {
241 'content': self.file_schema,
241 'content': self.file_schema,
242 'commit': self.commit_schema,
242 'commit': self.commit_schema,
243 'path': self.file_schema
243 'path': self.file_schema
244 }.get(cur_type, self.file_schema)
244 }.get(cur_type, self.file_schema)
245
245
246 log.debug('IDX: %s', index_name)
246 log.debug('IDX: %s', index_name)
247 log.debug('SCHEMA: %s', schema_defn)
247 log.debug('SCHEMA: %s', schema_defn)
248 return search_type, index_name, schema_defn
248 return search_type, index_name, schema_defn
249
249
250 def _init_searcher(self, index_name):
250 def _init_searcher(self, index_name):
251 idx = open_dir(self.config['location'], indexname=index_name)
251 idx = open_dir(self.config['location'], indexname=index_name)
252 self.searcher = idx.searcher()
252 self.searcher = idx.searcher()
253 return self.searcher
253 return self.searcher
254
254
255
255
256 Searcher = WhooshSearcher
256 Searcher = WhooshSearcher
257
257
258
258
259 class WhooshResultWrapper(object):
259 class WhooshResultWrapper(object):
260 def __init__(self, search_type, total_hits, results):
260 def __init__(self, search_type, total_hits, results):
261 self.search_type = search_type
261 self.search_type = search_type
262 self.results = results
262 self.results = results
263 self.total_hits = total_hits
263 self.total_hits = total_hits
264
264
265 def __str__(self):
265 def __str__(self):
266 return '<%s at %s>' % (self.__class__.__name__, len(self))
266 return '<%s at %s>' % (self.__class__.__name__, len(self))
267
267
268 def __repr__(self):
268 def __repr__(self):
269 return self.__str__()
269 return self.__str__()
270
270
271 def __len__(self):
271 def __len__(self):
272 return self.total_hits
272 return self.total_hits
273
273
274 def __iter__(self):
274 def __iter__(self):
275 """
275 """
276 Allows Iteration over results,and lazy generate content
276 Allows Iteration over results,and lazy generate content
277
277
278 *Requires* implementation of ``__getitem__`` method.
278 *Requires* implementation of ``__getitem__`` method.
279 """
279 """
280 for hit in self.results:
280 for hit in self.results:
281 yield self.get_full_content(hit)
281 yield self.get_full_content(hit)
282
282
283 def __getitem__(self, key):
283 def __getitem__(self, key):
284 """
284 """
285 Slicing of resultWrapper
285 Slicing of resultWrapper
286 """
286 """
287 i, j = key.start, key.stop
287 i, j = key.start, key.stop
288 for hit in self.results[i:j]:
288 for hit in self.results[i:j]:
289 yield self.get_full_content(hit)
289 yield self.get_full_content(hit)
290
290
291 def get_full_content(self, hit):
291 def get_full_content(self, hit):
292 # TODO: marcink: this feels like an overkill, there's a lot of data
292 # TODO: marcink: this feels like an overkill, there's a lot of data
293 # inside hit object, and we don't need all
293 # inside hit object, and we don't need all
294 res = dict(hit)
294 res = dict(hit)
295 # elastic search uses that, we set it empty so it fallbacks to regular HL logic
295 # elastic search uses that, we set it empty so it fallbacks to regular HL logic
296 res['content_highlight'] = ''
296 res['content_highlight'] = ''
297
297
298 f_path = '' # pragma: no cover
298 f_path = '' # pragma: no cover
299 if self.search_type in ['content', 'path']:
299 if self.search_type in ['content', 'path']:
300 f_path = res['path'][len(res['repository']):]
300 f_path = res['path'][len(res['repository']):]
301 f_path = f_path.lstrip(os.sep)
301 f_path = f_path.lstrip(os.sep)
302
302
303 if self.search_type == 'content':
303 if self.search_type == 'content':
304 res.update({'content_short_hl': hit.highlights('content'),
304 res.update({'content_short_hl': hit.highlights('content'),
305 'f_path': f_path})
305 'f_path': f_path})
306 elif self.search_type == 'path':
306 elif self.search_type == 'path':
307 res.update({'f_path': f_path})
307 res.update({'f_path': f_path})
308 elif self.search_type == 'message':
308 elif self.search_type == 'message':
309 res.update({'message_hl': hit.highlights('message')})
309 res.update({'message_hl': hit.highlights('message')})
310
310
311 return res
311 return res
@@ -1,75 +1,75 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Whoosh fallback schema for RhodeCode in case rhodecode_tools defined one is
22 Whoosh fallback schema for RhodeCode in case rhodecode_tools defined one is
23 not available
23 not available
24 """
24 """
25
25
26
26
27
27
28 from whoosh.analysis import RegexTokenizer, LowercaseFilter
28 from whoosh.analysis import RegexTokenizer, LowercaseFilter
29 from whoosh.formats import Characters
29 from whoosh.formats import Characters
30 from whoosh.fields import (
30 from whoosh.fields import (
31 TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME)
31 TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME)
32
32
33 # CUSTOM ANALYZER wordsplit + lowercase filter for case insensitive search
33 # CUSTOM ANALYZER wordsplit + lowercase filter for case insensitive search
34 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
34 ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
35
35
36 # FILE INDEX SCHEMA DEFINITION
36 # FILE INDEX SCHEMA DEFINITION
37 FILE_INDEX_NAME = 'FILE_INDEX'
37 FILE_INDEX_NAME = 'FILE_INDEX'
38 FILE_SCHEMA = Schema(
38 FILE_SCHEMA = Schema(
39 fileid=ID(unique=True), # Path
39 fileid=ID(unique=True), # Path
40 repository=ID(stored=True),
40 repository=ID(stored=True),
41 repository_id=NUMERIC(unique=True, stored=True), # Numeric id of repo
41 repository_id=NUMERIC(unique=True, stored=True), # Numeric id of repo
42 repo_name=TEXT(stored=True),
42 repo_name=TEXT(stored=True),
43 owner=TEXT(),
43 owner=TEXT(),
44 path=TEXT(stored=True),
44 path=TEXT(stored=True),
45 content=FieldType(format=Characters(), analyzer=ANALYZER,
45 content=FieldType(format=Characters(), analyzer=ANALYZER,
46 scorable=True, stored=True),
46 scorable=True, stored=True),
47 modtime=STORED(),
47 modtime=STORED(),
48 md5=STORED(),
48 md5=STORED(),
49 extension=ID(stored=True),
49 extension=ID(stored=True),
50 commit_id=TEXT(stored=True),
50 commit_id=TEXT(stored=True),
51
51
52 size=NUMERIC(int, 64, signed=False, stored=True),
52 size=NUMERIC(int, 64, signed=False, stored=True),
53 mimetype=TEXT(stored=True),
53 mimetype=TEXT(stored=True),
54 lines=NUMERIC(int, 64, signed=False, stored=True),
54 lines=NUMERIC(int, 64, signed=False, stored=True),
55 )
55 )
56
56
57
57
58 # COMMIT INDEX SCHEMA
58 # COMMIT INDEX SCHEMA
59 COMMIT_INDEX_NAME = 'COMMIT_INDEX'
59 COMMIT_INDEX_NAME = 'COMMIT_INDEX'
60 COMMIT_SCHEMA = Schema(
60 COMMIT_SCHEMA = Schema(
61 commit_id=ID(unique=True, stored=True),
61 commit_id=ID(unique=True, stored=True),
62 repository=ID(unique=True, stored=True),
62 repository=ID(unique=True, stored=True),
63 repository_id=NUMERIC(unique=True, stored=True),
63 repository_id=NUMERIC(unique=True, stored=True),
64 commit_idx=NUMERIC(stored=True, sortable=True),
64 commit_idx=NUMERIC(stored=True, sortable=True),
65 commit_idx_sort=ID(),
65 commit_idx_sort=ID(),
66 date=NUMERIC(int, 64, signed=False, stored=True, sortable=True),
66 date=NUMERIC(int, 64, signed=False, stored=True, sortable=True),
67 owner=TEXT(stored=True),
67 owner=TEXT(stored=True),
68 author=TEXT(stored=True),
68 author=TEXT(stored=True),
69 message=FieldType(format=Characters(), analyzer=ANALYZER,
69 message=FieldType(format=Characters(), analyzer=ANALYZER,
70 scorable=True, stored=True),
70 scorable=True, stored=True),
71 parents=TEXT(stored=True),
71 parents=TEXT(stored=True),
72 added=TEXT(stored=True), # space separated names of added files
72 added=TEXT(stored=True), # space separated names of added files
73 removed=TEXT(stored=True), # space separated names of removed files
73 removed=TEXT(stored=True), # space separated names of removed files
74 changed=TEXT(stored=True), # space separated names of changed files
74 changed=TEXT(stored=True), # space separated names of changed files
75 )
75 )
@@ -1,279 +1,278 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import collections
20 import collections
22
21
23 import sqlalchemy
22 import sqlalchemy
24 from sqlalchemy import UnicodeText
23 from sqlalchemy import UnicodeText
25 from sqlalchemy.ext.mutable import Mutable
24 from sqlalchemy.ext.mutable import Mutable
26
25
27 from rhodecode.lib.ext_json import json
26 from rhodecode.lib.ext_json import json
28 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.utils2 import safe_unicode
29
28
30
29
31 class JsonRaw(str):
30 class JsonRaw(str):
32 """
31 """
33 Allows interacting with a JSON types field using a raw string.
32 Allows interacting with a JSON types field using a raw string.
34
33
35 For example::
34 For example::
36 db_instance = JsonTable()
35 db_instance = JsonTable()
37 db_instance.enabled = True
36 db_instance.enabled = True
38 db_instance.json_data = JsonRaw('{"a": 4}')
37 db_instance.json_data = JsonRaw('{"a": 4}')
39
38
40 This will bypass serialization/checks, and allow storing
39 This will bypass serialization/checks, and allow storing
41 raw values
40 raw values
42 """
41 """
43 pass
42 pass
44
43
45
44
46 # Set this to the standard dict if Order is not required
45 # Set this to the standard dict if Order is not required
47 DictClass = collections.OrderedDict
46 DictClass = collections.OrderedDict
48
47
49
48
50 class JSONEncodedObj(sqlalchemy.types.TypeDecorator):
49 class JSONEncodedObj(sqlalchemy.types.TypeDecorator):
51 """
50 """
52 Represents an immutable structure as a json-encoded string.
51 Represents an immutable structure as a json-encoded string.
53
52
54 If default is, for example, a dict, then a NULL value in the
53 If default is, for example, a dict, then a NULL value in the
55 database will be exposed as an empty dict.
54 database will be exposed as an empty dict.
56 """
55 """
57
56
58 impl = UnicodeText
57 impl = UnicodeText
59 safe = True
58 safe = True
60 enforce_unicode = True
59 enforce_unicode = True
61
60
62 def __init__(self, *args, **kwargs):
61 def __init__(self, *args, **kwargs):
63 self.default = kwargs.pop('default', None)
62 self.default = kwargs.pop('default', None)
64 self.safe = kwargs.pop('safe_json', self.safe)
63 self.safe = kwargs.pop('safe_json', self.safe)
65 self.enforce_unicode = kwargs.pop('enforce_unicode', self.enforce_unicode)
64 self.enforce_unicode = kwargs.pop('enforce_unicode', self.enforce_unicode)
66 self.dialect_map = kwargs.pop('dialect_map', {})
65 self.dialect_map = kwargs.pop('dialect_map', {})
67 super(JSONEncodedObj, self).__init__(*args, **kwargs)
66 super(JSONEncodedObj, self).__init__(*args, **kwargs)
68
67
69 def load_dialect_impl(self, dialect):
68 def load_dialect_impl(self, dialect):
70 if dialect.name in self.dialect_map:
69 if dialect.name in self.dialect_map:
71 return dialect.type_descriptor(self.dialect_map[dialect.name])
70 return dialect.type_descriptor(self.dialect_map[dialect.name])
72 return dialect.type_descriptor(self.impl)
71 return dialect.type_descriptor(self.impl)
73
72
74 def process_bind_param(self, value, dialect):
73 def process_bind_param(self, value, dialect):
75 if isinstance(value, JsonRaw):
74 if isinstance(value, JsonRaw):
76 value = value
75 value = value
77 elif value is not None:
76 elif value is not None:
78 value = json.dumps(value)
77 value = json.dumps(value)
79 if self.enforce_unicode:
78 if self.enforce_unicode:
80 value = safe_unicode(value)
79 value = safe_unicode(value)
81 return value
80 return value
82
81
83 def process_result_value(self, value, dialect):
82 def process_result_value(self, value, dialect):
84 if self.default is not None and (not value or value == '""'):
83 if self.default is not None and (not value or value == '""'):
85 return self.default()
84 return self.default()
86
85
87 if value is not None:
86 if value is not None:
88 try:
87 try:
89 value = json.loads(value, object_pairs_hook=DictClass)
88 value = json.loads(value, object_pairs_hook=DictClass)
90 except Exception as e:
89 except Exception as e:
91 if self.safe and self.default is not None:
90 if self.safe and self.default is not None:
92 return self.default()
91 return self.default()
93 else:
92 else:
94 raise
93 raise
95 return value
94 return value
96
95
97
96
98 class MutationObj(Mutable):
97 class MutationObj(Mutable):
99 @classmethod
98 @classmethod
100 def coerce(cls, key, value):
99 def coerce(cls, key, value):
101 if isinstance(value, dict) and not isinstance(value, MutationDict):
100 if isinstance(value, dict) and not isinstance(value, MutationDict):
102 return MutationDict.coerce(key, value)
101 return MutationDict.coerce(key, value)
103 if isinstance(value, list) and not isinstance(value, MutationList):
102 if isinstance(value, list) and not isinstance(value, MutationList):
104 return MutationList.coerce(key, value)
103 return MutationList.coerce(key, value)
105 return value
104 return value
106
105
107 def de_coerce(self):
106 def de_coerce(self):
108 return self
107 return self
109
108
110 @classmethod
109 @classmethod
111 def _listen_on_attribute(cls, attribute, coerce, parent_cls):
110 def _listen_on_attribute(cls, attribute, coerce, parent_cls):
112 key = attribute.key
111 key = attribute.key
113 if parent_cls is not attribute.class_:
112 if parent_cls is not attribute.class_:
114 return
113 return
115
114
116 # rely on "propagate" here
115 # rely on "propagate" here
117 parent_cls = attribute.class_
116 parent_cls = attribute.class_
118
117
119 def load(state, *args):
118 def load(state, *args):
120 val = state.dict.get(key, None)
119 val = state.dict.get(key, None)
121 if coerce:
120 if coerce:
122 val = cls.coerce(key, val)
121 val = cls.coerce(key, val)
123 state.dict[key] = val
122 state.dict[key] = val
124 if isinstance(val, cls):
123 if isinstance(val, cls):
125 val._parents[state.obj()] = key
124 val._parents[state.obj()] = key
126
125
127 def set(target, value, oldvalue, initiator):
126 def set(target, value, oldvalue, initiator):
128 if not isinstance(value, cls):
127 if not isinstance(value, cls):
129 value = cls.coerce(key, value)
128 value = cls.coerce(key, value)
130 if isinstance(value, cls):
129 if isinstance(value, cls):
131 value._parents[target.obj()] = key
130 value._parents[target.obj()] = key
132 if isinstance(oldvalue, cls):
131 if isinstance(oldvalue, cls):
133 oldvalue._parents.pop(target.obj(), None)
132 oldvalue._parents.pop(target.obj(), None)
134 return value
133 return value
135
134
136 def pickle(state, state_dict):
135 def pickle(state, state_dict):
137 val = state.dict.get(key, None)
136 val = state.dict.get(key, None)
138 if isinstance(val, cls):
137 if isinstance(val, cls):
139 if 'ext.mutable.values' not in state_dict:
138 if 'ext.mutable.values' not in state_dict:
140 state_dict['ext.mutable.values'] = []
139 state_dict['ext.mutable.values'] = []
141 state_dict['ext.mutable.values'].append(val)
140 state_dict['ext.mutable.values'].append(val)
142
141
143 def unpickle(state, state_dict):
142 def unpickle(state, state_dict):
144 if 'ext.mutable.values' in state_dict:
143 if 'ext.mutable.values' in state_dict:
145 for val in state_dict['ext.mutable.values']:
144 for val in state_dict['ext.mutable.values']:
146 val._parents[state.obj()] = key
145 val._parents[state.obj()] = key
147
146
148 sqlalchemy.event.listen(parent_cls, 'load', load, raw=True,
147 sqlalchemy.event.listen(parent_cls, 'load', load, raw=True,
149 propagate=True)
148 propagate=True)
150 sqlalchemy.event.listen(parent_cls, 'refresh', load, raw=True,
149 sqlalchemy.event.listen(parent_cls, 'refresh', load, raw=True,
151 propagate=True)
150 propagate=True)
152 sqlalchemy.event.listen(parent_cls, 'pickle', pickle, raw=True,
151 sqlalchemy.event.listen(parent_cls, 'pickle', pickle, raw=True,
153 propagate=True)
152 propagate=True)
154 sqlalchemy.event.listen(attribute, 'set', set, raw=True, retval=True,
153 sqlalchemy.event.listen(attribute, 'set', set, raw=True, retval=True,
155 propagate=True)
154 propagate=True)
156 sqlalchemy.event.listen(parent_cls, 'unpickle', unpickle, raw=True,
155 sqlalchemy.event.listen(parent_cls, 'unpickle', unpickle, raw=True,
157 propagate=True)
156 propagate=True)
158
157
159
158
160 class MutationDict(MutationObj, DictClass):
159 class MutationDict(MutationObj, DictClass):
161 @classmethod
160 @classmethod
162 def coerce(cls, key, value):
161 def coerce(cls, key, value):
163 """Convert plain dictionary to MutationDict"""
162 """Convert plain dictionary to MutationDict"""
164 self = MutationDict(
163 self = MutationDict(
165 (k, MutationObj.coerce(key, v)) for (k, v) in value.items())
164 (k, MutationObj.coerce(key, v)) for (k, v) in value.items())
166 self._key = key
165 self._key = key
167 return self
166 return self
168
167
169 def de_coerce(self):
168 def de_coerce(self):
170 return dict(self)
169 return dict(self)
171
170
172 def __setitem__(self, key, value):
171 def __setitem__(self, key, value):
173 # Due to the way OrderedDict works, this is called during __init__.
172 # Due to the way OrderedDict works, this is called during __init__.
174 # At this time we don't have a key set, but what is more, the value
173 # At this time we don't have a key set, but what is more, the value
175 # being set has already been coerced. So special case this and skip.
174 # being set has already been coerced. So special case this and skip.
176 if hasattr(self, '_key'):
175 if hasattr(self, '_key'):
177 value = MutationObj.coerce(self._key, value)
176 value = MutationObj.coerce(self._key, value)
178 DictClass.__setitem__(self, key, value)
177 DictClass.__setitem__(self, key, value)
179 self.changed()
178 self.changed()
180
179
181 def __delitem__(self, key):
180 def __delitem__(self, key):
182 DictClass.__delitem__(self, key)
181 DictClass.__delitem__(self, key)
183 self.changed()
182 self.changed()
184
183
185 def __setstate__(self, state):
184 def __setstate__(self, state):
186 self.__dict__ = state
185 self.__dict__ = state
187
186
188 def __reduce_ex__(self, proto):
187 def __reduce_ex__(self, proto):
189 # support pickling of MutationDicts
188 # support pickling of MutationDicts
190 d = dict(self)
189 d = dict(self)
191 return (self.__class__, (d,))
190 return (self.__class__, (d,))
192
191
193
192
194 class MutationList(MutationObj, list):
193 class MutationList(MutationObj, list):
195 @classmethod
194 @classmethod
196 def coerce(cls, key, value):
195 def coerce(cls, key, value):
197 """Convert plain list to MutationList"""
196 """Convert plain list to MutationList"""
198 self = MutationList((MutationObj.coerce(key, v) for v in value))
197 self = MutationList((MutationObj.coerce(key, v) for v in value))
199 self._key = key
198 self._key = key
200 return self
199 return self
201
200
202 def de_coerce(self):
201 def de_coerce(self):
203 return list(self)
202 return list(self)
204
203
205 def __setitem__(self, idx, value):
204 def __setitem__(self, idx, value):
206 list.__setitem__(self, idx, MutationObj.coerce(self._key, value))
205 list.__setitem__(self, idx, MutationObj.coerce(self._key, value))
207 self.changed()
206 self.changed()
208
207
209 def __setslice__(self, start, stop, values):
208 def __setslice__(self, start, stop, values):
210 list.__setslice__(self, start, stop,
209 list.__setslice__(self, start, stop,
211 (MutationObj.coerce(self._key, v) for v in values))
210 (MutationObj.coerce(self._key, v) for v in values))
212 self.changed()
211 self.changed()
213
212
214 def __delitem__(self, idx):
213 def __delitem__(self, idx):
215 list.__delitem__(self, idx)
214 list.__delitem__(self, idx)
216 self.changed()
215 self.changed()
217
216
218 def __delslice__(self, start, stop):
217 def __delslice__(self, start, stop):
219 list.__delslice__(self, start, stop)
218 list.__delslice__(self, start, stop)
220 self.changed()
219 self.changed()
221
220
222 def append(self, value):
221 def append(self, value):
223 list.append(self, MutationObj.coerce(self._key, value))
222 list.append(self, MutationObj.coerce(self._key, value))
224 self.changed()
223 self.changed()
225
224
226 def insert(self, idx, value):
225 def insert(self, idx, value):
227 list.insert(self, idx, MutationObj.coerce(self._key, value))
226 list.insert(self, idx, MutationObj.coerce(self._key, value))
228 self.changed()
227 self.changed()
229
228
230 def extend(self, values):
229 def extend(self, values):
231 list.extend(self, (MutationObj.coerce(self._key, v) for v in values))
230 list.extend(self, (MutationObj.coerce(self._key, v) for v in values))
232 self.changed()
231 self.changed()
233
232
234 def pop(self, *args, **kw):
233 def pop(self, *args, **kw):
235 value = list.pop(self, *args, **kw)
234 value = list.pop(self, *args, **kw)
236 self.changed()
235 self.changed()
237 return value
236 return value
238
237
239 def remove(self, value):
238 def remove(self, value):
240 list.remove(self, value)
239 list.remove(self, value)
241 self.changed()
240 self.changed()
242
241
243
242
244 def JsonType(impl=None, **kwargs):
243 def JsonType(impl=None, **kwargs):
245 """
244 """
246 Helper for using a mutation obj, it allows to use .with_variant easily.
245 Helper for using a mutation obj, it allows to use .with_variant easily.
247 example::
246 example::
248
247
249 settings = Column('settings_json',
248 settings = Column('settings_json',
250 MutationObj.as_mutable(
249 MutationObj.as_mutable(
251 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
250 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
252 """
251 """
253
252
254 if impl == 'list':
253 if impl == 'list':
255 return JSONEncodedObj(default=list, **kwargs)
254 return JSONEncodedObj(default=list, **kwargs)
256 elif impl == 'dict':
255 elif impl == 'dict':
257 return JSONEncodedObj(default=DictClass, **kwargs)
256 return JSONEncodedObj(default=DictClass, **kwargs)
258 else:
257 else:
259 return JSONEncodedObj(**kwargs)
258 return JSONEncodedObj(**kwargs)
260
259
261
260
262 JSON = MutationObj.as_mutable(JsonType())
261 JSON = MutationObj.as_mutable(JsonType())
263 """
262 """
264 A type to encode/decode JSON on the fly
263 A type to encode/decode JSON on the fly
265
264
266 sqltype is the string type for the underlying DB column::
265 sqltype is the string type for the underlying DB column::
267
266
268 Column(JSON) (defaults to UnicodeText)
267 Column(JSON) (defaults to UnicodeText)
269 """
268 """
270
269
271 JSONDict = MutationObj.as_mutable(JsonType('dict'))
270 JSONDict = MutationObj.as_mutable(JsonType('dict'))
272 """
271 """
273 A type to encode/decode JSON dictionaries on the fly
272 A type to encode/decode JSON dictionaries on the fly
274 """
273 """
275
274
276 JSONList = MutationObj.as_mutable(JsonType('list'))
275 JSONList = MutationObj.as_mutable(JsonType('list'))
277 """
276 """
278 A type to encode/decode JSON lists` on the fly
277 A type to encode/decode JSON lists` on the fly
279 """
278 """
@@ -1,187 +1,186 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import sys
20 import sys
22 import logging
21 import logging
23
22
24
23
25 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(30, 38))
24 BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(30, 38))
26
25
27 # Sequences
26 # Sequences
28 RESET_SEQ = "\033[0m"
27 RESET_SEQ = "\033[0m"
29 COLOR_SEQ = "\033[0;%dm"
28 COLOR_SEQ = "\033[0;%dm"
30 BOLD_SEQ = "\033[1m"
29 BOLD_SEQ = "\033[1m"
31
30
32 COLORS = {
31 COLORS = {
33 'CRITICAL': MAGENTA,
32 'CRITICAL': MAGENTA,
34 'ERROR': RED,
33 'ERROR': RED,
35 'WARNING': CYAN,
34 'WARNING': CYAN,
36 'INFO': GREEN,
35 'INFO': GREEN,
37 'DEBUG': BLUE,
36 'DEBUG': BLUE,
38 'SQL': YELLOW
37 'SQL': YELLOW
39 }
38 }
40
39
41
40
42 def _inject_req_id(record, with_prefix=True):
41 def _inject_req_id(record, with_prefix=True):
43 from pyramid.threadlocal import get_current_request
42 from pyramid.threadlocal import get_current_request
44 dummy = '00000000-0000-0000-0000-000000000000'
43 dummy = '00000000-0000-0000-0000-000000000000'
45 req_id = None
44 req_id = None
46
45
47 req = get_current_request()
46 req = get_current_request()
48 if req:
47 if req:
49 req_id = getattr(req, 'req_id', None)
48 req_id = getattr(req, 'req_id', None)
50 if with_prefix:
49 if with_prefix:
51 req_id = 'req_id:%-36s' % (req_id or dummy)
50 req_id = 'req_id:%-36s' % (req_id or dummy)
52 else:
51 else:
53 req_id = (req_id or dummy)
52 req_id = (req_id or dummy)
54 record.req_id = req_id
53 record.req_id = req_id
55
54
56
55
57 def _add_log_to_debug_bucket(formatted_record):
56 def _add_log_to_debug_bucket(formatted_record):
58 from pyramid.threadlocal import get_current_request
57 from pyramid.threadlocal import get_current_request
59 req = get_current_request()
58 req = get_current_request()
60 if req:
59 if req:
61 req.req_id_bucket.append(formatted_record)
60 req.req_id_bucket.append(formatted_record)
62
61
63
62
64 def one_space_trim(s):
63 def one_space_trim(s):
65 if s.find(" ") == -1:
64 if s.find(" ") == -1:
66 return s
65 return s
67 else:
66 else:
68 s = s.replace(' ', ' ')
67 s = s.replace(' ', ' ')
69 return one_space_trim(s)
68 return one_space_trim(s)
70
69
71
70
72 def format_sql(sql):
71 def format_sql(sql):
73 sql = sql.replace('\n', '')
72 sql = sql.replace('\n', '')
74 sql = one_space_trim(sql)
73 sql = one_space_trim(sql)
75 sql = sql\
74 sql = sql\
76 .replace(',', ',\n\t')\
75 .replace(',', ',\n\t')\
77 .replace('SELECT', '\n\tSELECT \n\t')\
76 .replace('SELECT', '\n\tSELECT \n\t')\
78 .replace('UPDATE', '\n\tUPDATE \n\t')\
77 .replace('UPDATE', '\n\tUPDATE \n\t')\
79 .replace('DELETE', '\n\tDELETE \n\t')\
78 .replace('DELETE', '\n\tDELETE \n\t')\
80 .replace('FROM', '\n\tFROM')\
79 .replace('FROM', '\n\tFROM')\
81 .replace('ORDER BY', '\n\tORDER BY')\
80 .replace('ORDER BY', '\n\tORDER BY')\
82 .replace('LIMIT', '\n\tLIMIT')\
81 .replace('LIMIT', '\n\tLIMIT')\
83 .replace('WHERE', '\n\tWHERE')\
82 .replace('WHERE', '\n\tWHERE')\
84 .replace('AND', '\n\tAND')\
83 .replace('AND', '\n\tAND')\
85 .replace('LEFT', '\n\tLEFT')\
84 .replace('LEFT', '\n\tLEFT')\
86 .replace('INNER', '\n\tINNER')\
85 .replace('INNER', '\n\tINNER')\
87 .replace('INSERT', '\n\tINSERT')\
86 .replace('INSERT', '\n\tINSERT')\
88 .replace('DELETE', '\n\tDELETE')
87 .replace('DELETE', '\n\tDELETE')
89 return sql
88 return sql
90
89
91
90
92 class ExceptionAwareFormatter(logging.Formatter):
91 class ExceptionAwareFormatter(logging.Formatter):
93 """
92 """
94 Extended logging formatter which prints out remote tracebacks.
93 Extended logging formatter which prints out remote tracebacks.
95 """
94 """
96
95
97 def formatException(self, ei):
96 def formatException(self, ei):
98 ex_type, ex_value, ex_tb = ei
97 ex_type, ex_value, ex_tb = ei
99
98
100 local_tb = logging.Formatter.formatException(self, ei)
99 local_tb = logging.Formatter.formatException(self, ei)
101 if hasattr(ex_value, '_vcs_server_traceback'):
100 if hasattr(ex_value, '_vcs_server_traceback'):
102
101
103 def formatRemoteTraceback(remote_tb_lines):
102 def formatRemoteTraceback(remote_tb_lines):
104 result = ["\n +--- This exception occured remotely on VCSServer - Remote traceback:\n\n"]
103 result = ["\n +--- This exception occured remotely on VCSServer - Remote traceback:\n\n"]
105 result.append(remote_tb_lines)
104 result.append(remote_tb_lines)
106 result.append("\n +--- End of remote traceback\n")
105 result.append("\n +--- End of remote traceback\n")
107 return result
106 return result
108
107
109 try:
108 try:
110 if ex_type is not None and ex_value is None and ex_tb is None:
109 if ex_type is not None and ex_value is None and ex_tb is None:
111 # possible old (3.x) call syntax where caller is only
110 # possible old (3.x) call syntax where caller is only
112 # providing exception object
111 # providing exception object
113 if type(ex_type) is not type:
112 if type(ex_type) is not type:
114 raise TypeError(
113 raise TypeError(
115 "invalid argument: ex_type should be an exception "
114 "invalid argument: ex_type should be an exception "
116 "type, or just supply no arguments at all")
115 "type, or just supply no arguments at all")
117 if ex_type is None and ex_tb is None:
116 if ex_type is None and ex_tb is None:
118 ex_type, ex_value, ex_tb = sys.exc_info()
117 ex_type, ex_value, ex_tb = sys.exc_info()
119
118
120 remote_tb = getattr(ex_value, "_vcs_server_traceback", None)
119 remote_tb = getattr(ex_value, "_vcs_server_traceback", None)
121
120
122 if remote_tb:
121 if remote_tb:
123 remote_tb = formatRemoteTraceback(remote_tb)
122 remote_tb = formatRemoteTraceback(remote_tb)
124 return local_tb + ''.join(remote_tb)
123 return local_tb + ''.join(remote_tb)
125 finally:
124 finally:
126 # clean up cycle to traceback, to allow proper GC
125 # clean up cycle to traceback, to allow proper GC
127 del ex_type, ex_value, ex_tb
126 del ex_type, ex_value, ex_tb
128
127
129 return local_tb
128 return local_tb
130
129
131
130
132 class RequestTrackingFormatter(ExceptionAwareFormatter):
131 class RequestTrackingFormatter(ExceptionAwareFormatter):
133 def format(self, record):
132 def format(self, record):
134 _inject_req_id(record)
133 _inject_req_id(record)
135 def_record = logging.Formatter.format(self, record)
134 def_record = logging.Formatter.format(self, record)
136 _add_log_to_debug_bucket(def_record)
135 _add_log_to_debug_bucket(def_record)
137 return def_record
136 return def_record
138
137
139
138
140 class ColorFormatter(ExceptionAwareFormatter):
139 class ColorFormatter(ExceptionAwareFormatter):
141
140
142 def format(self, record):
141 def format(self, record):
143 """
142 """
144 Changes record's levelname to use with COLORS enum
143 Changes record's levelname to use with COLORS enum
145 """
144 """
146 def_record = super(ColorFormatter, self).format(record)
145 def_record = super(ColorFormatter, self).format(record)
147
146
148 levelname = record.levelname
147 levelname = record.levelname
149 start = COLOR_SEQ % (COLORS[levelname])
148 start = COLOR_SEQ % (COLORS[levelname])
150 end = RESET_SEQ
149 end = RESET_SEQ
151
150
152 colored_record = ''.join([start, def_record, end])
151 colored_record = ''.join([start, def_record, end])
153 return colored_record
152 return colored_record
154
153
155
154
156 class ColorRequestTrackingFormatter(RequestTrackingFormatter):
155 class ColorRequestTrackingFormatter(RequestTrackingFormatter):
157
156
158 def format(self, record):
157 def format(self, record):
159 """
158 """
160 Changes record's levelname to use with COLORS enum
159 Changes record's levelname to use with COLORS enum
161 """
160 """
162 def_record = super(ColorRequestTrackingFormatter, self).format(record)
161 def_record = super(ColorRequestTrackingFormatter, self).format(record)
163
162
164 levelname = record.levelname
163 levelname = record.levelname
165 start = COLOR_SEQ % (COLORS[levelname])
164 start = COLOR_SEQ % (COLORS[levelname])
166 end = RESET_SEQ
165 end = RESET_SEQ
167
166
168 colored_record = ''.join([start, def_record, end])
167 colored_record = ''.join([start, def_record, end])
169 return colored_record
168 return colored_record
170
169
171
170
172 class ColorFormatterSql(logging.Formatter):
171 class ColorFormatterSql(logging.Formatter):
173
172
174 def format(self, record):
173 def format(self, record):
175 """
174 """
176 Changes record's levelname to use with COLORS enum
175 Changes record's levelname to use with COLORS enum
177 """
176 """
178
177
179 start = COLOR_SEQ % (COLORS['SQL'])
178 start = COLOR_SEQ % (COLORS['SQL'])
180 def_record = format_sql(logging.Formatter.format(self, record))
179 def_record = format_sql(logging.Formatter.format(self, record))
181 end = RESET_SEQ
180 end = RESET_SEQ
182
181
183 colored_record = ''.join([start, def_record, end])
182 colored_record = ''.join([start, def_record, end])
184 return colored_record
183 return colored_record
185
184
186 # marcink: needs to stay with this name for backward .ini compatability
185 # marcink: needs to stay with this name for backward .ini compatability
187 Pyro4AwareFormatter = ExceptionAwareFormatter
186 Pyro4AwareFormatter = ExceptionAwareFormatter
@@ -1,176 +1,175 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import re
20 import re
22 import markdown
21 import markdown
23 import xml.etree.ElementTree as etree
22 import xml.etree.ElementTree as etree
24
23
25 from markdown.extensions import Extension
24 from markdown.extensions import Extension
26 from markdown.extensions.fenced_code import FencedCodeExtension
25 from markdown.extensions.fenced_code import FencedCodeExtension
27 from markdown.extensions.tables import TableExtension
26 from markdown.extensions.tables import TableExtension
28 from markdown.inlinepatterns import Pattern
27 from markdown.inlinepatterns import Pattern
29
28
30 import gfm
29 import gfm
31
30
32
31
33 class InlineProcessor(Pattern):
32 class InlineProcessor(Pattern):
34 """
33 """
35 Base class that inline patterns subclass.
34 Base class that inline patterns subclass.
36 This is the newer style inline processor that uses a more
35 This is the newer style inline processor that uses a more
37 efficient and flexible search approach.
36 efficient and flexible search approach.
38 """
37 """
39
38
40 def __init__(self, pattern, md=None):
39 def __init__(self, pattern, md=None):
41 """
40 """
42 Create an instant of an inline pattern.
41 Create an instant of an inline pattern.
43 Keyword arguments:
42 Keyword arguments:
44 * pattern: A regular expression that matches a pattern
43 * pattern: A regular expression that matches a pattern
45 """
44 """
46 self.pattern = pattern
45 self.pattern = pattern
47 self.compiled_re = re.compile(pattern, re.DOTALL | re.UNICODE)
46 self.compiled_re = re.compile(pattern, re.DOTALL | re.UNICODE)
48
47
49 # Api for Markdown to pass safe_mode into instance
48 # Api for Markdown to pass safe_mode into instance
50 self.safe_mode = False
49 self.safe_mode = False
51 self.md = md
50 self.md = md
52
51
53 def handleMatch(self, m, data):
52 def handleMatch(self, m, data):
54 """Return a ElementTree element from the given match and the
53 """Return a ElementTree element from the given match and the
55 start and end index of the matched text.
54 start and end index of the matched text.
56 If `start` and/or `end` are returned as `None`, it will be
55 If `start` and/or `end` are returned as `None`, it will be
57 assumed that the processor did not find a valid region of text.
56 assumed that the processor did not find a valid region of text.
58 Subclasses should override this method.
57 Subclasses should override this method.
59 Keyword arguments:
58 Keyword arguments:
60 * m: A re match object containing a match of the pattern.
59 * m: A re match object containing a match of the pattern.
61 * data: The buffer current under analysis
60 * data: The buffer current under analysis
62 Returns:
61 Returns:
63 * el: The ElementTree element, text or None.
62 * el: The ElementTree element, text or None.
64 * start: The start of the region that has been matched or None.
63 * start: The start of the region that has been matched or None.
65 * end: The end of the region that has been matched or None.
64 * end: The end of the region that has been matched or None.
66 """
65 """
67 pass # pragma: no cover
66 pass # pragma: no cover
68
67
69
68
70 class SimpleTagInlineProcessor(InlineProcessor):
69 class SimpleTagInlineProcessor(InlineProcessor):
71 """
70 """
72 Return element of type `tag` with a text attribute of group(2)
71 Return element of type `tag` with a text attribute of group(2)
73 of a Pattern.
72 of a Pattern.
74 """
73 """
75 def __init__(self, pattern, tag):
74 def __init__(self, pattern, tag):
76 InlineProcessor.__init__(self, pattern)
75 InlineProcessor.__init__(self, pattern)
77 self.tag = tag
76 self.tag = tag
78
77
79 def handleMatch(self, m, data): # pragma: no cover
78 def handleMatch(self, m, data): # pragma: no cover
80 el = etree.Element(self.tag)
79 el = etree.Element(self.tag)
81 el.text = m.group(2)
80 el.text = m.group(2)
82 return el, m.start(0), m.end(0)
81 return el, m.start(0), m.end(0)
83
82
84
83
85 class SubstituteTagInlineProcessor(SimpleTagInlineProcessor):
84 class SubstituteTagInlineProcessor(SimpleTagInlineProcessor):
86 """ Return an element of type `tag` with no children. """
85 """ Return an element of type `tag` with no children. """
87 def handleMatch(self, m, data):
86 def handleMatch(self, m, data):
88 return etree.Element(self.tag), m.start(0), m.end(0)
87 return etree.Element(self.tag), m.start(0), m.end(0)
89
88
90
89
91 class Nl2BrExtension(Extension):
90 class Nl2BrExtension(Extension):
92 BR_RE = r'\n'
91 BR_RE = r'\n'
93
92
94 def extendMarkdown(self, md, md_globals):
93 def extendMarkdown(self, md, md_globals):
95 br_tag = SubstituteTagInlineProcessor(self.BR_RE, 'br')
94 br_tag = SubstituteTagInlineProcessor(self.BR_RE, 'br')
96 md.inlinePatterns.add('nl', br_tag, '_end')
95 md.inlinePatterns.add('nl', br_tag, '_end')
97
96
98
97
99 class GithubFlavoredMarkdownExtension(Extension):
98 class GithubFlavoredMarkdownExtension(Extension):
100 """
99 """
101 An extension that is as compatible as possible with GitHub-flavored
100 An extension that is as compatible as possible with GitHub-flavored
102 Markdown (GFM).
101 Markdown (GFM).
103
102
104 This extension aims to be compatible with the variant of GFM that GitHub
103 This extension aims to be compatible with the variant of GFM that GitHub
105 uses for Markdown-formatted gists and files (including READMEs). This
104 uses for Markdown-formatted gists and files (including READMEs). This
106 variant seems to have all the extensions described in the `GFM
105 variant seems to have all the extensions described in the `GFM
107 documentation`_, except:
106 documentation`_, except:
108
107
109 - Newlines in paragraphs are not transformed into ``br`` tags.
108 - Newlines in paragraphs are not transformed into ``br`` tags.
110 - Intra-GitHub links to commits, repositories, and issues are not
109 - Intra-GitHub links to commits, repositories, and issues are not
111 supported.
110 supported.
112
111
113 If you need support for features specific to GitHub comments and issues,
112 If you need support for features specific to GitHub comments and issues,
114 please use :class:`mdx_gfm.GithubFlavoredMarkdownExtension`.
113 please use :class:`mdx_gfm.GithubFlavoredMarkdownExtension`.
115
114
116 .. _GFM documentation: https://guides.github.com/features/mastering-markdown/
115 .. _GFM documentation: https://guides.github.com/features/mastering-markdown/
117 """
116 """
118
117
119 def extendMarkdown(self, md, md_globals):
118 def extendMarkdown(self, md, md_globals):
120 # Built-in extensions
119 # Built-in extensions
121 Nl2BrExtension().extendMarkdown(md, md_globals)
120 Nl2BrExtension().extendMarkdown(md, md_globals)
122 FencedCodeExtension().extendMarkdown(md, md_globals)
121 FencedCodeExtension().extendMarkdown(md, md_globals)
123 TableExtension().extendMarkdown(md, md_globals)
122 TableExtension().extendMarkdown(md, md_globals)
124
123
125 # Custom extensions
124 # Custom extensions
126 gfm.AutolinkExtension().extendMarkdown(md, md_globals)
125 gfm.AutolinkExtension().extendMarkdown(md, md_globals)
127 gfm.AutomailExtension().extendMarkdown(md, md_globals)
126 gfm.AutomailExtension().extendMarkdown(md, md_globals)
128 gfm.HiddenHiliteExtension([
127 gfm.HiddenHiliteExtension([
129 ('guess_lang', 'False'),
128 ('guess_lang', 'False'),
130 ('css_class', 'highlight')
129 ('css_class', 'highlight')
131 ]).extendMarkdown(md, md_globals)
130 ]).extendMarkdown(md, md_globals)
132 gfm.SemiSaneListExtension().extendMarkdown(md, md_globals)
131 gfm.SemiSaneListExtension().extendMarkdown(md, md_globals)
133 gfm.SpacedLinkExtension().extendMarkdown(md, md_globals)
132 gfm.SpacedLinkExtension().extendMarkdown(md, md_globals)
134 gfm.StrikethroughExtension().extendMarkdown(md, md_globals)
133 gfm.StrikethroughExtension().extendMarkdown(md, md_globals)
135 gfm.TaskListExtension([
134 gfm.TaskListExtension([
136 ('list_attrs', {'class': 'checkbox'})
135 ('list_attrs', {'class': 'checkbox'})
137 ]).extendMarkdown(md, md_globals)
136 ]).extendMarkdown(md, md_globals)
138
137
139
138
140 # Global Vars
139 # Global Vars
141 URLIZE_RE = '(%s)' % '|'.join([
140 URLIZE_RE = '(%s)' % '|'.join([
142 r'<(?:f|ht)tps?://[^>]*>',
141 r'<(?:f|ht)tps?://[^>]*>',
143 r'\b(?:f|ht)tps?://[^)<>\s]+[^.,)<>\s]',
142 r'\b(?:f|ht)tps?://[^)<>\s]+[^.,)<>\s]',
144 r'\bwww\.[^)<>\s]+[^.,)<>\s]',
143 r'\bwww\.[^)<>\s]+[^.,)<>\s]',
145 r'[^(<\s]+\.(?:com|net|org)\b',
144 r'[^(<\s]+\.(?:com|net|org)\b',
146 ])
145 ])
147
146
148
147
149 class UrlizePattern(markdown.inlinepatterns.Pattern):
148 class UrlizePattern(markdown.inlinepatterns.Pattern):
150 """ Return a link Element given an autolink (`http://example/com`). """
149 """ Return a link Element given an autolink (`http://example/com`). """
151 def handleMatch(self, m):
150 def handleMatch(self, m):
152 url = m.group(2)
151 url = m.group(2)
153
152
154 if url.startswith('<'):
153 if url.startswith('<'):
155 url = url[1:-1]
154 url = url[1:-1]
156
155
157 text = url
156 text = url
158
157
159 if not url.split('://')[0] in ('http','https','ftp'):
158 if not url.split('://')[0] in ('http','https','ftp'):
160 if '@' in url and not '/' in url:
159 if '@' in url and not '/' in url:
161 url = 'mailto:' + url
160 url = 'mailto:' + url
162 else:
161 else:
163 url = 'http://' + url
162 url = 'http://' + url
164
163
165 el = markdown.util.etree.Element("a")
164 el = markdown.util.etree.Element("a")
166 el.set('href', url)
165 el.set('href', url)
167 el.text = markdown.util.AtomicString(text)
166 el.text = markdown.util.AtomicString(text)
168 return el
167 return el
169
168
170
169
171 class UrlizeExtension(markdown.Extension):
170 class UrlizeExtension(markdown.Extension):
172 """ Urlize Extension for Python-Markdown. """
171 """ Urlize Extension for Python-Markdown. """
173
172
174 def extendMarkdown(self, md, md_globals):
173 def extendMarkdown(self, md, md_globals):
175 """ Replace autolink with UrlizePattern """
174 """ Replace autolink with UrlizePattern """
176 md.inlinePatterns['autolink'] = UrlizePattern(URLIZE_RE, md)
175 md.inlinePatterns['autolink'] = UrlizePattern(URLIZE_RE, md)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now