Show More
The requested changes are too big and content was truncated. Show full diff
This diff has been collapsed as it changes many lines, (557 lines changed) Show them Hide them | |||
@@ -0,0 +1,557 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | ||
|
22 | import logging | |
|
23 | import collections | |
|
24 | ||
|
25 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound | |
|
26 | from pyramid.view import view_config | |
|
27 | from pyramid.renderers import render | |
|
28 | from pyramid.response import Response | |
|
29 | ||
|
30 | from rhodecode.apps._base import RepoAppView | |
|
31 | ||
|
32 | from rhodecode.lib import diffs, codeblocks | |
|
33 | from rhodecode.lib.auth import ( | |
|
34 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) | |
|
35 | ||
|
36 | from rhodecode.lib.compat import OrderedDict | |
|
37 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError | |
|
38 | import rhodecode.lib.helpers as h | |
|
39 | from rhodecode.lib.utils2 import safe_unicode, safe_int | |
|
40 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
|
41 | from rhodecode.lib.vcs.exceptions import ( | |
|
42 | RepositoryError, CommitDoesNotExistError, NodeDoesNotExistError) | |
|
43 | from rhodecode.model.db import ChangesetComment, ChangesetStatus | |
|
44 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
|
45 | from rhodecode.model.comment import CommentsModel | |
|
46 | from rhodecode.model.meta import Session | |
|
47 | ||
|
48 | ||
|
49 | log = logging.getLogger(__name__) | |
|
50 | ||
|
51 | ||
|
52 | def _update_with_GET(params, request): | |
|
53 | for k in ['diff1', 'diff2', 'diff']: | |
|
54 | params[k] += request.GET.getall(k) | |
|
55 | ||
|
56 | ||
|
57 | def get_ignore_ws(fid, request): | |
|
58 | ig_ws_global = request.GET.get('ignorews') | |
|
59 | ig_ws = filter(lambda k: k.startswith('WS'), request.GET.getall(fid)) | |
|
60 | if ig_ws: | |
|
61 | try: | |
|
62 | return int(ig_ws[0].split(':')[-1]) | |
|
63 | except Exception: | |
|
64 | pass | |
|
65 | return ig_ws_global | |
|
66 | ||
|
67 | ||
|
68 | def _ignorews_url(request, fileid=None): | |
|
69 | _ = request.translate | |
|
70 | fileid = str(fileid) if fileid else None | |
|
71 | params = collections.defaultdict(list) | |
|
72 | _update_with_GET(params, request) | |
|
73 | label = _('Show whitespace') | |
|
74 | tooltiplbl = _('Show whitespace for all diffs') | |
|
75 | ig_ws = get_ignore_ws(fileid, request) | |
|
76 | ln_ctx = get_line_ctx(fileid, request) | |
|
77 | ||
|
78 | if ig_ws is None: | |
|
79 | params['ignorews'] += [1] | |
|
80 | label = _('Ignore whitespace') | |
|
81 | tooltiplbl = _('Ignore whitespace for all diffs') | |
|
82 | ctx_key = 'context' | |
|
83 | ctx_val = ln_ctx | |
|
84 | ||
|
85 | # if we have passed in ln_ctx pass it along to our params | |
|
86 | if ln_ctx: | |
|
87 | params[ctx_key] += [ctx_val] | |
|
88 | ||
|
89 | if fileid: | |
|
90 | params['anchor'] = 'a_' + fileid | |
|
91 | return h.link_to(label, request.current_route_path(_query=params), | |
|
92 | title=tooltiplbl, class_='tooltip') | |
|
93 | ||
|
94 | ||
|
95 | def get_line_ctx(fid, request): | |
|
96 | ln_ctx_global = request.GET.get('context') | |
|
97 | if fid: | |
|
98 | ln_ctx = filter(lambda k: k.startswith('C'), request.GET.getall(fid)) | |
|
99 | else: | |
|
100 | _ln_ctx = filter(lambda k: k.startswith('C'), request.GET) | |
|
101 | ln_ctx = request.GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global | |
|
102 | if ln_ctx: | |
|
103 | ln_ctx = [ln_ctx] | |
|
104 | ||
|
105 | if ln_ctx: | |
|
106 | retval = ln_ctx[0].split(':')[-1] | |
|
107 | else: | |
|
108 | retval = ln_ctx_global | |
|
109 | ||
|
110 | try: | |
|
111 | return int(retval) | |
|
112 | except Exception: | |
|
113 | return 3 | |
|
114 | ||
|
115 | ||
|
116 | def _context_url(request, fileid=None): | |
|
117 | """ | |
|
118 | Generates a url for context lines. | |
|
119 | ||
|
120 | :param fileid: | |
|
121 | """ | |
|
122 | ||
|
123 | _ = request.translate | |
|
124 | fileid = str(fileid) if fileid else None | |
|
125 | ig_ws = get_ignore_ws(fileid, request) | |
|
126 | ln_ctx = (get_line_ctx(fileid, request) or 3) * 2 | |
|
127 | ||
|
128 | params = collections.defaultdict(list) | |
|
129 | _update_with_GET(params, request) | |
|
130 | ||
|
131 | if ln_ctx > 0: | |
|
132 | params['context'] += [ln_ctx] | |
|
133 | ||
|
134 | if ig_ws: | |
|
135 | ig_ws_key = 'ignorews' | |
|
136 | ig_ws_val = 1 | |
|
137 | params[ig_ws_key] += [ig_ws_val] | |
|
138 | ||
|
139 | lbl = _('Increase context') | |
|
140 | tooltiplbl = _('Increase context for all diffs') | |
|
141 | ||
|
142 | if fileid: | |
|
143 | params['anchor'] = 'a_' + fileid | |
|
144 | return h.link_to(lbl, request.current_route_path(_query=params), | |
|
145 | title=tooltiplbl, class_='tooltip') | |
|
146 | ||
|
147 | ||
|
148 | class RepoCommitsView(RepoAppView): | |
|
149 | def load_default_context(self): | |
|
150 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
|
151 | ||
|
152 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
|
153 | c.repo_info = self.db_repo | |
|
154 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
|
155 | ||
|
156 | self._register_global_c(c) | |
|
157 | return c | |
|
158 | ||
|
159 | def _commit(self, commit_id_range, method): | |
|
160 | _ = self.request.translate | |
|
161 | c = self.load_default_context() | |
|
162 | c.ignorews_url = _ignorews_url | |
|
163 | c.context_url = _context_url | |
|
164 | c.fulldiff = self.request.GET.get('fulldiff') | |
|
165 | ||
|
166 | # fetch global flags of ignore ws or context lines | |
|
167 | context_lcl = get_line_ctx('', self.request) | |
|
168 | ign_whitespace_lcl = get_ignore_ws('', self.request) | |
|
169 | ||
|
170 | # diff_limit will cut off the whole diff if the limit is applied | |
|
171 | # otherwise it will just hide the big files from the front-end | |
|
172 | diff_limit = c.visual.cut_off_limit_diff | |
|
173 | file_limit = c.visual.cut_off_limit_file | |
|
174 | ||
|
175 | # get ranges of commit ids if preset | |
|
176 | commit_range = commit_id_range.split('...')[:2] | |
|
177 | ||
|
178 | try: | |
|
179 | pre_load = ['affected_files', 'author', 'branch', 'date', | |
|
180 | 'message', 'parents'] | |
|
181 | ||
|
182 | if len(commit_range) == 2: | |
|
183 | commits = self.rhodecode_vcs_repo.get_commits( | |
|
184 | start_id=commit_range[0], end_id=commit_range[1], | |
|
185 | pre_load=pre_load) | |
|
186 | commits = list(commits) | |
|
187 | else: | |
|
188 | commits = [self.rhodecode_vcs_repo.get_commit( | |
|
189 | commit_id=commit_id_range, pre_load=pre_load)] | |
|
190 | ||
|
191 | c.commit_ranges = commits | |
|
192 | if not c.commit_ranges: | |
|
193 | raise RepositoryError( | |
|
194 | 'The commit range returned an empty result') | |
|
195 | except CommitDoesNotExistError: | |
|
196 | msg = _('No such commit exists for this repository') | |
|
197 | h.flash(msg, category='error') | |
|
198 | raise HTTPNotFound() | |
|
199 | except Exception: | |
|
200 | log.exception("General failure") | |
|
201 | raise HTTPNotFound() | |
|
202 | ||
|
203 | c.changes = OrderedDict() | |
|
204 | c.lines_added = 0 | |
|
205 | c.lines_deleted = 0 | |
|
206 | ||
|
207 | # auto collapse if we have more than limit | |
|
208 | collapse_limit = diffs.DiffProcessor._collapse_commits_over | |
|
209 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit | |
|
210 | ||
|
211 | c.commit_statuses = ChangesetStatus.STATUSES | |
|
212 | c.inline_comments = [] | |
|
213 | c.files = [] | |
|
214 | ||
|
215 | c.statuses = [] | |
|
216 | c.comments = [] | |
|
217 | c.unresolved_comments = [] | |
|
218 | if len(c.commit_ranges) == 1: | |
|
219 | commit = c.commit_ranges[0] | |
|
220 | c.comments = CommentsModel().get_comments( | |
|
221 | self.db_repo.repo_id, | |
|
222 | revision=commit.raw_id) | |
|
223 | c.statuses.append(ChangesetStatusModel().get_status( | |
|
224 | self.db_repo.repo_id, commit.raw_id)) | |
|
225 | # comments from PR | |
|
226 | statuses = ChangesetStatusModel().get_statuses( | |
|
227 | self.db_repo.repo_id, commit.raw_id, | |
|
228 | with_revisions=True) | |
|
229 | prs = set(st.pull_request for st in statuses | |
|
230 | if st.pull_request is not None) | |
|
231 | # from associated statuses, check the pull requests, and | |
|
232 | # show comments from them | |
|
233 | for pr in prs: | |
|
234 | c.comments.extend(pr.comments) | |
|
235 | ||
|
236 | c.unresolved_comments = CommentsModel()\ | |
|
237 | .get_commit_unresolved_todos(commit.raw_id) | |
|
238 | ||
|
239 | diff = None | |
|
240 | # Iterate over ranges (default commit view is always one commit) | |
|
241 | for commit in c.commit_ranges: | |
|
242 | c.changes[commit.raw_id] = [] | |
|
243 | ||
|
244 | commit2 = commit | |
|
245 | commit1 = commit.parents[0] if commit.parents else EmptyCommit() | |
|
246 | ||
|
247 | _diff = self.rhodecode_vcs_repo.get_diff( | |
|
248 | commit1, commit2, | |
|
249 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) | |
|
250 | diff_processor = diffs.DiffProcessor( | |
|
251 | _diff, format='newdiff', diff_limit=diff_limit, | |
|
252 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
|
253 | ||
|
254 | commit_changes = OrderedDict() | |
|
255 | if method == 'show': | |
|
256 | _parsed = diff_processor.prepare() | |
|
257 | c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer) | |
|
258 | ||
|
259 | _parsed = diff_processor.prepare() | |
|
260 | ||
|
261 | def _node_getter(commit): | |
|
262 | def get_node(fname): | |
|
263 | try: | |
|
264 | return commit.get_node(fname) | |
|
265 | except NodeDoesNotExistError: | |
|
266 | return None | |
|
267 | return get_node | |
|
268 | ||
|
269 | inline_comments = CommentsModel().get_inline_comments( | |
|
270 | self.db_repo.repo_id, revision=commit.raw_id) | |
|
271 | c.inline_cnt = CommentsModel().get_inline_comments_count( | |
|
272 | inline_comments) | |
|
273 | ||
|
274 | diffset = codeblocks.DiffSet( | |
|
275 | repo_name=self.db_repo_name, | |
|
276 | source_node_getter=_node_getter(commit1), | |
|
277 | target_node_getter=_node_getter(commit2), | |
|
278 | comments=inline_comments) | |
|
279 | diffset = diffset.render_patchset( | |
|
280 | _parsed, commit1.raw_id, commit2.raw_id) | |
|
281 | ||
|
282 | c.changes[commit.raw_id] = diffset | |
|
283 | else: | |
|
284 | # downloads/raw we only need RAW diff nothing else | |
|
285 | diff = diff_processor.as_raw() | |
|
286 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] | |
|
287 | ||
|
288 | # sort comments by how they were generated | |
|
289 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) | |
|
290 | ||
|
291 | if len(c.commit_ranges) == 1: | |
|
292 | c.commit = c.commit_ranges[0] | |
|
293 | c.parent_tmpl = ''.join( | |
|
294 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) | |
|
295 | ||
|
296 | if method == 'download': | |
|
297 | response = Response(diff) | |
|
298 | response.content_type = 'text/plain' | |
|
299 | response.content_disposition = ( | |
|
300 | 'attachment; filename=%s.diff' % commit_id_range[:12]) | |
|
301 | return response | |
|
302 | elif method == 'patch': | |
|
303 | c.diff = safe_unicode(diff) | |
|
304 | patch = render( | |
|
305 | 'rhodecode:templates/changeset/patch_changeset.mako', | |
|
306 | self._get_template_context(c), self.request) | |
|
307 | response = Response(patch) | |
|
308 | response.content_type = 'text/plain' | |
|
309 | return response | |
|
310 | elif method == 'raw': | |
|
311 | response = Response(diff) | |
|
312 | response.content_type = 'text/plain' | |
|
313 | return response | |
|
314 | elif method == 'show': | |
|
315 | if len(c.commit_ranges) == 1: | |
|
316 | html = render( | |
|
317 | 'rhodecode:templates/changeset/changeset.mako', | |
|
318 | self._get_template_context(c), self.request) | |
|
319 | return Response(html) | |
|
320 | else: | |
|
321 | c.ancestor = None | |
|
322 | c.target_repo = self.db_repo | |
|
323 | html = render( | |
|
324 | 'rhodecode:templates/changeset/changeset_range.mako', | |
|
325 | self._get_template_context(c), self.request) | |
|
326 | return Response(html) | |
|
327 | ||
|
328 | raise HTTPBadRequest() | |
|
329 | ||
|
330 | @LoginRequired() | |
|
331 | @HasRepoPermissionAnyDecorator( | |
|
332 | 'repository.read', 'repository.write', 'repository.admin') | |
|
333 | @view_config( | |
|
334 | route_name='repo_commit', request_method='GET', | |
|
335 | renderer=None) | |
|
336 | def repo_commit_show(self): | |
|
337 | commit_id = self.request.matchdict['commit_id'] | |
|
338 | return self._commit(commit_id, method='show') | |
|
339 | ||
|
340 | @LoginRequired() | |
|
341 | @HasRepoPermissionAnyDecorator( | |
|
342 | 'repository.read', 'repository.write', 'repository.admin') | |
|
343 | @view_config( | |
|
344 | route_name='repo_commit_raw', request_method='GET', | |
|
345 | renderer=None) | |
|
346 | @view_config( | |
|
347 | route_name='repo_commit_raw_deprecated', request_method='GET', | |
|
348 | renderer=None) | |
|
349 | def repo_commit_raw(self): | |
|
350 | commit_id = self.request.matchdict['commit_id'] | |
|
351 | return self._commit(commit_id, method='raw') | |
|
352 | ||
|
353 | @LoginRequired() | |
|
354 | @HasRepoPermissionAnyDecorator( | |
|
355 | 'repository.read', 'repository.write', 'repository.admin') | |
|
356 | @view_config( | |
|
357 | route_name='repo_commit_patch', request_method='GET', | |
|
358 | renderer=None) | |
|
359 | def repo_commit_patch(self): | |
|
360 | commit_id = self.request.matchdict['commit_id'] | |
|
361 | return self._commit(commit_id, method='patch') | |
|
362 | ||
|
363 | @LoginRequired() | |
|
364 | @HasRepoPermissionAnyDecorator( | |
|
365 | 'repository.read', 'repository.write', 'repository.admin') | |
|
366 | @view_config( | |
|
367 | route_name='repo_commit_download', request_method='GET', | |
|
368 | renderer=None) | |
|
369 | def repo_commit_download(self): | |
|
370 | commit_id = self.request.matchdict['commit_id'] | |
|
371 | return self._commit(commit_id, method='download') | |
|
372 | ||
|
373 | @LoginRequired() | |
|
374 | @NotAnonymous() | |
|
375 | @HasRepoPermissionAnyDecorator( | |
|
376 | 'repository.read', 'repository.write', 'repository.admin') | |
|
377 | @CSRFRequired() | |
|
378 | @view_config( | |
|
379 | route_name='repo_commit_comment_create', request_method='POST', | |
|
380 | renderer='json_ext') | |
|
381 | def repo_commit_comment_create(self): | |
|
382 | _ = self.request.translate | |
|
383 | commit_id = self.request.matchdict['commit_id'] | |
|
384 | ||
|
385 | c = self.load_default_context() | |
|
386 | status = self.request.POST.get('changeset_status', None) | |
|
387 | text = self.request.POST.get('text') | |
|
388 | comment_type = self.request.POST.get('comment_type') | |
|
389 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) | |
|
390 | ||
|
391 | if status: | |
|
392 | text = text or (_('Status change %(transition_icon)s %(status)s') | |
|
393 | % {'transition_icon': '>', | |
|
394 | 'status': ChangesetStatus.get_status_lbl(status)}) | |
|
395 | ||
|
396 | multi_commit_ids = [] | |
|
397 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): | |
|
398 | if _commit_id not in ['', None, EmptyCommit.raw_id]: | |
|
399 | if _commit_id not in multi_commit_ids: | |
|
400 | multi_commit_ids.append(_commit_id) | |
|
401 | ||
|
402 | commit_ids = multi_commit_ids or [commit_id] | |
|
403 | ||
|
404 | comment = None | |
|
405 | for current_id in filter(None, commit_ids): | |
|
406 | comment = CommentsModel().create( | |
|
407 | text=text, | |
|
408 | repo=self.db_repo.repo_id, | |
|
409 | user=self._rhodecode_db_user.user_id, | |
|
410 | commit_id=current_id, | |
|
411 | f_path=self.request.POST.get('f_path'), | |
|
412 | line_no=self.request.POST.get('line'), | |
|
413 | status_change=(ChangesetStatus.get_status_lbl(status) | |
|
414 | if status else None), | |
|
415 | status_change_type=status, | |
|
416 | comment_type=comment_type, | |
|
417 | resolves_comment_id=resolves_comment_id | |
|
418 | ) | |
|
419 | ||
|
420 | # get status if set ! | |
|
421 | if status: | |
|
422 | # if latest status was from pull request and it's closed | |
|
423 | # disallow changing status ! | |
|
424 | # dont_allow_on_closed_pull_request = True ! | |
|
425 | ||
|
426 | try: | |
|
427 | ChangesetStatusModel().set_status( | |
|
428 | self.db_repo.repo_id, | |
|
429 | status, | |
|
430 | self._rhodecode_db_user.user_id, | |
|
431 | comment, | |
|
432 | revision=current_id, | |
|
433 | dont_allow_on_closed_pull_request=True | |
|
434 | ) | |
|
435 | except StatusChangeOnClosedPullRequestError: | |
|
436 | msg = _('Changing the status of a commit associated with ' | |
|
437 | 'a closed pull request is not allowed') | |
|
438 | log.exception(msg) | |
|
439 | h.flash(msg, category='warning') | |
|
440 | raise HTTPFound(h.route_path( | |
|
441 | 'repo_commit', repo_name=self.db_repo_name, | |
|
442 | commit_id=current_id)) | |
|
443 | ||
|
444 | # finalize, commit and redirect | |
|
445 | Session().commit() | |
|
446 | ||
|
447 | data = { | |
|
448 | 'target_id': h.safeid(h.safe_unicode( | |
|
449 | self.request.POST.get('f_path'))), | |
|
450 | } | |
|
451 | if comment: | |
|
452 | c.co = comment | |
|
453 | rendered_comment = render( | |
|
454 | 'rhodecode:templates/changeset/changeset_comment_block.mako', | |
|
455 | self._get_template_context(c), self.request) | |
|
456 | ||
|
457 | data.update(comment.get_dict()) | |
|
458 | data.update({'rendered_text': rendered_comment}) | |
|
459 | ||
|
460 | return data | |
|
461 | ||
|
462 | @LoginRequired() | |
|
463 | @NotAnonymous() | |
|
464 | @HasRepoPermissionAnyDecorator( | |
|
465 | 'repository.read', 'repository.write', 'repository.admin') | |
|
466 | @CSRFRequired() | |
|
467 | @view_config( | |
|
468 | route_name='repo_commit_comment_preview', request_method='POST', | |
|
469 | renderer='string', xhr=True) | |
|
470 | def repo_commit_comment_preview(self): | |
|
471 | # Technically a CSRF token is not needed as no state changes with this | |
|
472 | # call. However, as this is a POST is better to have it, so automated | |
|
473 | # tools don't flag it as potential CSRF. | |
|
474 | # Post is required because the payload could be bigger than the maximum | |
|
475 | # allowed by GET. | |
|
476 | ||
|
477 | text = self.request.POST.get('text') | |
|
478 | renderer = self.request.POST.get('renderer') or 'rst' | |
|
479 | if text: | |
|
480 | return h.render(text, renderer=renderer, mentions=True) | |
|
481 | return '' | |
|
482 | ||
|
483 | @LoginRequired() | |
|
484 | @NotAnonymous() | |
|
485 | @HasRepoPermissionAnyDecorator( | |
|
486 | 'repository.read', 'repository.write', 'repository.admin') | |
|
487 | @CSRFRequired() | |
|
488 | @view_config( | |
|
489 | route_name='repo_commit_comment_delete', request_method='POST', | |
|
490 | renderer='json_ext') | |
|
491 | def repo_commit_comment_delete(self): | |
|
492 | commit_id = self.request.matchdict['commit_id'] | |
|
493 | comment_id = self.request.matchdict['comment_id'] | |
|
494 | ||
|
495 | comment = ChangesetComment.get_or_404(safe_int(comment_id)) | |
|
496 | if not comment: | |
|
497 | log.debug('Comment with id:%s not found, skipping', comment_id) | |
|
498 | # comment already deleted in another call probably | |
|
499 | return True | |
|
500 | ||
|
501 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |
|
502 | super_admin = h.HasPermissionAny('hg.admin')() | |
|
503 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) | |
|
504 | is_repo_comment = comment.repo.repo_name == self.db_repo_name | |
|
505 | comment_repo_admin = is_repo_admin and is_repo_comment | |
|
506 | ||
|
507 | if super_admin or comment_owner or comment_repo_admin: | |
|
508 | CommentsModel().delete(comment=comment, user=self._rhodecode_db_user) | |
|
509 | Session().commit() | |
|
510 | return True | |
|
511 | else: | |
|
512 | log.warning('No permissions for user %s to delete comment_id: %s', | |
|
513 | self._rhodecode_db_user, comment_id) | |
|
514 | raise HTTPNotFound() | |
|
515 | ||
|
516 | @LoginRequired() | |
|
517 | @HasRepoPermissionAnyDecorator( | |
|
518 | 'repository.read', 'repository.write', 'repository.admin') | |
|
519 | @view_config( | |
|
520 | route_name='repo_commit_data', request_method='GET', | |
|
521 | renderer='json_ext', xhr=True) | |
|
522 | def repo_commit_data(self): | |
|
523 | commit_id = self.request.matchdict['commit_id'] | |
|
524 | self.load_default_context() | |
|
525 | ||
|
526 | try: | |
|
527 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
|
528 | except CommitDoesNotExistError as e: | |
|
529 | return EmptyCommit(message=str(e)) | |
|
530 | ||
|
531 | @LoginRequired() | |
|
532 | @HasRepoPermissionAnyDecorator( | |
|
533 | 'repository.read', 'repository.write', 'repository.admin') | |
|
534 | @view_config( | |
|
535 | route_name='repo_commit_children', request_method='GET', | |
|
536 | renderer='json_ext', xhr=True) | |
|
537 | def repo_commit_children(self): | |
|
538 | commit_id = self.request.matchdict['commit_id'] | |
|
539 | self.load_default_context() | |
|
540 | ||
|
541 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
|
542 | result = {"results": commit.children} | |
|
543 | return result | |
|
544 | ||
|
545 | @LoginRequired() | |
|
546 | @HasRepoPermissionAnyDecorator( | |
|
547 | 'repository.read', 'repository.write', 'repository.admin') | |
|
548 | @view_config( | |
|
549 | route_name='repo_commit_parents', request_method='GET', | |
|
550 | renderer='json_ext') | |
|
551 | def repo_commit_parents(self): | |
|
552 | commit_id = self.request.matchdict['commit_id'] | |
|
553 | self.load_default_context() | |
|
554 | ||
|
555 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
|
556 | result = {"results": commit.parents} | |
|
557 | return result |
@@ -1,677 +1,678 b'' | |||
|
1 | 1 | |
|
2 | 2 | |
|
3 | 3 | ################################################################################ |
|
4 | 4 | ## RHODECODE COMMUNITY EDITION CONFIGURATION ## |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## EMAIL CONFIGURATION ## |
|
13 | 13 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 14 | ## any error reports after an application crash ## |
|
15 | 15 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 16 | ################################################################################ |
|
17 | 17 | |
|
18 | 18 | ## prefix all emails subjects with given prefix, helps filtering out emails |
|
19 | 19 | #email_prefix = [RhodeCode] |
|
20 | 20 | |
|
21 | 21 | ## email FROM address all mails will be sent |
|
22 | 22 | #app_email_from = rhodecode-noreply@localhost |
|
23 | 23 | |
|
24 | 24 | ## Uncomment and replace with the address which should receive any error report |
|
25 | 25 | ## note: using appenlight for error handling doesn't need this to be uncommented |
|
26 | 26 | #email_to = admin@localhost |
|
27 | 27 | |
|
28 | 28 | ## in case of Application errors, sent an error email form |
|
29 | 29 | #error_email_from = rhodecode_error@localhost |
|
30 | 30 | |
|
31 | 31 | ## additional error message to be send in case of server crash |
|
32 | 32 | #error_message = |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | #smtp_server = mail.server.com |
|
36 | 36 | #smtp_username = |
|
37 | 37 | #smtp_password = |
|
38 | 38 | #smtp_port = |
|
39 | 39 | #smtp_use_tls = false |
|
40 | 40 | #smtp_use_ssl = true |
|
41 | 41 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
42 | 42 | #smtp_auth = |
|
43 | 43 | |
|
44 | 44 | [server:main] |
|
45 | 45 | ## COMMON ## |
|
46 | 46 | host = 127.0.0.1 |
|
47 | 47 | port = 5000 |
|
48 | 48 | |
|
49 | 49 | ################################## |
|
50 | 50 | ## WAITRESS WSGI SERVER ## |
|
51 | 51 | ## Recommended for Development ## |
|
52 | 52 | ################################## |
|
53 | 53 | |
|
54 | 54 | use = egg:waitress#main |
|
55 | 55 | ## number of worker threads |
|
56 | 56 | threads = 5 |
|
57 | 57 | ## MAX BODY SIZE 100GB |
|
58 | 58 | max_request_body_size = 107374182400 |
|
59 | 59 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
60 | 60 | ## May not work on old windows systems. |
|
61 | 61 | asyncore_use_poll = true |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | ########################## |
|
65 | 65 | ## GUNICORN WSGI SERVER ## |
|
66 | 66 | ########################## |
|
67 | 67 | ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini |
|
68 | 68 | |
|
69 | 69 | #use = egg:gunicorn#main |
|
70 | 70 | ## Sets the number of process workers. You must set `instance_id = *` |
|
71 | 71 | ## when this option is set to more than one worker, recommended |
|
72 | 72 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
73 | 73 | ## The `instance_id = *` must be set in the [app:main] section below |
|
74 | 74 | #workers = 2 |
|
75 | 75 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
76 | 76 | ## generally recommened to be at 1 |
|
77 | 77 | #threads = 1 |
|
78 | 78 | ## process name |
|
79 | 79 | #proc_name = rhodecode |
|
80 | 80 | ## type of worker class, one of sync, gevent |
|
81 | 81 | ## recommended for bigger setup is using of of other than sync one |
|
82 | 82 | #worker_class = sync |
|
83 | 83 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
84 | 84 | #worker_connections = 10 |
|
85 | 85 | ## max number of requests that worker will handle before being gracefully |
|
86 | 86 | ## restarted, could prevent memory leaks |
|
87 | 87 | #max_requests = 1000 |
|
88 | 88 | #max_requests_jitter = 30 |
|
89 | 89 | ## amount of time a worker can spend with handling a request before it |
|
90 | 90 | ## gets killed and restarted. Set to 6hrs |
|
91 | 91 | #timeout = 21600 |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | ## prefix middleware for RhodeCode. |
|
95 | 95 | ## recommended when using proxy setup. |
|
96 | 96 | ## allows to set RhodeCode under a prefix in server. |
|
97 | 97 | ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
98 | 98 | ## And set your prefix like: `prefix = /custom_prefix` |
|
99 | 99 | ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
100 | 100 | ## to make your cookies only work on prefix url |
|
101 | 101 | [filter:proxy-prefix] |
|
102 | 102 | use = egg:PasteDeploy#prefix |
|
103 | 103 | prefix = / |
|
104 | 104 | |
|
105 | 105 | [app:main] |
|
106 | 106 | use = egg:rhodecode-enterprise-ce |
|
107 | 107 | |
|
108 | 108 | ## enable proxy prefix middleware, defined above |
|
109 | 109 | #filter-with = proxy-prefix |
|
110 | 110 | |
|
111 | 111 | # During development the we want to have the debug toolbar enabled |
|
112 | 112 | pyramid.includes = |
|
113 | 113 | pyramid_debugtoolbar |
|
114 | 114 | rhodecode.utils.debugtoolbar |
|
115 | 115 | rhodecode.lib.middleware.request_wrapper |
|
116 | 116 | |
|
117 | 117 | pyramid.reload_templates = true |
|
118 | 118 | |
|
119 | 119 | debugtoolbar.hosts = 0.0.0.0/0 |
|
120 | 120 | debugtoolbar.exclude_prefixes = |
|
121 | 121 | /css |
|
122 | 122 | /fonts |
|
123 | 123 | /images |
|
124 | 124 | /js |
|
125 | 125 | |
|
126 | 126 | ## RHODECODE PLUGINS ## |
|
127 | 127 | rhodecode.includes = |
|
128 | 128 | rhodecode.api |
|
129 | 129 | |
|
130 | 130 | |
|
131 | 131 | # api prefix url |
|
132 | 132 | rhodecode.api.url = /_admin/api |
|
133 | 133 | |
|
134 | 134 | |
|
135 | 135 | ## END RHODECODE PLUGINS ## |
|
136 | 136 | |
|
137 | 137 | ## encryption key used to encrypt social plugin tokens, |
|
138 | 138 | ## remote_urls with credentials etc, if not set it defaults to |
|
139 | 139 | ## `beaker.session.secret` |
|
140 | 140 | #rhodecode.encrypted_values.secret = |
|
141 | 141 | |
|
142 | 142 | ## decryption strict mode (enabled by default). It controls if decryption raises |
|
143 | 143 | ## `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
144 | 144 | #rhodecode.encrypted_values.strict = false |
|
145 | 145 | |
|
146 | 146 | ## return gzipped responses from Rhodecode (static files/application) |
|
147 | 147 | gzip_responses = false |
|
148 | 148 | |
|
149 | 149 | ## autogenerate javascript routes file on startup |
|
150 | 150 | generate_js_files = false |
|
151 | 151 | |
|
152 | 152 | ## Optional Languages |
|
153 | 153 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
154 | 154 | lang = en |
|
155 | 155 | |
|
156 | 156 | ## perform a full repository scan on each server start, this should be |
|
157 | 157 | ## set to false after first startup, to allow faster server restarts. |
|
158 | 158 | startup.import_repos = false |
|
159 | 159 | |
|
160 | 160 | ## Uncomment and set this path to use archive download cache. |
|
161 | 161 | ## Once enabled, generated archives will be cached at this location |
|
162 | 162 | ## and served from the cache during subsequent requests for the same archive of |
|
163 | 163 | ## the repository. |
|
164 | 164 | #archive_cache_dir = /tmp/tarballcache |
|
165 | 165 | |
|
166 | 166 | ## change this to unique ID for security |
|
167 | 167 | app_instance_uuid = rc-production |
|
168 | 168 | |
|
169 | 169 | ## cut off limit for large diffs (size in bytes) |
|
170 | 170 | cut_off_limit_diff = 1024000 |
|
171 | 171 | cut_off_limit_file = 256000 |
|
172 | 172 | |
|
173 | 173 | ## use cache version of scm repo everywhere |
|
174 | 174 | vcs_full_cache = true |
|
175 | 175 | |
|
176 | 176 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
177 | 177 | ## Normally this is controlled by proper http flags sent from http server |
|
178 | 178 | force_https = false |
|
179 | 179 | |
|
180 | 180 | ## use Strict-Transport-Security headers |
|
181 | 181 | use_htsts = false |
|
182 | 182 | |
|
183 | 183 | ## number of commits stats will parse on each iteration |
|
184 | 184 | commit_parse_limit = 25 |
|
185 | 185 | |
|
186 | 186 | ## git rev filter option, --all is the default filter, if you need to |
|
187 | 187 | ## hide all refs in changelog switch this to --branches --tags |
|
188 | 188 | git_rev_filter = --branches --tags |
|
189 | 189 | |
|
190 | 190 | # Set to true if your repos are exposed using the dumb protocol |
|
191 | 191 | git_update_server_info = false |
|
192 | 192 | |
|
193 | 193 | ## RSS/ATOM feed options |
|
194 | 194 | rss_cut_off_limit = 256000 |
|
195 | 195 | rss_items_per_page = 10 |
|
196 | 196 | rss_include_diff = false |
|
197 | 197 | |
|
198 | 198 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
199 | 199 | ## url that does rewrites to _admin/gists/{gistid}. |
|
200 | 200 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
201 | 201 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
202 | 202 | gist_alias_url = |
|
203 | 203 | |
|
204 | 204 | ## List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
205 | 205 | ## used for access. |
|
206 | 206 | ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
207 | 207 | ## came from the the logged in user who own this authentication token. |
|
208 | 208 | ## |
|
209 | 209 | ## list of all views can be found under `_admin/permissions/auth_token_access` |
|
210 | 210 | ## The list should be "," separated and on a single line. |
|
211 | 211 | ## |
|
212 | 212 | ## Most common views to enable: |
|
213 | # ChangesetController:changeset_patch | |
|
214 | # ChangesetController:changeset_raw | |
|
215 |
# Repo |
|
|
216 |
# RepoFilesView |
|
|
217 |
# RepoFilesView |
|
|
213 | # RepoCommitsView:repo_commit_download | |
|
214 | # RepoCommitsView:repo_commit_patch | |
|
215 | # RepoCommitsView:repo_commit_raw | |
|
216 | # RepoFilesView:repo_files_diff | |
|
217 | # RepoFilesView:repo_archivefile | |
|
218 | # RepoFilesView:repo_file_raw | |
|
218 | 219 | # GistView:* |
|
219 | 220 | api_access_controllers_whitelist = |
|
220 | 221 | |
|
221 | 222 | ## default encoding used to convert from and to unicode |
|
222 | 223 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
223 | 224 | default_encoding = UTF-8 |
|
224 | 225 | |
|
225 | 226 | ## instance-id prefix |
|
226 | 227 | ## a prefix key for this instance used for cache invalidation when running |
|
227 | 228 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
228 | 229 | ## all running rhodecode instances. Leave empty if you don't use it |
|
229 | 230 | instance_id = |
|
230 | 231 | |
|
231 | 232 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
232 | 233 | ## of an authentication plugin also if it is disabled by it's settings. |
|
233 | 234 | ## This could be useful if you are unable to log in to the system due to broken |
|
234 | 235 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
235 | 236 | ## module to log in again and fix the settings. |
|
236 | 237 | ## |
|
237 | 238 | ## Available builtin plugin IDs (hash is part of the ID): |
|
238 | 239 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
239 | 240 | ## egg:rhodecode-enterprise-ce#pam |
|
240 | 241 | ## egg:rhodecode-enterprise-ce#ldap |
|
241 | 242 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
242 | 243 | ## egg:rhodecode-enterprise-ce#headers |
|
243 | 244 | ## egg:rhodecode-enterprise-ce#crowd |
|
244 | 245 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
245 | 246 | |
|
246 | 247 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
247 | 248 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
248 | 249 | ## handling that causing a series of failed authentication calls. |
|
249 | 250 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
250 | 251 | ## This will be served instead of default 401 on bad authnetication |
|
251 | 252 | auth_ret_code = |
|
252 | 253 | |
|
253 | 254 | ## use special detection method when serving auth_ret_code, instead of serving |
|
254 | 255 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
255 | 256 | ## and then serve auth_ret_code to clients |
|
256 | 257 | auth_ret_code_detection = false |
|
257 | 258 | |
|
258 | 259 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
259 | 260 | ## codes don't break the transactions while 4XX codes do |
|
260 | 261 | lock_ret_code = 423 |
|
261 | 262 | |
|
262 | 263 | ## allows to change the repository location in settings page |
|
263 | 264 | allow_repo_location_change = true |
|
264 | 265 | |
|
265 | 266 | ## allows to setup custom hooks in settings page |
|
266 | 267 | allow_custom_hooks_settings = true |
|
267 | 268 | |
|
268 | 269 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
269 | 270 | ## new token |
|
270 | 271 | license_token = |
|
271 | 272 | |
|
272 | 273 | ## supervisor connection uri, for managing supervisor and logs. |
|
273 | 274 | supervisor.uri = |
|
274 | 275 | ## supervisord group name/id we only want this RC instance to handle |
|
275 | 276 | supervisor.group_id = dev |
|
276 | 277 | |
|
277 | 278 | ## Display extended labs settings |
|
278 | 279 | labs_settings_active = true |
|
279 | 280 | |
|
280 | 281 | #################################### |
|
281 | 282 | ### CELERY CONFIG #### |
|
282 | 283 | #################################### |
|
283 | 284 | use_celery = false |
|
284 | 285 | broker.host = localhost |
|
285 | 286 | broker.vhost = rabbitmqhost |
|
286 | 287 | broker.port = 5672 |
|
287 | 288 | broker.user = rabbitmq |
|
288 | 289 | broker.password = qweqwe |
|
289 | 290 | |
|
290 | 291 | celery.imports = rhodecode.lib.celerylib.tasks |
|
291 | 292 | |
|
292 | 293 | celery.result.backend = amqp |
|
293 | 294 | celery.result.dburi = amqp:// |
|
294 | 295 | celery.result.serialier = json |
|
295 | 296 | |
|
296 | 297 | #celery.send.task.error.emails = true |
|
297 | 298 | #celery.amqp.task.result.expires = 18000 |
|
298 | 299 | |
|
299 | 300 | celeryd.concurrency = 2 |
|
300 | 301 | #celeryd.log.file = celeryd.log |
|
301 | 302 | celeryd.log.level = debug |
|
302 | 303 | celeryd.max.tasks.per.child = 1 |
|
303 | 304 | |
|
304 | 305 | ## tasks will never be sent to the queue, but executed locally instead. |
|
305 | 306 | celery.always.eager = false |
|
306 | 307 | |
|
307 | 308 | #################################### |
|
308 | 309 | ### BEAKER CACHE #### |
|
309 | 310 | #################################### |
|
310 | 311 | # default cache dir for templates. Putting this into a ramdisk |
|
311 | 312 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
312 | 313 | cache_dir = %(here)s/data |
|
313 | 314 | |
|
314 | 315 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
315 | 316 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
316 | 317 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
317 | 318 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
318 | 319 | |
|
319 | 320 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
320 | 321 | |
|
321 | 322 | beaker.cache.super_short_term.type = memory |
|
322 | 323 | beaker.cache.super_short_term.expire = 10 |
|
323 | 324 | beaker.cache.super_short_term.key_length = 256 |
|
324 | 325 | |
|
325 | 326 | beaker.cache.short_term.type = memory |
|
326 | 327 | beaker.cache.short_term.expire = 60 |
|
327 | 328 | beaker.cache.short_term.key_length = 256 |
|
328 | 329 | |
|
329 | 330 | beaker.cache.long_term.type = memory |
|
330 | 331 | beaker.cache.long_term.expire = 36000 |
|
331 | 332 | beaker.cache.long_term.key_length = 256 |
|
332 | 333 | |
|
333 | 334 | beaker.cache.sql_cache_short.type = memory |
|
334 | 335 | beaker.cache.sql_cache_short.expire = 10 |
|
335 | 336 | beaker.cache.sql_cache_short.key_length = 256 |
|
336 | 337 | |
|
337 | 338 | ## default is memory cache, configure only if required |
|
338 | 339 | ## using multi-node or multi-worker setup |
|
339 | 340 | #beaker.cache.auth_plugins.type = ext:database |
|
340 | 341 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
341 | 342 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
342 | 343 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
343 | 344 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
344 | 345 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
345 | 346 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
346 | 347 | |
|
347 | 348 | beaker.cache.repo_cache_long.type = memorylru_base |
|
348 | 349 | beaker.cache.repo_cache_long.max_items = 4096 |
|
349 | 350 | beaker.cache.repo_cache_long.expire = 2592000 |
|
350 | 351 | |
|
351 | 352 | ## default is memorylru_base cache, configure only if required |
|
352 | 353 | ## using multi-node or multi-worker setup |
|
353 | 354 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
354 | 355 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
355 | 356 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
356 | 357 | #beaker.cache.repo_cache_long.key_length = 256 |
|
357 | 358 | |
|
358 | 359 | #################################### |
|
359 | 360 | ### BEAKER SESSION #### |
|
360 | 361 | #################################### |
|
361 | 362 | |
|
362 | 363 | ## .session.type is type of storage options for the session, current allowed |
|
363 | 364 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
364 | 365 | beaker.session.type = file |
|
365 | 366 | beaker.session.data_dir = %(here)s/data/sessions/data |
|
366 | 367 | |
|
367 | 368 | ## db based session, fast, and allows easy management over logged in users |
|
368 | 369 | #beaker.session.type = ext:database |
|
369 | 370 | #beaker.session.table_name = db_session |
|
370 | 371 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
371 | 372 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
372 | 373 | #beaker.session.sa.pool_recycle = 3600 |
|
373 | 374 | #beaker.session.sa.echo = false |
|
374 | 375 | |
|
375 | 376 | beaker.session.key = rhodecode |
|
376 | 377 | beaker.session.secret = develop-rc-uytcxaz |
|
377 | 378 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
378 | 379 | |
|
379 | 380 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
380 | 381 | ## you must disable beaker.session.secret to use this |
|
381 | 382 | #beaker.session.encrypt_key = key_for_encryption |
|
382 | 383 | #beaker.session.validate_key = validation_key |
|
383 | 384 | |
|
384 | 385 | ## sets session as invalid(also logging out user) if it haven not been |
|
385 | 386 | ## accessed for given amount of time in seconds |
|
386 | 387 | beaker.session.timeout = 2592000 |
|
387 | 388 | beaker.session.httponly = true |
|
388 | 389 | ## Path to use for the cookie. Set to prefix if you use prefix middleware |
|
389 | 390 | #beaker.session.cookie_path = /custom_prefix |
|
390 | 391 | |
|
391 | 392 | ## uncomment for https secure cookie |
|
392 | 393 | beaker.session.secure = false |
|
393 | 394 | |
|
394 | 395 | ## auto save the session to not to use .save() |
|
395 | 396 | beaker.session.auto = false |
|
396 | 397 | |
|
397 | 398 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
398 | 399 | ## at browser close |
|
399 | 400 | #beaker.session.cookie_expires = 3600 |
|
400 | 401 | |
|
401 | 402 | ################################### |
|
402 | 403 | ## SEARCH INDEXING CONFIGURATION ## |
|
403 | 404 | ################################### |
|
404 | 405 | ## Full text search indexer is available in rhodecode-tools under |
|
405 | 406 | ## `rhodecode-tools index` command |
|
406 | 407 | |
|
407 | 408 | ## WHOOSH Backend, doesn't require additional services to run |
|
408 | 409 | ## it works good with few dozen repos |
|
409 | 410 | search.module = rhodecode.lib.index.whoosh |
|
410 | 411 | search.location = %(here)s/data/index |
|
411 | 412 | |
|
412 | 413 | ######################################## |
|
413 | 414 | ### CHANNELSTREAM CONFIG #### |
|
414 | 415 | ######################################## |
|
415 | 416 | ## channelstream enables persistent connections and live notification |
|
416 | 417 | ## in the system. It's also used by the chat system |
|
417 | 418 | channelstream.enabled = false |
|
418 | 419 | |
|
419 | 420 | ## server address for channelstream server on the backend |
|
420 | 421 | channelstream.server = 127.0.0.1:9800 |
|
421 | 422 | |
|
422 | 423 | ## location of the channelstream server from outside world |
|
423 | 424 | ## use ws:// for http or wss:// for https. This address needs to be handled |
|
424 | 425 | ## by external HTTP server such as Nginx or Apache |
|
425 | 426 | ## see nginx/apache configuration examples in our docs |
|
426 | 427 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
427 | 428 | channelstream.secret = secret |
|
428 | 429 | channelstream.history.location = %(here)s/channelstream_history |
|
429 | 430 | |
|
430 | 431 | ## Internal application path that Javascript uses to connect into. |
|
431 | 432 | ## If you use proxy-prefix the prefix should be added before /_channelstream |
|
432 | 433 | channelstream.proxy_path = /_channelstream |
|
433 | 434 | |
|
434 | 435 | |
|
435 | 436 | ################################### |
|
436 | 437 | ## APPENLIGHT CONFIG ## |
|
437 | 438 | ################################### |
|
438 | 439 | |
|
439 | 440 | ## Appenlight is tailored to work with RhodeCode, see |
|
440 | 441 | ## http://appenlight.com for details how to obtain an account |
|
441 | 442 | |
|
442 | 443 | ## appenlight integration enabled |
|
443 | 444 | appenlight = false |
|
444 | 445 | |
|
445 | 446 | appenlight.server_url = https://api.appenlight.com |
|
446 | 447 | appenlight.api_key = YOUR_API_KEY |
|
447 | 448 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
448 | 449 | |
|
449 | 450 | # used for JS client |
|
450 | 451 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
451 | 452 | |
|
452 | 453 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
453 | 454 | |
|
454 | 455 | ## enables 404 error logging (default False) |
|
455 | 456 | appenlight.report_404 = false |
|
456 | 457 | |
|
457 | 458 | ## time in seconds after request is considered being slow (default 1) |
|
458 | 459 | appenlight.slow_request_time = 1 |
|
459 | 460 | |
|
460 | 461 | ## record slow requests in application |
|
461 | 462 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
462 | 463 | appenlight.slow_requests = true |
|
463 | 464 | |
|
464 | 465 | ## enable hooking to application loggers |
|
465 | 466 | appenlight.logging = true |
|
466 | 467 | |
|
467 | 468 | ## minimum log level for log capture |
|
468 | 469 | appenlight.logging.level = WARNING |
|
469 | 470 | |
|
470 | 471 | ## send logs only from erroneous/slow requests |
|
471 | 472 | ## (saves API quota for intensive logging) |
|
472 | 473 | appenlight.logging_on_error = false |
|
473 | 474 | |
|
474 | 475 | ## list of additonal keywords that should be grabbed from environ object |
|
475 | 476 | ## can be string with comma separated list of words in lowercase |
|
476 | 477 | ## (by default client will always send following info: |
|
477 | 478 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
478 | 479 | ## start with HTTP* this list be extended with additional keywords here |
|
479 | 480 | appenlight.environ_keys_whitelist = |
|
480 | 481 | |
|
481 | 482 | ## list of keywords that should be blanked from request object |
|
482 | 483 | ## can be string with comma separated list of words in lowercase |
|
483 | 484 | ## (by default client will always blank keys that contain following words |
|
484 | 485 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
485 | 486 | ## this list be extended with additional keywords set here |
|
486 | 487 | appenlight.request_keys_blacklist = |
|
487 | 488 | |
|
488 | 489 | ## list of namespaces that should be ignores when gathering log entries |
|
489 | 490 | ## can be string with comma separated list of namespaces |
|
490 | 491 | ## (by default the client ignores own entries: appenlight_client.client) |
|
491 | 492 | appenlight.log_namespace_blacklist = |
|
492 | 493 | |
|
493 | 494 | |
|
494 | 495 | ################################################################################ |
|
495 | 496 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
496 | 497 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
497 | 498 | ## execute malicious code after an exception is raised. ## |
|
498 | 499 | ################################################################################ |
|
499 | 500 | #set debug = false |
|
500 | 501 | |
|
501 | 502 | |
|
502 | 503 | ############## |
|
503 | 504 | ## STYLING ## |
|
504 | 505 | ############## |
|
505 | 506 | debug_style = true |
|
506 | 507 | |
|
507 | 508 | ########################################### |
|
508 | 509 | ### MAIN RHODECODE DATABASE CONFIG ### |
|
509 | 510 | ########################################### |
|
510 | 511 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
511 | 512 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
512 | 513 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode |
|
513 | 514 | sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
514 | 515 | |
|
515 | 516 | # see sqlalchemy docs for other advanced settings |
|
516 | 517 | |
|
517 | 518 | ## print the sql statements to output |
|
518 | 519 | sqlalchemy.db1.echo = false |
|
519 | 520 | ## recycle the connections after this amount of seconds |
|
520 | 521 | sqlalchemy.db1.pool_recycle = 3600 |
|
521 | 522 | sqlalchemy.db1.convert_unicode = true |
|
522 | 523 | |
|
523 | 524 | ## the number of connections to keep open inside the connection pool. |
|
524 | 525 | ## 0 indicates no limit |
|
525 | 526 | #sqlalchemy.db1.pool_size = 5 |
|
526 | 527 | |
|
527 | 528 | ## the number of connections to allow in connection pool "overflow", that is |
|
528 | 529 | ## connections that can be opened above and beyond the pool_size setting, |
|
529 | 530 | ## which defaults to five. |
|
530 | 531 | #sqlalchemy.db1.max_overflow = 10 |
|
531 | 532 | |
|
532 | 533 | |
|
533 | 534 | ################## |
|
534 | 535 | ### VCS CONFIG ### |
|
535 | 536 | ################## |
|
536 | 537 | vcs.server.enable = true |
|
537 | 538 | vcs.server = localhost:9900 |
|
538 | 539 | |
|
539 | 540 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
540 | 541 | ## Available protocols are: |
|
541 | 542 | ## `http` - use http-rpc backend (default) |
|
542 | 543 | vcs.server.protocol = http |
|
543 | 544 | |
|
544 | 545 | ## Push/Pull operations protocol, available options are: |
|
545 | 546 | ## `http` - use http-rpc backend (default) |
|
546 | 547 | ## |
|
547 | 548 | vcs.scm_app_implementation = http |
|
548 | 549 | |
|
549 | 550 | ## Push/Pull operations hooks protocol, available options are: |
|
550 | 551 | ## `http` - use http-rpc backend (default) |
|
551 | 552 | vcs.hooks.protocol = http |
|
552 | 553 | |
|
553 | 554 | vcs.server.log_level = debug |
|
554 | 555 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
555 | 556 | vcs.start_server = true |
|
556 | 557 | |
|
557 | 558 | ## List of enabled VCS backends, available options are: |
|
558 | 559 | ## `hg` - mercurial |
|
559 | 560 | ## `git` - git |
|
560 | 561 | ## `svn` - subversion |
|
561 | 562 | vcs.backends = hg, git, svn |
|
562 | 563 | |
|
563 | 564 | vcs.connection_timeout = 3600 |
|
564 | 565 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
565 | 566 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible |
|
566 | 567 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
567 | 568 | |
|
568 | 569 | |
|
569 | 570 | ############################################################ |
|
570 | 571 | ### Subversion proxy support (mod_dav_svn) ### |
|
571 | 572 | ### Maps RhodeCode repo groups into SVN paths for Apache ### |
|
572 | 573 | ############################################################ |
|
573 | 574 | ## Enable or disable the config file generation. |
|
574 | 575 | svn.proxy.generate_config = false |
|
575 | 576 | ## Generate config file with `SVNListParentPath` set to `On`. |
|
576 | 577 | svn.proxy.list_parent_path = true |
|
577 | 578 | ## Set location and file name of generated config file. |
|
578 | 579 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
579 | 580 | ## Used as a prefix to the `Location` block in the generated config file. |
|
580 | 581 | ## In most cases it should be set to `/`. |
|
581 | 582 | svn.proxy.location_root = / |
|
582 | 583 | ## Command to reload the mod dav svn configuration on change. |
|
583 | 584 | ## Example: `/etc/init.d/apache2 reload` |
|
584 | 585 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
585 | 586 | ## If the timeout expires before the reload command finishes, the command will |
|
586 | 587 | ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
587 | 588 | #svn.proxy.reload_timeout = 10 |
|
588 | 589 | |
|
589 | 590 | ## Dummy marker to add new entries after. |
|
590 | 591 | ## Add any custom entries below. Please don't remove. |
|
591 | 592 | custom.conf = 1 |
|
592 | 593 | |
|
593 | 594 | |
|
594 | 595 | ################################ |
|
595 | 596 | ### LOGGING CONFIGURATION #### |
|
596 | 597 | ################################ |
|
597 | 598 | [loggers] |
|
598 | 599 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates |
|
599 | 600 | |
|
600 | 601 | [handlers] |
|
601 | 602 | keys = console, console_sql |
|
602 | 603 | |
|
603 | 604 | [formatters] |
|
604 | 605 | keys = generic, color_formatter, color_formatter_sql |
|
605 | 606 | |
|
606 | 607 | ############# |
|
607 | 608 | ## LOGGERS ## |
|
608 | 609 | ############# |
|
609 | 610 | [logger_root] |
|
610 | 611 | level = NOTSET |
|
611 | 612 | handlers = console |
|
612 | 613 | |
|
613 | 614 | [logger_routes] |
|
614 | 615 | level = DEBUG |
|
615 | 616 | handlers = |
|
616 | 617 | qualname = routes.middleware |
|
617 | 618 | ## "level = DEBUG" logs the route matched and routing variables. |
|
618 | 619 | propagate = 1 |
|
619 | 620 | |
|
620 | 621 | [logger_beaker] |
|
621 | 622 | level = DEBUG |
|
622 | 623 | handlers = |
|
623 | 624 | qualname = beaker.container |
|
624 | 625 | propagate = 1 |
|
625 | 626 | |
|
626 | 627 | [logger_templates] |
|
627 | 628 | level = INFO |
|
628 | 629 | handlers = |
|
629 | 630 | qualname = pylons.templating |
|
630 | 631 | propagate = 1 |
|
631 | 632 | |
|
632 | 633 | [logger_rhodecode] |
|
633 | 634 | level = DEBUG |
|
634 | 635 | handlers = |
|
635 | 636 | qualname = rhodecode |
|
636 | 637 | propagate = 1 |
|
637 | 638 | |
|
638 | 639 | [logger_sqlalchemy] |
|
639 | 640 | level = INFO |
|
640 | 641 | handlers = console_sql |
|
641 | 642 | qualname = sqlalchemy.engine |
|
642 | 643 | propagate = 0 |
|
643 | 644 | |
|
644 | 645 | ############## |
|
645 | 646 | ## HANDLERS ## |
|
646 | 647 | ############## |
|
647 | 648 | |
|
648 | 649 | [handler_console] |
|
649 | 650 | class = StreamHandler |
|
650 | 651 | args = (sys.stderr, ) |
|
651 | 652 | level = DEBUG |
|
652 | 653 | formatter = color_formatter |
|
653 | 654 | |
|
654 | 655 | [handler_console_sql] |
|
655 | 656 | class = StreamHandler |
|
656 | 657 | args = (sys.stderr, ) |
|
657 | 658 | level = DEBUG |
|
658 | 659 | formatter = color_formatter_sql |
|
659 | 660 | |
|
660 | 661 | ################ |
|
661 | 662 | ## FORMATTERS ## |
|
662 | 663 | ################ |
|
663 | 664 | |
|
664 | 665 | [formatter_generic] |
|
665 | 666 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
666 | 667 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
667 | 668 | datefmt = %Y-%m-%d %H:%M:%S |
|
668 | 669 | |
|
669 | 670 | [formatter_color_formatter] |
|
670 | 671 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
671 | 672 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
672 | 673 | datefmt = %Y-%m-%d %H:%M:%S |
|
673 | 674 | |
|
674 | 675 | [formatter_color_formatter_sql] |
|
675 | 676 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
676 | 677 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
677 | 678 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,646 +1,647 b'' | |||
|
1 | 1 | |
|
2 | 2 | |
|
3 | 3 | ################################################################################ |
|
4 | 4 | ## RHODECODE COMMUNITY EDITION CONFIGURATION ## |
|
5 | 5 | # The %(here)s variable will be replaced with the parent directory of this file# |
|
6 | 6 | ################################################################################ |
|
7 | 7 | |
|
8 | 8 | [DEFAULT] |
|
9 | 9 | debug = true |
|
10 | 10 | |
|
11 | 11 | ################################################################################ |
|
12 | 12 | ## EMAIL CONFIGURATION ## |
|
13 | 13 | ## Uncomment and replace with the email address which should receive ## |
|
14 | 14 | ## any error reports after an application crash ## |
|
15 | 15 | ## Additionally these settings will be used by the RhodeCode mailing system ## |
|
16 | 16 | ################################################################################ |
|
17 | 17 | |
|
18 | 18 | ## prefix all emails subjects with given prefix, helps filtering out emails |
|
19 | 19 | #email_prefix = [RhodeCode] |
|
20 | 20 | |
|
21 | 21 | ## email FROM address all mails will be sent |
|
22 | 22 | #app_email_from = rhodecode-noreply@localhost |
|
23 | 23 | |
|
24 | 24 | ## Uncomment and replace with the address which should receive any error report |
|
25 | 25 | ## note: using appenlight for error handling doesn't need this to be uncommented |
|
26 | 26 | #email_to = admin@localhost |
|
27 | 27 | |
|
28 | 28 | ## in case of Application errors, sent an error email form |
|
29 | 29 | #error_email_from = rhodecode_error@localhost |
|
30 | 30 | |
|
31 | 31 | ## additional error message to be send in case of server crash |
|
32 | 32 | #error_message = |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | #smtp_server = mail.server.com |
|
36 | 36 | #smtp_username = |
|
37 | 37 | #smtp_password = |
|
38 | 38 | #smtp_port = |
|
39 | 39 | #smtp_use_tls = false |
|
40 | 40 | #smtp_use_ssl = true |
|
41 | 41 | ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.) |
|
42 | 42 | #smtp_auth = |
|
43 | 43 | |
|
44 | 44 | [server:main] |
|
45 | 45 | ## COMMON ## |
|
46 | 46 | host = 127.0.0.1 |
|
47 | 47 | port = 5000 |
|
48 | 48 | |
|
49 | 49 | ################################## |
|
50 | 50 | ## WAITRESS WSGI SERVER ## |
|
51 | 51 | ## Recommended for Development ## |
|
52 | 52 | ################################## |
|
53 | 53 | |
|
54 | 54 | #use = egg:waitress#main |
|
55 | 55 | ## number of worker threads |
|
56 | 56 | #threads = 5 |
|
57 | 57 | ## MAX BODY SIZE 100GB |
|
58 | 58 | #max_request_body_size = 107374182400 |
|
59 | 59 | ## Use poll instead of select, fixes file descriptors limits problems. |
|
60 | 60 | ## May not work on old windows systems. |
|
61 | 61 | #asyncore_use_poll = true |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | ########################## |
|
65 | 65 | ## GUNICORN WSGI SERVER ## |
|
66 | 66 | ########################## |
|
67 | 67 | ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini |
|
68 | 68 | |
|
69 | 69 | use = egg:gunicorn#main |
|
70 | 70 | ## Sets the number of process workers. You must set `instance_id = *` |
|
71 | 71 | ## when this option is set to more than one worker, recommended |
|
72 | 72 | ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers |
|
73 | 73 | ## The `instance_id = *` must be set in the [app:main] section below |
|
74 | 74 | workers = 2 |
|
75 | 75 | ## number of threads for each of the worker, must be set to 1 for gevent |
|
76 | 76 | ## generally recommened to be at 1 |
|
77 | 77 | #threads = 1 |
|
78 | 78 | ## process name |
|
79 | 79 | proc_name = rhodecode |
|
80 | 80 | ## type of worker class, one of sync, gevent |
|
81 | 81 | ## recommended for bigger setup is using of of other than sync one |
|
82 | 82 | worker_class = sync |
|
83 | 83 | ## The maximum number of simultaneous clients. Valid only for Gevent |
|
84 | 84 | #worker_connections = 10 |
|
85 | 85 | ## max number of requests that worker will handle before being gracefully |
|
86 | 86 | ## restarted, could prevent memory leaks |
|
87 | 87 | max_requests = 1000 |
|
88 | 88 | max_requests_jitter = 30 |
|
89 | 89 | ## amount of time a worker can spend with handling a request before it |
|
90 | 90 | ## gets killed and restarted. Set to 6hrs |
|
91 | 91 | timeout = 21600 |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | ## prefix middleware for RhodeCode. |
|
95 | 95 | ## recommended when using proxy setup. |
|
96 | 96 | ## allows to set RhodeCode under a prefix in server. |
|
97 | 97 | ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well. |
|
98 | 98 | ## And set your prefix like: `prefix = /custom_prefix` |
|
99 | 99 | ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need |
|
100 | 100 | ## to make your cookies only work on prefix url |
|
101 | 101 | [filter:proxy-prefix] |
|
102 | 102 | use = egg:PasteDeploy#prefix |
|
103 | 103 | prefix = / |
|
104 | 104 | |
|
105 | 105 | [app:main] |
|
106 | 106 | use = egg:rhodecode-enterprise-ce |
|
107 | 107 | |
|
108 | 108 | ## enable proxy prefix middleware, defined above |
|
109 | 109 | #filter-with = proxy-prefix |
|
110 | 110 | |
|
111 | 111 | ## encryption key used to encrypt social plugin tokens, |
|
112 | 112 | ## remote_urls with credentials etc, if not set it defaults to |
|
113 | 113 | ## `beaker.session.secret` |
|
114 | 114 | #rhodecode.encrypted_values.secret = |
|
115 | 115 | |
|
116 | 116 | ## decryption strict mode (enabled by default). It controls if decryption raises |
|
117 | 117 | ## `SignatureVerificationError` in case of wrong key, or damaged encryption data. |
|
118 | 118 | #rhodecode.encrypted_values.strict = false |
|
119 | 119 | |
|
120 | 120 | ## return gzipped responses from Rhodecode (static files/application) |
|
121 | 121 | gzip_responses = false |
|
122 | 122 | |
|
123 | 123 | ## autogenerate javascript routes file on startup |
|
124 | 124 | generate_js_files = false |
|
125 | 125 | |
|
126 | 126 | ## Optional Languages |
|
127 | 127 | ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh |
|
128 | 128 | lang = en |
|
129 | 129 | |
|
130 | 130 | ## perform a full repository scan on each server start, this should be |
|
131 | 131 | ## set to false after first startup, to allow faster server restarts. |
|
132 | 132 | startup.import_repos = false |
|
133 | 133 | |
|
134 | 134 | ## Uncomment and set this path to use archive download cache. |
|
135 | 135 | ## Once enabled, generated archives will be cached at this location |
|
136 | 136 | ## and served from the cache during subsequent requests for the same archive of |
|
137 | 137 | ## the repository. |
|
138 | 138 | #archive_cache_dir = /tmp/tarballcache |
|
139 | 139 | |
|
140 | 140 | ## change this to unique ID for security |
|
141 | 141 | app_instance_uuid = rc-production |
|
142 | 142 | |
|
143 | 143 | ## cut off limit for large diffs (size in bytes) |
|
144 | 144 | cut_off_limit_diff = 1024000 |
|
145 | 145 | cut_off_limit_file = 256000 |
|
146 | 146 | |
|
147 | 147 | ## use cache version of scm repo everywhere |
|
148 | 148 | vcs_full_cache = true |
|
149 | 149 | |
|
150 | 150 | ## force https in RhodeCode, fixes https redirects, assumes it's always https |
|
151 | 151 | ## Normally this is controlled by proper http flags sent from http server |
|
152 | 152 | force_https = false |
|
153 | 153 | |
|
154 | 154 | ## use Strict-Transport-Security headers |
|
155 | 155 | use_htsts = false |
|
156 | 156 | |
|
157 | 157 | ## number of commits stats will parse on each iteration |
|
158 | 158 | commit_parse_limit = 25 |
|
159 | 159 | |
|
160 | 160 | ## git rev filter option, --all is the default filter, if you need to |
|
161 | 161 | ## hide all refs in changelog switch this to --branches --tags |
|
162 | 162 | git_rev_filter = --branches --tags |
|
163 | 163 | |
|
164 | 164 | # Set to true if your repos are exposed using the dumb protocol |
|
165 | 165 | git_update_server_info = false |
|
166 | 166 | |
|
167 | 167 | ## RSS/ATOM feed options |
|
168 | 168 | rss_cut_off_limit = 256000 |
|
169 | 169 | rss_items_per_page = 10 |
|
170 | 170 | rss_include_diff = false |
|
171 | 171 | |
|
172 | 172 | ## gist URL alias, used to create nicer urls for gist. This should be an |
|
173 | 173 | ## url that does rewrites to _admin/gists/{gistid}. |
|
174 | 174 | ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal |
|
175 | 175 | ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid} |
|
176 | 176 | gist_alias_url = |
|
177 | 177 | |
|
178 | 178 | ## List of views (using glob pattern syntax) that AUTH TOKENS could be |
|
179 | 179 | ## used for access. |
|
180 | 180 | ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it |
|
181 | 181 | ## came from the the logged in user who own this authentication token. |
|
182 | 182 | ## |
|
183 | 183 | ## list of all views can be found under `_admin/permissions/auth_token_access` |
|
184 | 184 | ## The list should be "," separated and on a single line. |
|
185 | 185 | ## |
|
186 | 186 | ## Most common views to enable: |
|
187 | # ChangesetController:changeset_patch | |
|
188 | # ChangesetController:changeset_raw | |
|
189 |
# Repo |
|
|
190 |
# RepoFilesView |
|
|
191 |
# RepoFilesView |
|
|
187 | # RepoCommitsView:repo_commit_download | |
|
188 | # RepoCommitsView:repo_commit_patch | |
|
189 | # RepoCommitsView:repo_commit_raw | |
|
190 | # RepoFilesView:repo_files_diff | |
|
191 | # RepoFilesView:repo_archivefile | |
|
192 | # RepoFilesView:repo_file_raw | |
|
192 | 193 | # GistView:* |
|
193 | 194 | api_access_controllers_whitelist = |
|
194 | 195 | |
|
195 | 196 | ## default encoding used to convert from and to unicode |
|
196 | 197 | ## can be also a comma separated list of encoding in case of mixed encodings |
|
197 | 198 | default_encoding = UTF-8 |
|
198 | 199 | |
|
199 | 200 | ## instance-id prefix |
|
200 | 201 | ## a prefix key for this instance used for cache invalidation when running |
|
201 | 202 | ## multiple instances of rhodecode, make sure it's globally unique for |
|
202 | 203 | ## all running rhodecode instances. Leave empty if you don't use it |
|
203 | 204 | instance_id = |
|
204 | 205 | |
|
205 | 206 | ## Fallback authentication plugin. Set this to a plugin ID to force the usage |
|
206 | 207 | ## of an authentication plugin also if it is disabled by it's settings. |
|
207 | 208 | ## This could be useful if you are unable to log in to the system due to broken |
|
208 | 209 | ## authentication settings. Then you can enable e.g. the internal rhodecode auth |
|
209 | 210 | ## module to log in again and fix the settings. |
|
210 | 211 | ## |
|
211 | 212 | ## Available builtin plugin IDs (hash is part of the ID): |
|
212 | 213 | ## egg:rhodecode-enterprise-ce#rhodecode |
|
213 | 214 | ## egg:rhodecode-enterprise-ce#pam |
|
214 | 215 | ## egg:rhodecode-enterprise-ce#ldap |
|
215 | 216 | ## egg:rhodecode-enterprise-ce#jasig_cas |
|
216 | 217 | ## egg:rhodecode-enterprise-ce#headers |
|
217 | 218 | ## egg:rhodecode-enterprise-ce#crowd |
|
218 | 219 | #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode |
|
219 | 220 | |
|
220 | 221 | ## alternative return HTTP header for failed authentication. Default HTTP |
|
221 | 222 | ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with |
|
222 | 223 | ## handling that causing a series of failed authentication calls. |
|
223 | 224 | ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code |
|
224 | 225 | ## This will be served instead of default 401 on bad authnetication |
|
225 | 226 | auth_ret_code = |
|
226 | 227 | |
|
227 | 228 | ## use special detection method when serving auth_ret_code, instead of serving |
|
228 | 229 | ## ret_code directly, use 401 initially (Which triggers credentials prompt) |
|
229 | 230 | ## and then serve auth_ret_code to clients |
|
230 | 231 | auth_ret_code_detection = false |
|
231 | 232 | |
|
232 | 233 | ## locking return code. When repository is locked return this HTTP code. 2XX |
|
233 | 234 | ## codes don't break the transactions while 4XX codes do |
|
234 | 235 | lock_ret_code = 423 |
|
235 | 236 | |
|
236 | 237 | ## allows to change the repository location in settings page |
|
237 | 238 | allow_repo_location_change = true |
|
238 | 239 | |
|
239 | 240 | ## allows to setup custom hooks in settings page |
|
240 | 241 | allow_custom_hooks_settings = true |
|
241 | 242 | |
|
242 | 243 | ## generated license token, goto license page in RhodeCode settings to obtain |
|
243 | 244 | ## new token |
|
244 | 245 | license_token = |
|
245 | 246 | |
|
246 | 247 | ## supervisor connection uri, for managing supervisor and logs. |
|
247 | 248 | supervisor.uri = |
|
248 | 249 | ## supervisord group name/id we only want this RC instance to handle |
|
249 | 250 | supervisor.group_id = prod |
|
250 | 251 | |
|
251 | 252 | ## Display extended labs settings |
|
252 | 253 | labs_settings_active = true |
|
253 | 254 | |
|
254 | 255 | #################################### |
|
255 | 256 | ### CELERY CONFIG #### |
|
256 | 257 | #################################### |
|
257 | 258 | use_celery = false |
|
258 | 259 | broker.host = localhost |
|
259 | 260 | broker.vhost = rabbitmqhost |
|
260 | 261 | broker.port = 5672 |
|
261 | 262 | broker.user = rabbitmq |
|
262 | 263 | broker.password = qweqwe |
|
263 | 264 | |
|
264 | 265 | celery.imports = rhodecode.lib.celerylib.tasks |
|
265 | 266 | |
|
266 | 267 | celery.result.backend = amqp |
|
267 | 268 | celery.result.dburi = amqp:// |
|
268 | 269 | celery.result.serialier = json |
|
269 | 270 | |
|
270 | 271 | #celery.send.task.error.emails = true |
|
271 | 272 | #celery.amqp.task.result.expires = 18000 |
|
272 | 273 | |
|
273 | 274 | celeryd.concurrency = 2 |
|
274 | 275 | #celeryd.log.file = celeryd.log |
|
275 | 276 | celeryd.log.level = debug |
|
276 | 277 | celeryd.max.tasks.per.child = 1 |
|
277 | 278 | |
|
278 | 279 | ## tasks will never be sent to the queue, but executed locally instead. |
|
279 | 280 | celery.always.eager = false |
|
280 | 281 | |
|
281 | 282 | #################################### |
|
282 | 283 | ### BEAKER CACHE #### |
|
283 | 284 | #################################### |
|
284 | 285 | # default cache dir for templates. Putting this into a ramdisk |
|
285 | 286 | ## can boost performance, eg. %(here)s/data_ramdisk |
|
286 | 287 | cache_dir = %(here)s/data |
|
287 | 288 | |
|
288 | 289 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
289 | 290 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
290 | 291 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
291 | 292 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
292 | 293 | |
|
293 | 294 | beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long |
|
294 | 295 | |
|
295 | 296 | beaker.cache.super_short_term.type = memory |
|
296 | 297 | beaker.cache.super_short_term.expire = 10 |
|
297 | 298 | beaker.cache.super_short_term.key_length = 256 |
|
298 | 299 | |
|
299 | 300 | beaker.cache.short_term.type = memory |
|
300 | 301 | beaker.cache.short_term.expire = 60 |
|
301 | 302 | beaker.cache.short_term.key_length = 256 |
|
302 | 303 | |
|
303 | 304 | beaker.cache.long_term.type = memory |
|
304 | 305 | beaker.cache.long_term.expire = 36000 |
|
305 | 306 | beaker.cache.long_term.key_length = 256 |
|
306 | 307 | |
|
307 | 308 | beaker.cache.sql_cache_short.type = memory |
|
308 | 309 | beaker.cache.sql_cache_short.expire = 10 |
|
309 | 310 | beaker.cache.sql_cache_short.key_length = 256 |
|
310 | 311 | |
|
311 | 312 | ## default is memory cache, configure only if required |
|
312 | 313 | ## using multi-node or multi-worker setup |
|
313 | 314 | #beaker.cache.auth_plugins.type = ext:database |
|
314 | 315 | #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock |
|
315 | 316 | #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode |
|
316 | 317 | #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode |
|
317 | 318 | #beaker.cache.auth_plugins.sa.pool_recycle = 3600 |
|
318 | 319 | #beaker.cache.auth_plugins.sa.pool_size = 10 |
|
319 | 320 | #beaker.cache.auth_plugins.sa.max_overflow = 0 |
|
320 | 321 | |
|
321 | 322 | beaker.cache.repo_cache_long.type = memorylru_base |
|
322 | 323 | beaker.cache.repo_cache_long.max_items = 4096 |
|
323 | 324 | beaker.cache.repo_cache_long.expire = 2592000 |
|
324 | 325 | |
|
325 | 326 | ## default is memorylru_base cache, configure only if required |
|
326 | 327 | ## using multi-node or multi-worker setup |
|
327 | 328 | #beaker.cache.repo_cache_long.type = ext:memcached |
|
328 | 329 | #beaker.cache.repo_cache_long.url = localhost:11211 |
|
329 | 330 | #beaker.cache.repo_cache_long.expire = 1209600 |
|
330 | 331 | #beaker.cache.repo_cache_long.key_length = 256 |
|
331 | 332 | |
|
332 | 333 | #################################### |
|
333 | 334 | ### BEAKER SESSION #### |
|
334 | 335 | #################################### |
|
335 | 336 | |
|
336 | 337 | ## .session.type is type of storage options for the session, current allowed |
|
337 | 338 | ## types are file, ext:memcached, ext:database, and memory (default). |
|
338 | 339 | beaker.session.type = file |
|
339 | 340 | beaker.session.data_dir = %(here)s/data/sessions/data |
|
340 | 341 | |
|
341 | 342 | ## db based session, fast, and allows easy management over logged in users |
|
342 | 343 | #beaker.session.type = ext:database |
|
343 | 344 | #beaker.session.table_name = db_session |
|
344 | 345 | #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode |
|
345 | 346 | #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode |
|
346 | 347 | #beaker.session.sa.pool_recycle = 3600 |
|
347 | 348 | #beaker.session.sa.echo = false |
|
348 | 349 | |
|
349 | 350 | beaker.session.key = rhodecode |
|
350 | 351 | beaker.session.secret = production-rc-uytcxaz |
|
351 | 352 | beaker.session.lock_dir = %(here)s/data/sessions/lock |
|
352 | 353 | |
|
353 | 354 | ## Secure encrypted cookie. Requires AES and AES python libraries |
|
354 | 355 | ## you must disable beaker.session.secret to use this |
|
355 | 356 | #beaker.session.encrypt_key = key_for_encryption |
|
356 | 357 | #beaker.session.validate_key = validation_key |
|
357 | 358 | |
|
358 | 359 | ## sets session as invalid(also logging out user) if it haven not been |
|
359 | 360 | ## accessed for given amount of time in seconds |
|
360 | 361 | beaker.session.timeout = 2592000 |
|
361 | 362 | beaker.session.httponly = true |
|
362 | 363 | ## Path to use for the cookie. Set to prefix if you use prefix middleware |
|
363 | 364 | #beaker.session.cookie_path = /custom_prefix |
|
364 | 365 | |
|
365 | 366 | ## uncomment for https secure cookie |
|
366 | 367 | beaker.session.secure = false |
|
367 | 368 | |
|
368 | 369 | ## auto save the session to not to use .save() |
|
369 | 370 | beaker.session.auto = false |
|
370 | 371 | |
|
371 | 372 | ## default cookie expiration time in seconds, set to `true` to set expire |
|
372 | 373 | ## at browser close |
|
373 | 374 | #beaker.session.cookie_expires = 3600 |
|
374 | 375 | |
|
375 | 376 | ################################### |
|
376 | 377 | ## SEARCH INDEXING CONFIGURATION ## |
|
377 | 378 | ################################### |
|
378 | 379 | ## Full text search indexer is available in rhodecode-tools under |
|
379 | 380 | ## `rhodecode-tools index` command |
|
380 | 381 | |
|
381 | 382 | ## WHOOSH Backend, doesn't require additional services to run |
|
382 | 383 | ## it works good with few dozen repos |
|
383 | 384 | search.module = rhodecode.lib.index.whoosh |
|
384 | 385 | search.location = %(here)s/data/index |
|
385 | 386 | |
|
386 | 387 | ######################################## |
|
387 | 388 | ### CHANNELSTREAM CONFIG #### |
|
388 | 389 | ######################################## |
|
389 | 390 | ## channelstream enables persistent connections and live notification |
|
390 | 391 | ## in the system. It's also used by the chat system |
|
391 | 392 | channelstream.enabled = false |
|
392 | 393 | |
|
393 | 394 | ## server address for channelstream server on the backend |
|
394 | 395 | channelstream.server = 127.0.0.1:9800 |
|
395 | 396 | |
|
396 | 397 | ## location of the channelstream server from outside world |
|
397 | 398 | ## use ws:// for http or wss:// for https. This address needs to be handled |
|
398 | 399 | ## by external HTTP server such as Nginx or Apache |
|
399 | 400 | ## see nginx/apache configuration examples in our docs |
|
400 | 401 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
401 | 402 | channelstream.secret = secret |
|
402 | 403 | channelstream.history.location = %(here)s/channelstream_history |
|
403 | 404 | |
|
404 | 405 | ## Internal application path that Javascript uses to connect into. |
|
405 | 406 | ## If you use proxy-prefix the prefix should be added before /_channelstream |
|
406 | 407 | channelstream.proxy_path = /_channelstream |
|
407 | 408 | |
|
408 | 409 | |
|
409 | 410 | ################################### |
|
410 | 411 | ## APPENLIGHT CONFIG ## |
|
411 | 412 | ################################### |
|
412 | 413 | |
|
413 | 414 | ## Appenlight is tailored to work with RhodeCode, see |
|
414 | 415 | ## http://appenlight.com for details how to obtain an account |
|
415 | 416 | |
|
416 | 417 | ## appenlight integration enabled |
|
417 | 418 | appenlight = false |
|
418 | 419 | |
|
419 | 420 | appenlight.server_url = https://api.appenlight.com |
|
420 | 421 | appenlight.api_key = YOUR_API_KEY |
|
421 | 422 | #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5 |
|
422 | 423 | |
|
423 | 424 | # used for JS client |
|
424 | 425 | appenlight.api_public_key = YOUR_API_PUBLIC_KEY |
|
425 | 426 | |
|
426 | 427 | ## TWEAK AMOUNT OF INFO SENT HERE |
|
427 | 428 | |
|
428 | 429 | ## enables 404 error logging (default False) |
|
429 | 430 | appenlight.report_404 = false |
|
430 | 431 | |
|
431 | 432 | ## time in seconds after request is considered being slow (default 1) |
|
432 | 433 | appenlight.slow_request_time = 1 |
|
433 | 434 | |
|
434 | 435 | ## record slow requests in application |
|
435 | 436 | ## (needs to be enabled for slow datastore recording and time tracking) |
|
436 | 437 | appenlight.slow_requests = true |
|
437 | 438 | |
|
438 | 439 | ## enable hooking to application loggers |
|
439 | 440 | appenlight.logging = true |
|
440 | 441 | |
|
441 | 442 | ## minimum log level for log capture |
|
442 | 443 | appenlight.logging.level = WARNING |
|
443 | 444 | |
|
444 | 445 | ## send logs only from erroneous/slow requests |
|
445 | 446 | ## (saves API quota for intensive logging) |
|
446 | 447 | appenlight.logging_on_error = false |
|
447 | 448 | |
|
448 | 449 | ## list of additonal keywords that should be grabbed from environ object |
|
449 | 450 | ## can be string with comma separated list of words in lowercase |
|
450 | 451 | ## (by default client will always send following info: |
|
451 | 452 | ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that |
|
452 | 453 | ## start with HTTP* this list be extended with additional keywords here |
|
453 | 454 | appenlight.environ_keys_whitelist = |
|
454 | 455 | |
|
455 | 456 | ## list of keywords that should be blanked from request object |
|
456 | 457 | ## can be string with comma separated list of words in lowercase |
|
457 | 458 | ## (by default client will always blank keys that contain following words |
|
458 | 459 | ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf' |
|
459 | 460 | ## this list be extended with additional keywords set here |
|
460 | 461 | appenlight.request_keys_blacklist = |
|
461 | 462 | |
|
462 | 463 | ## list of namespaces that should be ignores when gathering log entries |
|
463 | 464 | ## can be string with comma separated list of namespaces |
|
464 | 465 | ## (by default the client ignores own entries: appenlight_client.client) |
|
465 | 466 | appenlight.log_namespace_blacklist = |
|
466 | 467 | |
|
467 | 468 | |
|
468 | 469 | ################################################################################ |
|
469 | 470 | ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ## |
|
470 | 471 | ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ## |
|
471 | 472 | ## execute malicious code after an exception is raised. ## |
|
472 | 473 | ################################################################################ |
|
473 | 474 | set debug = false |
|
474 | 475 | |
|
475 | 476 | |
|
476 | 477 | ########################################### |
|
477 | 478 | ### MAIN RHODECODE DATABASE CONFIG ### |
|
478 | 479 | ########################################### |
|
479 | 480 | #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30 |
|
480 | 481 | #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
481 | 482 | #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode |
|
482 | 483 | sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode |
|
483 | 484 | |
|
484 | 485 | # see sqlalchemy docs for other advanced settings |
|
485 | 486 | |
|
486 | 487 | ## print the sql statements to output |
|
487 | 488 | sqlalchemy.db1.echo = false |
|
488 | 489 | ## recycle the connections after this amount of seconds |
|
489 | 490 | sqlalchemy.db1.pool_recycle = 3600 |
|
490 | 491 | sqlalchemy.db1.convert_unicode = true |
|
491 | 492 | |
|
492 | 493 | ## the number of connections to keep open inside the connection pool. |
|
493 | 494 | ## 0 indicates no limit |
|
494 | 495 | #sqlalchemy.db1.pool_size = 5 |
|
495 | 496 | |
|
496 | 497 | ## the number of connections to allow in connection pool "overflow", that is |
|
497 | 498 | ## connections that can be opened above and beyond the pool_size setting, |
|
498 | 499 | ## which defaults to five. |
|
499 | 500 | #sqlalchemy.db1.max_overflow = 10 |
|
500 | 501 | |
|
501 | 502 | |
|
502 | 503 | ################## |
|
503 | 504 | ### VCS CONFIG ### |
|
504 | 505 | ################## |
|
505 | 506 | vcs.server.enable = true |
|
506 | 507 | vcs.server = localhost:9900 |
|
507 | 508 | |
|
508 | 509 | ## Web server connectivity protocol, responsible for web based VCS operatations |
|
509 | 510 | ## Available protocols are: |
|
510 | 511 | ## `http` - use http-rpc backend (default) |
|
511 | 512 | vcs.server.protocol = http |
|
512 | 513 | |
|
513 | 514 | ## Push/Pull operations protocol, available options are: |
|
514 | 515 | ## `http` - use http-rpc backend (default) |
|
515 | 516 | ## |
|
516 | 517 | vcs.scm_app_implementation = http |
|
517 | 518 | |
|
518 | 519 | ## Push/Pull operations hooks protocol, available options are: |
|
519 | 520 | ## `http` - use http-rpc backend (default) |
|
520 | 521 | vcs.hooks.protocol = http |
|
521 | 522 | |
|
522 | 523 | vcs.server.log_level = info |
|
523 | 524 | ## Start VCSServer with this instance as a subprocess, usefull for development |
|
524 | 525 | vcs.start_server = false |
|
525 | 526 | |
|
526 | 527 | ## List of enabled VCS backends, available options are: |
|
527 | 528 | ## `hg` - mercurial |
|
528 | 529 | ## `git` - git |
|
529 | 530 | ## `svn` - subversion |
|
530 | 531 | vcs.backends = hg, git, svn |
|
531 | 532 | |
|
532 | 533 | vcs.connection_timeout = 3600 |
|
533 | 534 | ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out. |
|
534 | 535 | ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible |
|
535 | 536 | #vcs.svn.compatible_version = pre-1.8-compatible |
|
536 | 537 | |
|
537 | 538 | |
|
538 | 539 | ############################################################ |
|
539 | 540 | ### Subversion proxy support (mod_dav_svn) ### |
|
540 | 541 | ### Maps RhodeCode repo groups into SVN paths for Apache ### |
|
541 | 542 | ############################################################ |
|
542 | 543 | ## Enable or disable the config file generation. |
|
543 | 544 | svn.proxy.generate_config = false |
|
544 | 545 | ## Generate config file with `SVNListParentPath` set to `On`. |
|
545 | 546 | svn.proxy.list_parent_path = true |
|
546 | 547 | ## Set location and file name of generated config file. |
|
547 | 548 | svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf |
|
548 | 549 | ## Used as a prefix to the `Location` block in the generated config file. |
|
549 | 550 | ## In most cases it should be set to `/`. |
|
550 | 551 | svn.proxy.location_root = / |
|
551 | 552 | ## Command to reload the mod dav svn configuration on change. |
|
552 | 553 | ## Example: `/etc/init.d/apache2 reload` |
|
553 | 554 | #svn.proxy.reload_cmd = /etc/init.d/apache2 reload |
|
554 | 555 | ## If the timeout expires before the reload command finishes, the command will |
|
555 | 556 | ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds. |
|
556 | 557 | #svn.proxy.reload_timeout = 10 |
|
557 | 558 | |
|
558 | 559 | ## Dummy marker to add new entries after. |
|
559 | 560 | ## Add any custom entries below. Please don't remove. |
|
560 | 561 | custom.conf = 1 |
|
561 | 562 | |
|
562 | 563 | |
|
563 | 564 | ################################ |
|
564 | 565 | ### LOGGING CONFIGURATION #### |
|
565 | 566 | ################################ |
|
566 | 567 | [loggers] |
|
567 | 568 | keys = root, routes, rhodecode, sqlalchemy, beaker, templates |
|
568 | 569 | |
|
569 | 570 | [handlers] |
|
570 | 571 | keys = console, console_sql |
|
571 | 572 | |
|
572 | 573 | [formatters] |
|
573 | 574 | keys = generic, color_formatter, color_formatter_sql |
|
574 | 575 | |
|
575 | 576 | ############# |
|
576 | 577 | ## LOGGERS ## |
|
577 | 578 | ############# |
|
578 | 579 | [logger_root] |
|
579 | 580 | level = NOTSET |
|
580 | 581 | handlers = console |
|
581 | 582 | |
|
582 | 583 | [logger_routes] |
|
583 | 584 | level = DEBUG |
|
584 | 585 | handlers = |
|
585 | 586 | qualname = routes.middleware |
|
586 | 587 | ## "level = DEBUG" logs the route matched and routing variables. |
|
587 | 588 | propagate = 1 |
|
588 | 589 | |
|
589 | 590 | [logger_beaker] |
|
590 | 591 | level = DEBUG |
|
591 | 592 | handlers = |
|
592 | 593 | qualname = beaker.container |
|
593 | 594 | propagate = 1 |
|
594 | 595 | |
|
595 | 596 | [logger_templates] |
|
596 | 597 | level = INFO |
|
597 | 598 | handlers = |
|
598 | 599 | qualname = pylons.templating |
|
599 | 600 | propagate = 1 |
|
600 | 601 | |
|
601 | 602 | [logger_rhodecode] |
|
602 | 603 | level = DEBUG |
|
603 | 604 | handlers = |
|
604 | 605 | qualname = rhodecode |
|
605 | 606 | propagate = 1 |
|
606 | 607 | |
|
607 | 608 | [logger_sqlalchemy] |
|
608 | 609 | level = INFO |
|
609 | 610 | handlers = console_sql |
|
610 | 611 | qualname = sqlalchemy.engine |
|
611 | 612 | propagate = 0 |
|
612 | 613 | |
|
613 | 614 | ############## |
|
614 | 615 | ## HANDLERS ## |
|
615 | 616 | ############## |
|
616 | 617 | |
|
617 | 618 | [handler_console] |
|
618 | 619 | class = StreamHandler |
|
619 | 620 | args = (sys.stderr, ) |
|
620 | 621 | level = INFO |
|
621 | 622 | formatter = generic |
|
622 | 623 | |
|
623 | 624 | [handler_console_sql] |
|
624 | 625 | class = StreamHandler |
|
625 | 626 | args = (sys.stderr, ) |
|
626 | 627 | level = WARN |
|
627 | 628 | formatter = generic |
|
628 | 629 | |
|
629 | 630 | ################ |
|
630 | 631 | ## FORMATTERS ## |
|
631 | 632 | ################ |
|
632 | 633 | |
|
633 | 634 | [formatter_generic] |
|
634 | 635 | class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter |
|
635 | 636 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
636 | 637 | datefmt = %Y-%m-%d %H:%M:%S |
|
637 | 638 | |
|
638 | 639 | [formatter_color_formatter] |
|
639 | 640 | class = rhodecode.lib.logging_formatter.ColorFormatter |
|
640 | 641 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
641 | 642 | datefmt = %Y-%m-%d %H:%M:%S |
|
642 | 643 | |
|
643 | 644 | [formatter_color_formatter_sql] |
|
644 | 645 | class = rhodecode.lib.logging_formatter.ColorFormatterSql |
|
645 | 646 | format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s |
|
646 | 647 | datefmt = %Y-%m-%d %H:%M:%S |
@@ -1,207 +1,207 b'' | |||
|
1 | 1 | .. _api: |
|
2 | 2 | |
|
3 | 3 | API Documentation |
|
4 | 4 | ================= |
|
5 | 5 | |
|
6 | 6 | The |RCE| API uses a single scheme for calling all API methods. The API is |
|
7 | 7 | implemented with JSON protocol in both directions. To send API requests to |
|
8 | 8 | your instance of |RCE|, use the following URL format |
|
9 | 9 | ``<your_server>/_admin`` |
|
10 | 10 | |
|
11 | 11 | .. note:: |
|
12 | 12 | |
|
13 | 13 | To use the API, you should configure the :file:`~/.rhoderc` file with |
|
14 | 14 | access details per instance. For more information, see |
|
15 | 15 | :ref:`config-rhoderc`. |
|
16 | 16 | |
|
17 | 17 | |
|
18 | 18 | API ACCESS FOR WEB VIEWS |
|
19 | 19 | ------------------------ |
|
20 | 20 | |
|
21 | 21 | API access can also be turned on for each web view in |RCE| that is |
|
22 | 22 | decorated with a `@LoginRequired` decorator. To enable API access, change |
|
23 | 23 | the standard login decorator to `@LoginRequired(api_access=True)`. |
|
24 | 24 | |
|
25 | 25 | From |RCM| version 1.7.0 you can configure a white list |
|
26 | 26 | of views that have API access enabled by default. To enable these, |
|
27 | 27 | edit the |RCM| configuration ``.ini`` file. The default location is: |
|
28 | 28 | |
|
29 | 29 | * |RCM| Pre-2.2.7 :file:`root/rhodecode/data/production.ini` |
|
30 | 30 | * |RCM| 3.0 :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini` |
|
31 | 31 | |
|
32 | 32 | To configure the white list, edit this section of the file. In this |
|
33 | 33 | configuration example, API access is granted to the patch/diff raw file and |
|
34 | 34 | archive. |
|
35 | 35 | |
|
36 | 36 | .. code-block:: ini |
|
37 | 37 | |
|
38 | 38 | ## List of controllers (using glob syntax) that AUTH TOKENS could be used for access. |
|
39 | 39 | ## Adding ?auth_token = <token> to the url authenticates this request as if it |
|
40 | 40 | ## came from the the logged in user who own this authentication token. |
|
41 | 41 | ## |
|
42 | 42 | ## Syntax is <ControllerClass>:<function_pattern>. |
|
43 | 43 | ## The list should be "," separated and on a single line. |
|
44 | 44 | ## |
|
45 | api_access_controllers_whitelist = ChangesetController:changeset_patch,ChangesetController:changeset_raw,ilesController:raw,FilesController:archivefile, | |
|
45 | api_access_controllers_whitelist = RepoCommitsView:repo_commit_raw,RepoCommitsView:repo_commit_patch,RepoCommitsView:repo_commit_download | |
|
46 | 46 | |
|
47 | 47 | After this change, a |RCE| view can be accessed without login by adding a |
|
48 | 48 | GET parameter ``?auth_token=<auth_token>`` to a url. For example to |
|
49 | 49 | access the raw diff. |
|
50 | 50 | |
|
51 | 51 | .. code-block:: html |
|
52 | 52 | |
|
53 | 53 | http://<server>/<repo>/changeset-diff/<sha>?auth_token=<auth_token> |
|
54 | 54 | |
|
55 | 55 | By default this is only enabled on RSS/ATOM feed views. Exposing raw diffs is a |
|
56 | 56 | good way to integrate with 3rd party services like code review, or build farms |
|
57 | 57 | that could download archives. |
|
58 | 58 | |
|
59 | 59 | API ACCESS |
|
60 | 60 | ---------- |
|
61 | 61 | |
|
62 | 62 | All clients are required to send JSON-RPC spec JSON data. |
|
63 | 63 | |
|
64 | 64 | .. code-block:: bash |
|
65 | 65 | |
|
66 | 66 | { |
|
67 | 67 | "id:"<id>", |
|
68 | 68 | "auth_token":"<auth_token>", |
|
69 | 69 | "method":"<method_name>", |
|
70 | 70 | "args":{"<arg_key>":"<arg_val>"} |
|
71 | 71 | } |
|
72 | 72 | |
|
73 | 73 | Example call for auto pulling from remote repositories using curl: |
|
74 | 74 | |
|
75 | 75 | .. code-block:: bash |
|
76 | 76 | |
|
77 | 77 | curl https://server.com/_admin/api -X POST -H 'content-type:text/plain' --data-binary '{"id":1, |
|
78 | 78 | "auth_token":"xe7cdb2v278e4evbdf5vs04v832v0efvcbcve4a3","method":"pull", "args":{"repoid":"CPython"}}' |
|
79 | 79 | |
|
80 | 80 | Provide those parameters: |
|
81 | 81 | - **id** A value of any type, which is used to match the response with the |
|
82 | 82 | request that it is replying to. |
|
83 | 83 | - **auth_token** for access and permission validation. |
|
84 | 84 | - **method** is name of method to call |
|
85 | 85 | - **args** is an ``key:value`` list of arguments to pass to method |
|
86 | 86 | |
|
87 | 87 | .. note:: |
|
88 | 88 | |
|
89 | 89 | To get your |authtoken|, from the |RCE| interface, |
|
90 | 90 | go to: |
|
91 | 91 | :menuselection:`username --> My account --> Auth tokens` |
|
92 | 92 | |
|
93 | 93 | For security reasons you should always create a dedicated |authtoken| for |
|
94 | 94 | API use only. |
|
95 | 95 | |
|
96 | 96 | |
|
97 | 97 | The |RCE| API will always return a JSON-RPC response: |
|
98 | 98 | |
|
99 | 99 | .. code-block:: bash |
|
100 | 100 | |
|
101 | 101 | { |
|
102 | 102 | "id": <id>, # matching id sent by request |
|
103 | 103 | "result": "<result>"|null, # JSON formatted result, null if any errors |
|
104 | 104 | "error": "null"|<error_message> # JSON formatted error (if any) |
|
105 | 105 | } |
|
106 | 106 | |
|
107 | 107 | All responses from API will be with `HTTP/1.0 200 OK` status code. |
|
108 | 108 | If there is an error when calling the API, the *error* key will contain a |
|
109 | 109 | failure description and the *result* will be `null`. |
|
110 | 110 | |
|
111 | 111 | API CLIENT |
|
112 | 112 | ---------- |
|
113 | 113 | |
|
114 | 114 | To install the |RCE| API, see :ref:`install-tools`. To configure the API per |
|
115 | 115 | instance, see the :ref:`rc-tools` section as you need to configure a |
|
116 | 116 | :file:`~/.rhoderc` file with your |authtokens|. |
|
117 | 117 | |
|
118 | 118 | Once you have set up your instance API access, use the following examples to |
|
119 | 119 | get started. |
|
120 | 120 | |
|
121 | 121 | .. code-block:: bash |
|
122 | 122 | |
|
123 | 123 | # Getting the 'rhodecode' repository |
|
124 | 124 | # from a RhodeCode Enterprise instance |
|
125 | 125 | rhodecode-api --instance-name=enterprise-1 get_repo repoid:rhodecode |
|
126 | 126 | |
|
127 | 127 | Calling method get_repo => http://127.0.0.1:5000 |
|
128 | 128 | Server response |
|
129 | 129 | { |
|
130 | 130 | <json data> |
|
131 | 131 | } |
|
132 | 132 | |
|
133 | 133 | # Creating a new mercurial repository called 'brand-new' |
|
134 | 134 | # with a description 'Repo-description' |
|
135 | 135 | rhodecode-api --instance-name=enterprise-1 create_repo repo_name:brand-new repo_type:hg description:Repo-description |
|
136 | 136 | { |
|
137 | 137 | "error": null, |
|
138 | 138 | "id": 1110, |
|
139 | 139 | "result": { |
|
140 | 140 | "msg": "Created new repository `brand-new`", |
|
141 | 141 | "success": true, |
|
142 | 142 | "task": null |
|
143 | 143 | } |
|
144 | 144 | } |
|
145 | 145 | |
|
146 | 146 | A broken example, what not to do. |
|
147 | 147 | |
|
148 | 148 | .. code-block:: bash |
|
149 | 149 | |
|
150 | 150 | # A call missing the required arguments |
|
151 | 151 | # and not specifying the instance |
|
152 | 152 | rhodecode-api get_repo |
|
153 | 153 | |
|
154 | 154 | Calling method get_repo => http://127.0.0.1:5000 |
|
155 | 155 | Server response |
|
156 | 156 | "Missing non optional `repoid` arg in JSON DATA" |
|
157 | 157 | |
|
158 | 158 | You can specify pure JSON using the ``--format`` parameter. |
|
159 | 159 | |
|
160 | 160 | .. code-block:: bash |
|
161 | 161 | |
|
162 | 162 | rhodecode-api --format=json get_repo repoid:rhodecode |
|
163 | 163 | |
|
164 | 164 | In such case only output that this function shows is pure JSON, we can use that |
|
165 | 165 | and pipe output to some json formatter. |
|
166 | 166 | |
|
167 | 167 | If output is in pure JSON format, you can pipe output to a JSON formatter. |
|
168 | 168 | |
|
169 | 169 | .. code-block:: bash |
|
170 | 170 | |
|
171 | 171 | rhodecode-api --instance-name=enterprise-1 --format=json get_repo repoid:rhodecode | python -m json.tool |
|
172 | 172 | |
|
173 | 173 | API METHODS |
|
174 | 174 | ----------- |
|
175 | 175 | |
|
176 | 176 | Each method by default required following arguments. |
|
177 | 177 | |
|
178 | 178 | .. code-block:: bash |
|
179 | 179 | |
|
180 | 180 | id : "<id_for_response>" |
|
181 | 181 | auth_token : "<auth_token>" |
|
182 | 182 | method : "<method name>" |
|
183 | 183 | args : {} |
|
184 | 184 | |
|
185 | 185 | Use each **param** from docs and put it in args, Optional parameters |
|
186 | 186 | are not required in args. |
|
187 | 187 | |
|
188 | 188 | .. code-block:: bash |
|
189 | 189 | |
|
190 | 190 | args: {"repoid": "rhodecode"} |
|
191 | 191 | |
|
192 | 192 | .. Note: From this point on things are generated by the script in |
|
193 | 193 | `scripts/fabfile.py`. To change things below, update the docstrings in the |
|
194 | 194 | ApiController. |
|
195 | 195 | |
|
196 | 196 | .. --- API DEFS MARKER --- |
|
197 | 197 | .. toctree:: |
|
198 | 198 | |
|
199 | 199 | methods/license-methods |
|
200 | 200 | methods/deprecated-methods |
|
201 | 201 | methods/gist-methods |
|
202 | 202 | methods/pull-request-methods |
|
203 | 203 | methods/repo-methods |
|
204 | 204 | methods/repo-group-methods |
|
205 | 205 | methods/server-methods |
|
206 | 206 | methods/user-methods |
|
207 | 207 | methods/user-group-methods |
@@ -1,304 +1,304 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import re |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from pyramid.view import view_config |
|
25 | 25 | |
|
26 | 26 | from rhodecode.apps._base import BaseAppView |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.lib.auth import LoginRequired, NotAnonymous, \ |
|
29 | 29 | HasRepoGroupPermissionAnyDecorator |
|
30 | 30 | from rhodecode.lib.index import searcher_from_config |
|
31 | 31 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
32 | 32 | from rhodecode.lib.ext_json import json |
|
33 | 33 | from rhodecode.model.db import func, Repository, RepoGroup |
|
34 | 34 | from rhodecode.model.repo import RepoModel |
|
35 | 35 | from rhodecode.model.repo_group import RepoGroupModel |
|
36 | 36 | from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList |
|
37 | 37 | from rhodecode.model.user import UserModel |
|
38 | 38 | from rhodecode.model.user_group import UserGroupModel |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | class HomeView(BaseAppView): |
|
44 | 44 | |
|
45 | 45 | def load_default_context(self): |
|
46 | 46 | c = self._get_local_tmpl_context() |
|
47 | 47 | c.user = c.auth_user.get_instance() |
|
48 | 48 | self._register_global_c(c) |
|
49 | 49 | return c |
|
50 | 50 | |
|
51 | 51 | @LoginRequired() |
|
52 | 52 | @view_config( |
|
53 | 53 | route_name='user_autocomplete_data', request_method='GET', |
|
54 | 54 | renderer='json_ext', xhr=True) |
|
55 | 55 | def user_autocomplete_data(self): |
|
56 | 56 | query = self.request.GET.get('query') |
|
57 | 57 | active = str2bool(self.request.GET.get('active') or True) |
|
58 | 58 | include_groups = str2bool(self.request.GET.get('user_groups')) |
|
59 | 59 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
60 | 60 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) |
|
61 | 61 | |
|
62 | 62 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', |
|
63 | 63 | query, active, include_groups) |
|
64 | 64 | |
|
65 | 65 | _users = UserModel().get_users( |
|
66 | 66 | name_contains=query, only_active=active) |
|
67 | 67 | |
|
68 | 68 | def maybe_skip_default_user(usr): |
|
69 | 69 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
70 | 70 | return False |
|
71 | 71 | return True |
|
72 | 72 | _users = filter(maybe_skip_default_user, _users) |
|
73 | 73 | |
|
74 | 74 | if include_groups: |
|
75 | 75 | # extend with user groups |
|
76 | 76 | _user_groups = UserGroupModel().get_user_groups( |
|
77 | 77 | name_contains=query, only_active=active, |
|
78 | 78 | expand_groups=expand_groups) |
|
79 | 79 | _users = _users + _user_groups |
|
80 | 80 | |
|
81 | 81 | return {'suggestions': _users} |
|
82 | 82 | |
|
83 | 83 | @LoginRequired() |
|
84 | 84 | @NotAnonymous() |
|
85 | 85 | @view_config( |
|
86 | 86 | route_name='user_group_autocomplete_data', request_method='GET', |
|
87 | 87 | renderer='json_ext', xhr=True) |
|
88 | 88 | def user_group_autocomplete_data(self): |
|
89 | 89 | query = self.request.GET.get('query') |
|
90 | 90 | active = str2bool(self.request.GET.get('active') or True) |
|
91 | 91 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
92 | 92 | |
|
93 | 93 | log.debug('generating user group list, query:%s, active:%s', |
|
94 | 94 | query, active) |
|
95 | 95 | |
|
96 | 96 | _user_groups = UserGroupModel().get_user_groups( |
|
97 | 97 | name_contains=query, only_active=active, |
|
98 | 98 | expand_groups=expand_groups) |
|
99 | 99 | _user_groups = _user_groups |
|
100 | 100 | |
|
101 | 101 | return {'suggestions': _user_groups} |
|
102 | 102 | |
|
103 | 103 | def _get_repo_list(self, name_contains=None, repo_type=None, limit=20): |
|
104 | 104 | query = Repository.query()\ |
|
105 | 105 | .order_by(func.length(Repository.repo_name))\ |
|
106 | 106 | .order_by(Repository.repo_name) |
|
107 | 107 | |
|
108 | 108 | if repo_type: |
|
109 | 109 | query = query.filter(Repository.repo_type == repo_type) |
|
110 | 110 | |
|
111 | 111 | if name_contains: |
|
112 | 112 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
113 | 113 | query = query.filter( |
|
114 | 114 | Repository.repo_name.ilike(ilike_expression)) |
|
115 | 115 | query = query.limit(limit) |
|
116 | 116 | |
|
117 | 117 | all_repos = query.all() |
|
118 | 118 | # permission checks are inside this function |
|
119 | 119 | repo_iter = ScmModel().get_repos(all_repos) |
|
120 | 120 | return [ |
|
121 | 121 | { |
|
122 | 122 | 'id': obj['name'], |
|
123 | 123 | 'text': obj['name'], |
|
124 | 124 | 'type': 'repo', |
|
125 | 125 | 'obj': obj['dbrepo'], |
|
126 | 126 | 'url': h.route_path('repo_summary', repo_name=obj['name']) |
|
127 | 127 | } |
|
128 | 128 | for obj in repo_iter] |
|
129 | 129 | |
|
130 | 130 | def _get_repo_group_list(self, name_contains=None, limit=20): |
|
131 | 131 | query = RepoGroup.query()\ |
|
132 | 132 | .order_by(func.length(RepoGroup.group_name))\ |
|
133 | 133 | .order_by(RepoGroup.group_name) |
|
134 | 134 | |
|
135 | 135 | if name_contains: |
|
136 | 136 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
137 | 137 | query = query.filter( |
|
138 | 138 | RepoGroup.group_name.ilike(ilike_expression)) |
|
139 | 139 | query = query.limit(limit) |
|
140 | 140 | |
|
141 | 141 | all_groups = query.all() |
|
142 | 142 | repo_groups_iter = ScmModel().get_repo_groups(all_groups) |
|
143 | 143 | return [ |
|
144 | 144 | { |
|
145 | 145 | 'id': obj.group_name, |
|
146 | 146 | 'text': obj.group_name, |
|
147 | 147 | 'type': 'group', |
|
148 | 148 | 'obj': {}, |
|
149 | 149 | 'url': h.route_path('repo_group_home', repo_group_name=obj.group_name) |
|
150 | 150 | } |
|
151 | 151 | for obj in repo_groups_iter] |
|
152 | 152 | |
|
153 | 153 | def _get_hash_commit_list(self, auth_user, hash_starts_with=None): |
|
154 | 154 | if not hash_starts_with or len(hash_starts_with) < 3: |
|
155 | 155 | return [] |
|
156 | 156 | |
|
157 | 157 | commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with) |
|
158 | 158 | |
|
159 | 159 | if len(commit_hashes) != 1: |
|
160 | 160 | return [] |
|
161 | 161 | |
|
162 | 162 | commit_hash_prefix = commit_hashes[0] |
|
163 | 163 | |
|
164 | 164 | searcher = searcher_from_config(self.request.registry.settings) |
|
165 | 165 | result = searcher.search( |
|
166 | 166 | 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user, |
|
167 | 167 | raise_on_exc=False) |
|
168 | 168 | |
|
169 | 169 | return [ |
|
170 | 170 | { |
|
171 | 171 | 'id': entry['commit_id'], |
|
172 | 172 | 'text': entry['commit_id'], |
|
173 | 173 | 'type': 'commit', |
|
174 | 174 | 'obj': {'repo': entry['repository']}, |
|
175 |
'url': h. |
|
|
175 | 'url': h.route_path('repo_commit', | |
|
176 | 176 | repo_name=entry['repository'], |
|
177 |
|
|
|
177 | commit_id=entry['commit_id']) | |
|
178 | 178 | } |
|
179 | 179 | for entry in result['results']] |
|
180 | 180 | |
|
181 | 181 | @LoginRequired() |
|
182 | 182 | @view_config( |
|
183 | 183 | route_name='repo_list_data', request_method='GET', |
|
184 | 184 | renderer='json_ext', xhr=True) |
|
185 | 185 | def repo_list_data(self): |
|
186 | 186 | _ = self.request.translate |
|
187 | 187 | |
|
188 | 188 | query = self.request.GET.get('query') |
|
189 | 189 | repo_type = self.request.GET.get('repo_type') |
|
190 | 190 | log.debug('generating repo list, query:%s, repo_type:%s', |
|
191 | 191 | query, repo_type) |
|
192 | 192 | |
|
193 | 193 | res = [] |
|
194 | 194 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
195 | 195 | if repos: |
|
196 | 196 | res.append({ |
|
197 | 197 | 'text': _('Repositories'), |
|
198 | 198 | 'children': repos |
|
199 | 199 | }) |
|
200 | 200 | |
|
201 | 201 | data = { |
|
202 | 202 | 'more': False, |
|
203 | 203 | 'results': res |
|
204 | 204 | } |
|
205 | 205 | return data |
|
206 | 206 | |
|
207 | 207 | @LoginRequired() |
|
208 | 208 | @view_config( |
|
209 | 209 | route_name='goto_switcher_data', request_method='GET', |
|
210 | 210 | renderer='json_ext', xhr=True) |
|
211 | 211 | def goto_switcher_data(self): |
|
212 | 212 | c = self.load_default_context() |
|
213 | 213 | |
|
214 | 214 | _ = self.request.translate |
|
215 | 215 | |
|
216 | 216 | query = self.request.GET.get('query') |
|
217 | 217 | log.debug('generating goto switcher list, query %s', query) |
|
218 | 218 | |
|
219 | 219 | res = [] |
|
220 | 220 | repo_groups = self._get_repo_group_list(query) |
|
221 | 221 | if repo_groups: |
|
222 | 222 | res.append({ |
|
223 | 223 | 'text': _('Groups'), |
|
224 | 224 | 'children': repo_groups |
|
225 | 225 | }) |
|
226 | 226 | |
|
227 | 227 | repos = self._get_repo_list(query) |
|
228 | 228 | if repos: |
|
229 | 229 | res.append({ |
|
230 | 230 | 'text': _('Repositories'), |
|
231 | 231 | 'children': repos |
|
232 | 232 | }) |
|
233 | 233 | |
|
234 | 234 | commits = self._get_hash_commit_list(c.auth_user, query) |
|
235 | 235 | if commits: |
|
236 | 236 | unique_repos = {} |
|
237 | 237 | for commit in commits: |
|
238 | 238 | unique_repos.setdefault(commit['obj']['repo'], [] |
|
239 | 239 | ).append(commit) |
|
240 | 240 | |
|
241 | 241 | for repo in unique_repos: |
|
242 | 242 | res.append({ |
|
243 | 243 | 'text': _('Commits in %(repo)s') % {'repo': repo}, |
|
244 | 244 | 'children': unique_repos[repo] |
|
245 | 245 | }) |
|
246 | 246 | |
|
247 | 247 | data = { |
|
248 | 248 | 'more': False, |
|
249 | 249 | 'results': res |
|
250 | 250 | } |
|
251 | 251 | return data |
|
252 | 252 | |
|
253 | 253 | def _get_groups_and_repos(self, repo_group_id=None): |
|
254 | 254 | # repo groups groups |
|
255 | 255 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) |
|
256 | 256 | _perms = ['group.read', 'group.write', 'group.admin'] |
|
257 | 257 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) |
|
258 | 258 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
259 | 259 | repo_group_list=repo_group_list_acl, admin=False) |
|
260 | 260 | |
|
261 | 261 | # repositories |
|
262 | 262 | repo_list = Repository.get_all_repos(group_id=repo_group_id) |
|
263 | 263 | _perms = ['repository.read', 'repository.write', 'repository.admin'] |
|
264 | 264 | repo_list_acl = RepoList(repo_list, perm_set=_perms) |
|
265 | 265 | repo_data = RepoModel().get_repos_as_dict( |
|
266 | 266 | repo_list=repo_list_acl, admin=False) |
|
267 | 267 | |
|
268 | 268 | return repo_data, repo_group_data |
|
269 | 269 | |
|
270 | 270 | @LoginRequired() |
|
271 | 271 | @view_config( |
|
272 | 272 | route_name='home', request_method='GET', |
|
273 | 273 | renderer='rhodecode:templates/index.mako') |
|
274 | 274 | def main_page(self): |
|
275 | 275 | c = self.load_default_context() |
|
276 | 276 | c.repo_group = None |
|
277 | 277 | |
|
278 | 278 | repo_data, repo_group_data = self._get_groups_and_repos() |
|
279 | 279 | # json used to render the grids |
|
280 | 280 | c.repos_data = json.dumps(repo_data) |
|
281 | 281 | c.repo_groups_data = json.dumps(repo_group_data) |
|
282 | 282 | |
|
283 | 283 | return self._get_template_context(c) |
|
284 | 284 | |
|
285 | 285 | @LoginRequired() |
|
286 | 286 | @HasRepoGroupPermissionAnyDecorator( |
|
287 | 287 | 'group.read', 'group.write', 'group.admin') |
|
288 | 288 | @view_config( |
|
289 | 289 | route_name='repo_group_home', request_method='GET', |
|
290 | 290 | renderer='rhodecode:templates/index_repo_group.mako') |
|
291 | 291 | @view_config( |
|
292 | 292 | route_name='repo_group_home_slash', request_method='GET', |
|
293 | 293 | renderer='rhodecode:templates/index_repo_group.mako') |
|
294 | 294 | def repo_group_main_page(self): |
|
295 | 295 | c = self.load_default_context() |
|
296 | 296 | c.repo_group = self.request.db_repo_group |
|
297 | 297 | repo_data, repo_group_data = self._get_groups_and_repos( |
|
298 | 298 | c.repo_group.group_id) |
|
299 | 299 | |
|
300 | 300 | # json used to render the grids |
|
301 | 301 | c.repos_data = json.dumps(repo_data) |
|
302 | 302 | c.repo_groups_data = json.dumps(repo_group_data) |
|
303 | 303 | |
|
304 | 304 | return self._get_template_context(c) |
@@ -1,530 +1,529 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import urlparse |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 | 26 | from rhodecode.tests import ( |
|
27 | 27 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, |
|
28 | 28 | no_newline_id_generator) |
|
29 | 29 | from rhodecode.tests.fixture import Fixture |
|
30 | 30 | from rhodecode.lib.auth import check_password |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.model.auth_token import AuthTokenModel |
|
33 | 33 | from rhodecode.model import validators |
|
34 | 34 | from rhodecode.model.db import User, Notification, UserApiKeys |
|
35 | 35 | from rhodecode.model.meta import Session |
|
36 | 36 | |
|
37 | 37 | fixture = Fixture() |
|
38 | 38 | |
|
39 | whitelist_view = ['RepoCommitsView:repo_commit_raw'] | |
|
40 | ||
|
39 | 41 | |
|
40 | 42 | def route_path(name, params=None, **kwargs): |
|
41 | 43 | import urllib |
|
42 | 44 | from rhodecode.apps._base import ADMIN_PREFIX |
|
43 | 45 | |
|
44 | 46 | base_url = { |
|
45 | 47 | 'login': ADMIN_PREFIX + '/login', |
|
46 | 48 | 'logout': ADMIN_PREFIX + '/logout', |
|
47 | 49 | 'register': ADMIN_PREFIX + '/register', |
|
48 | 50 | 'reset_password': |
|
49 | 51 | ADMIN_PREFIX + '/password_reset', |
|
50 | 52 | 'reset_password_confirmation': |
|
51 | 53 | ADMIN_PREFIX + '/password_reset_confirmation', |
|
52 | 54 | |
|
53 | 55 | 'admin_permissions_application': |
|
54 | 56 | ADMIN_PREFIX + '/permissions/application', |
|
55 | 57 | 'admin_permissions_application_update': |
|
56 | 58 | ADMIN_PREFIX + '/permissions/application/update', |
|
57 | 59 | |
|
58 | 60 | 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}' |
|
59 | 61 | |
|
60 | 62 | }[name].format(**kwargs) |
|
61 | 63 | |
|
62 | 64 | if params: |
|
63 | 65 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
64 | 66 | return base_url |
|
65 | 67 | |
|
66 | 68 | |
|
67 | 69 | @pytest.mark.usefixtures('app') |
|
68 | 70 | class TestLoginController(object): |
|
69 | 71 | destroy_users = set() |
|
70 | 72 | |
|
71 | 73 | @classmethod |
|
72 | 74 | def teardown_class(cls): |
|
73 | 75 | fixture.destroy_users(cls.destroy_users) |
|
74 | 76 | |
|
75 | 77 | def teardown_method(self, method): |
|
76 | 78 | for n in Notification.query().all(): |
|
77 | 79 | Session().delete(n) |
|
78 | 80 | |
|
79 | 81 | Session().commit() |
|
80 | 82 | assert Notification.query().all() == [] |
|
81 | 83 | |
|
82 | 84 | def test_index(self): |
|
83 | 85 | response = self.app.get(route_path('login')) |
|
84 | 86 | assert response.status == '200 OK' |
|
85 | 87 | # Test response... |
|
86 | 88 | |
|
87 | 89 | def test_login_admin_ok(self): |
|
88 | 90 | response = self.app.post(route_path('login'), |
|
89 | 91 | {'username': 'test_admin', |
|
90 | 92 | 'password': 'test12'}) |
|
91 | 93 | assert response.status == '302 Found' |
|
92 | 94 | session = response.get_session_from_response() |
|
93 | 95 | username = session['rhodecode_user'].get('username') |
|
94 | 96 | assert username == 'test_admin' |
|
95 | 97 | response = response.follow() |
|
96 | 98 | response.mustcontain('/%s' % HG_REPO) |
|
97 | 99 | |
|
98 | 100 | def test_login_regular_ok(self): |
|
99 | 101 | response = self.app.post(route_path('login'), |
|
100 | 102 | {'username': 'test_regular', |
|
101 | 103 | 'password': 'test12'}) |
|
102 | 104 | |
|
103 | 105 | assert response.status == '302 Found' |
|
104 | 106 | session = response.get_session_from_response() |
|
105 | 107 | username = session['rhodecode_user'].get('username') |
|
106 | 108 | assert username == 'test_regular' |
|
107 | 109 | response = response.follow() |
|
108 | 110 | response.mustcontain('/%s' % HG_REPO) |
|
109 | 111 | |
|
110 | 112 | def test_login_ok_came_from(self): |
|
111 | 113 | test_came_from = '/_admin/users?branch=stable' |
|
112 | 114 | _url = '{}?came_from={}'.format(route_path('login'), test_came_from) |
|
113 | 115 | response = self.app.post( |
|
114 | 116 | _url, {'username': 'test_admin', 'password': 'test12'}) |
|
115 | 117 | assert response.status == '302 Found' |
|
116 | 118 | assert 'branch=stable' in response.location |
|
117 | 119 | response = response.follow() |
|
118 | 120 | |
|
119 | 121 | assert response.status == '200 OK' |
|
120 | 122 | response.mustcontain('Users administration') |
|
121 | 123 | |
|
122 | 124 | def test_redirect_to_login_with_get_args(self): |
|
123 | 125 | with fixture.anon_access(False): |
|
124 | 126 | kwargs = {'branch': 'stable'} |
|
125 | 127 | response = self.app.get( |
|
126 | 128 | h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs)) |
|
127 | 129 | assert response.status == '302 Found' |
|
128 | 130 | |
|
129 | 131 | response_query = urlparse.parse_qsl(response.location) |
|
130 | 132 | assert 'branch=stable' in response_query[0][1] |
|
131 | 133 | |
|
132 | 134 | def test_login_form_with_get_args(self): |
|
133 | 135 | _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login')) |
|
134 | 136 | response = self.app.get(_url) |
|
135 | 137 | assert 'branch%3Dstable' in response.form.action |
|
136 | 138 | |
|
137 | 139 | @pytest.mark.parametrize("url_came_from", [ |
|
138 | 140 | 'data:text/html,<script>window.alert("xss")</script>', |
|
139 | 141 | 'mailto:test@rhodecode.org', |
|
140 | 142 | 'file:///etc/passwd', |
|
141 | 143 | 'ftp://some.ftp.server', |
|
142 | 144 | 'http://other.domain', |
|
143 | 145 | '/\r\nX-Forwarded-Host: http://example.org', |
|
144 | 146 | ], ids=no_newline_id_generator) |
|
145 | 147 | def test_login_bad_came_froms(self, url_came_from): |
|
146 | 148 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) |
|
147 | 149 | response = self.app.post( |
|
148 | 150 | _url, |
|
149 | 151 | {'username': 'test_admin', 'password': 'test12'}) |
|
150 | 152 | assert response.status == '302 Found' |
|
151 | 153 | response = response.follow() |
|
152 | 154 | assert response.status == '200 OK' |
|
153 | 155 | assert response.request.path == '/' |
|
154 | 156 | |
|
155 | 157 | def test_login_short_password(self): |
|
156 | 158 | response = self.app.post(route_path('login'), |
|
157 | 159 | {'username': 'test_admin', |
|
158 | 160 | 'password': 'as'}) |
|
159 | 161 | assert response.status == '200 OK' |
|
160 | 162 | |
|
161 | 163 | response.mustcontain('Enter 3 characters or more') |
|
162 | 164 | |
|
163 | 165 | def test_login_wrong_non_ascii_password(self, user_regular): |
|
164 | 166 | response = self.app.post( |
|
165 | 167 | route_path('login'), |
|
166 | 168 | {'username': user_regular.username, |
|
167 | 169 | 'password': u'invalid-non-asci\xe4'.encode('utf8')}) |
|
168 | 170 | |
|
169 | 171 | response.mustcontain('invalid user name') |
|
170 | 172 | response.mustcontain('invalid password') |
|
171 | 173 | |
|
172 | 174 | def test_login_with_non_ascii_password(self, user_util): |
|
173 | 175 | password = u'valid-non-ascii\xe4' |
|
174 | 176 | user = user_util.create_user(password=password) |
|
175 | 177 | response = self.app.post( |
|
176 | 178 | route_path('login'), |
|
177 | 179 | {'username': user.username, |
|
178 | 180 | 'password': password.encode('utf-8')}) |
|
179 | 181 | assert response.status_code == 302 |
|
180 | 182 | |
|
181 | 183 | def test_login_wrong_username_password(self): |
|
182 | 184 | response = self.app.post(route_path('login'), |
|
183 | 185 | {'username': 'error', |
|
184 | 186 | 'password': 'test12'}) |
|
185 | 187 | |
|
186 | 188 | response.mustcontain('invalid user name') |
|
187 | 189 | response.mustcontain('invalid password') |
|
188 | 190 | |
|
189 | 191 | def test_login_admin_ok_password_migration(self, real_crypto_backend): |
|
190 | 192 | from rhodecode.lib import auth |
|
191 | 193 | |
|
192 | 194 | # create new user, with sha256 password |
|
193 | 195 | temp_user = 'test_admin_sha256' |
|
194 | 196 | user = fixture.create_user(temp_user) |
|
195 | 197 | user.password = auth._RhodeCodeCryptoSha256().hash_create( |
|
196 | 198 | b'test123') |
|
197 | 199 | Session().add(user) |
|
198 | 200 | Session().commit() |
|
199 | 201 | self.destroy_users.add(temp_user) |
|
200 | 202 | response = self.app.post(route_path('login'), |
|
201 | 203 | {'username': temp_user, |
|
202 | 204 | 'password': 'test123'}) |
|
203 | 205 | |
|
204 | 206 | assert response.status == '302 Found' |
|
205 | 207 | session = response.get_session_from_response() |
|
206 | 208 | username = session['rhodecode_user'].get('username') |
|
207 | 209 | assert username == temp_user |
|
208 | 210 | response = response.follow() |
|
209 | 211 | response.mustcontain('/%s' % HG_REPO) |
|
210 | 212 | |
|
211 | 213 | # new password should be bcrypted, after log-in and transfer |
|
212 | 214 | user = User.get_by_username(temp_user) |
|
213 | 215 | assert user.password.startswith('$') |
|
214 | 216 | |
|
215 | 217 | # REGISTRATIONS |
|
216 | 218 | def test_register(self): |
|
217 | 219 | response = self.app.get(route_path('register')) |
|
218 | 220 | response.mustcontain('Create an Account') |
|
219 | 221 | |
|
220 | 222 | def test_register_err_same_username(self): |
|
221 | 223 | uname = 'test_admin' |
|
222 | 224 | response = self.app.post( |
|
223 | 225 | route_path('register'), |
|
224 | 226 | { |
|
225 | 227 | 'username': uname, |
|
226 | 228 | 'password': 'test12', |
|
227 | 229 | 'password_confirmation': 'test12', |
|
228 | 230 | 'email': 'goodmail@domain.com', |
|
229 | 231 | 'firstname': 'test', |
|
230 | 232 | 'lastname': 'test' |
|
231 | 233 | } |
|
232 | 234 | ) |
|
233 | 235 | |
|
234 | 236 | assertr = response.assert_response() |
|
235 | 237 | msg = validators.ValidUsername()._messages['username_exists'] |
|
236 | 238 | msg = msg % {'username': uname} |
|
237 | 239 | assertr.element_contains('#username+.error-message', msg) |
|
238 | 240 | |
|
239 | 241 | def test_register_err_same_email(self): |
|
240 | 242 | response = self.app.post( |
|
241 | 243 | route_path('register'), |
|
242 | 244 | { |
|
243 | 245 | 'username': 'test_admin_0', |
|
244 | 246 | 'password': 'test12', |
|
245 | 247 | 'password_confirmation': 'test12', |
|
246 | 248 | 'email': 'test_admin@mail.com', |
|
247 | 249 | 'firstname': 'test', |
|
248 | 250 | 'lastname': 'test' |
|
249 | 251 | } |
|
250 | 252 | ) |
|
251 | 253 | |
|
252 | 254 | assertr = response.assert_response() |
|
253 | 255 | msg = validators.UniqSystemEmail()()._messages['email_taken'] |
|
254 | 256 | assertr.element_contains('#email+.error-message', msg) |
|
255 | 257 | |
|
256 | 258 | def test_register_err_same_email_case_sensitive(self): |
|
257 | 259 | response = self.app.post( |
|
258 | 260 | route_path('register'), |
|
259 | 261 | { |
|
260 | 262 | 'username': 'test_admin_1', |
|
261 | 263 | 'password': 'test12', |
|
262 | 264 | 'password_confirmation': 'test12', |
|
263 | 265 | 'email': 'TesT_Admin@mail.COM', |
|
264 | 266 | 'firstname': 'test', |
|
265 | 267 | 'lastname': 'test' |
|
266 | 268 | } |
|
267 | 269 | ) |
|
268 | 270 | assertr = response.assert_response() |
|
269 | 271 | msg = validators.UniqSystemEmail()()._messages['email_taken'] |
|
270 | 272 | assertr.element_contains('#email+.error-message', msg) |
|
271 | 273 | |
|
272 | 274 | def test_register_err_wrong_data(self): |
|
273 | 275 | response = self.app.post( |
|
274 | 276 | route_path('register'), |
|
275 | 277 | { |
|
276 | 278 | 'username': 'xs', |
|
277 | 279 | 'password': 'test', |
|
278 | 280 | 'password_confirmation': 'test', |
|
279 | 281 | 'email': 'goodmailm', |
|
280 | 282 | 'firstname': 'test', |
|
281 | 283 | 'lastname': 'test' |
|
282 | 284 | } |
|
283 | 285 | ) |
|
284 | 286 | assert response.status == '200 OK' |
|
285 | 287 | response.mustcontain('An email address must contain a single @') |
|
286 | 288 | response.mustcontain('Enter a value 6 characters long or more') |
|
287 | 289 | |
|
288 | 290 | def test_register_err_username(self): |
|
289 | 291 | response = self.app.post( |
|
290 | 292 | route_path('register'), |
|
291 | 293 | { |
|
292 | 294 | 'username': 'error user', |
|
293 | 295 | 'password': 'test12', |
|
294 | 296 | 'password_confirmation': 'test12', |
|
295 | 297 | 'email': 'goodmailm', |
|
296 | 298 | 'firstname': 'test', |
|
297 | 299 | 'lastname': 'test' |
|
298 | 300 | } |
|
299 | 301 | ) |
|
300 | 302 | |
|
301 | 303 | response.mustcontain('An email address must contain a single @') |
|
302 | 304 | response.mustcontain( |
|
303 | 305 | 'Username may only contain ' |
|
304 | 306 | 'alphanumeric characters underscores, ' |
|
305 | 307 | 'periods or dashes and must begin with ' |
|
306 | 308 | 'alphanumeric character') |
|
307 | 309 | |
|
308 | 310 | def test_register_err_case_sensitive(self): |
|
309 | 311 | usr = 'Test_Admin' |
|
310 | 312 | response = self.app.post( |
|
311 | 313 | route_path('register'), |
|
312 | 314 | { |
|
313 | 315 | 'username': usr, |
|
314 | 316 | 'password': 'test12', |
|
315 | 317 | 'password_confirmation': 'test12', |
|
316 | 318 | 'email': 'goodmailm', |
|
317 | 319 | 'firstname': 'test', |
|
318 | 320 | 'lastname': 'test' |
|
319 | 321 | } |
|
320 | 322 | ) |
|
321 | 323 | |
|
322 | 324 | assertr = response.assert_response() |
|
323 | 325 | msg = validators.ValidUsername()._messages['username_exists'] |
|
324 | 326 | msg = msg % {'username': usr} |
|
325 | 327 | assertr.element_contains('#username+.error-message', msg) |
|
326 | 328 | |
|
327 | 329 | def test_register_special_chars(self): |
|
328 | 330 | response = self.app.post( |
|
329 | 331 | route_path('register'), |
|
330 | 332 | { |
|
331 | 333 | 'username': 'xxxaxn', |
|
332 | 334 | 'password': 'ąćźżąśśśś', |
|
333 | 335 | 'password_confirmation': 'ąćźżąśśśś', |
|
334 | 336 | 'email': 'goodmailm@test.plx', |
|
335 | 337 | 'firstname': 'test', |
|
336 | 338 | 'lastname': 'test' |
|
337 | 339 | } |
|
338 | 340 | ) |
|
339 | 341 | |
|
340 | 342 | msg = validators.ValidPassword()._messages['invalid_password'] |
|
341 | 343 | response.mustcontain(msg) |
|
342 | 344 | |
|
343 | 345 | def test_register_password_mismatch(self): |
|
344 | 346 | response = self.app.post( |
|
345 | 347 | route_path('register'), |
|
346 | 348 | { |
|
347 | 349 | 'username': 'xs', |
|
348 | 350 | 'password': '123qwe', |
|
349 | 351 | 'password_confirmation': 'qwe123', |
|
350 | 352 | 'email': 'goodmailm@test.plxa', |
|
351 | 353 | 'firstname': 'test', |
|
352 | 354 | 'lastname': 'test' |
|
353 | 355 | } |
|
354 | 356 | ) |
|
355 | 357 | msg = validators.ValidPasswordsMatch()._messages['password_mismatch'] |
|
356 | 358 | response.mustcontain(msg) |
|
357 | 359 | |
|
358 | 360 | def test_register_ok(self): |
|
359 | 361 | username = 'test_regular4' |
|
360 | 362 | password = 'qweqwe' |
|
361 | 363 | email = 'marcin@test.com' |
|
362 | 364 | name = 'testname' |
|
363 | 365 | lastname = 'testlastname' |
|
364 | 366 | |
|
365 | 367 | response = self.app.post( |
|
366 | 368 | route_path('register'), |
|
367 | 369 | { |
|
368 | 370 | 'username': username, |
|
369 | 371 | 'password': password, |
|
370 | 372 | 'password_confirmation': password, |
|
371 | 373 | 'email': email, |
|
372 | 374 | 'firstname': name, |
|
373 | 375 | 'lastname': lastname, |
|
374 | 376 | 'admin': True |
|
375 | 377 | } |
|
376 | 378 | ) # This should be overriden |
|
377 | 379 | assert response.status == '302 Found' |
|
378 | 380 | assert_session_flash( |
|
379 | 381 | response, 'You have successfully registered with RhodeCode') |
|
380 | 382 | |
|
381 | 383 | ret = Session().query(User).filter( |
|
382 | 384 | User.username == 'test_regular4').one() |
|
383 | 385 | assert ret.username == username |
|
384 | 386 | assert check_password(password, ret.password) |
|
385 | 387 | assert ret.email == email |
|
386 | 388 | assert ret.name == name |
|
387 | 389 | assert ret.lastname == lastname |
|
388 | 390 | assert ret.auth_tokens is not None |
|
389 | 391 | assert not ret.admin |
|
390 | 392 | |
|
391 | 393 | def test_forgot_password_wrong_mail(self): |
|
392 | 394 | bad_email = 'marcin@wrongmail.org' |
|
393 | 395 | response = self.app.post( |
|
394 | 396 | route_path('reset_password'), {'email': bad_email, } |
|
395 | 397 | ) |
|
396 | 398 | assert_session_flash(response, |
|
397 | 399 | 'If such email exists, a password reset link was sent to it.') |
|
398 | 400 | |
|
399 | 401 | def test_forgot_password(self, user_util): |
|
400 | 402 | response = self.app.get(route_path('reset_password')) |
|
401 | 403 | assert response.status == '200 OK' |
|
402 | 404 | |
|
403 | 405 | user = user_util.create_user() |
|
404 | 406 | user_id = user.user_id |
|
405 | 407 | email = user.email |
|
406 | 408 | |
|
407 | 409 | response = self.app.post(route_path('reset_password'), {'email': email, }) |
|
408 | 410 | |
|
409 | 411 | assert_session_flash(response, |
|
410 | 412 | 'If such email exists, a password reset link was sent to it.') |
|
411 | 413 | |
|
412 | 414 | # BAD KEY |
|
413 | 415 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey') |
|
414 | 416 | response = self.app.get(confirm_url) |
|
415 | 417 | assert response.status == '302 Found' |
|
416 | 418 | assert response.location.endswith(route_path('reset_password')) |
|
417 | 419 | assert_session_flash(response, 'Given reset token is invalid') |
|
418 | 420 | |
|
419 | 421 | response.follow() # cleanup flash |
|
420 | 422 | |
|
421 | 423 | # GOOD KEY |
|
422 | 424 | key = UserApiKeys.query()\ |
|
423 | 425 | .filter(UserApiKeys.user_id == user_id)\ |
|
424 | 426 | .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\ |
|
425 | 427 | .first() |
|
426 | 428 | |
|
427 | 429 | assert key |
|
428 | 430 | |
|
429 | 431 | confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key) |
|
430 | 432 | response = self.app.get(confirm_url) |
|
431 | 433 | assert response.status == '302 Found' |
|
432 | 434 | assert response.location.endswith(route_path('login')) |
|
433 | 435 | |
|
434 | 436 | assert_session_flash( |
|
435 | 437 | response, |
|
436 | 438 | 'Your password reset was successful, ' |
|
437 | 439 | 'a new password has been sent to your email') |
|
438 | 440 | |
|
439 | 441 | response.follow() |
|
440 | 442 | |
|
441 | 443 | def _get_api_whitelist(self, values=None): |
|
442 | 444 | config = {'api_access_controllers_whitelist': values or []} |
|
443 | 445 | return config |
|
444 | 446 | |
|
445 | 447 | @pytest.mark.parametrize("test_name, auth_token", [ |
|
446 | 448 | ('none', None), |
|
447 | 449 | ('empty_string', ''), |
|
448 | 450 | ('fake_number', '123456'), |
|
449 | 451 | ('proper_auth_token', None) |
|
450 | 452 | ]) |
|
451 | 453 | def test_access_not_whitelisted_page_via_auth_token( |
|
452 | 454 | self, test_name, auth_token, user_admin): |
|
453 | 455 | |
|
454 | 456 | whitelist = self._get_api_whitelist([]) |
|
455 | 457 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
456 | 458 | assert [] == whitelist['api_access_controllers_whitelist'] |
|
457 | 459 | if test_name == 'proper_auth_token': |
|
458 | 460 | # use builtin if api_key is None |
|
459 | 461 | auth_token = user_admin.api_key |
|
460 | 462 | |
|
461 | 463 | with fixture.anon_access(False): |
|
462 | 464 | self.app.get( |
|
463 | 465 | route_path('repo_commit_raw', |
|
464 | 466 | repo_name=HG_REPO, commit_id='tip', |
|
465 | 467 | params=dict(api_key=auth_token)), |
|
466 | 468 | status=302) |
|
467 | 469 | |
|
468 | 470 | @pytest.mark.parametrize("test_name, auth_token, code", [ |
|
469 | 471 | ('none', None, 302), |
|
470 | 472 | ('empty_string', '', 302), |
|
471 | 473 | ('fake_number', '123456', 302), |
|
472 | 474 | ('proper_auth_token', None, 200) |
|
473 | 475 | ]) |
|
474 | 476 | def test_access_whitelisted_page_via_auth_token( |
|
475 | 477 | self, test_name, auth_token, code, user_admin): |
|
476 | 478 | |
|
477 | whitelist_entry = ['ChangesetController:changeset_raw'] | |
|
478 | whitelist = self._get_api_whitelist(whitelist_entry) | |
|
479 | whitelist = self._get_api_whitelist(whitelist_view) | |
|
479 | 480 | |
|
480 | 481 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
481 |
assert whitelist_ |
|
|
482 | assert whitelist_view == whitelist['api_access_controllers_whitelist'] | |
|
482 | 483 | |
|
483 | 484 | if test_name == 'proper_auth_token': |
|
484 | 485 | auth_token = user_admin.api_key |
|
485 | 486 | assert auth_token |
|
486 | 487 | |
|
487 | 488 | with fixture.anon_access(False): |
|
488 | 489 | self.app.get( |
|
489 | 490 | route_path('repo_commit_raw', |
|
490 | 491 | repo_name=HG_REPO, commit_id='tip', |
|
491 | 492 | params=dict(api_key=auth_token)), |
|
492 | 493 | status=code) |
|
493 | 494 | |
|
494 | 495 | def test_access_page_via_extra_auth_token(self): |
|
495 | whitelist = self._get_api_whitelist( | |
|
496 | ['ChangesetController:changeset_raw']) | |
|
496 | whitelist = self._get_api_whitelist(whitelist_view) | |
|
497 | 497 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
498 |
assert |
|
|
498 | assert whitelist_view == \ | |
|
499 | 499 | whitelist['api_access_controllers_whitelist'] |
|
500 | 500 | |
|
501 | 501 | new_auth_token = AuthTokenModel().create( |
|
502 | 502 | TEST_USER_ADMIN_LOGIN, 'test') |
|
503 | 503 | Session().commit() |
|
504 | 504 | with fixture.anon_access(False): |
|
505 | 505 | self.app.get( |
|
506 | 506 | route_path('repo_commit_raw', |
|
507 | 507 | repo_name=HG_REPO, commit_id='tip', |
|
508 | 508 | params=dict(api_key=new_auth_token.api_key)), |
|
509 | 509 | status=200) |
|
510 | 510 | |
|
511 | 511 | def test_access_page_via_expired_auth_token(self): |
|
512 | whitelist = self._get_api_whitelist( | |
|
513 | ['ChangesetController:changeset_raw']) | |
|
512 | whitelist = self._get_api_whitelist(whitelist_view) | |
|
514 | 513 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
515 |
assert |
|
|
514 | assert whitelist_view == \ | |
|
516 | 515 | whitelist['api_access_controllers_whitelist'] |
|
517 | 516 | |
|
518 | 517 | new_auth_token = AuthTokenModel().create( |
|
519 | 518 | TEST_USER_ADMIN_LOGIN, 'test') |
|
520 | 519 | Session().commit() |
|
521 | 520 | # patch the api key and make it expired |
|
522 | 521 | new_auth_token.expires = 0 |
|
523 | 522 | Session().add(new_auth_token) |
|
524 | 523 | Session().commit() |
|
525 | 524 | with fixture.anon_access(False): |
|
526 | 525 | self.app.get( |
|
527 | 526 | route_path('repo_commit_raw', |
|
528 | 527 | repo_name=HG_REPO, commit_id='tip', |
|
529 | 528 | params=dict(api_key=new_auth_token.api_key)), |
|
530 | 529 | status=302) |
@@ -1,277 +1,304 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | from rhodecode.apps._base import add_route_with_slash |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def includeme(config): |
|
24 | 24 | |
|
25 | 25 | # Summary |
|
26 | 26 | # NOTE(marcink): one additional route is defined in very bottom, catch |
|
27 | 27 | # all pattern |
|
28 | 28 | config.add_route( |
|
29 | 29 | name='repo_summary_explicit', |
|
30 | 30 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
31 | 31 | config.add_route( |
|
32 | 32 | name='repo_summary_commits', |
|
33 | 33 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) |
|
34 | 34 | |
|
35 | 35 | # repo commits |
|
36 | ||
|
36 | 37 | config.add_route( |
|
37 | 38 | name='repo_commit', |
|
38 | 39 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) |
|
39 | 40 | |
|
41 | config.add_route( | |
|
42 | name='repo_commit_children', | |
|
43 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) | |
|
44 | ||
|
45 | config.add_route( | |
|
46 | name='repo_commit_parents', | |
|
47 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) | |
|
48 | ||
|
49 | # still working url for backward compat. | |
|
50 | config.add_route( | |
|
51 | name='repo_commit_raw_deprecated', | |
|
52 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) | |
|
53 | ||
|
54 | config.add_route( | |
|
55 | name='repo_commit_raw', | |
|
56 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) | |
|
57 | ||
|
58 | config.add_route( | |
|
59 | name='repo_commit_patch', | |
|
60 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) | |
|
61 | ||
|
62 | config.add_route( | |
|
63 | name='repo_commit_download', | |
|
64 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) | |
|
65 | ||
|
66 | config.add_route( | |
|
67 | name='repo_commit_data', | |
|
68 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) | |
|
69 | ||
|
70 | config.add_route( | |
|
71 | name='repo_commit_comment_create', | |
|
72 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) | |
|
73 | ||
|
74 | config.add_route( | |
|
75 | name='repo_commit_comment_preview', | |
|
76 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) | |
|
77 | ||
|
78 | config.add_route( | |
|
79 | name='repo_commit_comment_delete', | |
|
80 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) | |
|
81 | ||
|
40 | 82 | # repo files |
|
41 | 83 | config.add_route( |
|
42 | 84 | name='repo_archivefile', |
|
43 | 85 | pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True) |
|
44 | 86 | |
|
45 | 87 | config.add_route( |
|
46 | 88 | name='repo_files_diff', |
|
47 | 89 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) |
|
48 | 90 | config.add_route( # legacy route to make old links work |
|
49 | 91 | name='repo_files_diff_2way_redirect', |
|
50 | 92 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) |
|
51 | 93 | |
|
52 | 94 | config.add_route( |
|
53 | 95 | name='repo_files', |
|
54 | 96 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) |
|
55 | 97 | config.add_route( |
|
56 | 98 | name='repo_files:default_path', |
|
57 | 99 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) |
|
58 | 100 | config.add_route( |
|
59 | 101 | name='repo_files:default_commit', |
|
60 | 102 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) |
|
61 | 103 | |
|
62 | 104 | config.add_route( |
|
63 | 105 | name='repo_files:rendered', |
|
64 | 106 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) |
|
65 | 107 | |
|
66 | 108 | config.add_route( |
|
67 | 109 | name='repo_files:annotated', |
|
68 | 110 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) |
|
69 | 111 | config.add_route( |
|
70 | 112 | name='repo_files:annotated_previous', |
|
71 | 113 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) |
|
72 | 114 | |
|
73 | 115 | config.add_route( |
|
74 | 116 | name='repo_nodetree_full', |
|
75 | 117 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) |
|
76 | 118 | config.add_route( |
|
77 | 119 | name='repo_nodetree_full:default_path', |
|
78 | 120 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) |
|
79 | 121 | |
|
80 | 122 | config.add_route( |
|
81 | 123 | name='repo_files_nodelist', |
|
82 | 124 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) |
|
83 | 125 | |
|
84 | 126 | config.add_route( |
|
85 | 127 | name='repo_file_raw', |
|
86 | 128 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) |
|
87 | 129 | |
|
88 | 130 | config.add_route( |
|
89 | 131 | name='repo_file_download', |
|
90 | 132 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) |
|
91 | 133 | config.add_route( # backward compat to keep old links working |
|
92 | 134 | name='repo_file_download:legacy', |
|
93 | 135 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', |
|
94 | 136 | repo_route=True) |
|
95 | 137 | |
|
96 | 138 | config.add_route( |
|
97 | 139 | name='repo_file_history', |
|
98 | 140 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) |
|
99 | 141 | |
|
100 | 142 | config.add_route( |
|
101 | 143 | name='repo_file_authors', |
|
102 | 144 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) |
|
103 | 145 | |
|
104 | 146 | config.add_route( |
|
105 | 147 | name='repo_files_remove_file', |
|
106 | 148 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', |
|
107 | 149 | repo_route=True) |
|
108 | 150 | config.add_route( |
|
109 | 151 | name='repo_files_delete_file', |
|
110 | 152 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', |
|
111 | 153 | repo_route=True) |
|
112 | 154 | config.add_route( |
|
113 | 155 | name='repo_files_edit_file', |
|
114 | 156 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', |
|
115 | 157 | repo_route=True) |
|
116 | 158 | config.add_route( |
|
117 | 159 | name='repo_files_update_file', |
|
118 | 160 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', |
|
119 | 161 | repo_route=True) |
|
120 | 162 | config.add_route( |
|
121 | 163 | name='repo_files_add_file', |
|
122 | 164 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', |
|
123 | 165 | repo_route=True) |
|
124 | 166 | config.add_route( |
|
125 | 167 | name='repo_files_create_file', |
|
126 | 168 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', |
|
127 | 169 | repo_route=True) |
|
128 | 170 | |
|
129 | 171 | # refs data |
|
130 | 172 | config.add_route( |
|
131 | 173 | name='repo_refs_data', |
|
132 | 174 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) |
|
133 | 175 | |
|
134 | 176 | config.add_route( |
|
135 | 177 | name='repo_refs_changelog_data', |
|
136 | 178 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) |
|
137 | 179 | |
|
138 | 180 | config.add_route( |
|
139 | 181 | name='repo_stats', |
|
140 | 182 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) |
|
141 | 183 | |
|
142 | 184 | # Changelog |
|
143 | 185 | config.add_route( |
|
144 | 186 | name='repo_changelog', |
|
145 | 187 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) |
|
146 | 188 | config.add_route( |
|
147 | 189 | name='repo_changelog_file', |
|
148 | 190 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) |
|
149 | 191 | config.add_route( |
|
150 | 192 | name='repo_changelog_elements', |
|
151 | 193 | pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True) |
|
152 | 194 | |
|
153 | 195 | # Tags |
|
154 | 196 | config.add_route( |
|
155 | 197 | name='tags_home', |
|
156 | 198 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
157 | 199 | |
|
158 | 200 | # Branches |
|
159 | 201 | config.add_route( |
|
160 | 202 | name='branches_home', |
|
161 | 203 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
162 | 204 | |
|
163 | 205 | config.add_route( |
|
164 | 206 | name='bookmarks_home', |
|
165 | 207 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
166 | 208 | |
|
167 | 209 | # Pull Requests |
|
168 | 210 | config.add_route( |
|
169 | 211 | name='pullrequest_show', |
|
170 | 212 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id}', |
|
171 | 213 | repo_route=True) |
|
172 | 214 | |
|
173 | 215 | config.add_route( |
|
174 | 216 | name='pullrequest_show_all', |
|
175 | 217 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
176 | 218 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
177 | 219 | |
|
178 | 220 | config.add_route( |
|
179 | 221 | name='pullrequest_show_all_data', |
|
180 | 222 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
181 | 223 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
182 | 224 | |
|
183 | # commits aka changesets | |
|
184 | # TODO(dan): handle default landing revision ? | |
|
185 | config.add_route( | |
|
186 | name='changeset_home', | |
|
187 | pattern='/{repo_name:.*?[^/]}/changeset/{revision}', | |
|
188 | repo_route=True) | |
|
189 | config.add_route( | |
|
190 | name='changeset_children', | |
|
191 | pattern='/{repo_name:.*?[^/]}/changeset_children/{revision}', | |
|
192 | repo_route=True) | |
|
193 | config.add_route( | |
|
194 | name='changeset_parents', | |
|
195 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{revision}', | |
|
196 | repo_route=True) | |
|
197 | ||
|
198 | 225 | # Settings |
|
199 | 226 | config.add_route( |
|
200 | 227 | name='edit_repo', |
|
201 | 228 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
202 | 229 | |
|
203 | 230 | # Settings advanced |
|
204 | 231 | config.add_route( |
|
205 | 232 | name='edit_repo_advanced', |
|
206 | 233 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
207 | 234 | config.add_route( |
|
208 | 235 | name='edit_repo_advanced_delete', |
|
209 | 236 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
210 | 237 | config.add_route( |
|
211 | 238 | name='edit_repo_advanced_locking', |
|
212 | 239 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
213 | 240 | config.add_route( |
|
214 | 241 | name='edit_repo_advanced_journal', |
|
215 | 242 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
216 | 243 | config.add_route( |
|
217 | 244 | name='edit_repo_advanced_fork', |
|
218 | 245 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
219 | 246 | |
|
220 | 247 | # Caches |
|
221 | 248 | config.add_route( |
|
222 | 249 | name='edit_repo_caches', |
|
223 | 250 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
224 | 251 | |
|
225 | 252 | # Permissions |
|
226 | 253 | config.add_route( |
|
227 | 254 | name='edit_repo_perms', |
|
228 | 255 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
229 | 256 | |
|
230 | 257 | # Repo Review Rules |
|
231 | 258 | config.add_route( |
|
232 | 259 | name='repo_reviewers', |
|
233 | 260 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
234 | 261 | |
|
235 | 262 | config.add_route( |
|
236 | 263 | name='repo_default_reviewers_data', |
|
237 | 264 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
238 | 265 | |
|
239 | 266 | # Maintenance |
|
240 | 267 | config.add_route( |
|
241 | 268 | name='repo_maintenance', |
|
242 | 269 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) |
|
243 | 270 | |
|
244 | 271 | config.add_route( |
|
245 | 272 | name='repo_maintenance_execute', |
|
246 | 273 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) |
|
247 | 274 | |
|
248 | 275 | # Strip |
|
249 | 276 | config.add_route( |
|
250 | 277 | name='strip', |
|
251 | 278 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
252 | 279 | |
|
253 | 280 | config.add_route( |
|
254 | 281 | name='strip_check', |
|
255 | 282 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
256 | 283 | |
|
257 | 284 | config.add_route( |
|
258 | 285 | name='strip_execute', |
|
259 | 286 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
260 | 287 | |
|
261 | 288 | # ATOM/RSS Feed |
|
262 | 289 | config.add_route( |
|
263 | 290 | name='rss_feed_home', |
|
264 | 291 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) |
|
265 | 292 | |
|
266 | 293 | config.add_route( |
|
267 | 294 | name='atom_feed_home', |
|
268 | 295 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) |
|
269 | 296 | |
|
270 | 297 | # NOTE(marcink): needs to be at the end for catch-all |
|
271 | 298 | add_route_with_slash( |
|
272 | 299 | config, |
|
273 | 300 | name='repo_summary', |
|
274 | 301 | pattern='/{repo_name:.*?[^/]}', repo_route=True) |
|
275 | 302 | |
|
276 | 303 | # Scan module for configuration decorators. |
|
277 | 304 | config.scan() |
@@ -1,288 +1,313 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | from pylons.i18n import ungettext | |
|
22 | 21 | import pytest |
|
23 | 22 | |
|
24 |
from rhodecode.tests import |
|
|
23 | from rhodecode.tests import TestController | |
|
24 | ||
|
25 | 25 | from rhodecode.model.db import ( |
|
26 | 26 | ChangesetComment, Notification, UserNotification) |
|
27 | 27 | from rhodecode.model.meta import Session |
|
28 | 28 | from rhodecode.lib import helpers as h |
|
29 | 29 | |
|
30 | 30 | |
|
31 | def route_path(name, params=None, **kwargs): | |
|
32 | import urllib | |
|
33 | ||
|
34 | base_url = { | |
|
35 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', | |
|
36 | 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create', | |
|
37 | 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview', | |
|
38 | 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete', | |
|
39 | }[name].format(**kwargs) | |
|
40 | ||
|
41 | if params: | |
|
42 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
|
43 | return base_url | |
|
44 | ||
|
45 | ||
|
31 | 46 | @pytest.mark.backends("git", "hg", "svn") |
|
32 |
class TestCommitComments |
|
|
47 | class TestRepoCommitCommentsView(TestController): | |
|
33 | 48 | |
|
34 | 49 | @pytest.fixture(autouse=True) |
|
35 | 50 | def prepare(self, request, pylonsapp): |
|
36 | 51 | for x in ChangesetComment.query().all(): |
|
37 | 52 | Session().delete(x) |
|
38 | 53 | Session().commit() |
|
39 | 54 | |
|
40 | 55 | for x in Notification.query().all(): |
|
41 | 56 | Session().delete(x) |
|
42 | 57 | Session().commit() |
|
43 | 58 | |
|
44 | 59 | request.addfinalizer(self.cleanup) |
|
45 | 60 | |
|
46 | 61 | def cleanup(self): |
|
47 | 62 | for x in ChangesetComment.query().all(): |
|
48 | 63 | Session().delete(x) |
|
49 | 64 | Session().commit() |
|
50 | 65 | |
|
51 | 66 | for x in Notification.query().all(): |
|
52 | 67 | Session().delete(x) |
|
53 | 68 | Session().commit() |
|
54 | 69 | |
|
55 | 70 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) |
|
56 | 71 | def test_create(self, comment_type, backend): |
|
57 | 72 | self.log_user() |
|
58 | 73 | commit = backend.repo.get_commit('300') |
|
59 | 74 | commit_id = commit.raw_id |
|
60 | 75 | text = u'CommentOnCommit' |
|
61 | 76 | |
|
62 | 77 | params = {'text': text, 'csrf_token': self.csrf_token, |
|
63 | 78 | 'comment_type': comment_type} |
|
64 | 79 | self.app.post( |
|
65 | url(controller='changeset', action='comment', | |
|
66 |
repo_name=backend.repo_name, |
|
|
80 | route_path('repo_commit_comment_create', | |
|
81 | repo_name=backend.repo_name, commit_id=commit_id), | |
|
82 | params=params) | |
|
67 | 83 | |
|
68 | 84 | response = self.app.get( |
|
69 | url(controller='changeset', action='index', | |
|
70 |
repo_name=backend.repo_name, |
|
|
85 | route_path('repo_commit', | |
|
86 | repo_name=backend.repo_name, commit_id=commit_id)) | |
|
71 | 87 | |
|
72 | 88 | # test DB |
|
73 | 89 | assert ChangesetComment.query().count() == 1 |
|
74 | 90 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
75 | 91 | |
|
76 | 92 | assert Notification.query().count() == 1 |
|
77 | 93 | assert ChangesetComment.query().count() == 1 |
|
78 | 94 | |
|
79 | 95 | notification = Notification.query().all()[0] |
|
80 | 96 | |
|
81 | 97 | comment_id = ChangesetComment.query().first().comment_id |
|
82 | 98 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
83 | 99 | |
|
84 | 100 | sbj = 'left {0} on commit `{1}` in the {2} repository'.format( |
|
85 | 101 | comment_type, h.show_id(commit), backend.repo_name) |
|
86 | 102 | assert sbj in notification.subject |
|
87 | 103 | |
|
88 | 104 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
89 | 105 | backend.repo_name, commit_id, comment_id)) |
|
90 | 106 | assert lnk in notification.body |
|
91 | 107 | |
|
92 | 108 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) |
|
93 | 109 | def test_create_inline(self, comment_type, backend): |
|
94 | 110 | self.log_user() |
|
95 | 111 | commit = backend.repo.get_commit('300') |
|
96 | 112 | commit_id = commit.raw_id |
|
97 | 113 | text = u'CommentOnCommit' |
|
98 | 114 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
99 | 115 | line = 'n1' |
|
100 | 116 | |
|
101 | 117 | params = {'text': text, 'f_path': f_path, 'line': line, |
|
102 | 118 | 'comment_type': comment_type, |
|
103 | 119 | 'csrf_token': self.csrf_token} |
|
104 | 120 | |
|
105 | 121 | self.app.post( |
|
106 | url(controller='changeset', action='comment', | |
|
107 |
repo_name=backend.repo_name, |
|
|
122 | route_path('repo_commit_comment_create', | |
|
123 | repo_name=backend.repo_name, commit_id=commit_id), | |
|
124 | params=params) | |
|
108 | 125 | |
|
109 | 126 | response = self.app.get( |
|
110 | url(controller='changeset', action='index', | |
|
111 |
repo_name=backend.repo_name, |
|
|
127 | route_path('repo_commit', | |
|
128 | repo_name=backend.repo_name, commit_id=commit_id)) | |
|
112 | 129 | |
|
113 | 130 | # test DB |
|
114 | 131 | assert ChangesetComment.query().count() == 1 |
|
115 | 132 | assert_comment_links(response, 0, ChangesetComment.query().count()) |
|
116 | 133 | |
|
117 | 134 | if backend.alias == 'svn': |
|
118 | 135 | response.mustcontain( |
|
119 | 136 | '''data-f-path="vcs/commands/summary.py" ''' |
|
120 | 137 | '''id="a_c--ad05457a43f8"''' |
|
121 | 138 | ) |
|
122 | 139 | else: |
|
123 | 140 | response.mustcontain( |
|
124 | 141 | '''data-f-path="vcs/backends/hg.py" ''' |
|
125 | 142 | '''id="a_c--9c390eb52cd6"''' |
|
126 | 143 | ) |
|
127 | 144 | |
|
128 | 145 | assert Notification.query().count() == 1 |
|
129 | 146 | assert ChangesetComment.query().count() == 1 |
|
130 | 147 | |
|
131 | 148 | notification = Notification.query().all()[0] |
|
132 | 149 | comment = ChangesetComment.query().first() |
|
133 | 150 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
134 | 151 | |
|
135 | 152 | assert comment.revision == commit_id |
|
136 | 153 | sbj = 'left {comment_type} on commit `{commit}` ' \ |
|
137 | 154 | '(file: `{f_path}`) in the {repo} repository'.format( |
|
138 | 155 | commit=h.show_id(commit), |
|
139 | 156 | f_path=f_path, line=line, repo=backend.repo_name, |
|
140 | 157 | comment_type=comment_type) |
|
141 | 158 | assert sbj in notification.subject |
|
142 | 159 | |
|
143 | 160 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
144 | 161 | backend.repo_name, commit_id, comment.comment_id)) |
|
145 | 162 | assert lnk in notification.body |
|
146 | 163 | assert 'on line n1' in notification.body |
|
147 | 164 | |
|
148 | 165 | def test_create_with_mention(self, backend): |
|
149 | 166 | self.log_user() |
|
150 | 167 | |
|
151 | 168 | commit_id = backend.repo.get_commit('300').raw_id |
|
152 | 169 | text = u'@test_regular check CommentOnCommit' |
|
153 | 170 | |
|
154 | 171 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
155 | 172 | self.app.post( |
|
156 | url(controller='changeset', action='comment', | |
|
157 |
repo_name=backend.repo_name, |
|
|
173 | route_path('repo_commit_comment_create', | |
|
174 | repo_name=backend.repo_name, commit_id=commit_id), | |
|
175 | params=params) | |
|
158 | 176 | |
|
159 | 177 | response = self.app.get( |
|
160 | url(controller='changeset', action='index', | |
|
161 |
repo_name=backend.repo_name, |
|
|
178 | route_path('repo_commit', | |
|
179 | repo_name=backend.repo_name, commit_id=commit_id)) | |
|
162 | 180 | # test DB |
|
163 | 181 | assert ChangesetComment.query().count() == 1 |
|
164 | 182 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
165 | 183 | |
|
166 | 184 | notification = Notification.query().one() |
|
167 | 185 | |
|
168 | 186 | assert len(notification.recipients) == 2 |
|
169 | 187 | users = [x.username for x in notification.recipients] |
|
170 | 188 | |
|
171 | 189 | # test_regular gets notification by @mention |
|
172 | 190 | assert sorted(users) == [u'test_admin', u'test_regular'] |
|
173 | 191 | |
|
174 | 192 | def test_create_with_status_change(self, backend): |
|
175 | 193 | self.log_user() |
|
176 | 194 | commit = backend.repo.get_commit('300') |
|
177 | 195 | commit_id = commit.raw_id |
|
178 | 196 | text = u'CommentOnCommit' |
|
179 | 197 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
180 | 198 | line = 'n1' |
|
181 | 199 | |
|
182 | 200 | params = {'text': text, 'changeset_status': 'approved', |
|
183 | 201 | 'csrf_token': self.csrf_token} |
|
184 | 202 | |
|
185 | 203 | self.app.post( |
|
186 | url(controller='changeset', action='comment', | |
|
187 | repo_name=backend.repo_name, revision=commit_id), params=params) | |
|
204 | route_path( | |
|
205 | 'repo_commit_comment_create', | |
|
206 | repo_name=backend.repo_name, commit_id=commit_id), | |
|
207 | params=params) | |
|
188 | 208 | |
|
189 | 209 | response = self.app.get( |
|
190 | url(controller='changeset', action='index', | |
|
191 |
repo_name=backend.repo_name, |
|
|
210 | route_path('repo_commit', | |
|
211 | repo_name=backend.repo_name, commit_id=commit_id)) | |
|
192 | 212 | |
|
193 | 213 | # test DB |
|
194 | 214 | assert ChangesetComment.query().count() == 1 |
|
195 | 215 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
196 | 216 | |
|
197 | 217 | assert Notification.query().count() == 1 |
|
198 | 218 | assert ChangesetComment.query().count() == 1 |
|
199 | 219 | |
|
200 | 220 | notification = Notification.query().all()[0] |
|
201 | 221 | |
|
202 | 222 | comment_id = ChangesetComment.query().first().comment_id |
|
203 | 223 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
204 | 224 | |
|
205 | 225 | sbj = 'left note on commit `{0}` (status: Approved) ' \ |
|
206 | 226 | 'in the {1} repository'.format( |
|
207 | 227 | h.show_id(commit), backend.repo_name) |
|
208 | 228 | assert sbj in notification.subject |
|
209 | 229 | |
|
210 | 230 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
211 | 231 | backend.repo_name, commit_id, comment_id)) |
|
212 | 232 | assert lnk in notification.body |
|
213 | 233 | |
|
214 | 234 | def test_delete(self, backend): |
|
215 | 235 | self.log_user() |
|
216 | 236 | commit_id = backend.repo.get_commit('300').raw_id |
|
217 | 237 | text = u'CommentOnCommit' |
|
218 | 238 | |
|
219 | 239 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
220 | 240 | self.app.post( |
|
221 |
|
|
|
222 | controller='changeset', action='comment', | |
|
223 |
repo_name=backend.repo_name, |
|
|
241 | route_path( | |
|
242 | 'repo_commit_comment_create', | |
|
243 | repo_name=backend.repo_name, commit_id=commit_id), | |
|
224 | 244 | params=params) |
|
225 | 245 | |
|
226 | 246 | comments = ChangesetComment.query().all() |
|
227 | 247 | assert len(comments) == 1 |
|
228 | 248 | comment_id = comments[0].comment_id |
|
229 | 249 | |
|
230 | 250 | self.app.post( |
|
231 | url(controller='changeset', action='delete_comment', | |
|
232 |
repo_name=backend.repo_name, |
|
|
233 | params={'_method': 'delete', 'csrf_token': self.csrf_token}) | |
|
251 | route_path('repo_commit_comment_delete', | |
|
252 | repo_name=backend.repo_name, | |
|
253 | commit_id=commit_id, | |
|
254 | comment_id=comment_id), | |
|
255 | params={'csrf_token': self.csrf_token}) | |
|
234 | 256 | |
|
235 | 257 | comments = ChangesetComment.query().all() |
|
236 | 258 | assert len(comments) == 0 |
|
237 | 259 | |
|
238 | 260 | response = self.app.get( |
|
239 | url(controller='changeset', action='index', | |
|
240 |
repo_name=backend.repo_name, |
|
|
261 | route_path('repo_commit', | |
|
262 | repo_name=backend.repo_name, commit_id=commit_id)) | |
|
241 | 263 | assert_comment_links(response, 0, 0) |
|
242 | 264 | |
|
243 | 265 | @pytest.mark.parametrize('renderer, input, output', [ |
|
244 | 266 | ('rst', 'plain text', '<p>plain text</p>'), |
|
245 | 267 | ('rst', 'header\n======', '<h1 class="title">header</h1>'), |
|
246 | 268 | ('rst', '*italics*', '<em>italics</em>'), |
|
247 | 269 | ('rst', '**bold**', '<strong>bold</strong>'), |
|
248 | 270 | ('markdown', 'plain text', '<p>plain text</p>'), |
|
249 | 271 | ('markdown', '# header', '<h1>header</h1>'), |
|
250 | 272 | ('markdown', '*italics*', '<em>italics</em>'), |
|
251 | 273 | ('markdown', '**bold**', '<strong>bold</strong>'), |
|
252 | 274 | ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain', |
|
253 | 275 | 'md-header', 'md-italics', 'md-bold', ]) |
|
254 | def test_preview(self, renderer, input, output, backend): | |
|
276 | def test_preview(self, renderer, input, output, backend, xhr_header): | |
|
255 | 277 | self.log_user() |
|
256 | 278 | params = { |
|
257 | 279 | 'renderer': renderer, |
|
258 | 280 | 'text': input, |
|
259 | 281 | 'csrf_token': self.csrf_token |
|
260 | 282 | } |
|
261 | environ = { | |
|
262 | 'HTTP_X_PARTIAL_XHR': 'true' | |
|
263 | } | |
|
283 | commit_id = '0' * 16 # fake this for tests | |
|
264 | 284 | response = self.app.post( |
|
265 | url(controller='changeset', | |
|
266 | action='preview_comment', | |
|
267 | repo_name=backend.repo_name), | |
|
285 | route_path('repo_commit_comment_preview', | |
|
286 | repo_name=backend.repo_name, commit_id=commit_id,), | |
|
268 | 287 | params=params, |
|
269 |
extra_environ= |
|
|
288 | extra_environ=xhr_header) | |
|
270 | 289 | |
|
271 | 290 | response.mustcontain(output) |
|
272 | 291 | |
|
273 | 292 | |
|
274 | 293 | def assert_comment_links(response, comments, inline_comments): |
|
275 | comments_text = ungettext("%d Commit comment", | |
|
276 |
|
|
|
294 | if comments == 1: | |
|
295 | comments_text = "%d Commit comment" % comments | |
|
296 | else: | |
|
297 | comments_text = "%d Commit comments" % comments | |
|
298 | ||
|
299 | if inline_comments == 1: | |
|
300 | inline_comments_text = "%d Inline Comment" % inline_comments | |
|
301 | else: | |
|
302 | inline_comments_text = "%d Inline Comments" % inline_comments | |
|
303 | ||
|
277 | 304 | if comments: |
|
278 | 305 | response.mustcontain('<a href="#comments">%s</a>,' % comments_text) |
|
279 | 306 | else: |
|
280 | 307 | response.mustcontain(comments_text) |
|
281 | 308 | |
|
282 | inline_comments_text = ungettext("%d Inline Comment", "%d Inline Comments", | |
|
283 | inline_comments) % inline_comments | |
|
284 | 309 | if inline_comments: |
|
285 | 310 | response.mustcontain( |
|
286 | 311 | 'id="inline-comments-counter">%s</' % inline_comments_text) |
|
287 | 312 | else: |
|
288 | 313 | response.mustcontain(inline_comments_text) |
@@ -1,301 +1,318 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.lib.helpers import _shorten_commit_id |
|
24 | from rhodecode.tests import url | |
|
24 | ||
|
25 | ||
|
26 | def route_path(name, params=None, **kwargs): | |
|
27 | import urllib | |
|
28 | ||
|
29 | base_url = { | |
|
30 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', | |
|
31 | 'repo_commit_children': '/{repo_name}/changeset_children/{commit_id}', | |
|
32 | 'repo_commit_parents': '/{repo_name}/changeset_parents/{commit_id}', | |
|
33 | 'repo_commit_raw': '/{repo_name}/changeset-diff/{commit_id}', | |
|
34 | 'repo_commit_patch': '/{repo_name}/changeset-patch/{commit_id}', | |
|
35 | 'repo_commit_download': '/{repo_name}/changeset-download/{commit_id}', | |
|
36 | 'repo_commit_data': '/{repo_name}/changeset-data/{commit_id}', | |
|
37 | 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}', | |
|
38 | }[name].format(**kwargs) | |
|
39 | ||
|
40 | if params: | |
|
41 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
|
42 | return base_url | |
|
25 | 43 | |
|
26 | 44 | |
|
27 | 45 | @pytest.mark.usefixtures("app") |
|
28 |
class Test |
|
|
46 | class TestRepoCommitView(object): | |
|
29 | 47 | |
|
30 |
def test_ |
|
|
48 | def test_show_commit(self, backend): | |
|
31 | 49 | commit_id = self.commit_id[backend.alias] |
|
32 |
response = self.app.get( |
|
|
33 | controller='changeset', action='index', | |
|
34 | repo_name=backend.repo_name, revision=commit_id)) | |
|
50 | response = self.app.get(route_path( | |
|
51 | 'repo_commit', repo_name=backend.repo_name, commit_id=commit_id)) | |
|
35 | 52 | response.mustcontain('Added a symlink') |
|
36 | 53 | response.mustcontain(commit_id) |
|
37 | 54 | response.mustcontain('No newline at end of file') |
|
38 | 55 | |
|
39 |
def test_ |
|
|
56 | def test_show_raw(self, backend): | |
|
40 | 57 | commit_id = self.commit_id[backend.alias] |
|
41 |
response = self.app.get( |
|
|
42 | controller='changeset', action='changeset_raw', | |
|
43 |
repo_name=backend.repo_name, |
|
|
58 | response = self.app.get(route_path( | |
|
59 | 'repo_commit_raw', | |
|
60 | repo_name=backend.repo_name, commit_id=commit_id)) | |
|
44 | 61 | assert response.body == self.diffs[backend.alias] |
|
45 | 62 | |
|
46 |
def test_ |
|
|
47 |
response = self.app.get( |
|
|
48 | controller='changeset', action='changeset_patch', | |
|
49 | repo_name=backend.repo_name, | |
|
50 | revision=self.commit_id[backend.alias])) | |
|
63 | def test_show_raw_patch(self, backend): | |
|
64 | response = self.app.get(route_path( | |
|
65 | 'repo_commit_patch', repo_name=backend.repo_name, | |
|
66 | commit_id=self.commit_id[backend.alias])) | |
|
51 | 67 | assert response.body == self.patches[backend.alias] |
|
52 | 68 | |
|
53 |
def test_i |
|
|
54 |
response = self.app.get( |
|
|
55 | controller='changeset', action='changeset_download', | |
|
69 | def test_commit_download(self, backend): | |
|
70 | response = self.app.get(route_path( | |
|
71 | 'repo_commit_download', | |
|
56 | 72 | repo_name=backend.repo_name, |
|
57 |
|
|
|
73 | commit_id=self.commit_id[backend.alias])) | |
|
58 | 74 | assert response.body == self.diffs[backend.alias] |
|
59 | 75 | |
|
60 | 76 | def test_single_commit_page_different_ops(self, backend): |
|
61 | 77 | commit_id = { |
|
62 | 78 | 'hg': '603d6c72c46d953420c89d36372f08d9f305f5dd', |
|
63 | 79 | 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986', |
|
64 | 80 | 'svn': '337', |
|
65 | 81 | } |
|
66 | 82 | commit_id = commit_id[backend.alias] |
|
67 |
response = self.app.get( |
|
|
68 | controller='changeset', action='index', | |
|
69 |
repo_name=backend.repo_name, |
|
|
83 | response = self.app.get(route_path( | |
|
84 | 'repo_commit', | |
|
85 | repo_name=backend.repo_name, commit_id=commit_id)) | |
|
70 | 86 | |
|
71 | 87 | response.mustcontain(_shorten_commit_id(commit_id)) |
|
72 | 88 | response.mustcontain('21 files changed: 943 inserted, 288 deleted') |
|
73 | 89 | |
|
74 | 90 | # files op files |
|
75 | 91 | response.mustcontain('File no longer present at commit: %s' % |
|
76 | 92 | _shorten_commit_id(commit_id)) |
|
77 | 93 | |
|
78 | 94 | # svn uses a different filename |
|
79 | 95 | if backend.alias == 'svn': |
|
80 | 96 | response.mustcontain('new file 10644') |
|
81 | 97 | else: |
|
82 | 98 | response.mustcontain('new file 100644') |
|
83 | 99 | response.mustcontain('Changed theme to ADC theme') # commit msg |
|
84 | 100 | |
|
85 | 101 | self._check_new_diff_menus(response, right_menu=True) |
|
86 | 102 | |
|
87 | 103 | def test_commit_range_page_different_ops(self, backend): |
|
88 | 104 | commit_id_range = { |
|
89 | 105 | 'hg': ( |
|
90 | 106 | '25d7e49c18b159446cadfa506a5cf8ad1cb04067', |
|
91 | 107 | '603d6c72c46d953420c89d36372f08d9f305f5dd'), |
|
92 | 108 | 'git': ( |
|
93 | 109 | '6fc9270775aaf5544c1deb014f4ddd60c952fcbb', |
|
94 | 110 | '03fa803d7e9fb14daa9a3089e0d1494eda75d986'), |
|
95 | 111 | 'svn': ( |
|
96 | 112 | '335', |
|
97 | 113 | '337'), |
|
98 | 114 | } |
|
99 | 115 | commit_ids = commit_id_range[backend.alias] |
|
100 | 116 | commit_id = '%s...%s' % (commit_ids[0], commit_ids[1]) |
|
101 |
response = self.app.get( |
|
|
102 | controller='changeset', action='index', | |
|
103 |
repo_name=backend.repo_name, |
|
|
117 | response = self.app.get(route_path( | |
|
118 | 'repo_commit', | |
|
119 | repo_name=backend.repo_name, commit_id=commit_id)) | |
|
104 | 120 | |
|
105 | 121 | response.mustcontain(_shorten_commit_id(commit_ids[0])) |
|
106 | 122 | response.mustcontain(_shorten_commit_id(commit_ids[1])) |
|
107 | 123 | |
|
108 | 124 | # svn is special |
|
109 | 125 | if backend.alias == 'svn': |
|
110 | 126 | response.mustcontain('new file 10644') |
|
111 | 127 | response.mustcontain('1 file changed: 5 inserted, 1 deleted') |
|
112 | 128 | response.mustcontain('12 files changed: 236 inserted, 22 deleted') |
|
113 | 129 | response.mustcontain('21 files changed: 943 inserted, 288 deleted') |
|
114 | 130 | else: |
|
115 | 131 | response.mustcontain('new file 100644') |
|
116 | 132 | response.mustcontain('12 files changed: 222 inserted, 20 deleted') |
|
117 | 133 | response.mustcontain('21 files changed: 943 inserted, 288 deleted') |
|
118 | 134 | |
|
119 | 135 | # files op files |
|
120 | 136 | response.mustcontain('File no longer present at commit: %s' % |
|
121 | 137 | _shorten_commit_id(commit_ids[1])) |
|
122 | 138 | response.mustcontain('Added docstrings to vcs.cli') # commit msg |
|
123 | 139 | response.mustcontain('Changed theme to ADC theme') # commit msg |
|
124 | 140 | |
|
125 | 141 | self._check_new_diff_menus(response) |
|
126 | 142 | |
|
127 | 143 | def test_combined_compare_commit_page_different_ops(self, backend): |
|
128 | 144 | commit_id_range = { |
|
129 | 145 | 'hg': ( |
|
130 | 146 | '4fdd71e9427417b2e904e0464c634fdee85ec5a7', |
|
131 | 147 | '603d6c72c46d953420c89d36372f08d9f305f5dd'), |
|
132 | 148 | 'git': ( |
|
133 | 149 | 'f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13', |
|
134 | 150 | '03fa803d7e9fb14daa9a3089e0d1494eda75d986'), |
|
135 | 151 | 'svn': ( |
|
136 | 152 | '335', |
|
137 | 153 | '337'), |
|
138 | 154 | } |
|
139 | 155 | commit_ids = commit_id_range[backend.alias] |
|
140 |
response = self.app.get( |
|
|
141 | controller='compare', action='compare', | |
|
156 | response = self.app.get(route_path( | |
|
157 | 'repo_compare', | |
|
142 | 158 | repo_name=backend.repo_name, |
|
143 | 159 | source_ref_type='rev', source_ref=commit_ids[0], |
|
144 | 160 | target_ref_type='rev', target_ref=commit_ids[1], )) |
|
145 | 161 | |
|
146 | 162 | response.mustcontain(_shorten_commit_id(commit_ids[0])) |
|
147 | 163 | response.mustcontain(_shorten_commit_id(commit_ids[1])) |
|
148 | 164 | |
|
149 | 165 | # files op files |
|
150 | 166 | response.mustcontain('File no longer present at commit: %s' % |
|
151 | 167 | _shorten_commit_id(commit_ids[1])) |
|
152 | 168 | |
|
153 | 169 | # svn is special |
|
154 | 170 | if backend.alias == 'svn': |
|
155 | 171 | response.mustcontain('new file 10644') |
|
156 | 172 | response.mustcontain('32 files changed: 1179 inserted, 310 deleted') |
|
157 | 173 | else: |
|
158 | 174 | response.mustcontain('new file 100644') |
|
159 | 175 | response.mustcontain('32 files changed: 1165 inserted, 308 deleted') |
|
160 | 176 | |
|
161 | 177 | response.mustcontain('Added docstrings to vcs.cli') # commit msg |
|
162 | 178 | response.mustcontain('Changed theme to ADC theme') # commit msg |
|
163 | 179 | |
|
164 | 180 | self._check_new_diff_menus(response) |
|
165 | 181 | |
|
166 | 182 | def test_changeset_range(self, backend): |
|
167 | 183 | self._check_changeset_range( |
|
168 | 184 | backend, self.commit_id_range, self.commit_id_range_result) |
|
169 | 185 | |
|
170 | 186 | def test_changeset_range_with_initial_commit(self, backend): |
|
171 | 187 | commit_id_range = { |
|
172 | 188 | 'hg': ( |
|
173 | 189 | 'b986218ba1c9b0d6a259fac9b050b1724ed8e545' |
|
174 | 190 | '...6cba7170863a2411822803fa77a0a264f1310b35'), |
|
175 | 191 | 'git': ( |
|
176 | 192 | 'c1214f7e79e02fc37156ff215cd71275450cffc3' |
|
177 | 193 | '...fa6600f6848800641328adbf7811fd2372c02ab2'), |
|
178 | 194 | 'svn': '1...3', |
|
179 | 195 | } |
|
180 | 196 | commit_id_range_result = { |
|
181 | 197 | 'hg': ['b986218ba1c9', '3d8f361e72ab', '6cba7170863a'], |
|
182 | 198 | 'git': ['c1214f7e79e0', '38b5fe81f109', 'fa6600f68488'], |
|
183 | 199 | 'svn': ['1', '2', '3'], |
|
184 | 200 | } |
|
185 | 201 | self._check_changeset_range( |
|
186 | 202 | backend, commit_id_range, commit_id_range_result) |
|
187 | 203 | |
|
188 | 204 | def _check_changeset_range( |
|
189 | 205 | self, backend, commit_id_ranges, commit_id_range_result): |
|
190 | 206 | response = self.app.get( |
|
191 | url(controller='changeset', action='index', | |
|
192 | repo_name=backend.repo_name, | |
|
193 |
|
|
|
207 | route_path('repo_commit', | |
|
208 | repo_name=backend.repo_name, | |
|
209 | commit_id=commit_id_ranges[backend.alias])) | |
|
210 | ||
|
194 | 211 | expected_result = commit_id_range_result[backend.alias] |
|
195 | 212 | response.mustcontain('{} commits'.format(len(expected_result))) |
|
196 | 213 | for commit_id in expected_result: |
|
197 | 214 | response.mustcontain(commit_id) |
|
198 | 215 | |
|
199 | 216 | commit_id = { |
|
200 | 217 | 'hg': '2062ec7beeeaf9f44a1c25c41479565040b930b2', |
|
201 | 218 | 'svn': '393', |
|
202 | 219 | 'git': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7', |
|
203 | 220 | } |
|
204 | 221 | |
|
205 | 222 | commit_id_range = { |
|
206 | 223 | 'hg': ( |
|
207 | 224 | 'a53d9201d4bc278910d416d94941b7ea007ecd52' |
|
208 | 225 | '...2062ec7beeeaf9f44a1c25c41479565040b930b2'), |
|
209 | 226 | 'git': ( |
|
210 | 227 | '7ab37bc680b4aa72c34d07b230c866c28e9fc204' |
|
211 | 228 | '...fd627b9e0dd80b47be81af07c4a98518244ed2f7'), |
|
212 | 229 | 'svn': '391...393', |
|
213 | 230 | } |
|
214 | 231 | |
|
215 | 232 | commit_id_range_result = { |
|
216 | 233 | 'hg': ['a53d9201d4bc', '96507bd11ecc', '2062ec7beeea'], |
|
217 | 234 | 'git': ['7ab37bc680b4', '5f2c6ee19592', 'fd627b9e0dd8'], |
|
218 | 235 | 'svn': ['391', '392', '393'], |
|
219 | 236 | } |
|
220 | 237 | |
|
221 | 238 | diffs = { |
|
222 | 239 | 'hg': r"""diff --git a/README b/README |
|
223 | 240 | new file mode 120000 |
|
224 | 241 | --- /dev/null |
|
225 | 242 | +++ b/README |
|
226 | 243 | @@ -0,0 +1,1 @@ |
|
227 | 244 | +README.rst |
|
228 | 245 | \ No newline at end of file |
|
229 | 246 | """, |
|
230 | 247 | 'git': r"""diff --git a/README b/README |
|
231 | 248 | new file mode 120000 |
|
232 | 249 | index 0000000000000000000000000000000000000000..92cacd285355271487b7e379dba6ca60f9a554a4 |
|
233 | 250 | --- /dev/null |
|
234 | 251 | +++ b/README |
|
235 | 252 | @@ -0,0 +1 @@ |
|
236 | 253 | +README.rst |
|
237 | 254 | \ No newline at end of file |
|
238 | 255 | """, |
|
239 | 256 | 'svn': """Index: README |
|
240 | 257 | =================================================================== |
|
241 | 258 | diff --git a/README b/README |
|
242 | 259 | new file mode 10644 |
|
243 | 260 | --- /dev/null\t(revision 0) |
|
244 | 261 | +++ b/README\t(revision 393) |
|
245 | 262 | @@ -0,0 +1 @@ |
|
246 | 263 | +link README.rst |
|
247 | 264 | \\ No newline at end of file |
|
248 | 265 | """, |
|
249 | 266 | } |
|
250 | 267 | |
|
251 | 268 | patches = { |
|
252 | 269 | 'hg': r"""# HG changeset patch |
|
253 | 270 | # User Marcin Kuzminski <marcin@python-works.com> |
|
254 | 271 | # Date 2014-01-07 12:21:40 |
|
255 | 272 | # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2 |
|
256 | 273 | # Parent 96507bd11ecc815ebc6270fdf6db110928c09c1e |
|
257 | 274 | |
|
258 | 275 | Added a symlink |
|
259 | 276 | |
|
260 | 277 | """ + diffs['hg'], |
|
261 | 278 | 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20 |
|
262 | 279 | From: Marcin Kuzminski <marcin@python-works.com> |
|
263 | 280 | Date: 2014-01-07 12:22:20 |
|
264 | 281 | Subject: [PATCH] Added a symlink |
|
265 | 282 | |
|
266 | 283 | --- |
|
267 | 284 | |
|
268 | 285 | """ + diffs['git'], |
|
269 | 286 | 'svn': r"""# SVN changeset patch |
|
270 | 287 | # User marcin |
|
271 | 288 | # Date 2014-09-02 12:25:22.071142 |
|
272 | 289 | # Revision 393 |
|
273 | 290 | |
|
274 | 291 | Added a symlink |
|
275 | 292 | |
|
276 | 293 | """ + diffs['svn'], |
|
277 | 294 | } |
|
278 | 295 | |
|
279 | 296 | def _check_diff_menus(self, response, right_menu=False,): |
|
280 | 297 | # diff menus |
|
281 | 298 | for elem in ['Show File', 'Unified Diff', 'Side-by-side Diff', |
|
282 | 299 | 'Raw Diff', 'Download Diff']: |
|
283 | 300 | response.mustcontain(elem) |
|
284 | 301 | |
|
285 | 302 | # right pane diff menus |
|
286 | 303 | if right_menu: |
|
287 | 304 | for elem in ['Ignore whitespace', 'Increase context', |
|
288 | 305 | 'Hide comments']: |
|
289 | 306 | response.mustcontain(elem) |
|
290 | 307 | |
|
291 | 308 | def _check_new_diff_menus(self, response, right_menu=False,): |
|
292 | 309 | # diff menus |
|
293 | 310 | for elem in ['Show file before', 'Show file after', |
|
294 | 311 | 'Raw diff', 'Download diff']: |
|
295 | 312 | response.mustcontain(elem) |
|
296 | 313 | |
|
297 | 314 | # right pane diff menus |
|
298 | 315 | if right_menu: |
|
299 | 316 | for elem in ['Ignore whitespace', 'Increase context', |
|
300 | 317 | 'Hide comments']: |
|
301 | 318 | response.mustcontain(elem) |
@@ -1,203 +1,203 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytz |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from beaker.cache import cache_region |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | from pyramid.response import Response |
|
27 | 27 | from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed |
|
28 | 28 | |
|
29 | 29 | from rhodecode.apps._base import RepoAppView |
|
30 | 30 | from rhodecode.lib import audit_logger |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator, |
|
33 | 33 | NotAnonymous, CSRFRequired) |
|
34 | 34 | from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer |
|
35 | 35 | from rhodecode.lib.ext_json import json |
|
36 | 36 | from rhodecode.lib.utils2 import str2bool, safe_int |
|
37 | 37 | from rhodecode.model.db import UserApiKeys, CacheKey |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class RepoFeedView(RepoAppView): |
|
43 | 43 | def load_default_context(self): |
|
44 | 44 | c = self._get_local_tmpl_context() |
|
45 | 45 | |
|
46 | 46 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
47 | 47 | c.repo_info = self.db_repo |
|
48 | 48 | |
|
49 | 49 | self._register_global_c(c) |
|
50 | 50 | self._load_defaults() |
|
51 | 51 | return c |
|
52 | 52 | |
|
53 | 53 | def _get_config(self): |
|
54 | 54 | import rhodecode |
|
55 | 55 | config = rhodecode.CONFIG |
|
56 | 56 | |
|
57 | 57 | return { |
|
58 | 58 | 'language': 'en-us', |
|
59 | 59 | 'feed_ttl': '5', # TTL of feed, |
|
60 | 60 | 'feed_include_diff': |
|
61 | 61 | str2bool(config.get('rss_include_diff', False)), |
|
62 | 62 | 'feed_items_per_page': |
|
63 | 63 | safe_int(config.get('rss_items_per_page', 20)), |
|
64 | 64 | 'feed_diff_limit': |
|
65 | 65 | # we need to protect from parsing huge diffs here other way |
|
66 | 66 | # we can kill the server |
|
67 | 67 | safe_int(config.get('rss_cut_off_limit', 32 * 1024)), |
|
68 | 68 | } |
|
69 | 69 | |
|
70 | 70 | def _load_defaults(self): |
|
71 | 71 | _ = self.request.translate |
|
72 | 72 | config = self._get_config() |
|
73 | 73 | # common values for feeds |
|
74 | 74 | self.description = _('Changes on %s repository') |
|
75 | 75 | self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s') |
|
76 | 76 | self.language = config["language"] |
|
77 | 77 | self.ttl = config["feed_ttl"] |
|
78 | 78 | self.feed_include_diff = config['feed_include_diff'] |
|
79 | 79 | self.feed_diff_limit = config['feed_diff_limit'] |
|
80 | 80 | self.feed_items_per_page = config['feed_items_per_page'] |
|
81 | 81 | |
|
82 | 82 | def _changes(self, commit): |
|
83 | 83 | diff_processor = DiffProcessor( |
|
84 | 84 | commit.diff(), diff_limit=self.feed_diff_limit) |
|
85 | 85 | _parsed = diff_processor.prepare(inline_diff=False) |
|
86 | 86 | limited_diff = isinstance(_parsed, LimitedDiffContainer) |
|
87 | 87 | |
|
88 | 88 | return _parsed, limited_diff |
|
89 | 89 | |
|
90 | 90 | def _get_title(self, commit): |
|
91 | 91 | return h.shorter(commit.message, 160) |
|
92 | 92 | |
|
93 | 93 | def _get_description(self, commit): |
|
94 | 94 | _renderer = self.request.get_partial_renderer( |
|
95 | 95 | 'feed/atom_feed_entry.mako') |
|
96 | 96 | parsed_diff, limited_diff = self._changes(commit) |
|
97 | 97 | return _renderer( |
|
98 | 98 | 'body', |
|
99 | 99 | commit=commit, |
|
100 | 100 | parsed_diff=parsed_diff, |
|
101 | 101 | limited_diff=limited_diff, |
|
102 | 102 | feed_include_diff=self.feed_include_diff, |
|
103 | 103 | ) |
|
104 | 104 | |
|
105 | 105 | def _set_timezone(self, date, tzinfo=pytz.utc): |
|
106 | 106 | if not getattr(date, "tzinfo", None): |
|
107 | 107 | date.replace(tzinfo=tzinfo) |
|
108 | 108 | return date |
|
109 | 109 | |
|
110 | 110 | def _get_commits(self): |
|
111 | 111 | return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:]) |
|
112 | 112 | |
|
113 | 113 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
114 | 114 | @HasRepoPermissionAnyDecorator( |
|
115 | 115 | 'repository.read', 'repository.write', 'repository.admin') |
|
116 | 116 | @view_config( |
|
117 | 117 | route_name='atom_feed_home', request_method='GET', |
|
118 | 118 | renderer=None) |
|
119 | 119 | def atom(self): |
|
120 | 120 | """ |
|
121 | 121 | Produce an atom-1.0 feed via feedgenerator module |
|
122 | 122 | """ |
|
123 | 123 | self.load_default_context() |
|
124 | 124 | |
|
125 | 125 | @cache_region('long_term') |
|
126 | 126 | def _generate_feed(cache_key): |
|
127 | 127 | feed = Atom1Feed( |
|
128 | 128 | title=self.title % self.db_repo_name, |
|
129 | 129 | link=h.route_url('repo_summary', repo_name=self.db_repo_name), |
|
130 | 130 | description=self.description % self.db_repo_name, |
|
131 | 131 | language=self.language, |
|
132 | 132 | ttl=self.ttl |
|
133 | 133 | ) |
|
134 | 134 | |
|
135 | 135 | for commit in reversed(self._get_commits()): |
|
136 | 136 | date = self._set_timezone(commit.date) |
|
137 | 137 | feed.add_item( |
|
138 | 138 | title=self._get_title(commit), |
|
139 | 139 | author_name=commit.author, |
|
140 | 140 | description=self._get_description(commit), |
|
141 | 141 | link=h.route_url( |
|
142 |
' |
|
|
143 |
|
|
|
142 | 'repo_commit', repo_name=self.db_repo_name, | |
|
143 | commit_id=commit.raw_id), | |
|
144 | 144 | pubdate=date,) |
|
145 | 145 | |
|
146 | 146 | return feed.mime_type, feed.writeString('utf-8') |
|
147 | 147 | |
|
148 | 148 | invalidator_context = CacheKey.repo_context_cache( |
|
149 | 149 | _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_ATOM) |
|
150 | 150 | |
|
151 | 151 | with invalidator_context as context: |
|
152 | 152 | context.invalidate() |
|
153 | 153 | mime_type, feed = context.compute() |
|
154 | 154 | |
|
155 | 155 | response = Response(feed) |
|
156 | 156 | response.content_type = mime_type |
|
157 | 157 | return response |
|
158 | 158 | |
|
159 | 159 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
160 | 160 | @HasRepoPermissionAnyDecorator( |
|
161 | 161 | 'repository.read', 'repository.write', 'repository.admin') |
|
162 | 162 | @view_config( |
|
163 | 163 | route_name='rss_feed_home', request_method='GET', |
|
164 | 164 | renderer=None) |
|
165 | 165 | def rss(self): |
|
166 | 166 | """ |
|
167 | 167 | Produce an rss2 feed via feedgenerator module |
|
168 | 168 | """ |
|
169 | 169 | self.load_default_context() |
|
170 | 170 | |
|
171 | 171 | @cache_region('long_term') |
|
172 | 172 | def _generate_feed(cache_key): |
|
173 | 173 | feed = Rss201rev2Feed( |
|
174 | 174 | title=self.title % self.db_repo_name, |
|
175 | 175 | link=h.route_url('repo_summary', repo_name=self.db_repo_name), |
|
176 | 176 | description=self.description % self.db_repo_name, |
|
177 | 177 | language=self.language, |
|
178 | 178 | ttl=self.ttl |
|
179 | 179 | ) |
|
180 | 180 | |
|
181 | 181 | for commit in reversed(self._get_commits()): |
|
182 | 182 | date = self._set_timezone(commit.date) |
|
183 | 183 | feed.add_item( |
|
184 | 184 | title=self._get_title(commit), |
|
185 | 185 | author_name=commit.author, |
|
186 | 186 | description=self._get_description(commit), |
|
187 | 187 | link=h.route_url( |
|
188 |
' |
|
|
189 |
|
|
|
188 | 'repo_commit', repo_name=self.db_repo_name, | |
|
189 | commit_id=commit.raw_id), | |
|
190 | 190 | pubdate=date,) |
|
191 | 191 | |
|
192 | 192 | return feed.mime_type, feed.writeString('utf-8') |
|
193 | 193 | |
|
194 | 194 | invalidator_context = CacheKey.repo_context_cache( |
|
195 | 195 | _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_RSS) |
|
196 | 196 | |
|
197 | 197 | with invalidator_context as context: |
|
198 | 198 | context.invalidate() |
|
199 | 199 | mime_type, feed = context.compute() |
|
200 | 200 | |
|
201 | 201 | response = Response(feed) |
|
202 | 202 | response.content_type = mime_type |
|
203 | 203 | return response |
@@ -1,1278 +1,1278 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import itertools |
|
22 | 22 | import logging |
|
23 | 23 | import os |
|
24 | 24 | import shutil |
|
25 | 25 | import tempfile |
|
26 | 26 | import collections |
|
27 | 27 | |
|
28 | 28 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound |
|
29 | 29 | from pyramid.view import view_config |
|
30 | 30 | from pyramid.renderers import render |
|
31 | 31 | from pyramid.response import Response |
|
32 | 32 | |
|
33 | 33 | from rhodecode.apps._base import RepoAppView |
|
34 | 34 | |
|
35 | 35 | from rhodecode.controllers.utils import parse_path_ref |
|
36 | 36 | from rhodecode.lib import diffs, helpers as h, caches |
|
37 | 37 | from rhodecode.lib import audit_logger |
|
38 | 38 | from rhodecode.lib.exceptions import NonRelativePathError |
|
39 | 39 | from rhodecode.lib.codeblocks import ( |
|
40 | 40 | filenode_as_lines_tokens, filenode_as_annotated_lines_tokens) |
|
41 | 41 | from rhodecode.lib.utils2 import ( |
|
42 | 42 | convert_line_endings, detect_mode, safe_str, str2bool) |
|
43 | 43 | from rhodecode.lib.auth import ( |
|
44 | 44 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
45 | 45 | from rhodecode.lib.vcs import path as vcspath |
|
46 | 46 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
47 | 47 | from rhodecode.lib.vcs.conf import settings |
|
48 | 48 | from rhodecode.lib.vcs.nodes import FileNode |
|
49 | 49 | from rhodecode.lib.vcs.exceptions import ( |
|
50 | 50 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, |
|
51 | 51 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, |
|
52 | 52 | NodeDoesNotExistError, CommitError, NodeError) |
|
53 | 53 | |
|
54 | 54 | from rhodecode.model.scm import ScmModel |
|
55 | 55 | from rhodecode.model.db import Repository |
|
56 | 56 | |
|
57 | 57 | log = logging.getLogger(__name__) |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | class RepoFilesView(RepoAppView): |
|
61 | 61 | |
|
62 | 62 | @staticmethod |
|
63 | 63 | def adjust_file_path_for_svn(f_path, repo): |
|
64 | 64 | """ |
|
65 | 65 | Computes the relative path of `f_path`. |
|
66 | 66 | |
|
67 | 67 | This is mainly based on prefix matching of the recognized tags and |
|
68 | 68 | branches in the underlying repository. |
|
69 | 69 | """ |
|
70 | 70 | tags_and_branches = itertools.chain( |
|
71 | 71 | repo.branches.iterkeys(), |
|
72 | 72 | repo.tags.iterkeys()) |
|
73 | 73 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
74 | 74 | |
|
75 | 75 | for name in tags_and_branches: |
|
76 | 76 | if f_path.startswith('{}/'.format(name)): |
|
77 | 77 | f_path = vcspath.relpath(f_path, name) |
|
78 | 78 | break |
|
79 | 79 | return f_path |
|
80 | 80 | |
|
81 | 81 | def load_default_context(self): |
|
82 | 82 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
83 | 83 | |
|
84 | 84 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
85 | 85 | c.repo_info = self.db_repo |
|
86 | 86 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
87 | 87 | |
|
88 | 88 | self._register_global_c(c) |
|
89 | 89 | return c |
|
90 | 90 | |
|
91 | 91 | def _ensure_not_locked(self): |
|
92 | 92 | _ = self.request.translate |
|
93 | 93 | |
|
94 | 94 | repo = self.db_repo |
|
95 | 95 | if repo.enable_locking and repo.locked[0]: |
|
96 | 96 | h.flash(_('This repository has been locked by %s on %s') |
|
97 | 97 | % (h.person_by_id(repo.locked[0]), |
|
98 | 98 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
99 | 99 | 'warning') |
|
100 | 100 | files_url = h.route_path( |
|
101 | 101 | 'repo_files:default_path', |
|
102 | 102 | repo_name=self.db_repo_name, commit_id='tip') |
|
103 | 103 | raise HTTPFound(files_url) |
|
104 | 104 | |
|
105 | 105 | def _get_commit_and_path(self): |
|
106 | 106 | default_commit_id = self.db_repo.landing_rev[1] |
|
107 | 107 | default_f_path = '/' |
|
108 | 108 | |
|
109 | 109 | commit_id = self.request.matchdict.get( |
|
110 | 110 | 'commit_id', default_commit_id) |
|
111 | 111 | f_path = self._get_f_path(self.request.matchdict, default_f_path) |
|
112 | 112 | return commit_id, f_path |
|
113 | 113 | |
|
114 | 114 | def _get_default_encoding(self, c): |
|
115 | 115 | enc_list = getattr(c, 'default_encodings', []) |
|
116 | 116 | return enc_list[0] if enc_list else 'UTF-8' |
|
117 | 117 | |
|
118 | 118 | def _get_commit_or_redirect(self, commit_id, redirect_after=True): |
|
119 | 119 | """ |
|
120 | 120 | This is a safe way to get commit. If an error occurs it redirects to |
|
121 | 121 | tip with proper message |
|
122 | 122 | |
|
123 | 123 | :param commit_id: id of commit to fetch |
|
124 | 124 | :param redirect_after: toggle redirection |
|
125 | 125 | """ |
|
126 | 126 | _ = self.request.translate |
|
127 | 127 | |
|
128 | 128 | try: |
|
129 | 129 | return self.rhodecode_vcs_repo.get_commit(commit_id) |
|
130 | 130 | except EmptyRepositoryError: |
|
131 | 131 | if not redirect_after: |
|
132 | 132 | return None |
|
133 | 133 | |
|
134 | 134 | _url = h.route_path( |
|
135 | 135 | 'repo_files_add_file', |
|
136 | 136 | repo_name=self.db_repo_name, commit_id=0, f_path='', |
|
137 | 137 | _anchor='edit') |
|
138 | 138 | |
|
139 | 139 | if h.HasRepoPermissionAny( |
|
140 | 140 | 'repository.write', 'repository.admin')(self.db_repo_name): |
|
141 | 141 | add_new = h.link_to( |
|
142 | 142 | _('Click here to add a new file.'), _url, class_="alert-link") |
|
143 | 143 | else: |
|
144 | 144 | add_new = "" |
|
145 | 145 | |
|
146 | 146 | h.flash(h.literal( |
|
147 | 147 | _('There are no files yet. %s') % add_new), category='warning') |
|
148 | 148 | raise HTTPFound( |
|
149 | 149 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
150 | 150 | |
|
151 | 151 | except (CommitDoesNotExistError, LookupError): |
|
152 | 152 | msg = _('No such commit exists for this repository') |
|
153 | 153 | h.flash(msg, category='error') |
|
154 | 154 | raise HTTPNotFound() |
|
155 | 155 | except RepositoryError as e: |
|
156 | 156 | h.flash(safe_str(h.escape(e)), category='error') |
|
157 | 157 | raise HTTPNotFound() |
|
158 | 158 | |
|
159 | 159 | def _get_filenode_or_redirect(self, commit_obj, path): |
|
160 | 160 | """ |
|
161 | 161 | Returns file_node, if error occurs or given path is directory, |
|
162 | 162 | it'll redirect to top level path |
|
163 | 163 | """ |
|
164 | 164 | _ = self.request.translate |
|
165 | 165 | |
|
166 | 166 | try: |
|
167 | 167 | file_node = commit_obj.get_node(path) |
|
168 | 168 | if file_node.is_dir(): |
|
169 | 169 | raise RepositoryError('The given path is a directory') |
|
170 | 170 | except CommitDoesNotExistError: |
|
171 | 171 | log.exception('No such commit exists for this repository') |
|
172 | 172 | h.flash(_('No such commit exists for this repository'), category='error') |
|
173 | 173 | raise HTTPNotFound() |
|
174 | 174 | except RepositoryError as e: |
|
175 | 175 | log.warning('Repository error while fetching ' |
|
176 | 176 | 'filenode `%s`. Err:%s', path, e) |
|
177 | 177 | h.flash(safe_str(h.escape(e)), category='error') |
|
178 | 178 | raise HTTPNotFound() |
|
179 | 179 | |
|
180 | 180 | return file_node |
|
181 | 181 | |
|
182 | 182 | def _is_valid_head(self, commit_id, repo): |
|
183 | 183 | # check if commit is a branch identifier- basically we cannot |
|
184 | 184 | # create multiple heads via file editing |
|
185 | 185 | valid_heads = repo.branches.keys() + repo.branches.values() |
|
186 | 186 | |
|
187 | 187 | if h.is_svn(repo) and not repo.is_empty(): |
|
188 | 188 | # Note: Subversion only has one head, we add it here in case there |
|
189 | 189 | # is no branch matched. |
|
190 | 190 | valid_heads.append(repo.get_commit(commit_idx=-1).raw_id) |
|
191 | 191 | |
|
192 | 192 | # check if commit is a branch name or branch hash |
|
193 | 193 | return commit_id in valid_heads |
|
194 | 194 | |
|
195 | 195 | def _get_tree_cache_manager(self, namespace_type): |
|
196 | 196 | _namespace = caches.get_repo_namespace_key( |
|
197 | 197 | namespace_type, self.db_repo_name) |
|
198 | 198 | return caches.get_cache_manager('repo_cache_long', _namespace) |
|
199 | 199 | |
|
200 | 200 | def _get_tree_at_commit( |
|
201 | 201 | self, c, commit_id, f_path, full_load=False, force=False): |
|
202 | 202 | def _cached_tree(): |
|
203 | 203 | log.debug('Generating cached file tree for %s, %s, %s', |
|
204 | 204 | self.db_repo_name, commit_id, f_path) |
|
205 | 205 | |
|
206 | 206 | c.full_load = full_load |
|
207 | 207 | return render( |
|
208 | 208 | 'rhodecode:templates/files/files_browser_tree.mako', |
|
209 | 209 | self._get_template_context(c), self.request) |
|
210 | 210 | |
|
211 | 211 | cache_manager = self._get_tree_cache_manager(caches.FILE_TREE) |
|
212 | 212 | |
|
213 | 213 | cache_key = caches.compute_key_from_params( |
|
214 | 214 | self.db_repo_name, commit_id, f_path) |
|
215 | 215 | |
|
216 | 216 | if force: |
|
217 | 217 | # we want to force recompute of caches |
|
218 | 218 | cache_manager.remove_value(cache_key) |
|
219 | 219 | |
|
220 | 220 | return cache_manager.get(cache_key, createfunc=_cached_tree) |
|
221 | 221 | |
|
222 | 222 | def _get_archive_spec(self, fname): |
|
223 | 223 | log.debug('Detecting archive spec for: `%s`', fname) |
|
224 | 224 | |
|
225 | 225 | fileformat = None |
|
226 | 226 | ext = None |
|
227 | 227 | content_type = None |
|
228 | 228 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): |
|
229 | 229 | content_type, extension = ext_data |
|
230 | 230 | |
|
231 | 231 | if fname.endswith(extension): |
|
232 | 232 | fileformat = a_type |
|
233 | 233 | log.debug('archive is of type: %s', fileformat) |
|
234 | 234 | ext = extension |
|
235 | 235 | break |
|
236 | 236 | |
|
237 | 237 | if not fileformat: |
|
238 | 238 | raise ValueError() |
|
239 | 239 | |
|
240 | 240 | # left over part of whole fname is the commit |
|
241 | 241 | commit_id = fname[:-len(ext)] |
|
242 | 242 | |
|
243 | 243 | return commit_id, ext, fileformat, content_type |
|
244 | 244 | |
|
245 | 245 | @LoginRequired() |
|
246 | 246 | @HasRepoPermissionAnyDecorator( |
|
247 | 247 | 'repository.read', 'repository.write', 'repository.admin') |
|
248 | 248 | @view_config( |
|
249 | 249 | route_name='repo_archivefile', request_method='GET', |
|
250 | 250 | renderer=None) |
|
251 | 251 | def repo_archivefile(self): |
|
252 | 252 | # archive cache config |
|
253 | 253 | from rhodecode import CONFIG |
|
254 | 254 | _ = self.request.translate |
|
255 | 255 | self.load_default_context() |
|
256 | 256 | |
|
257 | 257 | fname = self.request.matchdict['fname'] |
|
258 | 258 | subrepos = self.request.GET.get('subrepos') == 'true' |
|
259 | 259 | |
|
260 | 260 | if not self.db_repo.enable_downloads: |
|
261 | 261 | return Response(_('Downloads disabled')) |
|
262 | 262 | |
|
263 | 263 | try: |
|
264 | 264 | commit_id, ext, fileformat, content_type = \ |
|
265 | 265 | self._get_archive_spec(fname) |
|
266 | 266 | except ValueError: |
|
267 | 267 | return Response(_('Unknown archive type for: `{}`').format(fname)) |
|
268 | 268 | |
|
269 | 269 | try: |
|
270 | 270 | commit = self.rhodecode_vcs_repo.get_commit(commit_id) |
|
271 | 271 | except CommitDoesNotExistError: |
|
272 | 272 | return Response(_('Unknown commit_id %s') % commit_id) |
|
273 | 273 | except EmptyRepositoryError: |
|
274 | 274 | return Response(_('Empty repository')) |
|
275 | 275 | |
|
276 | 276 | archive_name = '%s-%s%s%s' % ( |
|
277 | 277 | safe_str(self.db_repo_name.replace('/', '_')), |
|
278 | 278 | '-sub' if subrepos else '', |
|
279 | 279 | safe_str(commit.short_id), ext) |
|
280 | 280 | |
|
281 | 281 | use_cached_archive = False |
|
282 | 282 | archive_cache_enabled = CONFIG.get( |
|
283 | 283 | 'archive_cache_dir') and not self.request.GET.get('no_cache') |
|
284 | 284 | |
|
285 | 285 | if archive_cache_enabled: |
|
286 | 286 | # check if we it's ok to write |
|
287 | 287 | if not os.path.isdir(CONFIG['archive_cache_dir']): |
|
288 | 288 | os.makedirs(CONFIG['archive_cache_dir']) |
|
289 | 289 | cached_archive_path = os.path.join( |
|
290 | 290 | CONFIG['archive_cache_dir'], archive_name) |
|
291 | 291 | if os.path.isfile(cached_archive_path): |
|
292 | 292 | log.debug('Found cached archive in %s', cached_archive_path) |
|
293 | 293 | fd, archive = None, cached_archive_path |
|
294 | 294 | use_cached_archive = True |
|
295 | 295 | else: |
|
296 | 296 | log.debug('Archive %s is not yet cached', archive_name) |
|
297 | 297 | |
|
298 | 298 | if not use_cached_archive: |
|
299 | 299 | # generate new archive |
|
300 | 300 | fd, archive = tempfile.mkstemp() |
|
301 | 301 | log.debug('Creating new temp archive in %s', archive) |
|
302 | 302 | try: |
|
303 | 303 | commit.archive_repo(archive, kind=fileformat, subrepos=subrepos) |
|
304 | 304 | except ImproperArchiveTypeError: |
|
305 | 305 | return _('Unknown archive type') |
|
306 | 306 | if archive_cache_enabled: |
|
307 | 307 | # if we generated the archive and we have cache enabled |
|
308 | 308 | # let's use this for future |
|
309 | 309 | log.debug('Storing new archive in %s', cached_archive_path) |
|
310 | 310 | shutil.move(archive, cached_archive_path) |
|
311 | 311 | archive = cached_archive_path |
|
312 | 312 | |
|
313 | 313 | # store download action |
|
314 | 314 | audit_logger.store_web( |
|
315 | 315 | 'repo.archive.download', action_data={ |
|
316 | 316 | 'user_agent': self.request.user_agent, |
|
317 | 317 | 'archive_name': archive_name, |
|
318 | 318 | 'archive_spec': fname, |
|
319 | 319 | 'archive_cached': use_cached_archive}, |
|
320 | 320 | user=self._rhodecode_user, |
|
321 | 321 | repo=self.db_repo, |
|
322 | 322 | commit=True |
|
323 | 323 | ) |
|
324 | 324 | |
|
325 | 325 | def get_chunked_archive(archive): |
|
326 | 326 | with open(archive, 'rb') as stream: |
|
327 | 327 | while True: |
|
328 | 328 | data = stream.read(16 * 1024) |
|
329 | 329 | if not data: |
|
330 | 330 | if fd: # fd means we used temporary file |
|
331 | 331 | os.close(fd) |
|
332 | 332 | if not archive_cache_enabled: |
|
333 | 333 | log.debug('Destroying temp archive %s', archive) |
|
334 | 334 | os.remove(archive) |
|
335 | 335 | break |
|
336 | 336 | yield data |
|
337 | 337 | |
|
338 | 338 | response = Response(app_iter=get_chunked_archive(archive)) |
|
339 | 339 | response.content_disposition = str( |
|
340 | 340 | 'attachment; filename=%s' % archive_name) |
|
341 | 341 | response.content_type = str(content_type) |
|
342 | 342 | |
|
343 | 343 | return response |
|
344 | 344 | |
|
345 | 345 | def _get_file_node(self, commit_id, f_path): |
|
346 | 346 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
347 | 347 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
348 | 348 | try: |
|
349 | 349 | node = commit.get_node(f_path) |
|
350 | 350 | if node.is_dir(): |
|
351 | 351 | raise NodeError('%s path is a %s not a file' |
|
352 | 352 | % (node, type(node))) |
|
353 | 353 | except NodeDoesNotExistError: |
|
354 | 354 | commit = EmptyCommit( |
|
355 | 355 | commit_id=commit_id, |
|
356 | 356 | idx=commit.idx, |
|
357 | 357 | repo=commit.repository, |
|
358 | 358 | alias=commit.repository.alias, |
|
359 | 359 | message=commit.message, |
|
360 | 360 | author=commit.author, |
|
361 | 361 | date=commit.date) |
|
362 | 362 | node = FileNode(f_path, '', commit=commit) |
|
363 | 363 | else: |
|
364 | 364 | commit = EmptyCommit( |
|
365 | 365 | repo=self.rhodecode_vcs_repo, |
|
366 | 366 | alias=self.rhodecode_vcs_repo.alias) |
|
367 | 367 | node = FileNode(f_path, '', commit=commit) |
|
368 | 368 | return node |
|
369 | 369 | |
|
370 | 370 | @LoginRequired() |
|
371 | 371 | @HasRepoPermissionAnyDecorator( |
|
372 | 372 | 'repository.read', 'repository.write', 'repository.admin') |
|
373 | 373 | @view_config( |
|
374 | 374 | route_name='repo_files_diff', request_method='GET', |
|
375 | 375 | renderer=None) |
|
376 | 376 | def repo_files_diff(self): |
|
377 | 377 | c = self.load_default_context() |
|
378 | 378 | f_path = self._get_f_path(self.request.matchdict) |
|
379 | 379 | diff1 = self.request.GET.get('diff1', '') |
|
380 | 380 | diff2 = self.request.GET.get('diff2', '') |
|
381 | 381 | |
|
382 | 382 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) |
|
383 | 383 | |
|
384 | 384 | ignore_whitespace = str2bool(self.request.GET.get('ignorews')) |
|
385 | 385 | line_context = self.request.GET.get('context', 3) |
|
386 | 386 | |
|
387 | 387 | if not any((diff1, diff2)): |
|
388 | 388 | h.flash( |
|
389 | 389 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
390 | 390 | category='error') |
|
391 | 391 | raise HTTPBadRequest() |
|
392 | 392 | |
|
393 | 393 | c.action = self.request.GET.get('diff') |
|
394 | 394 | if c.action not in ['download', 'raw']: |
|
395 | 395 | compare_url = h.url( |
|
396 | 396 | 'compare_url', repo_name=self.db_repo_name, |
|
397 | 397 | source_ref_type='rev', |
|
398 | 398 | source_ref=diff1, |
|
399 | 399 | target_repo=self.db_repo_name, |
|
400 | 400 | target_ref_type='rev', |
|
401 | 401 | target_ref=diff2, |
|
402 | 402 | f_path=f_path) |
|
403 | 403 | # redirect to new view if we render diff |
|
404 | 404 | raise HTTPFound(compare_url) |
|
405 | 405 | |
|
406 | 406 | try: |
|
407 | 407 | node1 = self._get_file_node(diff1, path1) |
|
408 | 408 | node2 = self._get_file_node(diff2, f_path) |
|
409 | 409 | except (RepositoryError, NodeError): |
|
410 | 410 | log.exception("Exception while trying to get node from repository") |
|
411 | 411 | raise HTTPFound( |
|
412 | 412 | h.route_path('repo_files', repo_name=self.db_repo_name, |
|
413 | 413 | commit_id='tip', f_path=f_path)) |
|
414 | 414 | |
|
415 | 415 | if all(isinstance(node.commit, EmptyCommit) |
|
416 | 416 | for node in (node1, node2)): |
|
417 | 417 | raise HTTPNotFound() |
|
418 | 418 | |
|
419 | 419 | c.commit_1 = node1.commit |
|
420 | 420 | c.commit_2 = node2.commit |
|
421 | 421 | |
|
422 | 422 | if c.action == 'download': |
|
423 | 423 | _diff = diffs.get_gitdiff(node1, node2, |
|
424 | 424 | ignore_whitespace=ignore_whitespace, |
|
425 | 425 | context=line_context) |
|
426 | 426 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
427 | 427 | |
|
428 | 428 | response = Response(diff.as_raw()) |
|
429 | 429 | response.content_type = 'text/plain' |
|
430 | 430 | response.content_disposition = ( |
|
431 | 431 | 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2) |
|
432 | 432 | ) |
|
433 | 433 | charset = self._get_default_encoding(c) |
|
434 | 434 | if charset: |
|
435 | 435 | response.charset = charset |
|
436 | 436 | return response |
|
437 | 437 | |
|
438 | 438 | elif c.action == 'raw': |
|
439 | 439 | _diff = diffs.get_gitdiff(node1, node2, |
|
440 | 440 | ignore_whitespace=ignore_whitespace, |
|
441 | 441 | context=line_context) |
|
442 | 442 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
443 | 443 | |
|
444 | 444 | response = Response(diff.as_raw()) |
|
445 | 445 | response.content_type = 'text/plain' |
|
446 | 446 | charset = self._get_default_encoding(c) |
|
447 | 447 | if charset: |
|
448 | 448 | response.charset = charset |
|
449 | 449 | return response |
|
450 | 450 | |
|
451 | 451 | # in case we ever end up here |
|
452 | 452 | raise HTTPNotFound() |
|
453 | 453 | |
|
454 | 454 | @LoginRequired() |
|
455 | 455 | @HasRepoPermissionAnyDecorator( |
|
456 | 456 | 'repository.read', 'repository.write', 'repository.admin') |
|
457 | 457 | @view_config( |
|
458 | 458 | route_name='repo_files_diff_2way_redirect', request_method='GET', |
|
459 | 459 | renderer=None) |
|
460 | 460 | def repo_files_diff_2way_redirect(self): |
|
461 | 461 | """ |
|
462 | 462 | Kept only to make OLD links work |
|
463 | 463 | """ |
|
464 | 464 | f_path = self._get_f_path(self.request.matchdict) |
|
465 | 465 | diff1 = self.request.GET.get('diff1', '') |
|
466 | 466 | diff2 = self.request.GET.get('diff2', '') |
|
467 | 467 | |
|
468 | 468 | if not any((diff1, diff2)): |
|
469 | 469 | h.flash( |
|
470 | 470 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
471 | 471 | category='error') |
|
472 | 472 | raise HTTPBadRequest() |
|
473 | 473 | |
|
474 | 474 | compare_url = h.url( |
|
475 | 475 | 'compare_url', repo_name=self.db_repo_name, |
|
476 | 476 | source_ref_type='rev', |
|
477 | 477 | source_ref=diff1, |
|
478 | 478 | target_repo=self.db_repo_name, |
|
479 | 479 | target_ref_type='rev', |
|
480 | 480 | target_ref=diff2, |
|
481 | 481 | f_path=f_path, |
|
482 | 482 | diffmode='sideside') |
|
483 | 483 | raise HTTPFound(compare_url) |
|
484 | 484 | |
|
485 | 485 | @LoginRequired() |
|
486 | 486 | @HasRepoPermissionAnyDecorator( |
|
487 | 487 | 'repository.read', 'repository.write', 'repository.admin') |
|
488 | 488 | @view_config( |
|
489 | 489 | route_name='repo_files', request_method='GET', |
|
490 | 490 | renderer=None) |
|
491 | 491 | @view_config( |
|
492 | 492 | route_name='repo_files:default_path', request_method='GET', |
|
493 | 493 | renderer=None) |
|
494 | 494 | @view_config( |
|
495 | 495 | route_name='repo_files:default_commit', request_method='GET', |
|
496 | 496 | renderer=None) |
|
497 | 497 | @view_config( |
|
498 | 498 | route_name='repo_files:rendered', request_method='GET', |
|
499 | 499 | renderer=None) |
|
500 | 500 | @view_config( |
|
501 | 501 | route_name='repo_files:annotated', request_method='GET', |
|
502 | 502 | renderer=None) |
|
503 | 503 | def repo_files(self): |
|
504 | 504 | c = self.load_default_context() |
|
505 | 505 | |
|
506 | 506 | view_name = getattr(self.request.matched_route, 'name', None) |
|
507 | 507 | |
|
508 | 508 | c.annotate = view_name == 'repo_files:annotated' |
|
509 | 509 | # default is false, but .rst/.md files later are auto rendered, we can |
|
510 | 510 | # overwrite auto rendering by setting this GET flag |
|
511 | 511 | c.renderer = view_name == 'repo_files:rendered' or \ |
|
512 | 512 | not self.request.GET.get('no-render', False) |
|
513 | 513 | |
|
514 | 514 | # redirect to given commit_id from form if given |
|
515 | 515 | get_commit_id = self.request.GET.get('at_rev', None) |
|
516 | 516 | if get_commit_id: |
|
517 | 517 | self._get_commit_or_redirect(get_commit_id) |
|
518 | 518 | |
|
519 | 519 | commit_id, f_path = self._get_commit_and_path() |
|
520 | 520 | c.commit = self._get_commit_or_redirect(commit_id) |
|
521 | 521 | c.branch = self.request.GET.get('branch', None) |
|
522 | 522 | c.f_path = f_path |
|
523 | 523 | |
|
524 | 524 | # prev link |
|
525 | 525 | try: |
|
526 | 526 | prev_commit = c.commit.prev(c.branch) |
|
527 | 527 | c.prev_commit = prev_commit |
|
528 | 528 | c.url_prev = h.route_path( |
|
529 | 529 | 'repo_files', repo_name=self.db_repo_name, |
|
530 | 530 | commit_id=prev_commit.raw_id, f_path=f_path) |
|
531 | 531 | if c.branch: |
|
532 | 532 | c.url_prev += '?branch=%s' % c.branch |
|
533 | 533 | except (CommitDoesNotExistError, VCSError): |
|
534 | 534 | c.url_prev = '#' |
|
535 | 535 | c.prev_commit = EmptyCommit() |
|
536 | 536 | |
|
537 | 537 | # next link |
|
538 | 538 | try: |
|
539 | 539 | next_commit = c.commit.next(c.branch) |
|
540 | 540 | c.next_commit = next_commit |
|
541 | 541 | c.url_next = h.route_path( |
|
542 | 542 | 'repo_files', repo_name=self.db_repo_name, |
|
543 | 543 | commit_id=next_commit.raw_id, f_path=f_path) |
|
544 | 544 | if c.branch: |
|
545 | 545 | c.url_next += '?branch=%s' % c.branch |
|
546 | 546 | except (CommitDoesNotExistError, VCSError): |
|
547 | 547 | c.url_next = '#' |
|
548 | 548 | c.next_commit = EmptyCommit() |
|
549 | 549 | |
|
550 | 550 | # files or dirs |
|
551 | 551 | try: |
|
552 | 552 | c.file = c.commit.get_node(f_path) |
|
553 | 553 | c.file_author = True |
|
554 | 554 | c.file_tree = '' |
|
555 | 555 | |
|
556 | 556 | # load file content |
|
557 | 557 | if c.file.is_file(): |
|
558 | 558 | c.lf_node = c.file.get_largefile_node() |
|
559 | 559 | |
|
560 | 560 | c.file_source_page = 'true' |
|
561 | 561 | c.file_last_commit = c.file.last_commit |
|
562 | 562 | if c.file.size < c.visual.cut_off_limit_diff: |
|
563 | 563 | if c.annotate: # annotation has precedence over renderer |
|
564 | 564 | c.annotated_lines = filenode_as_annotated_lines_tokens( |
|
565 | 565 | c.file |
|
566 | 566 | ) |
|
567 | 567 | else: |
|
568 | 568 | c.renderer = ( |
|
569 | 569 | c.renderer and h.renderer_from_filename(c.file.path) |
|
570 | 570 | ) |
|
571 | 571 | if not c.renderer: |
|
572 | 572 | c.lines = filenode_as_lines_tokens(c.file) |
|
573 | 573 | |
|
574 | 574 | c.on_branch_head = self._is_valid_head( |
|
575 | 575 | commit_id, self.rhodecode_vcs_repo) |
|
576 | 576 | |
|
577 | 577 | branch = c.commit.branch if ( |
|
578 | 578 | c.commit.branch and '/' not in c.commit.branch) else None |
|
579 | 579 | c.branch_or_raw_id = branch or c.commit.raw_id |
|
580 | 580 | c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id) |
|
581 | 581 | |
|
582 | 582 | author = c.file_last_commit.author |
|
583 | 583 | c.authors = [[ |
|
584 | 584 | h.email(author), |
|
585 | 585 | h.person(author, 'username_or_name_or_email'), |
|
586 | 586 | 1 |
|
587 | 587 | ]] |
|
588 | 588 | |
|
589 | 589 | else: # load tree content at path |
|
590 | 590 | c.file_source_page = 'false' |
|
591 | 591 | c.authors = [] |
|
592 | 592 | # this loads a simple tree without metadata to speed things up |
|
593 | 593 | # later via ajax we call repo_nodetree_full and fetch whole |
|
594 | 594 | c.file_tree = self._get_tree_at_commit( |
|
595 | 595 | c, c.commit.raw_id, f_path) |
|
596 | 596 | |
|
597 | 597 | except RepositoryError as e: |
|
598 | 598 | h.flash(safe_str(h.escape(e)), category='error') |
|
599 | 599 | raise HTTPNotFound() |
|
600 | 600 | |
|
601 | 601 | if self.request.environ.get('HTTP_X_PJAX'): |
|
602 | 602 | html = render('rhodecode:templates/files/files_pjax.mako', |
|
603 | 603 | self._get_template_context(c), self.request) |
|
604 | 604 | else: |
|
605 | 605 | html = render('rhodecode:templates/files/files.mako', |
|
606 | 606 | self._get_template_context(c), self.request) |
|
607 | 607 | return Response(html) |
|
608 | 608 | |
|
609 | 609 | @HasRepoPermissionAnyDecorator( |
|
610 | 610 | 'repository.read', 'repository.write', 'repository.admin') |
|
611 | 611 | @view_config( |
|
612 | 612 | route_name='repo_files:annotated_previous', request_method='GET', |
|
613 | 613 | renderer=None) |
|
614 | 614 | def repo_files_annotated_previous(self): |
|
615 | 615 | self.load_default_context() |
|
616 | 616 | |
|
617 | 617 | commit_id, f_path = self._get_commit_and_path() |
|
618 | 618 | commit = self._get_commit_or_redirect(commit_id) |
|
619 | 619 | prev_commit_id = commit.raw_id |
|
620 | 620 | line_anchor = self.request.GET.get('line_anchor') |
|
621 | 621 | is_file = False |
|
622 | 622 | try: |
|
623 | 623 | _file = commit.get_node(f_path) |
|
624 | 624 | is_file = _file.is_file() |
|
625 | 625 | except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError): |
|
626 | 626 | pass |
|
627 | 627 | |
|
628 | 628 | if is_file: |
|
629 | 629 | history = commit.get_file_history(f_path) |
|
630 | 630 | prev_commit_id = history[1].raw_id \ |
|
631 | 631 | if len(history) > 1 else prev_commit_id |
|
632 | 632 | prev_url = h.route_path( |
|
633 | 633 | 'repo_files:annotated', repo_name=self.db_repo_name, |
|
634 | 634 | commit_id=prev_commit_id, f_path=f_path, |
|
635 | 635 | _anchor='L{}'.format(line_anchor)) |
|
636 | 636 | |
|
637 | 637 | raise HTTPFound(prev_url) |
|
638 | 638 | |
|
639 | 639 | @LoginRequired() |
|
640 | 640 | @HasRepoPermissionAnyDecorator( |
|
641 | 641 | 'repository.read', 'repository.write', 'repository.admin') |
|
642 | 642 | @view_config( |
|
643 | 643 | route_name='repo_nodetree_full', request_method='GET', |
|
644 | 644 | renderer=None, xhr=True) |
|
645 | 645 | @view_config( |
|
646 | 646 | route_name='repo_nodetree_full:default_path', request_method='GET', |
|
647 | 647 | renderer=None, xhr=True) |
|
648 | 648 | def repo_nodetree_full(self): |
|
649 | 649 | """ |
|
650 | 650 | Returns rendered html of file tree that contains commit date, |
|
651 | 651 | author, commit_id for the specified combination of |
|
652 | 652 | repo, commit_id and file path |
|
653 | 653 | """ |
|
654 | 654 | c = self.load_default_context() |
|
655 | 655 | |
|
656 | 656 | commit_id, f_path = self._get_commit_and_path() |
|
657 | 657 | commit = self._get_commit_or_redirect(commit_id) |
|
658 | 658 | try: |
|
659 | 659 | dir_node = commit.get_node(f_path) |
|
660 | 660 | except RepositoryError as e: |
|
661 | 661 | return Response('error: {}'.format(safe_str(e))) |
|
662 | 662 | |
|
663 | 663 | if dir_node.is_file(): |
|
664 | 664 | return Response('') |
|
665 | 665 | |
|
666 | 666 | c.file = dir_node |
|
667 | 667 | c.commit = commit |
|
668 | 668 | |
|
669 | 669 | # using force=True here, make a little trick. We flush the cache and |
|
670 | 670 | # compute it using the same key as without previous full_load, so now |
|
671 | 671 | # the fully loaded tree is now returned instead of partial, |
|
672 | 672 | # and we store this in caches |
|
673 | 673 | html = self._get_tree_at_commit( |
|
674 | 674 | c, commit.raw_id, dir_node.path, full_load=True, force=True) |
|
675 | 675 | |
|
676 | 676 | return Response(html) |
|
677 | 677 | |
|
678 | 678 | def _get_attachement_disposition(self, f_path): |
|
679 | 679 | return 'attachment; filename=%s' % \ |
|
680 | 680 | safe_str(f_path.split(Repository.NAME_SEP)[-1]) |
|
681 | 681 | |
|
682 | 682 | @LoginRequired() |
|
683 | 683 | @HasRepoPermissionAnyDecorator( |
|
684 | 684 | 'repository.read', 'repository.write', 'repository.admin') |
|
685 | 685 | @view_config( |
|
686 | 686 | route_name='repo_file_raw', request_method='GET', |
|
687 | 687 | renderer=None) |
|
688 | 688 | def repo_file_raw(self): |
|
689 | 689 | """ |
|
690 | 690 | Action for show as raw, some mimetypes are "rendered", |
|
691 | 691 | those include images, icons. |
|
692 | 692 | """ |
|
693 | 693 | c = self.load_default_context() |
|
694 | 694 | |
|
695 | 695 | commit_id, f_path = self._get_commit_and_path() |
|
696 | 696 | commit = self._get_commit_or_redirect(commit_id) |
|
697 | 697 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
698 | 698 | |
|
699 | 699 | raw_mimetype_mapping = { |
|
700 | 700 | # map original mimetype to a mimetype used for "show as raw" |
|
701 | 701 | # you can also provide a content-disposition to override the |
|
702 | 702 | # default "attachment" disposition. |
|
703 | 703 | # orig_type: (new_type, new_dispo) |
|
704 | 704 | |
|
705 | 705 | # show images inline: |
|
706 | 706 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can |
|
707 | 707 | # for example render an SVG with javascript inside or even render |
|
708 | 708 | # HTML. |
|
709 | 709 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
710 | 710 | 'image/png': ('image/png', 'inline'), |
|
711 | 711 | 'image/gif': ('image/gif', 'inline'), |
|
712 | 712 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
713 | 713 | 'application/pdf': ('application/pdf', 'inline'), |
|
714 | 714 | } |
|
715 | 715 | |
|
716 | 716 | mimetype = file_node.mimetype |
|
717 | 717 | try: |
|
718 | 718 | mimetype, disposition = raw_mimetype_mapping[mimetype] |
|
719 | 719 | except KeyError: |
|
720 | 720 | # we don't know anything special about this, handle it safely |
|
721 | 721 | if file_node.is_binary: |
|
722 | 722 | # do same as download raw for binary files |
|
723 | 723 | mimetype, disposition = 'application/octet-stream', 'attachment' |
|
724 | 724 | else: |
|
725 | 725 | # do not just use the original mimetype, but force text/plain, |
|
726 | 726 | # otherwise it would serve text/html and that might be unsafe. |
|
727 | 727 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
728 | 728 | # mimetype can not be determined and it thinks it is not |
|
729 | 729 | # binary.This might lead to erroneous text display in some |
|
730 | 730 | # cases, but helps in other cases, like with text files |
|
731 | 731 | # without extension. |
|
732 | 732 | mimetype, disposition = 'text/plain', 'inline' |
|
733 | 733 | |
|
734 | 734 | if disposition == 'attachment': |
|
735 | 735 | disposition = self._get_attachement_disposition(f_path) |
|
736 | 736 | |
|
737 | 737 | def stream_node(): |
|
738 | 738 | yield file_node.raw_bytes |
|
739 | 739 | |
|
740 | 740 | response = Response(app_iter=stream_node()) |
|
741 | 741 | response.content_disposition = disposition |
|
742 | 742 | response.content_type = mimetype |
|
743 | 743 | |
|
744 | 744 | charset = self._get_default_encoding(c) |
|
745 | 745 | if charset: |
|
746 | 746 | response.charset = charset |
|
747 | 747 | |
|
748 | 748 | return response |
|
749 | 749 | |
|
750 | 750 | @LoginRequired() |
|
751 | 751 | @HasRepoPermissionAnyDecorator( |
|
752 | 752 | 'repository.read', 'repository.write', 'repository.admin') |
|
753 | 753 | @view_config( |
|
754 | 754 | route_name='repo_file_download', request_method='GET', |
|
755 | 755 | renderer=None) |
|
756 | 756 | @view_config( |
|
757 | 757 | route_name='repo_file_download:legacy', request_method='GET', |
|
758 | 758 | renderer=None) |
|
759 | 759 | def repo_file_download(self): |
|
760 | 760 | c = self.load_default_context() |
|
761 | 761 | |
|
762 | 762 | commit_id, f_path = self._get_commit_and_path() |
|
763 | 763 | commit = self._get_commit_or_redirect(commit_id) |
|
764 | 764 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
765 | 765 | |
|
766 | 766 | if self.request.GET.get('lf'): |
|
767 | 767 | # only if lf get flag is passed, we download this file |
|
768 | 768 | # as LFS/Largefile |
|
769 | 769 | lf_node = file_node.get_largefile_node() |
|
770 | 770 | if lf_node: |
|
771 | 771 | # overwrite our pointer with the REAL large-file |
|
772 | 772 | file_node = lf_node |
|
773 | 773 | |
|
774 | 774 | disposition = self._get_attachement_disposition(f_path) |
|
775 | 775 | |
|
776 | 776 | def stream_node(): |
|
777 | 777 | yield file_node.raw_bytes |
|
778 | 778 | |
|
779 | 779 | response = Response(app_iter=stream_node()) |
|
780 | 780 | response.content_disposition = disposition |
|
781 | 781 | response.content_type = file_node.mimetype |
|
782 | 782 | |
|
783 | 783 | charset = self._get_default_encoding(c) |
|
784 | 784 | if charset: |
|
785 | 785 | response.charset = charset |
|
786 | 786 | |
|
787 | 787 | return response |
|
788 | 788 | |
|
789 | 789 | def _get_nodelist_at_commit(self, repo_name, commit_id, f_path): |
|
790 | 790 | def _cached_nodes(): |
|
791 | 791 | log.debug('Generating cached nodelist for %s, %s, %s', |
|
792 | 792 | repo_name, commit_id, f_path) |
|
793 | 793 | _d, _f = ScmModel().get_nodes( |
|
794 | 794 | repo_name, commit_id, f_path, flat=False) |
|
795 | 795 | return _d + _f |
|
796 | 796 | |
|
797 | 797 | cache_manager = self._get_tree_cache_manager(caches.FILE_SEARCH_TREE_META) |
|
798 | 798 | |
|
799 | 799 | cache_key = caches.compute_key_from_params( |
|
800 | 800 | repo_name, commit_id, f_path) |
|
801 | 801 | return cache_manager.get(cache_key, createfunc=_cached_nodes) |
|
802 | 802 | |
|
803 | 803 | @LoginRequired() |
|
804 | 804 | @HasRepoPermissionAnyDecorator( |
|
805 | 805 | 'repository.read', 'repository.write', 'repository.admin') |
|
806 | 806 | @view_config( |
|
807 | 807 | route_name='repo_files_nodelist', request_method='GET', |
|
808 | 808 | renderer='json_ext', xhr=True) |
|
809 | 809 | def repo_nodelist(self): |
|
810 | 810 | self.load_default_context() |
|
811 | 811 | |
|
812 | 812 | commit_id, f_path = self._get_commit_and_path() |
|
813 | 813 | commit = self._get_commit_or_redirect(commit_id) |
|
814 | 814 | |
|
815 | 815 | metadata = self._get_nodelist_at_commit( |
|
816 | 816 | self.db_repo_name, commit.raw_id, f_path) |
|
817 | 817 | return {'nodes': metadata} |
|
818 | 818 | |
|
819 | 819 | def _create_references( |
|
820 | 820 | self, branches_or_tags, symbolic_reference, f_path): |
|
821 | 821 | items = [] |
|
822 | 822 | for name, commit_id in branches_or_tags.items(): |
|
823 | 823 | sym_ref = symbolic_reference(commit_id, name, f_path) |
|
824 | 824 | items.append((sym_ref, name)) |
|
825 | 825 | return items |
|
826 | 826 | |
|
827 | 827 | def _symbolic_reference(self, commit_id, name, f_path): |
|
828 | 828 | return commit_id |
|
829 | 829 | |
|
830 | 830 | def _symbolic_reference_svn(self, commit_id, name, f_path): |
|
831 | 831 | new_f_path = vcspath.join(name, f_path) |
|
832 | 832 | return u'%s@%s' % (new_f_path, commit_id) |
|
833 | 833 | |
|
834 | 834 | def _get_node_history(self, commit_obj, f_path, commits=None): |
|
835 | 835 | """ |
|
836 | 836 | get commit history for given node |
|
837 | 837 | |
|
838 | 838 | :param commit_obj: commit to calculate history |
|
839 | 839 | :param f_path: path for node to calculate history for |
|
840 | 840 | :param commits: if passed don't calculate history and take |
|
841 | 841 | commits defined in this list |
|
842 | 842 | """ |
|
843 | 843 | _ = self.request.translate |
|
844 | 844 | |
|
845 | 845 | # calculate history based on tip |
|
846 | 846 | tip = self.rhodecode_vcs_repo.get_commit() |
|
847 | 847 | if commits is None: |
|
848 | 848 | pre_load = ["author", "branch"] |
|
849 | 849 | try: |
|
850 | 850 | commits = tip.get_file_history(f_path, pre_load=pre_load) |
|
851 | 851 | except (NodeDoesNotExistError, CommitError): |
|
852 | 852 | # this node is not present at tip! |
|
853 | 853 | commits = commit_obj.get_file_history(f_path, pre_load=pre_load) |
|
854 | 854 | |
|
855 | 855 | history = [] |
|
856 | 856 | commits_group = ([], _("Changesets")) |
|
857 | 857 | for commit in commits: |
|
858 | 858 | branch = ' (%s)' % commit.branch if commit.branch else '' |
|
859 | 859 | n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch) |
|
860 | 860 | commits_group[0].append((commit.raw_id, n_desc,)) |
|
861 | 861 | history.append(commits_group) |
|
862 | 862 | |
|
863 | 863 | symbolic_reference = self._symbolic_reference |
|
864 | 864 | |
|
865 | 865 | if self.rhodecode_vcs_repo.alias == 'svn': |
|
866 | 866 | adjusted_f_path = RepoFilesView.adjust_file_path_for_svn( |
|
867 | 867 | f_path, self.rhodecode_vcs_repo) |
|
868 | 868 | if adjusted_f_path != f_path: |
|
869 | 869 | log.debug( |
|
870 | 870 | 'Recognized svn tag or branch in file "%s", using svn ' |
|
871 | 871 | 'specific symbolic references', f_path) |
|
872 | 872 | f_path = adjusted_f_path |
|
873 | 873 | symbolic_reference = self._symbolic_reference_svn |
|
874 | 874 | |
|
875 | 875 | branches = self._create_references( |
|
876 | 876 | self.rhodecode_vcs_repo.branches, symbolic_reference, f_path) |
|
877 | 877 | branches_group = (branches, _("Branches")) |
|
878 | 878 | |
|
879 | 879 | tags = self._create_references( |
|
880 | 880 | self.rhodecode_vcs_repo.tags, symbolic_reference, f_path) |
|
881 | 881 | tags_group = (tags, _("Tags")) |
|
882 | 882 | |
|
883 | 883 | history.append(branches_group) |
|
884 | 884 | history.append(tags_group) |
|
885 | 885 | |
|
886 | 886 | return history, commits |
|
887 | 887 | |
|
888 | 888 | @LoginRequired() |
|
889 | 889 | @HasRepoPermissionAnyDecorator( |
|
890 | 890 | 'repository.read', 'repository.write', 'repository.admin') |
|
891 | 891 | @view_config( |
|
892 | 892 | route_name='repo_file_history', request_method='GET', |
|
893 | 893 | renderer='json_ext') |
|
894 | 894 | def repo_file_history(self): |
|
895 | 895 | self.load_default_context() |
|
896 | 896 | |
|
897 | 897 | commit_id, f_path = self._get_commit_and_path() |
|
898 | 898 | commit = self._get_commit_or_redirect(commit_id) |
|
899 | 899 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
900 | 900 | |
|
901 | 901 | if file_node.is_file(): |
|
902 | 902 | file_history, _hist = self._get_node_history(commit, f_path) |
|
903 | 903 | |
|
904 | 904 | res = [] |
|
905 | 905 | for obj in file_history: |
|
906 | 906 | res.append({ |
|
907 | 907 | 'text': obj[1], |
|
908 | 908 | 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]] |
|
909 | 909 | }) |
|
910 | 910 | |
|
911 | 911 | data = { |
|
912 | 912 | 'more': False, |
|
913 | 913 | 'results': res |
|
914 | 914 | } |
|
915 | 915 | return data |
|
916 | 916 | |
|
917 | 917 | log.warning('Cannot fetch history for directory') |
|
918 | 918 | raise HTTPBadRequest() |
|
919 | 919 | |
|
920 | 920 | @LoginRequired() |
|
921 | 921 | @HasRepoPermissionAnyDecorator( |
|
922 | 922 | 'repository.read', 'repository.write', 'repository.admin') |
|
923 | 923 | @view_config( |
|
924 | 924 | route_name='repo_file_authors', request_method='GET', |
|
925 | 925 | renderer='rhodecode:templates/files/file_authors_box.mako') |
|
926 | 926 | def repo_file_authors(self): |
|
927 | 927 | c = self.load_default_context() |
|
928 | 928 | |
|
929 | 929 | commit_id, f_path = self._get_commit_and_path() |
|
930 | 930 | commit = self._get_commit_or_redirect(commit_id) |
|
931 | 931 | file_node = self._get_filenode_or_redirect(commit, f_path) |
|
932 | 932 | |
|
933 | 933 | if not file_node.is_file(): |
|
934 | 934 | raise HTTPBadRequest() |
|
935 | 935 | |
|
936 | 936 | c.file_last_commit = file_node.last_commit |
|
937 | 937 | if self.request.GET.get('annotate') == '1': |
|
938 | 938 | # use _hist from annotation if annotation mode is on |
|
939 | 939 | commit_ids = set(x[1] for x in file_node.annotate) |
|
940 | 940 | _hist = ( |
|
941 | 941 | self.rhodecode_vcs_repo.get_commit(commit_id) |
|
942 | 942 | for commit_id in commit_ids) |
|
943 | 943 | else: |
|
944 | 944 | _f_history, _hist = self._get_node_history(commit, f_path) |
|
945 | 945 | c.file_author = False |
|
946 | 946 | |
|
947 | 947 | unique = collections.OrderedDict() |
|
948 | 948 | for commit in _hist: |
|
949 | 949 | author = commit.author |
|
950 | 950 | if author not in unique: |
|
951 | 951 | unique[commit.author] = [ |
|
952 | 952 | h.email(author), |
|
953 | 953 | h.person(author, 'username_or_name_or_email'), |
|
954 | 954 | 1 # counter |
|
955 | 955 | ] |
|
956 | 956 | |
|
957 | 957 | else: |
|
958 | 958 | # increase counter |
|
959 | 959 | unique[commit.author][2] += 1 |
|
960 | 960 | |
|
961 | 961 | c.authors = [val for val in unique.values()] |
|
962 | 962 | |
|
963 | 963 | return self._get_template_context(c) |
|
964 | 964 | |
|
965 | 965 | @LoginRequired() |
|
966 | 966 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
967 | 967 | @view_config( |
|
968 | 968 | route_name='repo_files_remove_file', request_method='GET', |
|
969 | 969 | renderer='rhodecode:templates/files/files_delete.mako') |
|
970 | 970 | def repo_files_remove_file(self): |
|
971 | 971 | _ = self.request.translate |
|
972 | 972 | c = self.load_default_context() |
|
973 | 973 | commit_id, f_path = self._get_commit_and_path() |
|
974 | 974 | |
|
975 | 975 | self._ensure_not_locked() |
|
976 | 976 | |
|
977 | 977 | if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo): |
|
978 | 978 | h.flash(_('You can only delete files with commit ' |
|
979 | 979 | 'being a valid branch '), category='warning') |
|
980 | 980 | raise HTTPFound( |
|
981 | 981 | h.route_path('repo_files', |
|
982 | 982 | repo_name=self.db_repo_name, commit_id='tip', |
|
983 | 983 | f_path=f_path)) |
|
984 | 984 | |
|
985 | 985 | c.commit = self._get_commit_or_redirect(commit_id) |
|
986 | 986 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
987 | 987 | |
|
988 | 988 | c.default_message = _( |
|
989 | 989 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) |
|
990 | 990 | c.f_path = f_path |
|
991 | 991 | |
|
992 | 992 | return self._get_template_context(c) |
|
993 | 993 | |
|
994 | 994 | @LoginRequired() |
|
995 | 995 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
996 | 996 | @CSRFRequired() |
|
997 | 997 | @view_config( |
|
998 | 998 | route_name='repo_files_delete_file', request_method='POST', |
|
999 | 999 | renderer=None) |
|
1000 | 1000 | def repo_files_delete_file(self): |
|
1001 | 1001 | _ = self.request.translate |
|
1002 | 1002 | |
|
1003 | 1003 | c = self.load_default_context() |
|
1004 | 1004 | commit_id, f_path = self._get_commit_and_path() |
|
1005 | 1005 | |
|
1006 | 1006 | self._ensure_not_locked() |
|
1007 | 1007 | |
|
1008 | 1008 | if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo): |
|
1009 | 1009 | h.flash(_('You can only delete files with commit ' |
|
1010 | 1010 | 'being a valid branch '), category='warning') |
|
1011 | 1011 | raise HTTPFound( |
|
1012 | 1012 | h.route_path('repo_files', |
|
1013 | 1013 | repo_name=self.db_repo_name, commit_id='tip', |
|
1014 | 1014 | f_path=f_path)) |
|
1015 | 1015 | |
|
1016 | 1016 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1017 | 1017 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1018 | 1018 | |
|
1019 | 1019 | c.default_message = _( |
|
1020 | 1020 | 'Deleted file {} via RhodeCode Enterprise').format(f_path) |
|
1021 | 1021 | c.f_path = f_path |
|
1022 | 1022 | node_path = f_path |
|
1023 | 1023 | author = self._rhodecode_db_user.full_contact |
|
1024 | 1024 | message = self.request.POST.get('message') or c.default_message |
|
1025 | 1025 | try: |
|
1026 | 1026 | nodes = { |
|
1027 | 1027 | node_path: { |
|
1028 | 1028 | 'content': '' |
|
1029 | 1029 | } |
|
1030 | 1030 | } |
|
1031 | 1031 | ScmModel().delete_nodes( |
|
1032 | 1032 | user=self._rhodecode_db_user.user_id, repo=self.db_repo, |
|
1033 | 1033 | message=message, |
|
1034 | 1034 | nodes=nodes, |
|
1035 | 1035 | parent_commit=c.commit, |
|
1036 | 1036 | author=author, |
|
1037 | 1037 | ) |
|
1038 | 1038 | |
|
1039 | 1039 | h.flash( |
|
1040 | 1040 | _('Successfully deleted file `{}`').format( |
|
1041 | 1041 | h.escape(f_path)), category='success') |
|
1042 | 1042 | except Exception: |
|
1043 | 1043 | log.exception('Error during commit operation') |
|
1044 | 1044 | h.flash(_('Error occurred during commit'), category='error') |
|
1045 | 1045 | raise HTTPFound( |
|
1046 |
h.route_path(' |
|
|
1047 |
|
|
|
1046 | h.route_path('repo_commit', repo_name=self.db_repo_name, | |
|
1047 | commit_id='tip')) | |
|
1048 | 1048 | |
|
1049 | 1049 | @LoginRequired() |
|
1050 | 1050 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1051 | 1051 | @view_config( |
|
1052 | 1052 | route_name='repo_files_edit_file', request_method='GET', |
|
1053 | 1053 | renderer='rhodecode:templates/files/files_edit.mako') |
|
1054 | 1054 | def repo_files_edit_file(self): |
|
1055 | 1055 | _ = self.request.translate |
|
1056 | 1056 | c = self.load_default_context() |
|
1057 | 1057 | commit_id, f_path = self._get_commit_and_path() |
|
1058 | 1058 | |
|
1059 | 1059 | self._ensure_not_locked() |
|
1060 | 1060 | |
|
1061 | 1061 | if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo): |
|
1062 | 1062 | h.flash(_('You can only edit files with commit ' |
|
1063 | 1063 | 'being a valid branch '), category='warning') |
|
1064 | 1064 | raise HTTPFound( |
|
1065 | 1065 | h.route_path('repo_files', |
|
1066 | 1066 | repo_name=self.db_repo_name, commit_id='tip', |
|
1067 | 1067 | f_path=f_path)) |
|
1068 | 1068 | |
|
1069 | 1069 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1070 | 1070 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1071 | 1071 | |
|
1072 | 1072 | if c.file.is_binary: |
|
1073 | 1073 | files_url = h.route_path( |
|
1074 | 1074 | 'repo_files', |
|
1075 | 1075 | repo_name=self.db_repo_name, |
|
1076 | 1076 | commit_id=c.commit.raw_id, f_path=f_path) |
|
1077 | 1077 | raise HTTPFound(files_url) |
|
1078 | 1078 | |
|
1079 | 1079 | c.default_message = _( |
|
1080 | 1080 | 'Edited file {} via RhodeCode Enterprise').format(f_path) |
|
1081 | 1081 | c.f_path = f_path |
|
1082 | 1082 | |
|
1083 | 1083 | return self._get_template_context(c) |
|
1084 | 1084 | |
|
1085 | 1085 | @LoginRequired() |
|
1086 | 1086 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1087 | 1087 | @CSRFRequired() |
|
1088 | 1088 | @view_config( |
|
1089 | 1089 | route_name='repo_files_update_file', request_method='POST', |
|
1090 | 1090 | renderer=None) |
|
1091 | 1091 | def repo_files_update_file(self): |
|
1092 | 1092 | _ = self.request.translate |
|
1093 | 1093 | c = self.load_default_context() |
|
1094 | 1094 | commit_id, f_path = self._get_commit_and_path() |
|
1095 | 1095 | |
|
1096 | 1096 | self._ensure_not_locked() |
|
1097 | 1097 | |
|
1098 | 1098 | if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo): |
|
1099 | 1099 | h.flash(_('You can only edit files with commit ' |
|
1100 | 1100 | 'being a valid branch '), category='warning') |
|
1101 | 1101 | raise HTTPFound( |
|
1102 | 1102 | h.route_path('repo_files', |
|
1103 | 1103 | repo_name=self.db_repo_name, commit_id='tip', |
|
1104 | 1104 | f_path=f_path)) |
|
1105 | 1105 | |
|
1106 | 1106 | c.commit = self._get_commit_or_redirect(commit_id) |
|
1107 | 1107 | c.file = self._get_filenode_or_redirect(c.commit, f_path) |
|
1108 | 1108 | |
|
1109 | 1109 | if c.file.is_binary: |
|
1110 | 1110 | raise HTTPFound( |
|
1111 | 1111 | h.route_path('repo_files', |
|
1112 | 1112 | repo_name=self.db_repo_name, |
|
1113 | 1113 | commit_id=c.commit.raw_id, |
|
1114 | 1114 | f_path=f_path)) |
|
1115 | 1115 | |
|
1116 | 1116 | c.default_message = _( |
|
1117 | 1117 | 'Edited file {} via RhodeCode Enterprise').format(f_path) |
|
1118 | 1118 | c.f_path = f_path |
|
1119 | 1119 | old_content = c.file.content |
|
1120 | 1120 | sl = old_content.splitlines(1) |
|
1121 | 1121 | first_line = sl[0] if sl else '' |
|
1122 | 1122 | |
|
1123 | 1123 | r_post = self.request.POST |
|
1124 | 1124 | # modes: 0 - Unix, 1 - Mac, 2 - DOS |
|
1125 | 1125 | mode = detect_mode(first_line, 0) |
|
1126 | 1126 | content = convert_line_endings(r_post.get('content', ''), mode) |
|
1127 | 1127 | |
|
1128 | 1128 | message = r_post.get('message') or c.default_message |
|
1129 | 1129 | org_f_path = c.file.unicode_path |
|
1130 | 1130 | filename = r_post['filename'] |
|
1131 | 1131 | org_filename = c.file.name |
|
1132 | 1132 | |
|
1133 | 1133 | if content == old_content and filename == org_filename: |
|
1134 | 1134 | h.flash(_('No changes'), category='warning') |
|
1135 | 1135 | raise HTTPFound( |
|
1136 |
h.route_path(' |
|
|
1137 |
|
|
|
1136 | h.route_path('repo_commit', repo_name=self.db_repo_name, | |
|
1137 | commit_id='tip')) | |
|
1138 | 1138 | try: |
|
1139 | 1139 | mapping = { |
|
1140 | 1140 | org_f_path: { |
|
1141 | 1141 | 'org_filename': org_f_path, |
|
1142 | 1142 | 'filename': os.path.join(c.file.dir_path, filename), |
|
1143 | 1143 | 'content': content, |
|
1144 | 1144 | 'lexer': '', |
|
1145 | 1145 | 'op': 'mod', |
|
1146 | 1146 | } |
|
1147 | 1147 | } |
|
1148 | 1148 | |
|
1149 | 1149 | ScmModel().update_nodes( |
|
1150 | 1150 | user=self._rhodecode_db_user.user_id, |
|
1151 | 1151 | repo=self.db_repo, |
|
1152 | 1152 | message=message, |
|
1153 | 1153 | nodes=mapping, |
|
1154 | 1154 | parent_commit=c.commit, |
|
1155 | 1155 | ) |
|
1156 | 1156 | |
|
1157 | 1157 | h.flash( |
|
1158 | 1158 | _('Successfully committed changes to file `{}`').format( |
|
1159 | 1159 | h.escape(f_path)), category='success') |
|
1160 | 1160 | except Exception: |
|
1161 | 1161 | log.exception('Error occurred during commit') |
|
1162 | 1162 | h.flash(_('Error occurred during commit'), category='error') |
|
1163 | 1163 | raise HTTPFound( |
|
1164 |
h.route_path(' |
|
|
1165 |
|
|
|
1164 | h.route_path('repo_commit', repo_name=self.db_repo_name, | |
|
1165 | commit_id='tip')) | |
|
1166 | 1166 | |
|
1167 | 1167 | @LoginRequired() |
|
1168 | 1168 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1169 | 1169 | @view_config( |
|
1170 | 1170 | route_name='repo_files_add_file', request_method='GET', |
|
1171 | 1171 | renderer='rhodecode:templates/files/files_add.mako') |
|
1172 | 1172 | def repo_files_add_file(self): |
|
1173 | 1173 | _ = self.request.translate |
|
1174 | 1174 | c = self.load_default_context() |
|
1175 | 1175 | commit_id, f_path = self._get_commit_and_path() |
|
1176 | 1176 | |
|
1177 | 1177 | self._ensure_not_locked() |
|
1178 | 1178 | |
|
1179 | 1179 | c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False) |
|
1180 | 1180 | if c.commit is None: |
|
1181 | 1181 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1182 | 1182 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
1183 | 1183 | c.f_path = f_path |
|
1184 | 1184 | |
|
1185 | 1185 | return self._get_template_context(c) |
|
1186 | 1186 | |
|
1187 | 1187 | @LoginRequired() |
|
1188 | 1188 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
1189 | 1189 | @CSRFRequired() |
|
1190 | 1190 | @view_config( |
|
1191 | 1191 | route_name='repo_files_create_file', request_method='POST', |
|
1192 | 1192 | renderer=None) |
|
1193 | 1193 | def repo_files_create_file(self): |
|
1194 | 1194 | _ = self.request.translate |
|
1195 | 1195 | c = self.load_default_context() |
|
1196 | 1196 | commit_id, f_path = self._get_commit_and_path() |
|
1197 | 1197 | |
|
1198 | 1198 | self._ensure_not_locked() |
|
1199 | 1199 | |
|
1200 | 1200 | r_post = self.request.POST |
|
1201 | 1201 | |
|
1202 | 1202 | c.commit = self._get_commit_or_redirect( |
|
1203 | 1203 | commit_id, redirect_after=False) |
|
1204 | 1204 | if c.commit is None: |
|
1205 | 1205 | c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias) |
|
1206 | 1206 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
1207 | 1207 | c.f_path = f_path |
|
1208 | 1208 | unix_mode = 0 |
|
1209 | 1209 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
1210 | 1210 | |
|
1211 | 1211 | message = r_post.get('message') or c.default_message |
|
1212 | 1212 | filename = r_post.get('filename') |
|
1213 | 1213 | location = r_post.get('location', '') # dir location |
|
1214 | 1214 | file_obj = r_post.get('upload_file', None) |
|
1215 | 1215 | |
|
1216 | 1216 | if file_obj is not None and hasattr(file_obj, 'filename'): |
|
1217 | 1217 | filename = r_post.get('filename_upload') |
|
1218 | 1218 | content = file_obj.file |
|
1219 | 1219 | |
|
1220 | 1220 | if hasattr(content, 'file'): |
|
1221 | 1221 | # non posix systems store real file under file attr |
|
1222 | 1222 | content = content.file |
|
1223 | 1223 | |
|
1224 | 1224 | default_redirect_url = h.route_path( |
|
1225 |
' |
|
|
1225 | 'repo_commit', repo_name=self.db_repo_name, commit_id='tip') | |
|
1226 | 1226 | |
|
1227 | 1227 | # If there's no commit, redirect to repo summary |
|
1228 | 1228 | if type(c.commit) is EmptyCommit: |
|
1229 | 1229 | redirect_url = h.route_path( |
|
1230 | 1230 | 'repo_summary', repo_name=self.db_repo_name) |
|
1231 | 1231 | else: |
|
1232 | 1232 | redirect_url = default_redirect_url |
|
1233 | 1233 | |
|
1234 | 1234 | if not filename: |
|
1235 | 1235 | h.flash(_('No filename'), category='warning') |
|
1236 | 1236 | raise HTTPFound(redirect_url) |
|
1237 | 1237 | |
|
1238 | 1238 | # extract the location from filename, |
|
1239 | 1239 | # allows using foo/bar.txt syntax to create subdirectories |
|
1240 | 1240 | subdir_loc = filename.rsplit('/', 1) |
|
1241 | 1241 | if len(subdir_loc) == 2: |
|
1242 | 1242 | location = os.path.join(location, subdir_loc[0]) |
|
1243 | 1243 | |
|
1244 | 1244 | # strip all crap out of file, just leave the basename |
|
1245 | 1245 | filename = os.path.basename(filename) |
|
1246 | 1246 | node_path = os.path.join(location, filename) |
|
1247 | 1247 | author = self._rhodecode_db_user.full_contact |
|
1248 | 1248 | |
|
1249 | 1249 | try: |
|
1250 | 1250 | nodes = { |
|
1251 | 1251 | node_path: { |
|
1252 | 1252 | 'content': content |
|
1253 | 1253 | } |
|
1254 | 1254 | } |
|
1255 | 1255 | ScmModel().create_nodes( |
|
1256 | 1256 | user=self._rhodecode_db_user.user_id, |
|
1257 | 1257 | repo=self.db_repo, |
|
1258 | 1258 | message=message, |
|
1259 | 1259 | nodes=nodes, |
|
1260 | 1260 | parent_commit=c.commit, |
|
1261 | 1261 | author=author, |
|
1262 | 1262 | ) |
|
1263 | 1263 | |
|
1264 | 1264 | h.flash( |
|
1265 | 1265 | _('Successfully committed new file `{}`').format( |
|
1266 | 1266 | h.escape(node_path)), category='success') |
|
1267 | 1267 | except NonRelativePathError: |
|
1268 | 1268 | h.flash(_( |
|
1269 | 1269 | 'The location specified must be a relative path and must not ' |
|
1270 | 1270 | 'contain .. in the path'), category='warning') |
|
1271 | 1271 | raise HTTPFound(default_redirect_url) |
|
1272 | 1272 | except (NodeError, NodeAlreadyExistsError) as e: |
|
1273 | 1273 | h.flash(_(h.escape(e)), category='error') |
|
1274 | 1274 | except Exception: |
|
1275 | 1275 | log.exception('Error occurred during commit') |
|
1276 | 1276 | h.flash(_('Error occurred during commit'), category='error') |
|
1277 | 1277 | |
|
1278 | 1278 | raise HTTPFound(default_redirect_url) |
@@ -1,660 +1,599 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Routes configuration |
|
23 | 23 | |
|
24 | 24 | The more specific and detailed routes should be defined first so they |
|
25 | 25 | may take precedent over the more generic routes. For more information |
|
26 | 26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
27 | 27 | |
|
28 | 28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
29 | 29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
30 | 30 | """ |
|
31 | 31 | import os |
|
32 | 32 | import re |
|
33 | 33 | from routes import Mapper |
|
34 | 34 | |
|
35 | 35 | # prefix for non repository related links needs to be prefixed with `/` |
|
36 | 36 | ADMIN_PREFIX = '/_admin' |
|
37 | 37 | STATIC_FILE_PREFIX = '/_static' |
|
38 | 38 | |
|
39 | 39 | # Default requirements for URL parts |
|
40 | 40 | URL_NAME_REQUIREMENTS = { |
|
41 | 41 | # group name can have a slash in them, but they must not end with a slash |
|
42 | 42 | 'group_name': r'.*?[^/]', |
|
43 | 43 | 'repo_group_name': r'.*?[^/]', |
|
44 | 44 | # repo names can have a slash in them, but they must not end with a slash |
|
45 | 45 | 'repo_name': r'.*?[^/]', |
|
46 | 46 | # file path eats up everything at the end |
|
47 | 47 | 'f_path': r'.*', |
|
48 | 48 | # reference types |
|
49 | 49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
50 | 50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
51 | 51 | } |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | class JSRoutesMapper(Mapper): |
|
55 | 55 | """ |
|
56 | 56 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
57 | 57 | """ |
|
58 | 58 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
59 | 59 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
60 | 60 | def __init__(self, *args, **kw): |
|
61 | 61 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
62 | 62 | self._jsroutes = [] |
|
63 | 63 | |
|
64 | 64 | def connect(self, *args, **kw): |
|
65 | 65 | """ |
|
66 | 66 | Wrapper for connect to take an extra argument jsroute=True |
|
67 | 67 | |
|
68 | 68 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
69 | 69 | """ |
|
70 | 70 | if kw.pop('jsroute', False): |
|
71 | 71 | if not self._named_route_regex.match(args[0]): |
|
72 | 72 | raise Exception('only named routes can be added to pyroutes') |
|
73 | 73 | self._jsroutes.append(args[0]) |
|
74 | 74 | |
|
75 | 75 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
76 | 76 | |
|
77 | 77 | def _extract_route_information(self, route): |
|
78 | 78 | """ |
|
79 | 79 | Convert a route into tuple(name, path, args), eg: |
|
80 | 80 | ('show_user', '/profile/%(username)s', ['username']) |
|
81 | 81 | """ |
|
82 | 82 | routepath = route.routepath |
|
83 | 83 | def replace(matchobj): |
|
84 | 84 | if matchobj.group(1): |
|
85 | 85 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
86 | 86 | else: |
|
87 | 87 | return "%%(%s)s" % matchobj.group(2) |
|
88 | 88 | |
|
89 | 89 | routepath = self._argument_prog.sub(replace, routepath) |
|
90 | 90 | return ( |
|
91 | 91 | route.name, |
|
92 | 92 | routepath, |
|
93 | 93 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
94 | 94 | for arg in self._argument_prog.findall(route.routepath)] |
|
95 | 95 | ) |
|
96 | 96 | |
|
97 | 97 | def jsroutes(self): |
|
98 | 98 | """ |
|
99 | 99 | Return a list of pyroutes.js compatible routes |
|
100 | 100 | """ |
|
101 | 101 | for route_name in self._jsroutes: |
|
102 | 102 | yield self._extract_route_information(self._routenames[route_name]) |
|
103 | 103 | |
|
104 | 104 | |
|
105 | 105 | def make_map(config): |
|
106 | 106 | """Create, configure and return the routes Mapper""" |
|
107 | 107 | rmap = JSRoutesMapper( |
|
108 | 108 | directory=config['pylons.paths']['controllers'], |
|
109 | 109 | always_scan=config['debug']) |
|
110 | 110 | rmap.minimization = False |
|
111 | 111 | rmap.explicit = False |
|
112 | 112 | |
|
113 | 113 | from rhodecode.lib.utils2 import str2bool |
|
114 | 114 | from rhodecode.model import repo, repo_group |
|
115 | 115 | |
|
116 | 116 | def check_repo(environ, match_dict): |
|
117 | 117 | """ |
|
118 | 118 | check for valid repository for proper 404 handling |
|
119 | 119 | |
|
120 | 120 | :param environ: |
|
121 | 121 | :param match_dict: |
|
122 | 122 | """ |
|
123 | 123 | repo_name = match_dict.get('repo_name') |
|
124 | 124 | |
|
125 | 125 | if match_dict.get('f_path'): |
|
126 | 126 | # fix for multiple initial slashes that causes errors |
|
127 | 127 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
128 | 128 | repo_model = repo.RepoModel() |
|
129 | 129 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
130 | 130 | # if we match quickly from database, short circuit the operation, |
|
131 | 131 | # and validate repo based on the type. |
|
132 | 132 | if by_name_match: |
|
133 | 133 | return True |
|
134 | 134 | |
|
135 | 135 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
136 | 136 | if by_id_match: |
|
137 | 137 | repo_name = by_id_match.repo_name |
|
138 | 138 | match_dict['repo_name'] = repo_name |
|
139 | 139 | return True |
|
140 | 140 | |
|
141 | 141 | return False |
|
142 | 142 | |
|
143 | 143 | def check_group(environ, match_dict): |
|
144 | 144 | """ |
|
145 | 145 | check for valid repository group path for proper 404 handling |
|
146 | 146 | |
|
147 | 147 | :param environ: |
|
148 | 148 | :param match_dict: |
|
149 | 149 | """ |
|
150 | 150 | repo_group_name = match_dict.get('group_name') |
|
151 | 151 | repo_group_model = repo_group.RepoGroupModel() |
|
152 | 152 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
153 | 153 | if by_name_match: |
|
154 | 154 | return True |
|
155 | 155 | |
|
156 | 156 | return False |
|
157 | 157 | |
|
158 | 158 | def check_user_group(environ, match_dict): |
|
159 | 159 | """ |
|
160 | 160 | check for valid user group for proper 404 handling |
|
161 | 161 | |
|
162 | 162 | :param environ: |
|
163 | 163 | :param match_dict: |
|
164 | 164 | """ |
|
165 | 165 | return True |
|
166 | 166 | |
|
167 | 167 | def check_int(environ, match_dict): |
|
168 | 168 | return match_dict.get('id').isdigit() |
|
169 | 169 | |
|
170 | 170 | |
|
171 | 171 | #========================================================================== |
|
172 | 172 | # CUSTOM ROUTES HERE |
|
173 | 173 | #========================================================================== |
|
174 | 174 | |
|
175 | 175 | # ping and pylons error test |
|
176 | 176 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
177 | 177 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
178 | 178 | |
|
179 | 179 | # ADMIN REPOSITORY ROUTES |
|
180 | 180 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
181 | 181 | controller='admin/repos') as m: |
|
182 | 182 | m.connect('repos', '/repos', |
|
183 | 183 | action='create', conditions={'method': ['POST']}) |
|
184 | 184 | m.connect('repos', '/repos', |
|
185 | 185 | action='index', conditions={'method': ['GET']}) |
|
186 | 186 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
187 | 187 | action='create_repository', conditions={'method': ['GET']}) |
|
188 | 188 | m.connect('delete_repo', '/repos/{repo_name}', |
|
189 | 189 | action='delete', conditions={'method': ['DELETE']}, |
|
190 | 190 | requirements=URL_NAME_REQUIREMENTS) |
|
191 | 191 | m.connect('repo', '/repos/{repo_name}', |
|
192 | 192 | action='show', conditions={'method': ['GET'], |
|
193 | 193 | 'function': check_repo}, |
|
194 | 194 | requirements=URL_NAME_REQUIREMENTS) |
|
195 | 195 | |
|
196 | 196 | # ADMIN REPOSITORY GROUPS ROUTES |
|
197 | 197 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
198 | 198 | controller='admin/repo_groups') as m: |
|
199 | 199 | m.connect('repo_groups', '/repo_groups', |
|
200 | 200 | action='create', conditions={'method': ['POST']}) |
|
201 | 201 | m.connect('repo_groups', '/repo_groups', |
|
202 | 202 | action='index', conditions={'method': ['GET']}) |
|
203 | 203 | m.connect('new_repo_group', '/repo_groups/new', |
|
204 | 204 | action='new', conditions={'method': ['GET']}) |
|
205 | 205 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
206 | 206 | action='update', conditions={'method': ['PUT'], |
|
207 | 207 | 'function': check_group}, |
|
208 | 208 | requirements=URL_NAME_REQUIREMENTS) |
|
209 | 209 | |
|
210 | 210 | # EXTRAS REPO GROUP ROUTES |
|
211 | 211 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
212 | 212 | action='edit', |
|
213 | 213 | conditions={'method': ['GET'], 'function': check_group}, |
|
214 | 214 | requirements=URL_NAME_REQUIREMENTS) |
|
215 | 215 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
216 | 216 | action='edit', |
|
217 | 217 | conditions={'method': ['PUT'], 'function': check_group}, |
|
218 | 218 | requirements=URL_NAME_REQUIREMENTS) |
|
219 | 219 | |
|
220 | 220 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
221 | 221 | action='edit_repo_group_advanced', |
|
222 | 222 | conditions={'method': ['GET'], 'function': check_group}, |
|
223 | 223 | requirements=URL_NAME_REQUIREMENTS) |
|
224 | 224 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
225 | 225 | action='edit_repo_group_advanced', |
|
226 | 226 | conditions={'method': ['PUT'], 'function': check_group}, |
|
227 | 227 | requirements=URL_NAME_REQUIREMENTS) |
|
228 | 228 | |
|
229 | 229 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
230 | 230 | action='edit_repo_group_perms', |
|
231 | 231 | conditions={'method': ['GET'], 'function': check_group}, |
|
232 | 232 | requirements=URL_NAME_REQUIREMENTS) |
|
233 | 233 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
234 | 234 | action='update_perms', |
|
235 | 235 | conditions={'method': ['PUT'], 'function': check_group}, |
|
236 | 236 | requirements=URL_NAME_REQUIREMENTS) |
|
237 | 237 | |
|
238 | 238 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
239 | 239 | action='delete', conditions={'method': ['DELETE'], |
|
240 | 240 | 'function': check_group}, |
|
241 | 241 | requirements=URL_NAME_REQUIREMENTS) |
|
242 | 242 | |
|
243 | 243 | # ADMIN USER ROUTES |
|
244 | 244 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
245 | 245 | controller='admin/users') as m: |
|
246 | 246 | m.connect('users', '/users', |
|
247 | 247 | action='create', conditions={'method': ['POST']}) |
|
248 | 248 | m.connect('new_user', '/users/new', |
|
249 | 249 | action='new', conditions={'method': ['GET']}) |
|
250 | 250 | m.connect('update_user', '/users/{user_id}', |
|
251 | 251 | action='update', conditions={'method': ['PUT']}) |
|
252 | 252 | m.connect('delete_user', '/users/{user_id}', |
|
253 | 253 | action='delete', conditions={'method': ['DELETE']}) |
|
254 | 254 | m.connect('edit_user', '/users/{user_id}/edit', |
|
255 | 255 | action='edit', conditions={'method': ['GET']}, jsroute=True) |
|
256 | 256 | m.connect('user', '/users/{user_id}', |
|
257 | 257 | action='show', conditions={'method': ['GET']}) |
|
258 | 258 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
259 | 259 | action='reset_password', conditions={'method': ['POST']}) |
|
260 | 260 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
261 | 261 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
262 | 262 | |
|
263 | 263 | # EXTRAS USER ROUTES |
|
264 | 264 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
265 | 265 | action='edit_advanced', conditions={'method': ['GET']}) |
|
266 | 266 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
267 | 267 | action='update_advanced', conditions={'method': ['PUT']}) |
|
268 | 268 | |
|
269 | 269 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
270 | 270 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
271 | 271 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
272 | 272 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
273 | 273 | |
|
274 | 274 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', |
|
275 | 275 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
276 | 276 | |
|
277 | 277 | # ADMIN USER GROUPS REST ROUTES |
|
278 | 278 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
279 | 279 | controller='admin/user_groups') as m: |
|
280 | 280 | m.connect('users_groups', '/user_groups', |
|
281 | 281 | action='create', conditions={'method': ['POST']}) |
|
282 | 282 | m.connect('users_groups', '/user_groups', |
|
283 | 283 | action='index', conditions={'method': ['GET']}) |
|
284 | 284 | m.connect('new_users_group', '/user_groups/new', |
|
285 | 285 | action='new', conditions={'method': ['GET']}) |
|
286 | 286 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
287 | 287 | action='update', conditions={'method': ['PUT']}) |
|
288 | 288 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
289 | 289 | action='delete', conditions={'method': ['DELETE']}) |
|
290 | 290 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
291 | 291 | action='edit', conditions={'method': ['GET']}, |
|
292 | 292 | function=check_user_group) |
|
293 | 293 | |
|
294 | 294 | # EXTRAS USER GROUP ROUTES |
|
295 | 295 | m.connect('edit_user_group_global_perms', |
|
296 | 296 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
297 | 297 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
298 | 298 | m.connect('edit_user_group_global_perms', |
|
299 | 299 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
300 | 300 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
301 | 301 | m.connect('edit_user_group_perms_summary', |
|
302 | 302 | '/user_groups/{user_group_id}/edit/permissions_summary', |
|
303 | 303 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
304 | 304 | |
|
305 | 305 | m.connect('edit_user_group_perms', |
|
306 | 306 | '/user_groups/{user_group_id}/edit/permissions', |
|
307 | 307 | action='edit_perms', conditions={'method': ['GET']}) |
|
308 | 308 | m.connect('edit_user_group_perms', |
|
309 | 309 | '/user_groups/{user_group_id}/edit/permissions', |
|
310 | 310 | action='update_perms', conditions={'method': ['PUT']}) |
|
311 | 311 | |
|
312 | 312 | m.connect('edit_user_group_advanced', |
|
313 | 313 | '/user_groups/{user_group_id}/edit/advanced', |
|
314 | 314 | action='edit_advanced', conditions={'method': ['GET']}) |
|
315 | 315 | |
|
316 | 316 | m.connect('edit_user_group_advanced_sync', |
|
317 | 317 | '/user_groups/{user_group_id}/edit/advanced/sync', |
|
318 | 318 | action='edit_advanced_set_synchronization', conditions={'method': ['POST']}) |
|
319 | 319 | |
|
320 | 320 | m.connect('edit_user_group_members', |
|
321 | 321 | '/user_groups/{user_group_id}/edit/members', jsroute=True, |
|
322 | 322 | action='user_group_members', conditions={'method': ['GET']}) |
|
323 | 323 | |
|
324 | 324 | # ADMIN DEFAULTS REST ROUTES |
|
325 | 325 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
326 | 326 | controller='admin/defaults') as m: |
|
327 | 327 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
328 | 328 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
329 | 329 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
330 | 330 | action='index', conditions={'method': ['GET']}) |
|
331 | 331 | |
|
332 | 332 | # ADMIN SETTINGS ROUTES |
|
333 | 333 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
334 | 334 | controller='admin/settings') as m: |
|
335 | 335 | |
|
336 | 336 | # default |
|
337 | 337 | m.connect('admin_settings', '/settings', |
|
338 | 338 | action='settings_global_update', |
|
339 | 339 | conditions={'method': ['POST']}) |
|
340 | 340 | m.connect('admin_settings', '/settings', |
|
341 | 341 | action='settings_global', conditions={'method': ['GET']}) |
|
342 | 342 | |
|
343 | 343 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
344 | 344 | action='settings_vcs_update', |
|
345 | 345 | conditions={'method': ['POST']}) |
|
346 | 346 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
347 | 347 | action='settings_vcs', |
|
348 | 348 | conditions={'method': ['GET']}) |
|
349 | 349 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
350 | 350 | action='delete_svn_pattern', |
|
351 | 351 | conditions={'method': ['DELETE']}) |
|
352 | 352 | |
|
353 | 353 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
354 | 354 | action='settings_mapping_update', |
|
355 | 355 | conditions={'method': ['POST']}) |
|
356 | 356 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
357 | 357 | action='settings_mapping', conditions={'method': ['GET']}) |
|
358 | 358 | |
|
359 | 359 | m.connect('admin_settings_global', '/settings/global', |
|
360 | 360 | action='settings_global_update', |
|
361 | 361 | conditions={'method': ['POST']}) |
|
362 | 362 | m.connect('admin_settings_global', '/settings/global', |
|
363 | 363 | action='settings_global', conditions={'method': ['GET']}) |
|
364 | 364 | |
|
365 | 365 | m.connect('admin_settings_visual', '/settings/visual', |
|
366 | 366 | action='settings_visual_update', |
|
367 | 367 | conditions={'method': ['POST']}) |
|
368 | 368 | m.connect('admin_settings_visual', '/settings/visual', |
|
369 | 369 | action='settings_visual', conditions={'method': ['GET']}) |
|
370 | 370 | |
|
371 | 371 | m.connect('admin_settings_issuetracker', |
|
372 | 372 | '/settings/issue-tracker', action='settings_issuetracker', |
|
373 | 373 | conditions={'method': ['GET']}) |
|
374 | 374 | m.connect('admin_settings_issuetracker_save', |
|
375 | 375 | '/settings/issue-tracker/save', |
|
376 | 376 | action='settings_issuetracker_save', |
|
377 | 377 | conditions={'method': ['POST']}) |
|
378 | 378 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
379 | 379 | action='settings_issuetracker_test', |
|
380 | 380 | conditions={'method': ['POST']}) |
|
381 | 381 | m.connect('admin_issuetracker_delete', |
|
382 | 382 | '/settings/issue-tracker/delete', |
|
383 | 383 | action='settings_issuetracker_delete', |
|
384 | 384 | conditions={'method': ['DELETE']}) |
|
385 | 385 | |
|
386 | 386 | m.connect('admin_settings_email', '/settings/email', |
|
387 | 387 | action='settings_email_update', |
|
388 | 388 | conditions={'method': ['POST']}) |
|
389 | 389 | m.connect('admin_settings_email', '/settings/email', |
|
390 | 390 | action='settings_email', conditions={'method': ['GET']}) |
|
391 | 391 | |
|
392 | 392 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
393 | 393 | action='settings_hooks_update', |
|
394 | 394 | conditions={'method': ['POST', 'DELETE']}) |
|
395 | 395 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
396 | 396 | action='settings_hooks', conditions={'method': ['GET']}) |
|
397 | 397 | |
|
398 | 398 | m.connect('admin_settings_search', '/settings/search', |
|
399 | 399 | action='settings_search', conditions={'method': ['GET']}) |
|
400 | 400 | |
|
401 | 401 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
402 | 402 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
403 | 403 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
404 | 404 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
405 | 405 | |
|
406 | 406 | m.connect('admin_settings_labs', '/settings/labs', |
|
407 | 407 | action='settings_labs_update', |
|
408 | 408 | conditions={'method': ['POST']}) |
|
409 | 409 | m.connect('admin_settings_labs', '/settings/labs', |
|
410 | 410 | action='settings_labs', conditions={'method': ['GET']}) |
|
411 | 411 | |
|
412 | 412 | # ADMIN MY ACCOUNT |
|
413 | 413 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
414 | 414 | controller='admin/my_account') as m: |
|
415 | 415 | |
|
416 | 416 | # NOTE(marcink): this needs to be kept for password force flag to be |
|
417 | 417 | # handled in pylons controllers, remove after full migration to pyramid |
|
418 | 418 | m.connect('my_account_password', '/my_account/password', |
|
419 | 419 | action='my_account_password', conditions={'method': ['GET']}) |
|
420 | 420 | |
|
421 | 421 | #========================================================================== |
|
422 | 422 | # REPOSITORY ROUTES |
|
423 | 423 | #========================================================================== |
|
424 | 424 | |
|
425 | 425 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', |
|
426 | 426 | controller='admin/repos', action='repo_creating', |
|
427 | 427 | requirements=URL_NAME_REQUIREMENTS) |
|
428 | 428 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', |
|
429 | 429 | controller='admin/repos', action='repo_check', |
|
430 | 430 | requirements=URL_NAME_REQUIREMENTS) |
|
431 | 431 | |
|
432 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', | |
|
433 | controller='changeset', revision='tip', | |
|
434 | conditions={'function': check_repo}, | |
|
435 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
|
436 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', | |
|
437 | controller='changeset', revision='tip', action='changeset_children', | |
|
438 | conditions={'function': check_repo}, | |
|
439 | requirements=URL_NAME_REQUIREMENTS) | |
|
440 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', | |
|
441 | controller='changeset', revision='tip', action='changeset_parents', | |
|
442 | conditions={'function': check_repo}, | |
|
443 | requirements=URL_NAME_REQUIREMENTS) | |
|
444 | ||
|
445 | 432 | # repo edit options |
|
446 | 433 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
447 | 434 | controller='admin/repos', action='edit_fields', |
|
448 | 435 | conditions={'method': ['GET'], 'function': check_repo}, |
|
449 | 436 | requirements=URL_NAME_REQUIREMENTS) |
|
450 | 437 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
451 | 438 | controller='admin/repos', action='create_repo_field', |
|
452 | 439 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
453 | 440 | requirements=URL_NAME_REQUIREMENTS) |
|
454 | 441 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
455 | 442 | controller='admin/repos', action='delete_repo_field', |
|
456 | 443 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
457 | 444 | requirements=URL_NAME_REQUIREMENTS) |
|
458 | 445 | |
|
459 | 446 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
460 | 447 | controller='admin/repos', action='toggle_locking', |
|
461 | 448 | conditions={'method': ['GET'], 'function': check_repo}, |
|
462 | 449 | requirements=URL_NAME_REQUIREMENTS) |
|
463 | 450 | |
|
464 | 451 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
465 | 452 | controller='admin/repos', action='edit_remote_form', |
|
466 | 453 | conditions={'method': ['GET'], 'function': check_repo}, |
|
467 | 454 | requirements=URL_NAME_REQUIREMENTS) |
|
468 | 455 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
469 | 456 | controller='admin/repos', action='edit_remote', |
|
470 | 457 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
471 | 458 | requirements=URL_NAME_REQUIREMENTS) |
|
472 | 459 | |
|
473 | 460 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
474 | 461 | controller='admin/repos', action='edit_statistics_form', |
|
475 | 462 | conditions={'method': ['GET'], 'function': check_repo}, |
|
476 | 463 | requirements=URL_NAME_REQUIREMENTS) |
|
477 | 464 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
478 | 465 | controller='admin/repos', action='edit_statistics', |
|
479 | 466 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
480 | 467 | requirements=URL_NAME_REQUIREMENTS) |
|
481 | 468 | rmap.connect('repo_settings_issuetracker', |
|
482 | 469 | '/{repo_name}/settings/issue-tracker', |
|
483 | 470 | controller='admin/repos', action='repo_issuetracker', |
|
484 | 471 | conditions={'method': ['GET'], 'function': check_repo}, |
|
485 | 472 | requirements=URL_NAME_REQUIREMENTS) |
|
486 | 473 | rmap.connect('repo_issuetracker_test', |
|
487 | 474 | '/{repo_name}/settings/issue-tracker/test', |
|
488 | 475 | controller='admin/repos', action='repo_issuetracker_test', |
|
489 | 476 | conditions={'method': ['POST'], 'function': check_repo}, |
|
490 | 477 | requirements=URL_NAME_REQUIREMENTS) |
|
491 | 478 | rmap.connect('repo_issuetracker_delete', |
|
492 | 479 | '/{repo_name}/settings/issue-tracker/delete', |
|
493 | 480 | controller='admin/repos', action='repo_issuetracker_delete', |
|
494 | 481 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
495 | 482 | requirements=URL_NAME_REQUIREMENTS) |
|
496 | 483 | rmap.connect('repo_issuetracker_save', |
|
497 | 484 | '/{repo_name}/settings/issue-tracker/save', |
|
498 | 485 | controller='admin/repos', action='repo_issuetracker_save', |
|
499 | 486 | conditions={'method': ['POST'], 'function': check_repo}, |
|
500 | 487 | requirements=URL_NAME_REQUIREMENTS) |
|
501 | 488 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
502 | 489 | controller='admin/repos', action='repo_settings_vcs_update', |
|
503 | 490 | conditions={'method': ['POST'], 'function': check_repo}, |
|
504 | 491 | requirements=URL_NAME_REQUIREMENTS) |
|
505 | 492 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
506 | 493 | controller='admin/repos', action='repo_settings_vcs', |
|
507 | 494 | conditions={'method': ['GET'], 'function': check_repo}, |
|
508 | 495 | requirements=URL_NAME_REQUIREMENTS) |
|
509 | 496 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
510 | 497 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
511 | 498 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
512 | 499 | requirements=URL_NAME_REQUIREMENTS) |
|
513 | 500 | rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest', |
|
514 | 501 | controller='admin/repos', action='repo_settings_pullrequest', |
|
515 | 502 | conditions={'method': ['GET', 'POST'], 'function': check_repo}, |
|
516 | 503 | requirements=URL_NAME_REQUIREMENTS) |
|
517 | 504 | |
|
518 | # still working url for backward compat. | |
|
519 | rmap.connect('raw_changeset_home_depraced', | |
|
520 | '/{repo_name}/raw-changeset/{revision}', | |
|
521 | controller='changeset', action='changeset_raw', | |
|
522 | revision='tip', conditions={'function': check_repo}, | |
|
523 | requirements=URL_NAME_REQUIREMENTS) | |
|
524 | ||
|
525 | # new URLs | |
|
526 | rmap.connect('changeset_raw_home', | |
|
527 | '/{repo_name}/changeset-diff/{revision}', | |
|
528 | controller='changeset', action='changeset_raw', | |
|
529 | revision='tip', conditions={'function': check_repo}, | |
|
530 | requirements=URL_NAME_REQUIREMENTS) | |
|
531 | ||
|
532 | rmap.connect('changeset_patch_home', | |
|
533 | '/{repo_name}/changeset-patch/{revision}', | |
|
534 | controller='changeset', action='changeset_patch', | |
|
535 | revision='tip', conditions={'function': check_repo}, | |
|
536 | requirements=URL_NAME_REQUIREMENTS) | |
|
537 | ||
|
538 | rmap.connect('changeset_download_home', | |
|
539 | '/{repo_name}/changeset-download/{revision}', | |
|
540 | controller='changeset', action='changeset_download', | |
|
541 | revision='tip', conditions={'function': check_repo}, | |
|
542 | requirements=URL_NAME_REQUIREMENTS) | |
|
543 | ||
|
544 | rmap.connect('changeset_comment', | |
|
545 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, | |
|
546 | controller='changeset', revision='tip', action='comment', | |
|
547 | conditions={'function': check_repo}, | |
|
548 | requirements=URL_NAME_REQUIREMENTS) | |
|
549 | ||
|
550 | rmap.connect('changeset_comment_preview', | |
|
551 | '/{repo_name}/changeset/comment/preview', jsroute=True, | |
|
552 | controller='changeset', action='preview_comment', | |
|
553 | conditions={'function': check_repo, 'method': ['POST']}, | |
|
554 | requirements=URL_NAME_REQUIREMENTS) | |
|
555 | ||
|
556 | rmap.connect('changeset_comment_delete', | |
|
557 | '/{repo_name}/changeset/comment/{comment_id}/delete', | |
|
558 | controller='changeset', action='delete_comment', | |
|
559 | conditions={'function': check_repo, 'method': ['DELETE']}, | |
|
560 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
|
561 | ||
|
562 | rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}', | |
|
563 | controller='changeset', action='changeset_info', | |
|
564 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
|
565 | ||
|
566 | 505 | rmap.connect('compare_home', |
|
567 | 506 | '/{repo_name}/compare', |
|
568 | 507 | controller='compare', action='index', |
|
569 | 508 | conditions={'function': check_repo}, |
|
570 | 509 | requirements=URL_NAME_REQUIREMENTS) |
|
571 | 510 | |
|
572 | 511 | rmap.connect('compare_url', |
|
573 | 512 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', |
|
574 | 513 | controller='compare', action='compare', |
|
575 | 514 | conditions={'function': check_repo}, |
|
576 | 515 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
577 | 516 | |
|
578 | 517 | rmap.connect('pullrequest_home', |
|
579 | 518 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
580 | 519 | action='index', conditions={'function': check_repo, |
|
581 | 520 | 'method': ['GET']}, |
|
582 | 521 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
583 | 522 | |
|
584 | 523 | rmap.connect('pullrequest', |
|
585 | 524 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
586 | 525 | action='create', conditions={'function': check_repo, |
|
587 | 526 | 'method': ['POST']}, |
|
588 | 527 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
589 | 528 | |
|
590 | 529 | rmap.connect('pullrequest_repo_refs', |
|
591 | 530 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
592 | 531 | controller='pullrequests', |
|
593 | 532 | action='get_repo_refs', |
|
594 | 533 | conditions={'function': check_repo, 'method': ['GET']}, |
|
595 | 534 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
596 | 535 | |
|
597 | 536 | rmap.connect('pullrequest_repo_destinations', |
|
598 | 537 | '/{repo_name}/pull-request/repo-destinations', |
|
599 | 538 | controller='pullrequests', |
|
600 | 539 | action='get_repo_destinations', |
|
601 | 540 | conditions={'function': check_repo, 'method': ['GET']}, |
|
602 | 541 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
603 | 542 | |
|
604 | 543 | rmap.connect('pullrequest_show', |
|
605 | 544 | '/{repo_name}/pull-request/{pull_request_id}', |
|
606 | 545 | controller='pullrequests', |
|
607 | 546 | action='show', conditions={'function': check_repo, |
|
608 | 547 | 'method': ['GET']}, |
|
609 | 548 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
610 | 549 | |
|
611 | 550 | rmap.connect('pullrequest_update', |
|
612 | 551 | '/{repo_name}/pull-request/{pull_request_id}', |
|
613 | 552 | controller='pullrequests', |
|
614 | 553 | action='update', conditions={'function': check_repo, |
|
615 | 554 | 'method': ['PUT']}, |
|
616 | 555 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
617 | 556 | |
|
618 | 557 | rmap.connect('pullrequest_merge', |
|
619 | 558 | '/{repo_name}/pull-request/{pull_request_id}', |
|
620 | 559 | controller='pullrequests', |
|
621 | 560 | action='merge', conditions={'function': check_repo, |
|
622 | 561 | 'method': ['POST']}, |
|
623 | 562 | requirements=URL_NAME_REQUIREMENTS) |
|
624 | 563 | |
|
625 | 564 | rmap.connect('pullrequest_delete', |
|
626 | 565 | '/{repo_name}/pull-request/{pull_request_id}', |
|
627 | 566 | controller='pullrequests', |
|
628 | 567 | action='delete', conditions={'function': check_repo, |
|
629 | 568 | 'method': ['DELETE']}, |
|
630 | 569 | requirements=URL_NAME_REQUIREMENTS) |
|
631 | 570 | |
|
632 | 571 | rmap.connect('pullrequest_comment', |
|
633 | 572 | '/{repo_name}/pull-request-comment/{pull_request_id}', |
|
634 | 573 | controller='pullrequests', |
|
635 | 574 | action='comment', conditions={'function': check_repo, |
|
636 | 575 | 'method': ['POST']}, |
|
637 | 576 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
638 | 577 | |
|
639 | 578 | rmap.connect('pullrequest_comment_delete', |
|
640 | 579 | '/{repo_name}/pull-request-comment/{comment_id}/delete', |
|
641 | 580 | controller='pullrequests', action='delete_comment', |
|
642 | 581 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
643 | 582 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
644 | 583 | |
|
645 | 584 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', |
|
646 | 585 | controller='forks', action='fork_create', |
|
647 | 586 | conditions={'function': check_repo, 'method': ['POST']}, |
|
648 | 587 | requirements=URL_NAME_REQUIREMENTS) |
|
649 | 588 | |
|
650 | 589 | rmap.connect('repo_fork_home', '/{repo_name}/fork', |
|
651 | 590 | controller='forks', action='fork', |
|
652 | 591 | conditions={'function': check_repo}, |
|
653 | 592 | requirements=URL_NAME_REQUIREMENTS) |
|
654 | 593 | |
|
655 | 594 | rmap.connect('repo_forks_home', '/{repo_name}/forks', |
|
656 | 595 | controller='forks', action='forks', |
|
657 | 596 | conditions={'function': check_repo}, |
|
658 | 597 | requirements=URL_NAME_REQUIREMENTS) |
|
659 | 598 | |
|
660 | 599 | return rmap |
@@ -1,349 +1,351 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | from pylons import url | |
|
24 | 23 | from pylons.i18n.translation import _ |
|
25 | 24 | from webhelpers.html.builder import literal |
|
26 | 25 | from webhelpers.html.tags import link_to |
|
27 | 26 | |
|
28 | 27 | from rhodecode.lib.utils2 import AttributeDict |
|
29 | 28 | from rhodecode.lib.vcs.backends.base import BaseCommit |
|
30 | 29 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
31 | 30 | |
|
32 | 31 | |
|
33 | 32 | log = logging.getLogger(__name__) |
|
34 | 33 | |
|
35 | 34 | |
|
36 | 35 | def action_parser(user_log, feed=False, parse_cs=False): |
|
37 | 36 | """ |
|
38 | 37 | This helper will action_map the specified string action into translated |
|
39 | 38 | fancy names with icons and links |
|
40 | 39 | |
|
41 | 40 | :param user_log: user log instance |
|
42 | 41 | :param feed: use output for feeds (no html and fancy icons) |
|
43 | 42 | :param parse_cs: parse Changesets into VCS instances |
|
44 | 43 | """ |
|
45 | 44 | if user_log.version == 'v2': |
|
46 | 45 | ap = AuditLogParser(user_log) |
|
47 | 46 | return ap.callbacks() |
|
48 | 47 | else: |
|
49 | 48 | # old style |
|
50 | 49 | ap = ActionParser(user_log, feed=False, parse_commits=False) |
|
51 | 50 | return ap.callbacks() |
|
52 | 51 | |
|
53 | 52 | |
|
54 | 53 | class ActionParser(object): |
|
55 | 54 | |
|
56 | 55 | commits_limit = 3 # display this amount always |
|
57 | 56 | commits_top_limit = 50 # show up to this amount of commits hidden |
|
58 | 57 | |
|
59 | 58 | def __init__(self, user_log, feed=False, parse_commits=False): |
|
60 | 59 | self.user_log = user_log |
|
61 | 60 | self.feed = feed |
|
62 | 61 | self.parse_commits = parse_commits |
|
63 | 62 | |
|
64 | 63 | self.action = user_log.action |
|
65 | 64 | self.action_params = ' ' |
|
66 | 65 | x = self.action.split(':', 1) |
|
67 | 66 | if len(x) > 1: |
|
68 | 67 | self.action, self.action_params = x |
|
69 | 68 | |
|
70 | 69 | def callbacks(self): |
|
71 | 70 | action_str = self.action_map.get(self.action, self.action) |
|
72 | 71 | if self.feed: |
|
73 | 72 | action = action_str[0].replace('[', '').replace(']', '') |
|
74 | 73 | else: |
|
75 | 74 | action = action_str[0]\ |
|
76 | 75 | .replace('[', '<span class="journal_highlight">')\ |
|
77 | 76 | .replace(']', '</span>') |
|
78 | 77 | |
|
79 | 78 | action_params_func = _no_params_func |
|
80 | 79 | if callable(action_str[1]): |
|
81 | 80 | action_params_func = action_str[1] |
|
82 | 81 | |
|
83 | 82 | # returned callbacks we need to call to get |
|
84 | 83 | return [ |
|
85 | 84 | lambda: literal(action), action_params_func, |
|
86 | 85 | self.action_parser_icon] |
|
87 | 86 | |
|
88 | 87 | @property |
|
89 | 88 | def action_map(self): |
|
90 | 89 | |
|
91 | 90 | # action : translated str, callback(extractor), icon |
|
92 | 91 | action_map = { |
|
93 | 92 | 'user_deleted_repo': ( |
|
94 | 93 | _('[deleted] repository'), |
|
95 | 94 | None, 'icon-trash'), |
|
96 | 95 | 'user_created_repo': ( |
|
97 | 96 | _('[created] repository'), |
|
98 | 97 | None, 'icon-plus icon-plus-colored'), |
|
99 | 98 | 'user_created_fork': ( |
|
100 | 99 | _('[created] repository as fork'), |
|
101 | 100 | None, 'icon-code-fork'), |
|
102 | 101 | 'user_forked_repo': ( |
|
103 | 102 | _('[forked] repository'), |
|
104 | 103 | self.get_fork_name, 'icon-code-fork'), |
|
105 | 104 | 'user_updated_repo': ( |
|
106 | 105 | _('[updated] repository'), |
|
107 | 106 | None, 'icon-pencil icon-pencil-colored'), |
|
108 | 107 | 'user_downloaded_archive': ( |
|
109 | 108 | _('[downloaded] archive from repository'), |
|
110 | 109 | self.get_archive_name, 'icon-download-alt'), |
|
111 | 110 | 'admin_deleted_repo': ( |
|
112 | 111 | _('[delete] repository'), |
|
113 | 112 | None, 'icon-trash'), |
|
114 | 113 | 'admin_created_repo': ( |
|
115 | 114 | _('[created] repository'), |
|
116 | 115 | None, 'icon-plus icon-plus-colored'), |
|
117 | 116 | 'admin_forked_repo': ( |
|
118 | 117 | _('[forked] repository'), |
|
119 | 118 | None, 'icon-code-fork icon-fork-colored'), |
|
120 | 119 | 'admin_updated_repo': ( |
|
121 | 120 | _('[updated] repository'), |
|
122 | 121 | None, 'icon-pencil icon-pencil-colored'), |
|
123 | 122 | 'admin_created_user': ( |
|
124 | 123 | _('[created] user'), |
|
125 | 124 | self.get_user_name, 'icon-user icon-user-colored'), |
|
126 | 125 | 'admin_updated_user': ( |
|
127 | 126 | _('[updated] user'), |
|
128 | 127 | self.get_user_name, 'icon-user icon-user-colored'), |
|
129 | 128 | 'admin_created_users_group': ( |
|
130 | 129 | _('[created] user group'), |
|
131 | 130 | self.get_users_group, 'icon-pencil icon-pencil-colored'), |
|
132 | 131 | 'admin_updated_users_group': ( |
|
133 | 132 | _('[updated] user group'), |
|
134 | 133 | self.get_users_group, 'icon-pencil icon-pencil-colored'), |
|
135 | 134 | 'user_commented_revision': ( |
|
136 | 135 | _('[commented] on commit in repository'), |
|
137 | 136 | self.get_cs_links, 'icon-comment icon-comment-colored'), |
|
138 | 137 | 'user_commented_pull_request': ( |
|
139 | 138 | _('[commented] on pull request for'), |
|
140 | 139 | self.get_pull_request, 'icon-comment icon-comment-colored'), |
|
141 | 140 | 'user_closed_pull_request': ( |
|
142 | 141 | _('[closed] pull request for'), |
|
143 | 142 | self.get_pull_request, 'icon-check'), |
|
144 | 143 | 'user_merged_pull_request': ( |
|
145 | 144 | _('[merged] pull request for'), |
|
146 | 145 | self.get_pull_request, 'icon-check'), |
|
147 | 146 | 'push': ( |
|
148 | 147 | _('[pushed] into'), |
|
149 | 148 | self.get_cs_links, 'icon-arrow-up'), |
|
150 | 149 | 'push_local': ( |
|
151 | 150 | _('[committed via RhodeCode] into repository'), |
|
152 | 151 | self.get_cs_links, 'icon-pencil icon-pencil-colored'), |
|
153 | 152 | 'push_remote': ( |
|
154 | 153 | _('[pulled from remote] into repository'), |
|
155 | 154 | self.get_cs_links, 'icon-arrow-up'), |
|
156 | 155 | 'pull': ( |
|
157 | 156 | _('[pulled] from'), |
|
158 | 157 | None, 'icon-arrow-down'), |
|
159 | 158 | 'started_following_repo': ( |
|
160 | 159 | _('[started following] repository'), |
|
161 | 160 | None, 'icon-heart icon-heart-colored'), |
|
162 | 161 | 'stopped_following_repo': ( |
|
163 | 162 | _('[stopped following] repository'), |
|
164 | 163 | None, 'icon-heart-empty icon-heart-colored'), |
|
165 | 164 | } |
|
166 | 165 | return action_map |
|
167 | 166 | |
|
168 | 167 | def get_fork_name(self): |
|
169 | 168 | from rhodecode.lib import helpers as h |
|
170 | 169 | repo_name = self.action_params |
|
171 | 170 | _url = h.route_path('repo_summary', repo_name=repo_name) |
|
172 | 171 | return _('fork name %s') % link_to(self.action_params, _url) |
|
173 | 172 | |
|
174 | 173 | def get_user_name(self): |
|
175 | 174 | user_name = self.action_params |
|
176 | 175 | return user_name |
|
177 | 176 | |
|
178 | 177 | def get_users_group(self): |
|
179 | 178 | group_name = self.action_params |
|
180 | 179 | return group_name |
|
181 | 180 | |
|
182 | 181 | def get_pull_request(self): |
|
183 | 182 | from rhodecode.lib import helpers as h |
|
184 | 183 | pull_request_id = self.action_params |
|
185 | 184 | if self.is_deleted(): |
|
186 | 185 | repo_name = self.user_log.repository_name |
|
187 | 186 | else: |
|
188 | 187 | repo_name = self.user_log.repository.repo_name |
|
189 | 188 | return link_to( |
|
190 | 189 | _('Pull request #%s') % pull_request_id, |
|
191 | 190 | h.route_path('pullrequest_show', repo_name=repo_name, |
|
192 | 191 | pull_request_id=pull_request_id)) |
|
193 | 192 | |
|
194 | 193 | def get_archive_name(self): |
|
195 | 194 | archive_name = self.action_params |
|
196 | 195 | return archive_name |
|
197 | 196 | |
|
198 | 197 | def action_parser_icon(self): |
|
199 | 198 | tmpl = """<i class="%s" alt="%s"></i>""" |
|
200 | 199 | ico = self.action_map.get(self.action, ['', '', ''])[2] |
|
201 | 200 | return literal(tmpl % (ico, self.action)) |
|
202 | 201 | |
|
203 | 202 | def get_cs_links(self): |
|
203 | from rhodecode.lib import helpers as h | |
|
204 | 204 | if self.is_deleted(): |
|
205 | 205 | return self.action_params |
|
206 | 206 | |
|
207 | 207 | repo_name = self.user_log.repository.repo_name |
|
208 | 208 | commit_ids = self.action_params.split(',') |
|
209 | 209 | commits = self.get_commits(commit_ids) |
|
210 | 210 | |
|
211 | 211 | link_generator = ( |
|
212 | 212 | self.lnk(commit, repo_name) |
|
213 | 213 | for commit in commits[:self.commits_limit]) |
|
214 | 214 | commit_links = [" " + ', '.join(link_generator)] |
|
215 | 215 | _op1, _name1 = _get_op(commit_ids[0]) |
|
216 | 216 | _op2, _name2 = _get_op(commit_ids[-1]) |
|
217 | 217 | |
|
218 | 218 | commit_id_range = '%s...%s' % (_name1, _name2) |
|
219 | 219 | |
|
220 | 220 | compare_view = ( |
|
221 | 221 | ' <div class="compare_view tooltip" title="%s">' |
|
222 | 222 | '<a href="%s">%s</a> </div>' % ( |
|
223 | 223 | _('Show all combined commits %s->%s') % ( |
|
224 | 224 | commit_ids[0][:12], commit_ids[-1][:12] |
|
225 | 225 | ), |
|
226 | url('changeset_home', repo_name=repo_name, | |
|
227 | revision=commit_id_range), _('compare view') | |
|
226 | h.route_path( | |
|
227 | 'repo_commit', repo_name=repo_name, | |
|
228 | commit_id=commit_id_range), _('compare view') | |
|
228 | 229 | ) |
|
229 | 230 | ) |
|
230 | 231 | |
|
231 | 232 | if len(commit_ids) > self.commits_limit: |
|
232 | 233 | more_count = len(commit_ids) - self.commits_limit |
|
233 | 234 | commit_links.append( |
|
234 | 235 | _(' and %(num)s more commits') % {'num': more_count} |
|
235 | 236 | ) |
|
236 | 237 | |
|
237 | 238 | if len(commits) > 1: |
|
238 | 239 | commit_links.append(compare_view) |
|
239 | 240 | return ''.join(commit_links) |
|
240 | 241 | |
|
241 | 242 | def get_commits(self, commit_ids): |
|
242 | 243 | commits = [] |
|
243 | 244 | if not filter(lambda v: v != '', commit_ids): |
|
244 | 245 | return commits |
|
245 | 246 | |
|
246 | 247 | repo = None |
|
247 | 248 | if self.parse_commits: |
|
248 | 249 | repo = self.user_log.repository.scm_instance() |
|
249 | 250 | |
|
250 | 251 | for commit_id in commit_ids[:self.commits_top_limit]: |
|
251 | 252 | _op, _name = _get_op(commit_id) |
|
252 | 253 | |
|
253 | 254 | # we want parsed commits, or new log store format is bad |
|
254 | 255 | if self.parse_commits: |
|
255 | 256 | try: |
|
256 | 257 | commit = repo.get_commit(commit_id=commit_id) |
|
257 | 258 | commits.append(commit) |
|
258 | 259 | except CommitDoesNotExistError: |
|
259 | 260 | log.error( |
|
260 | 261 | 'cannot find commit id %s in this repository', |
|
261 | 262 | commit_id) |
|
262 | 263 | commits.append(commit_id) |
|
263 | 264 | continue |
|
264 | 265 | else: |
|
265 | 266 | fake_commit = AttributeDict({ |
|
266 | 267 | 'short_id': commit_id[:12], |
|
267 | 268 | 'raw_id': commit_id, |
|
268 | 269 | 'message': '', |
|
269 | 270 | 'op': _op, |
|
270 | 271 | 'ref_name': _name |
|
271 | 272 | }) |
|
272 | 273 | commits.append(fake_commit) |
|
273 | 274 | |
|
274 | 275 | return commits |
|
275 | 276 | |
|
276 | 277 | def lnk(self, commit_or_id, repo_name): |
|
277 | 278 | from rhodecode.lib.helpers import tooltip |
|
279 | from rhodecode.lib import helpers as h | |
|
278 | 280 | |
|
279 | 281 | if isinstance(commit_or_id, (BaseCommit, AttributeDict)): |
|
280 | 282 | lazy_cs = True |
|
281 | 283 | if (getattr(commit_or_id, 'op', None) and |
|
282 | 284 | getattr(commit_or_id, 'ref_name', None)): |
|
283 | 285 | lazy_cs = False |
|
284 | 286 | lbl = '?' |
|
285 | 287 | if commit_or_id.op == 'delete_branch': |
|
286 | 288 | lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name |
|
287 | 289 | title = '' |
|
288 | 290 | elif commit_or_id.op == 'tag': |
|
289 | 291 | lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name |
|
290 | 292 | title = '' |
|
291 | 293 | _url = '#' |
|
292 | 294 | |
|
293 | 295 | else: |
|
294 | 296 | lbl = '%s' % (commit_or_id.short_id[:8]) |
|
295 |
_url = |
|
|
296 |
|
|
|
297 | _url = h.route_path('repo_commit', repo_name=repo_name, | |
|
298 | commit_id=commit_or_id.raw_id) | |
|
297 | 299 | title = tooltip(commit_or_id.message) |
|
298 | 300 | else: |
|
299 | 301 | # commit cannot be found/striped/removed etc. |
|
300 | 302 | lbl = ('%s' % commit_or_id)[:12] |
|
301 | 303 | _url = '#' |
|
302 | 304 | title = _('Commit not found') |
|
303 | 305 | if self.parse_commits: |
|
304 | 306 | return link_to(lbl, _url, title=title, class_='tooltip') |
|
305 | 307 | return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name, |
|
306 | 308 | class_='lazy-cs' if lazy_cs else '') |
|
307 | 309 | |
|
308 | 310 | def is_deleted(self): |
|
309 | 311 | return self.user_log.repository is None |
|
310 | 312 | |
|
311 | 313 | |
|
312 | 314 | class AuditLogParser(object): |
|
313 | 315 | def __init__(self, audit_log_entry): |
|
314 | 316 | self.audit_log_entry = audit_log_entry |
|
315 | 317 | |
|
316 | 318 | def get_icon(self, action): |
|
317 | 319 | return 'icon-rhodecode' |
|
318 | 320 | |
|
319 | 321 | def callbacks(self): |
|
320 | 322 | action_str = self.audit_log_entry.action |
|
321 | 323 | |
|
322 | 324 | def callback(): |
|
323 | 325 | # returned callbacks we need to call to get |
|
324 | 326 | action = action_str \ |
|
325 | 327 | .replace('[', '<span class="journal_highlight">')\ |
|
326 | 328 | .replace(']', '</span>') |
|
327 | 329 | return literal(action) |
|
328 | 330 | |
|
329 | 331 | def icon(): |
|
330 | 332 | tmpl = """<i class="%s" alt="%s"></i>""" |
|
331 | 333 | ico = self.get_icon(action_str) |
|
332 | 334 | return literal(tmpl % (ico, action_str)) |
|
333 | 335 | |
|
334 | 336 | action_params_func = _no_params_func |
|
335 | 337 | |
|
336 | 338 | return [ |
|
337 | 339 | callback, action_params_func, icon] |
|
338 | 340 | |
|
339 | 341 | |
|
340 | 342 | def _no_params_func(): |
|
341 | 343 | return "" |
|
342 | 344 | |
|
343 | 345 | |
|
344 | 346 | def _get_op(commit_id): |
|
345 | 347 | _op = None |
|
346 | 348 | _name = commit_id |
|
347 | 349 | if len(commit_id.split('=>')) == 2: |
|
348 | 350 | _op, _name = commit_id.split('=>') |
|
349 | 351 | return _op, _name |
@@ -1,2027 +1,2027 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | authentication and permission libraries |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import inspect |
|
27 | 27 | import collections |
|
28 | 28 | import fnmatch |
|
29 | 29 | import hashlib |
|
30 | 30 | import itertools |
|
31 | 31 | import logging |
|
32 | 32 | import random |
|
33 | 33 | import traceback |
|
34 | 34 | from functools import wraps |
|
35 | 35 | |
|
36 | 36 | import ipaddress |
|
37 | 37 | from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound |
|
38 | 38 | from pylons.i18n.translation import _ |
|
39 | 39 | # NOTE(marcink): this has to be removed only after pyramid migration, |
|
40 | 40 | # replace with _ = request.translate |
|
41 | 41 | from sqlalchemy.orm.exc import ObjectDeletedError |
|
42 | 42 | from sqlalchemy.orm import joinedload |
|
43 | 43 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
44 | 44 | |
|
45 | 45 | import rhodecode |
|
46 | 46 | from rhodecode.model import meta |
|
47 | 47 | from rhodecode.model.meta import Session |
|
48 | 48 | from rhodecode.model.user import UserModel |
|
49 | 49 | from rhodecode.model.db import ( |
|
50 | 50 | User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember, |
|
51 | 51 | UserIpMap, UserApiKeys, RepoGroup) |
|
52 | 52 | from rhodecode.lib import caches |
|
53 | 53 | from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5 |
|
54 | 54 | from rhodecode.lib.utils import ( |
|
55 | 55 | get_repo_slug, get_repo_group_slug, get_user_group_slug) |
|
56 | 56 | from rhodecode.lib.caching_query import FromCache |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | if rhodecode.is_unix: |
|
60 | 60 | import bcrypt |
|
61 | 61 | |
|
62 | 62 | log = logging.getLogger(__name__) |
|
63 | 63 | |
|
64 | 64 | csrf_token_key = "csrf_token" |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class PasswordGenerator(object): |
|
68 | 68 | """ |
|
69 | 69 | This is a simple class for generating password from different sets of |
|
70 | 70 | characters |
|
71 | 71 | usage:: |
|
72 | 72 | |
|
73 | 73 | passwd_gen = PasswordGenerator() |
|
74 | 74 | #print 8-letter password containing only big and small letters |
|
75 | 75 | of alphabet |
|
76 | 76 | passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) |
|
77 | 77 | """ |
|
78 | 78 | ALPHABETS_NUM = r'''1234567890''' |
|
79 | 79 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm''' |
|
80 | 80 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM''' |
|
81 | 81 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' |
|
82 | 82 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \ |
|
83 | 83 | + ALPHABETS_NUM + ALPHABETS_SPECIAL |
|
84 | 84 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM |
|
85 | 85 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL |
|
86 | 86 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM |
|
87 | 87 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM |
|
88 | 88 | |
|
89 | 89 | def __init__(self, passwd=''): |
|
90 | 90 | self.passwd = passwd |
|
91 | 91 | |
|
92 | 92 | def gen_password(self, length, type_=None): |
|
93 | 93 | if type_ is None: |
|
94 | 94 | type_ = self.ALPHABETS_FULL |
|
95 | 95 | self.passwd = ''.join([random.choice(type_) for _ in xrange(length)]) |
|
96 | 96 | return self.passwd |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | class _RhodeCodeCryptoBase(object): |
|
100 | 100 | ENC_PREF = None |
|
101 | 101 | |
|
102 | 102 | def hash_create(self, str_): |
|
103 | 103 | """ |
|
104 | 104 | hash the string using |
|
105 | 105 | |
|
106 | 106 | :param str_: password to hash |
|
107 | 107 | """ |
|
108 | 108 | raise NotImplementedError |
|
109 | 109 | |
|
110 | 110 | def hash_check_with_upgrade(self, password, hashed): |
|
111 | 111 | """ |
|
112 | 112 | Returns tuple in which first element is boolean that states that |
|
113 | 113 | given password matches it's hashed version, and the second is new hash |
|
114 | 114 | of the password, in case this password should be migrated to new |
|
115 | 115 | cipher. |
|
116 | 116 | """ |
|
117 | 117 | checked_hash = self.hash_check(password, hashed) |
|
118 | 118 | return checked_hash, None |
|
119 | 119 | |
|
120 | 120 | def hash_check(self, password, hashed): |
|
121 | 121 | """ |
|
122 | 122 | Checks matching password with it's hashed value. |
|
123 | 123 | |
|
124 | 124 | :param password: password |
|
125 | 125 | :param hashed: password in hashed form |
|
126 | 126 | """ |
|
127 | 127 | raise NotImplementedError |
|
128 | 128 | |
|
129 | 129 | def _assert_bytes(self, value): |
|
130 | 130 | """ |
|
131 | 131 | Passing in an `unicode` object can lead to hard to detect issues |
|
132 | 132 | if passwords contain non-ascii characters. Doing a type check |
|
133 | 133 | during runtime, so that such mistakes are detected early on. |
|
134 | 134 | """ |
|
135 | 135 | if not isinstance(value, str): |
|
136 | 136 | raise TypeError( |
|
137 | 137 | "Bytestring required as input, got %r." % (value, )) |
|
138 | 138 | |
|
139 | 139 | |
|
140 | 140 | class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase): |
|
141 | 141 | ENC_PREF = ('$2a$10', '$2b$10') |
|
142 | 142 | |
|
143 | 143 | def hash_create(self, str_): |
|
144 | 144 | self._assert_bytes(str_) |
|
145 | 145 | return bcrypt.hashpw(str_, bcrypt.gensalt(10)) |
|
146 | 146 | |
|
147 | 147 | def hash_check_with_upgrade(self, password, hashed): |
|
148 | 148 | """ |
|
149 | 149 | Returns tuple in which first element is boolean that states that |
|
150 | 150 | given password matches it's hashed version, and the second is new hash |
|
151 | 151 | of the password, in case this password should be migrated to new |
|
152 | 152 | cipher. |
|
153 | 153 | |
|
154 | 154 | This implements special upgrade logic which works like that: |
|
155 | 155 | - check if the given password == bcrypted hash, if yes then we |
|
156 | 156 | properly used password and it was already in bcrypt. Proceed |
|
157 | 157 | without any changes |
|
158 | 158 | - if bcrypt hash check is not working try with sha256. If hash compare |
|
159 | 159 | is ok, it means we using correct but old hashed password. indicate |
|
160 | 160 | hash change and proceed |
|
161 | 161 | """ |
|
162 | 162 | |
|
163 | 163 | new_hash = None |
|
164 | 164 | |
|
165 | 165 | # regular pw check |
|
166 | 166 | password_match_bcrypt = self.hash_check(password, hashed) |
|
167 | 167 | |
|
168 | 168 | # now we want to know if the password was maybe from sha256 |
|
169 | 169 | # basically calling _RhodeCodeCryptoSha256().hash_check() |
|
170 | 170 | if not password_match_bcrypt: |
|
171 | 171 | if _RhodeCodeCryptoSha256().hash_check(password, hashed): |
|
172 | 172 | new_hash = self.hash_create(password) # make new bcrypt hash |
|
173 | 173 | password_match_bcrypt = True |
|
174 | 174 | |
|
175 | 175 | return password_match_bcrypt, new_hash |
|
176 | 176 | |
|
177 | 177 | def hash_check(self, password, hashed): |
|
178 | 178 | """ |
|
179 | 179 | Checks matching password with it's hashed value. |
|
180 | 180 | |
|
181 | 181 | :param password: password |
|
182 | 182 | :param hashed: password in hashed form |
|
183 | 183 | """ |
|
184 | 184 | self._assert_bytes(password) |
|
185 | 185 | try: |
|
186 | 186 | return bcrypt.hashpw(password, hashed) == hashed |
|
187 | 187 | except ValueError as e: |
|
188 | 188 | # we're having a invalid salt here probably, we should not crash |
|
189 | 189 | # just return with False as it would be a wrong password. |
|
190 | 190 | log.debug('Failed to check password hash using bcrypt %s', |
|
191 | 191 | safe_str(e)) |
|
192 | 192 | |
|
193 | 193 | return False |
|
194 | 194 | |
|
195 | 195 | |
|
196 | 196 | class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase): |
|
197 | 197 | ENC_PREF = '_' |
|
198 | 198 | |
|
199 | 199 | def hash_create(self, str_): |
|
200 | 200 | self._assert_bytes(str_) |
|
201 | 201 | return hashlib.sha256(str_).hexdigest() |
|
202 | 202 | |
|
203 | 203 | def hash_check(self, password, hashed): |
|
204 | 204 | """ |
|
205 | 205 | Checks matching password with it's hashed value. |
|
206 | 206 | |
|
207 | 207 | :param password: password |
|
208 | 208 | :param hashed: password in hashed form |
|
209 | 209 | """ |
|
210 | 210 | self._assert_bytes(password) |
|
211 | 211 | return hashlib.sha256(password).hexdigest() == hashed |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase): |
|
215 | 215 | ENC_PREF = '_' |
|
216 | 216 | |
|
217 | 217 | def hash_create(self, str_): |
|
218 | 218 | self._assert_bytes(str_) |
|
219 | 219 | return hashlib.md5(str_).hexdigest() |
|
220 | 220 | |
|
221 | 221 | def hash_check(self, password, hashed): |
|
222 | 222 | """ |
|
223 | 223 | Checks matching password with it's hashed value. |
|
224 | 224 | |
|
225 | 225 | :param password: password |
|
226 | 226 | :param hashed: password in hashed form |
|
227 | 227 | """ |
|
228 | 228 | self._assert_bytes(password) |
|
229 | 229 | return hashlib.md5(password).hexdigest() == hashed |
|
230 | 230 | |
|
231 | 231 | |
|
232 | 232 | def crypto_backend(): |
|
233 | 233 | """ |
|
234 | 234 | Return the matching crypto backend. |
|
235 | 235 | |
|
236 | 236 | Selection is based on if we run tests or not, we pick md5 backend to run |
|
237 | 237 | tests faster since BCRYPT is expensive to calculate |
|
238 | 238 | """ |
|
239 | 239 | if rhodecode.is_test: |
|
240 | 240 | RhodeCodeCrypto = _RhodeCodeCryptoMd5() |
|
241 | 241 | else: |
|
242 | 242 | RhodeCodeCrypto = _RhodeCodeCryptoBCrypt() |
|
243 | 243 | |
|
244 | 244 | return RhodeCodeCrypto |
|
245 | 245 | |
|
246 | 246 | |
|
247 | 247 | def get_crypt_password(password): |
|
248 | 248 | """ |
|
249 | 249 | Create the hash of `password` with the active crypto backend. |
|
250 | 250 | |
|
251 | 251 | :param password: The cleartext password. |
|
252 | 252 | :type password: unicode |
|
253 | 253 | """ |
|
254 | 254 | password = safe_str(password) |
|
255 | 255 | return crypto_backend().hash_create(password) |
|
256 | 256 | |
|
257 | 257 | |
|
258 | 258 | def check_password(password, hashed): |
|
259 | 259 | """ |
|
260 | 260 | Check if the value in `password` matches the hash in `hashed`. |
|
261 | 261 | |
|
262 | 262 | :param password: The cleartext password. |
|
263 | 263 | :type password: unicode |
|
264 | 264 | |
|
265 | 265 | :param hashed: The expected hashed version of the password. |
|
266 | 266 | :type hashed: The hash has to be passed in in text representation. |
|
267 | 267 | """ |
|
268 | 268 | password = safe_str(password) |
|
269 | 269 | return crypto_backend().hash_check(password, hashed) |
|
270 | 270 | |
|
271 | 271 | |
|
272 | 272 | def generate_auth_token(data, salt=None): |
|
273 | 273 | """ |
|
274 | 274 | Generates API KEY from given string |
|
275 | 275 | """ |
|
276 | 276 | |
|
277 | 277 | if salt is None: |
|
278 | 278 | salt = os.urandom(16) |
|
279 | 279 | return hashlib.sha1(safe_str(data) + salt).hexdigest() |
|
280 | 280 | |
|
281 | 281 | |
|
282 | 282 | class CookieStoreWrapper(object): |
|
283 | 283 | |
|
284 | 284 | def __init__(self, cookie_store): |
|
285 | 285 | self.cookie_store = cookie_store |
|
286 | 286 | |
|
287 | 287 | def __repr__(self): |
|
288 | 288 | return 'CookieStore<%s>' % (self.cookie_store) |
|
289 | 289 | |
|
290 | 290 | def get(self, key, other=None): |
|
291 | 291 | if isinstance(self.cookie_store, dict): |
|
292 | 292 | return self.cookie_store.get(key, other) |
|
293 | 293 | elif isinstance(self.cookie_store, AuthUser): |
|
294 | 294 | return self.cookie_store.__dict__.get(key, other) |
|
295 | 295 | |
|
296 | 296 | |
|
297 | 297 | def _cached_perms_data(user_id, scope, user_is_admin, |
|
298 | 298 | user_inherit_default_permissions, explicit, algo): |
|
299 | 299 | |
|
300 | 300 | permissions = PermissionCalculator( |
|
301 | 301 | user_id, scope, user_is_admin, user_inherit_default_permissions, |
|
302 | 302 | explicit, algo) |
|
303 | 303 | return permissions.calculate() |
|
304 | 304 | |
|
305 | 305 | |
|
306 | 306 | class PermOrigin(object): |
|
307 | 307 | ADMIN = 'superadmin' |
|
308 | 308 | |
|
309 | 309 | REPO_USER = 'user:%s' |
|
310 | 310 | REPO_USERGROUP = 'usergroup:%s' |
|
311 | 311 | REPO_OWNER = 'repo.owner' |
|
312 | 312 | REPO_DEFAULT = 'repo.default' |
|
313 | 313 | REPO_PRIVATE = 'repo.private' |
|
314 | 314 | |
|
315 | 315 | REPOGROUP_USER = 'user:%s' |
|
316 | 316 | REPOGROUP_USERGROUP = 'usergroup:%s' |
|
317 | 317 | REPOGROUP_OWNER = 'group.owner' |
|
318 | 318 | REPOGROUP_DEFAULT = 'group.default' |
|
319 | 319 | |
|
320 | 320 | USERGROUP_USER = 'user:%s' |
|
321 | 321 | USERGROUP_USERGROUP = 'usergroup:%s' |
|
322 | 322 | USERGROUP_OWNER = 'usergroup.owner' |
|
323 | 323 | USERGROUP_DEFAULT = 'usergroup.default' |
|
324 | 324 | |
|
325 | 325 | |
|
326 | 326 | class PermOriginDict(dict): |
|
327 | 327 | """ |
|
328 | 328 | A special dict used for tracking permissions along with their origins. |
|
329 | 329 | |
|
330 | 330 | `__setitem__` has been overridden to expect a tuple(perm, origin) |
|
331 | 331 | `__getitem__` will return only the perm |
|
332 | 332 | `.perm_origin_stack` will return the stack of (perm, origin) set per key |
|
333 | 333 | |
|
334 | 334 | >>> perms = PermOriginDict() |
|
335 | 335 | >>> perms['resource'] = 'read', 'default' |
|
336 | 336 | >>> perms['resource'] |
|
337 | 337 | 'read' |
|
338 | 338 | >>> perms['resource'] = 'write', 'admin' |
|
339 | 339 | >>> perms['resource'] |
|
340 | 340 | 'write' |
|
341 | 341 | >>> perms.perm_origin_stack |
|
342 | 342 | {'resource': [('read', 'default'), ('write', 'admin')]} |
|
343 | 343 | """ |
|
344 | 344 | |
|
345 | 345 | def __init__(self, *args, **kw): |
|
346 | 346 | dict.__init__(self, *args, **kw) |
|
347 | 347 | self.perm_origin_stack = {} |
|
348 | 348 | |
|
349 | 349 | def __setitem__(self, key, (perm, origin)): |
|
350 | 350 | self.perm_origin_stack.setdefault(key, []).append((perm, origin)) |
|
351 | 351 | dict.__setitem__(self, key, perm) |
|
352 | 352 | |
|
353 | 353 | |
|
354 | 354 | class PermissionCalculator(object): |
|
355 | 355 | |
|
356 | 356 | def __init__( |
|
357 | 357 | self, user_id, scope, user_is_admin, |
|
358 | 358 | user_inherit_default_permissions, explicit, algo): |
|
359 | 359 | self.user_id = user_id |
|
360 | 360 | self.user_is_admin = user_is_admin |
|
361 | 361 | self.inherit_default_permissions = user_inherit_default_permissions |
|
362 | 362 | self.explicit = explicit |
|
363 | 363 | self.algo = algo |
|
364 | 364 | |
|
365 | 365 | scope = scope or {} |
|
366 | 366 | self.scope_repo_id = scope.get('repo_id') |
|
367 | 367 | self.scope_repo_group_id = scope.get('repo_group_id') |
|
368 | 368 | self.scope_user_group_id = scope.get('user_group_id') |
|
369 | 369 | |
|
370 | 370 | self.default_user_id = User.get_default_user(cache=True).user_id |
|
371 | 371 | |
|
372 | 372 | self.permissions_repositories = PermOriginDict() |
|
373 | 373 | self.permissions_repository_groups = PermOriginDict() |
|
374 | 374 | self.permissions_user_groups = PermOriginDict() |
|
375 | 375 | self.permissions_global = set() |
|
376 | 376 | |
|
377 | 377 | self.default_repo_perms = Permission.get_default_repo_perms( |
|
378 | 378 | self.default_user_id, self.scope_repo_id) |
|
379 | 379 | self.default_repo_groups_perms = Permission.get_default_group_perms( |
|
380 | 380 | self.default_user_id, self.scope_repo_group_id) |
|
381 | 381 | self.default_user_group_perms = \ |
|
382 | 382 | Permission.get_default_user_group_perms( |
|
383 | 383 | self.default_user_id, self.scope_user_group_id) |
|
384 | 384 | |
|
385 | 385 | def calculate(self): |
|
386 | 386 | if self.user_is_admin: |
|
387 | 387 | return self._admin_permissions() |
|
388 | 388 | |
|
389 | 389 | self._calculate_global_default_permissions() |
|
390 | 390 | self._calculate_global_permissions() |
|
391 | 391 | self._calculate_default_permissions() |
|
392 | 392 | self._calculate_repository_permissions() |
|
393 | 393 | self._calculate_repository_group_permissions() |
|
394 | 394 | self._calculate_user_group_permissions() |
|
395 | 395 | return self._permission_structure() |
|
396 | 396 | |
|
397 | 397 | def _admin_permissions(self): |
|
398 | 398 | """ |
|
399 | 399 | admin user have all default rights for repositories |
|
400 | 400 | and groups set to admin |
|
401 | 401 | """ |
|
402 | 402 | self.permissions_global.add('hg.admin') |
|
403 | 403 | self.permissions_global.add('hg.create.write_on_repogroup.true') |
|
404 | 404 | |
|
405 | 405 | # repositories |
|
406 | 406 | for perm in self.default_repo_perms: |
|
407 | 407 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
408 | 408 | p = 'repository.admin' |
|
409 | 409 | self.permissions_repositories[r_k] = p, PermOrigin.ADMIN |
|
410 | 410 | |
|
411 | 411 | # repository groups |
|
412 | 412 | for perm in self.default_repo_groups_perms: |
|
413 | 413 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
414 | 414 | p = 'group.admin' |
|
415 | 415 | self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN |
|
416 | 416 | |
|
417 | 417 | # user groups |
|
418 | 418 | for perm in self.default_user_group_perms: |
|
419 | 419 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
420 | 420 | p = 'usergroup.admin' |
|
421 | 421 | self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN |
|
422 | 422 | |
|
423 | 423 | return self._permission_structure() |
|
424 | 424 | |
|
425 | 425 | def _calculate_global_default_permissions(self): |
|
426 | 426 | """ |
|
427 | 427 | global permissions taken from the default user |
|
428 | 428 | """ |
|
429 | 429 | default_global_perms = UserToPerm.query()\ |
|
430 | 430 | .filter(UserToPerm.user_id == self.default_user_id)\ |
|
431 | 431 | .options(joinedload(UserToPerm.permission)) |
|
432 | 432 | |
|
433 | 433 | for perm in default_global_perms: |
|
434 | 434 | self.permissions_global.add(perm.permission.permission_name) |
|
435 | 435 | |
|
436 | 436 | def _calculate_global_permissions(self): |
|
437 | 437 | """ |
|
438 | 438 | Set global system permissions with user permissions or permissions |
|
439 | 439 | taken from the user groups of the current user. |
|
440 | 440 | |
|
441 | 441 | The permissions include repo creating, repo group creating, forking |
|
442 | 442 | etc. |
|
443 | 443 | """ |
|
444 | 444 | |
|
445 | 445 | # now we read the defined permissions and overwrite what we have set |
|
446 | 446 | # before those can be configured from groups or users explicitly. |
|
447 | 447 | |
|
448 | 448 | # TODO: johbo: This seems to be out of sync, find out the reason |
|
449 | 449 | # for the comment below and update it. |
|
450 | 450 | |
|
451 | 451 | # In case we want to extend this list we should be always in sync with |
|
452 | 452 | # User.DEFAULT_USER_PERMISSIONS definitions |
|
453 | 453 | _configurable = frozenset([ |
|
454 | 454 | 'hg.fork.none', 'hg.fork.repository', |
|
455 | 455 | 'hg.create.none', 'hg.create.repository', |
|
456 | 456 | 'hg.usergroup.create.false', 'hg.usergroup.create.true', |
|
457 | 457 | 'hg.repogroup.create.false', 'hg.repogroup.create.true', |
|
458 | 458 | 'hg.create.write_on_repogroup.false', |
|
459 | 459 | 'hg.create.write_on_repogroup.true', |
|
460 | 460 | 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true' |
|
461 | 461 | ]) |
|
462 | 462 | |
|
463 | 463 | # USER GROUPS comes first user group global permissions |
|
464 | 464 | user_perms_from_users_groups = Session().query(UserGroupToPerm)\ |
|
465 | 465 | .options(joinedload(UserGroupToPerm.permission))\ |
|
466 | 466 | .join((UserGroupMember, UserGroupToPerm.users_group_id == |
|
467 | 467 | UserGroupMember.users_group_id))\ |
|
468 | 468 | .filter(UserGroupMember.user_id == self.user_id)\ |
|
469 | 469 | .order_by(UserGroupToPerm.users_group_id)\ |
|
470 | 470 | .all() |
|
471 | 471 | |
|
472 | 472 | # need to group here by groups since user can be in more than |
|
473 | 473 | # one group, so we get all groups |
|
474 | 474 | _explicit_grouped_perms = [ |
|
475 | 475 | [x, list(y)] for x, y in |
|
476 | 476 | itertools.groupby(user_perms_from_users_groups, |
|
477 | 477 | lambda _x: _x.users_group)] |
|
478 | 478 | |
|
479 | 479 | for gr, perms in _explicit_grouped_perms: |
|
480 | 480 | # since user can be in multiple groups iterate over them and |
|
481 | 481 | # select the lowest permissions first (more explicit) |
|
482 | 482 | # TODO: marcink: do this^^ |
|
483 | 483 | |
|
484 | 484 | # group doesn't inherit default permissions so we actually set them |
|
485 | 485 | if not gr.inherit_default_permissions: |
|
486 | 486 | # NEED TO IGNORE all previously set configurable permissions |
|
487 | 487 | # and replace them with explicitly set from this user |
|
488 | 488 | # group permissions |
|
489 | 489 | self.permissions_global = self.permissions_global.difference( |
|
490 | 490 | _configurable) |
|
491 | 491 | for perm in perms: |
|
492 | 492 | self.permissions_global.add(perm.permission.permission_name) |
|
493 | 493 | |
|
494 | 494 | # user explicit global permissions |
|
495 | 495 | user_perms = Session().query(UserToPerm)\ |
|
496 | 496 | .options(joinedload(UserToPerm.permission))\ |
|
497 | 497 | .filter(UserToPerm.user_id == self.user_id).all() |
|
498 | 498 | |
|
499 | 499 | if not self.inherit_default_permissions: |
|
500 | 500 | # NEED TO IGNORE all configurable permissions and |
|
501 | 501 | # replace them with explicitly set from this user permissions |
|
502 | 502 | self.permissions_global = self.permissions_global.difference( |
|
503 | 503 | _configurable) |
|
504 | 504 | for perm in user_perms: |
|
505 | 505 | self.permissions_global.add(perm.permission.permission_name) |
|
506 | 506 | |
|
507 | 507 | def _calculate_default_permissions(self): |
|
508 | 508 | """ |
|
509 | 509 | Set default user permissions for repositories, repository groups |
|
510 | 510 | taken from the default user. |
|
511 | 511 | |
|
512 | 512 | Calculate inheritance of object permissions based on what we have now |
|
513 | 513 | in GLOBAL permissions. We check if .false is in GLOBAL since this is |
|
514 | 514 | explicitly set. Inherit is the opposite of .false being there. |
|
515 | 515 | |
|
516 | 516 | .. note:: |
|
517 | 517 | |
|
518 | 518 | the syntax is little bit odd but what we need to check here is |
|
519 | 519 | the opposite of .false permission being in the list so even for |
|
520 | 520 | inconsistent state when both .true/.false is there |
|
521 | 521 | .false is more important |
|
522 | 522 | |
|
523 | 523 | """ |
|
524 | 524 | user_inherit_object_permissions = not ('hg.inherit_default_perms.false' |
|
525 | 525 | in self.permissions_global) |
|
526 | 526 | |
|
527 | 527 | # defaults for repositories, taken from `default` user permissions |
|
528 | 528 | # on given repo |
|
529 | 529 | for perm in self.default_repo_perms: |
|
530 | 530 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
531 | 531 | o = PermOrigin.REPO_DEFAULT |
|
532 | 532 | if perm.Repository.private and not ( |
|
533 | 533 | perm.Repository.user_id == self.user_id): |
|
534 | 534 | # disable defaults for private repos, |
|
535 | 535 | p = 'repository.none' |
|
536 | 536 | o = PermOrigin.REPO_PRIVATE |
|
537 | 537 | elif perm.Repository.user_id == self.user_id: |
|
538 | 538 | # set admin if owner |
|
539 | 539 | p = 'repository.admin' |
|
540 | 540 | o = PermOrigin.REPO_OWNER |
|
541 | 541 | else: |
|
542 | 542 | p = perm.Permission.permission_name |
|
543 | 543 | # if we decide this user isn't inheriting permissions from |
|
544 | 544 | # default user we set him to .none so only explicit |
|
545 | 545 | # permissions work |
|
546 | 546 | if not user_inherit_object_permissions: |
|
547 | 547 | p = 'repository.none' |
|
548 | 548 | self.permissions_repositories[r_k] = p, o |
|
549 | 549 | |
|
550 | 550 | # defaults for repository groups taken from `default` user permission |
|
551 | 551 | # on given group |
|
552 | 552 | for perm in self.default_repo_groups_perms: |
|
553 | 553 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
554 | 554 | o = PermOrigin.REPOGROUP_DEFAULT |
|
555 | 555 | if perm.RepoGroup.user_id == self.user_id: |
|
556 | 556 | # set admin if owner |
|
557 | 557 | p = 'group.admin' |
|
558 | 558 | o = PermOrigin.REPOGROUP_OWNER |
|
559 | 559 | else: |
|
560 | 560 | p = perm.Permission.permission_name |
|
561 | 561 | |
|
562 | 562 | # if we decide this user isn't inheriting permissions from default |
|
563 | 563 | # user we set him to .none so only explicit permissions work |
|
564 | 564 | if not user_inherit_object_permissions: |
|
565 | 565 | p = 'group.none' |
|
566 | 566 | self.permissions_repository_groups[rg_k] = p, o |
|
567 | 567 | |
|
568 | 568 | # defaults for user groups taken from `default` user permission |
|
569 | 569 | # on given user group |
|
570 | 570 | for perm in self.default_user_group_perms: |
|
571 | 571 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
572 | 572 | o = PermOrigin.USERGROUP_DEFAULT |
|
573 | 573 | if perm.UserGroup.user_id == self.user_id: |
|
574 | 574 | # set admin if owner |
|
575 | 575 | p = 'usergroup.admin' |
|
576 | 576 | o = PermOrigin.USERGROUP_OWNER |
|
577 | 577 | else: |
|
578 | 578 | p = perm.Permission.permission_name |
|
579 | 579 | |
|
580 | 580 | # if we decide this user isn't inheriting permissions from default |
|
581 | 581 | # user we set him to .none so only explicit permissions work |
|
582 | 582 | if not user_inherit_object_permissions: |
|
583 | 583 | p = 'usergroup.none' |
|
584 | 584 | self.permissions_user_groups[u_k] = p, o |
|
585 | 585 | |
|
586 | 586 | def _calculate_repository_permissions(self): |
|
587 | 587 | """ |
|
588 | 588 | Repository permissions for the current user. |
|
589 | 589 | |
|
590 | 590 | Check if the user is part of user groups for this repository and |
|
591 | 591 | fill in the permission from it. `_choose_permission` decides of which |
|
592 | 592 | permission should be selected based on selected method. |
|
593 | 593 | """ |
|
594 | 594 | |
|
595 | 595 | # user group for repositories permissions |
|
596 | 596 | user_repo_perms_from_user_group = Permission\ |
|
597 | 597 | .get_default_repo_perms_from_user_group( |
|
598 | 598 | self.user_id, self.scope_repo_id) |
|
599 | 599 | |
|
600 | 600 | multiple_counter = collections.defaultdict(int) |
|
601 | 601 | for perm in user_repo_perms_from_user_group: |
|
602 | 602 | r_k = perm.UserGroupRepoToPerm.repository.repo_name |
|
603 | 603 | ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name |
|
604 | 604 | multiple_counter[r_k] += 1 |
|
605 | 605 | p = perm.Permission.permission_name |
|
606 | 606 | o = PermOrigin.REPO_USERGROUP % ug_k |
|
607 | 607 | |
|
608 | 608 | if perm.Repository.user_id == self.user_id: |
|
609 | 609 | # set admin if owner |
|
610 | 610 | p = 'repository.admin' |
|
611 | 611 | o = PermOrigin.REPO_OWNER |
|
612 | 612 | else: |
|
613 | 613 | if multiple_counter[r_k] > 1: |
|
614 | 614 | cur_perm = self.permissions_repositories[r_k] |
|
615 | 615 | p = self._choose_permission(p, cur_perm) |
|
616 | 616 | self.permissions_repositories[r_k] = p, o |
|
617 | 617 | |
|
618 | 618 | # user explicit permissions for repositories, overrides any specified |
|
619 | 619 | # by the group permission |
|
620 | 620 | user_repo_perms = Permission.get_default_repo_perms( |
|
621 | 621 | self.user_id, self.scope_repo_id) |
|
622 | 622 | for perm in user_repo_perms: |
|
623 | 623 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
624 | 624 | o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username |
|
625 | 625 | # set admin if owner |
|
626 | 626 | if perm.Repository.user_id == self.user_id: |
|
627 | 627 | p = 'repository.admin' |
|
628 | 628 | o = PermOrigin.REPO_OWNER |
|
629 | 629 | else: |
|
630 | 630 | p = perm.Permission.permission_name |
|
631 | 631 | if not self.explicit: |
|
632 | 632 | cur_perm = self.permissions_repositories.get( |
|
633 | 633 | r_k, 'repository.none') |
|
634 | 634 | p = self._choose_permission(p, cur_perm) |
|
635 | 635 | self.permissions_repositories[r_k] = p, o |
|
636 | 636 | |
|
637 | 637 | def _calculate_repository_group_permissions(self): |
|
638 | 638 | """ |
|
639 | 639 | Repository group permissions for the current user. |
|
640 | 640 | |
|
641 | 641 | Check if the user is part of user groups for repository groups and |
|
642 | 642 | fill in the permissions from it. `_choose_permmission` decides of which |
|
643 | 643 | permission should be selected based on selected method. |
|
644 | 644 | """ |
|
645 | 645 | # user group for repo groups permissions |
|
646 | 646 | user_repo_group_perms_from_user_group = Permission\ |
|
647 | 647 | .get_default_group_perms_from_user_group( |
|
648 | 648 | self.user_id, self.scope_repo_group_id) |
|
649 | 649 | |
|
650 | 650 | multiple_counter = collections.defaultdict(int) |
|
651 | 651 | for perm in user_repo_group_perms_from_user_group: |
|
652 | 652 | g_k = perm.UserGroupRepoGroupToPerm.group.group_name |
|
653 | 653 | ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name |
|
654 | 654 | o = PermOrigin.REPOGROUP_USERGROUP % ug_k |
|
655 | 655 | multiple_counter[g_k] += 1 |
|
656 | 656 | p = perm.Permission.permission_name |
|
657 | 657 | if perm.RepoGroup.user_id == self.user_id: |
|
658 | 658 | # set admin if owner, even for member of other user group |
|
659 | 659 | p = 'group.admin' |
|
660 | 660 | o = PermOrigin.REPOGROUP_OWNER |
|
661 | 661 | else: |
|
662 | 662 | if multiple_counter[g_k] > 1: |
|
663 | 663 | cur_perm = self.permissions_repository_groups[g_k] |
|
664 | 664 | p = self._choose_permission(p, cur_perm) |
|
665 | 665 | self.permissions_repository_groups[g_k] = p, o |
|
666 | 666 | |
|
667 | 667 | # user explicit permissions for repository groups |
|
668 | 668 | user_repo_groups_perms = Permission.get_default_group_perms( |
|
669 | 669 | self.user_id, self.scope_repo_group_id) |
|
670 | 670 | for perm in user_repo_groups_perms: |
|
671 | 671 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
672 | 672 | u_k = perm.UserRepoGroupToPerm.user.username |
|
673 | 673 | o = PermOrigin.REPOGROUP_USER % u_k |
|
674 | 674 | |
|
675 | 675 | if perm.RepoGroup.user_id == self.user_id: |
|
676 | 676 | # set admin if owner |
|
677 | 677 | p = 'group.admin' |
|
678 | 678 | o = PermOrigin.REPOGROUP_OWNER |
|
679 | 679 | else: |
|
680 | 680 | p = perm.Permission.permission_name |
|
681 | 681 | if not self.explicit: |
|
682 | 682 | cur_perm = self.permissions_repository_groups.get( |
|
683 | 683 | rg_k, 'group.none') |
|
684 | 684 | p = self._choose_permission(p, cur_perm) |
|
685 | 685 | self.permissions_repository_groups[rg_k] = p, o |
|
686 | 686 | |
|
687 | 687 | def _calculate_user_group_permissions(self): |
|
688 | 688 | """ |
|
689 | 689 | User group permissions for the current user. |
|
690 | 690 | """ |
|
691 | 691 | # user group for user group permissions |
|
692 | 692 | user_group_from_user_group = Permission\ |
|
693 | 693 | .get_default_user_group_perms_from_user_group( |
|
694 | 694 | self.user_id, self.scope_user_group_id) |
|
695 | 695 | |
|
696 | 696 | multiple_counter = collections.defaultdict(int) |
|
697 | 697 | for perm in user_group_from_user_group: |
|
698 | 698 | g_k = perm.UserGroupUserGroupToPerm\ |
|
699 | 699 | .target_user_group.users_group_name |
|
700 | 700 | u_k = perm.UserGroupUserGroupToPerm\ |
|
701 | 701 | .user_group.users_group_name |
|
702 | 702 | o = PermOrigin.USERGROUP_USERGROUP % u_k |
|
703 | 703 | multiple_counter[g_k] += 1 |
|
704 | 704 | p = perm.Permission.permission_name |
|
705 | 705 | |
|
706 | 706 | if perm.UserGroup.user_id == self.user_id: |
|
707 | 707 | # set admin if owner, even for member of other user group |
|
708 | 708 | p = 'usergroup.admin' |
|
709 | 709 | o = PermOrigin.USERGROUP_OWNER |
|
710 | 710 | else: |
|
711 | 711 | if multiple_counter[g_k] > 1: |
|
712 | 712 | cur_perm = self.permissions_user_groups[g_k] |
|
713 | 713 | p = self._choose_permission(p, cur_perm) |
|
714 | 714 | self.permissions_user_groups[g_k] = p, o |
|
715 | 715 | |
|
716 | 716 | # user explicit permission for user groups |
|
717 | 717 | user_user_groups_perms = Permission.get_default_user_group_perms( |
|
718 | 718 | self.user_id, self.scope_user_group_id) |
|
719 | 719 | for perm in user_user_groups_perms: |
|
720 | 720 | ug_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
721 | 721 | u_k = perm.UserUserGroupToPerm.user.username |
|
722 | 722 | o = PermOrigin.USERGROUP_USER % u_k |
|
723 | 723 | |
|
724 | 724 | if perm.UserGroup.user_id == self.user_id: |
|
725 | 725 | # set admin if owner |
|
726 | 726 | p = 'usergroup.admin' |
|
727 | 727 | o = PermOrigin.USERGROUP_OWNER |
|
728 | 728 | else: |
|
729 | 729 | p = perm.Permission.permission_name |
|
730 | 730 | if not self.explicit: |
|
731 | 731 | cur_perm = self.permissions_user_groups.get( |
|
732 | 732 | ug_k, 'usergroup.none') |
|
733 | 733 | p = self._choose_permission(p, cur_perm) |
|
734 | 734 | self.permissions_user_groups[ug_k] = p, o |
|
735 | 735 | |
|
736 | 736 | def _choose_permission(self, new_perm, cur_perm): |
|
737 | 737 | new_perm_val = Permission.PERM_WEIGHTS[new_perm] |
|
738 | 738 | cur_perm_val = Permission.PERM_WEIGHTS[cur_perm] |
|
739 | 739 | if self.algo == 'higherwin': |
|
740 | 740 | if new_perm_val > cur_perm_val: |
|
741 | 741 | return new_perm |
|
742 | 742 | return cur_perm |
|
743 | 743 | elif self.algo == 'lowerwin': |
|
744 | 744 | if new_perm_val < cur_perm_val: |
|
745 | 745 | return new_perm |
|
746 | 746 | return cur_perm |
|
747 | 747 | |
|
748 | 748 | def _permission_structure(self): |
|
749 | 749 | return { |
|
750 | 750 | 'global': self.permissions_global, |
|
751 | 751 | 'repositories': self.permissions_repositories, |
|
752 | 752 | 'repositories_groups': self.permissions_repository_groups, |
|
753 | 753 | 'user_groups': self.permissions_user_groups, |
|
754 | 754 | } |
|
755 | 755 | |
|
756 | 756 | |
|
757 |
def allowed_auth_token_access( |
|
|
757 | def allowed_auth_token_access(view_name, whitelist=None, auth_token=None): | |
|
758 | 758 | """ |
|
759 | 759 | Check if given controller_name is in whitelist of auth token access |
|
760 | 760 | """ |
|
761 | 761 | if not whitelist: |
|
762 | 762 | from rhodecode import CONFIG |
|
763 | 763 | whitelist = aslist( |
|
764 | 764 | CONFIG.get('api_access_controllers_whitelist'), sep=',') |
|
765 | 765 | log.debug( |
|
766 | 766 | 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,)) |
|
767 | 767 | |
|
768 | 768 | auth_token_access_valid = False |
|
769 | 769 | for entry in whitelist: |
|
770 |
if fnmatch.fnmatch( |
|
|
770 | if fnmatch.fnmatch(view_name, entry): | |
|
771 | 771 | auth_token_access_valid = True |
|
772 | 772 | break |
|
773 | 773 | |
|
774 | 774 | if auth_token_access_valid: |
|
775 |
log.debug(' |
|
|
776 |
% ( |
|
|
775 | log.debug('view: `%s` matches entry in whitelist: %s' | |
|
776 | % (view_name, whitelist)) | |
|
777 | 777 | else: |
|
778 |
msg = (' |
|
|
779 |
% ( |
|
|
778 | msg = ('view: `%s` does *NOT* match any entry in whitelist: %s' | |
|
779 | % (view_name, whitelist)) | |
|
780 | 780 | if auth_token: |
|
781 | 781 | # if we use auth token key and don't have access it's a warning |
|
782 | 782 | log.warning(msg) |
|
783 | 783 | else: |
|
784 | 784 | log.debug(msg) |
|
785 | 785 | |
|
786 | 786 | return auth_token_access_valid |
|
787 | 787 | |
|
788 | 788 | |
|
789 | 789 | class AuthUser(object): |
|
790 | 790 | """ |
|
791 | 791 | A simple object that handles all attributes of user in RhodeCode |
|
792 | 792 | |
|
793 | 793 | It does lookup based on API key,given user, or user present in session |
|
794 | 794 | Then it fills all required information for such user. It also checks if |
|
795 | 795 | anonymous access is enabled and if so, it returns default user as logged in |
|
796 | 796 | """ |
|
797 | 797 | GLOBAL_PERMS = [x[0] for x in Permission.PERMS] |
|
798 | 798 | |
|
799 | 799 | def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None): |
|
800 | 800 | |
|
801 | 801 | self.user_id = user_id |
|
802 | 802 | self._api_key = api_key |
|
803 | 803 | |
|
804 | 804 | self.api_key = None |
|
805 | 805 | self.feed_token = '' |
|
806 | 806 | self.username = username |
|
807 | 807 | self.ip_addr = ip_addr |
|
808 | 808 | self.name = '' |
|
809 | 809 | self.lastname = '' |
|
810 | 810 | self.first_name = '' |
|
811 | 811 | self.last_name = '' |
|
812 | 812 | self.email = '' |
|
813 | 813 | self.is_authenticated = False |
|
814 | 814 | self.admin = False |
|
815 | 815 | self.inherit_default_permissions = False |
|
816 | 816 | self.password = '' |
|
817 | 817 | |
|
818 | 818 | self.anonymous_user = None # propagated on propagate_data |
|
819 | 819 | self.propagate_data() |
|
820 | 820 | self._instance = None |
|
821 | 821 | self._permissions_scoped_cache = {} # used to bind scoped calculation |
|
822 | 822 | |
|
823 | 823 | @LazyProperty |
|
824 | 824 | def permissions(self): |
|
825 | 825 | return self.get_perms(user=self, cache=False) |
|
826 | 826 | |
|
827 | 827 | def permissions_with_scope(self, scope): |
|
828 | 828 | """ |
|
829 | 829 | Call the get_perms function with scoped data. The scope in that function |
|
830 | 830 | narrows the SQL calls to the given ID of objects resulting in fetching |
|
831 | 831 | Just particular permission we want to obtain. If scope is an empty dict |
|
832 | 832 | then it basically narrows the scope to GLOBAL permissions only. |
|
833 | 833 | |
|
834 | 834 | :param scope: dict |
|
835 | 835 | """ |
|
836 | 836 | if 'repo_name' in scope: |
|
837 | 837 | obj = Repository.get_by_repo_name(scope['repo_name']) |
|
838 | 838 | if obj: |
|
839 | 839 | scope['repo_id'] = obj.repo_id |
|
840 | 840 | _scope = { |
|
841 | 841 | 'repo_id': -1, |
|
842 | 842 | 'user_group_id': -1, |
|
843 | 843 | 'repo_group_id': -1, |
|
844 | 844 | } |
|
845 | 845 | _scope.update(scope) |
|
846 | 846 | cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b, |
|
847 | 847 | _scope.items()))) |
|
848 | 848 | if cache_key not in self._permissions_scoped_cache: |
|
849 | 849 | # store in cache to mimic how the @LazyProperty works, |
|
850 | 850 | # the difference here is that we use the unique key calculated |
|
851 | 851 | # from params and values |
|
852 | 852 | res = self.get_perms(user=self, cache=False, scope=_scope) |
|
853 | 853 | self._permissions_scoped_cache[cache_key] = res |
|
854 | 854 | return self._permissions_scoped_cache[cache_key] |
|
855 | 855 | |
|
856 | 856 | def get_instance(self): |
|
857 | 857 | return User.get(self.user_id) |
|
858 | 858 | |
|
859 | 859 | def update_lastactivity(self): |
|
860 | 860 | if self.user_id: |
|
861 | 861 | User.get(self.user_id).update_lastactivity() |
|
862 | 862 | |
|
863 | 863 | def propagate_data(self): |
|
864 | 864 | """ |
|
865 | 865 | Fills in user data and propagates values to this instance. Maps fetched |
|
866 | 866 | user attributes to this class instance attributes |
|
867 | 867 | """ |
|
868 | 868 | log.debug('starting data propagation for new potential AuthUser') |
|
869 | 869 | user_model = UserModel() |
|
870 | 870 | anon_user = self.anonymous_user = User.get_default_user(cache=True) |
|
871 | 871 | is_user_loaded = False |
|
872 | 872 | |
|
873 | 873 | # lookup by userid |
|
874 | 874 | if self.user_id is not None and self.user_id != anon_user.user_id: |
|
875 | 875 | log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id) |
|
876 | 876 | is_user_loaded = user_model.fill_data(self, user_id=self.user_id) |
|
877 | 877 | |
|
878 | 878 | # try go get user by api key |
|
879 | 879 | elif self._api_key and self._api_key != anon_user.api_key: |
|
880 | 880 | log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key) |
|
881 | 881 | is_user_loaded = user_model.fill_data(self, api_key=self._api_key) |
|
882 | 882 | |
|
883 | 883 | # lookup by username |
|
884 | 884 | elif self.username: |
|
885 | 885 | log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username) |
|
886 | 886 | is_user_loaded = user_model.fill_data(self, username=self.username) |
|
887 | 887 | else: |
|
888 | 888 | log.debug('No data in %s that could been used to log in' % self) |
|
889 | 889 | |
|
890 | 890 | if not is_user_loaded: |
|
891 | 891 | log.debug('Failed to load user. Fallback to default user') |
|
892 | 892 | # if we cannot authenticate user try anonymous |
|
893 | 893 | if anon_user.active: |
|
894 | 894 | user_model.fill_data(self, user_id=anon_user.user_id) |
|
895 | 895 | # then we set this user is logged in |
|
896 | 896 | self.is_authenticated = True |
|
897 | 897 | else: |
|
898 | 898 | # in case of disabled anonymous user we reset some of the |
|
899 | 899 | # parameters so such user is "corrupted", skipping the fill_data |
|
900 | 900 | for attr in ['user_id', 'username', 'admin', 'active']: |
|
901 | 901 | setattr(self, attr, None) |
|
902 | 902 | self.is_authenticated = False |
|
903 | 903 | |
|
904 | 904 | if not self.username: |
|
905 | 905 | self.username = 'None' |
|
906 | 906 | |
|
907 | 907 | log.debug('Auth User is now %s' % self) |
|
908 | 908 | |
|
909 | 909 | def get_perms(self, user, scope=None, explicit=True, algo='higherwin', |
|
910 | 910 | cache=False): |
|
911 | 911 | """ |
|
912 | 912 | Fills user permission attribute with permissions taken from database |
|
913 | 913 | works for permissions given for repositories, and for permissions that |
|
914 | 914 | are granted to groups |
|
915 | 915 | |
|
916 | 916 | :param user: instance of User object from database |
|
917 | 917 | :param explicit: In case there are permissions both for user and a group |
|
918 | 918 | that user is part of, explicit flag will defiine if user will |
|
919 | 919 | explicitly override permissions from group, if it's False it will |
|
920 | 920 | make decision based on the algo |
|
921 | 921 | :param algo: algorithm to decide what permission should be choose if |
|
922 | 922 | it's multiple defined, eg user in two different groups. It also |
|
923 | 923 | decides if explicit flag is turned off how to specify the permission |
|
924 | 924 | for case when user is in a group + have defined separate permission |
|
925 | 925 | """ |
|
926 | 926 | user_id = user.user_id |
|
927 | 927 | user_is_admin = user.is_admin |
|
928 | 928 | |
|
929 | 929 | # inheritance of global permissions like create repo/fork repo etc |
|
930 | 930 | user_inherit_default_permissions = user.inherit_default_permissions |
|
931 | 931 | |
|
932 | 932 | log.debug('Computing PERMISSION tree for scope %s' % (scope, )) |
|
933 | 933 | compute = caches.conditional_cache( |
|
934 | 934 | 'short_term', 'cache_desc', |
|
935 | 935 | condition=cache, func=_cached_perms_data) |
|
936 | 936 | result = compute(user_id, scope, user_is_admin, |
|
937 | 937 | user_inherit_default_permissions, explicit, algo) |
|
938 | 938 | |
|
939 | 939 | result_repr = [] |
|
940 | 940 | for k in result: |
|
941 | 941 | result_repr.append((k, len(result[k]))) |
|
942 | 942 | |
|
943 | 943 | log.debug('PERMISSION tree computed %s' % (result_repr,)) |
|
944 | 944 | return result |
|
945 | 945 | |
|
946 | 946 | @property |
|
947 | 947 | def is_default(self): |
|
948 | 948 | return self.username == User.DEFAULT_USER |
|
949 | 949 | |
|
950 | 950 | @property |
|
951 | 951 | def is_admin(self): |
|
952 | 952 | return self.admin |
|
953 | 953 | |
|
954 | 954 | @property |
|
955 | 955 | def is_user_object(self): |
|
956 | 956 | return self.user_id is not None |
|
957 | 957 | |
|
958 | 958 | @property |
|
959 | 959 | def repositories_admin(self): |
|
960 | 960 | """ |
|
961 | 961 | Returns list of repositories you're an admin of |
|
962 | 962 | """ |
|
963 | 963 | return [ |
|
964 | 964 | x[0] for x in self.permissions['repositories'].iteritems() |
|
965 | 965 | if x[1] == 'repository.admin'] |
|
966 | 966 | |
|
967 | 967 | @property |
|
968 | 968 | def repository_groups_admin(self): |
|
969 | 969 | """ |
|
970 | 970 | Returns list of repository groups you're an admin of |
|
971 | 971 | """ |
|
972 | 972 | return [ |
|
973 | 973 | x[0] for x in self.permissions['repositories_groups'].iteritems() |
|
974 | 974 | if x[1] == 'group.admin'] |
|
975 | 975 | |
|
976 | 976 | @property |
|
977 | 977 | def user_groups_admin(self): |
|
978 | 978 | """ |
|
979 | 979 | Returns list of user groups you're an admin of |
|
980 | 980 | """ |
|
981 | 981 | return [ |
|
982 | 982 | x[0] for x in self.permissions['user_groups'].iteritems() |
|
983 | 983 | if x[1] == 'usergroup.admin'] |
|
984 | 984 | |
|
985 | 985 | @property |
|
986 | 986 | def ip_allowed(self): |
|
987 | 987 | """ |
|
988 | 988 | Checks if ip_addr used in constructor is allowed from defined list of |
|
989 | 989 | allowed ip_addresses for user |
|
990 | 990 | |
|
991 | 991 | :returns: boolean, True if ip is in allowed ip range |
|
992 | 992 | """ |
|
993 | 993 | # check IP |
|
994 | 994 | inherit = self.inherit_default_permissions |
|
995 | 995 | return AuthUser.check_ip_allowed(self.user_id, self.ip_addr, |
|
996 | 996 | inherit_from_default=inherit) |
|
997 | 997 | @property |
|
998 | 998 | def personal_repo_group(self): |
|
999 | 999 | return RepoGroup.get_user_personal_repo_group(self.user_id) |
|
1000 | 1000 | |
|
1001 | 1001 | @classmethod |
|
1002 | 1002 | def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default): |
|
1003 | 1003 | allowed_ips = AuthUser.get_allowed_ips( |
|
1004 | 1004 | user_id, cache=True, inherit_from_default=inherit_from_default) |
|
1005 | 1005 | if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips): |
|
1006 | 1006 | log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips)) |
|
1007 | 1007 | return True |
|
1008 | 1008 | else: |
|
1009 | 1009 | log.info('Access for IP:%s forbidden, ' |
|
1010 | 1010 | 'not in %s' % (ip_addr, allowed_ips)) |
|
1011 | 1011 | return False |
|
1012 | 1012 | |
|
1013 | 1013 | def __repr__(self): |
|
1014 | 1014 | return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\ |
|
1015 | 1015 | % (self.user_id, self.username, self.ip_addr, self.is_authenticated) |
|
1016 | 1016 | |
|
1017 | 1017 | def set_authenticated(self, authenticated=True): |
|
1018 | 1018 | if self.user_id != self.anonymous_user.user_id: |
|
1019 | 1019 | self.is_authenticated = authenticated |
|
1020 | 1020 | |
|
1021 | 1021 | def get_cookie_store(self): |
|
1022 | 1022 | return { |
|
1023 | 1023 | 'username': self.username, |
|
1024 | 1024 | 'password': md5(self.password), |
|
1025 | 1025 | 'user_id': self.user_id, |
|
1026 | 1026 | 'is_authenticated': self.is_authenticated |
|
1027 | 1027 | } |
|
1028 | 1028 | |
|
1029 | 1029 | @classmethod |
|
1030 | 1030 | def from_cookie_store(cls, cookie_store): |
|
1031 | 1031 | """ |
|
1032 | 1032 | Creates AuthUser from a cookie store |
|
1033 | 1033 | |
|
1034 | 1034 | :param cls: |
|
1035 | 1035 | :param cookie_store: |
|
1036 | 1036 | """ |
|
1037 | 1037 | user_id = cookie_store.get('user_id') |
|
1038 | 1038 | username = cookie_store.get('username') |
|
1039 | 1039 | api_key = cookie_store.get('api_key') |
|
1040 | 1040 | return AuthUser(user_id, api_key, username) |
|
1041 | 1041 | |
|
1042 | 1042 | @classmethod |
|
1043 | 1043 | def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False): |
|
1044 | 1044 | _set = set() |
|
1045 | 1045 | |
|
1046 | 1046 | if inherit_from_default: |
|
1047 | 1047 | default_ips = UserIpMap.query().filter( |
|
1048 | 1048 | UserIpMap.user == User.get_default_user(cache=True)) |
|
1049 | 1049 | if cache: |
|
1050 | 1050 | default_ips = default_ips.options( |
|
1051 | 1051 | FromCache("sql_cache_short", "get_user_ips_default")) |
|
1052 | 1052 | |
|
1053 | 1053 | # populate from default user |
|
1054 | 1054 | for ip in default_ips: |
|
1055 | 1055 | try: |
|
1056 | 1056 | _set.add(ip.ip_addr) |
|
1057 | 1057 | except ObjectDeletedError: |
|
1058 | 1058 | # since we use heavy caching sometimes it happens that |
|
1059 | 1059 | # we get deleted objects here, we just skip them |
|
1060 | 1060 | pass |
|
1061 | 1061 | |
|
1062 | 1062 | user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id) |
|
1063 | 1063 | if cache: |
|
1064 | 1064 | user_ips = user_ips.options( |
|
1065 | 1065 | FromCache("sql_cache_short", "get_user_ips_%s" % user_id)) |
|
1066 | 1066 | |
|
1067 | 1067 | for ip in user_ips: |
|
1068 | 1068 | try: |
|
1069 | 1069 | _set.add(ip.ip_addr) |
|
1070 | 1070 | except ObjectDeletedError: |
|
1071 | 1071 | # since we use heavy caching sometimes it happens that we get |
|
1072 | 1072 | # deleted objects here, we just skip them |
|
1073 | 1073 | pass |
|
1074 | 1074 | return _set or set(['0.0.0.0/0', '::/0']) |
|
1075 | 1075 | |
|
1076 | 1076 | |
|
1077 | 1077 | def set_available_permissions(config): |
|
1078 | 1078 | """ |
|
1079 | 1079 | This function will propagate pylons globals with all available defined |
|
1080 | 1080 | permission given in db. We don't want to check each time from db for new |
|
1081 | 1081 | permissions since adding a new permission also requires application restart |
|
1082 | 1082 | ie. to decorate new views with the newly created permission |
|
1083 | 1083 | |
|
1084 | 1084 | :param config: current pylons config instance |
|
1085 | 1085 | |
|
1086 | 1086 | """ |
|
1087 | 1087 | log.info('getting information about all available permissions') |
|
1088 | 1088 | try: |
|
1089 | 1089 | sa = meta.Session |
|
1090 | 1090 | all_perms = sa.query(Permission).all() |
|
1091 | 1091 | config['available_permissions'] = [x.permission_name for x in all_perms] |
|
1092 | 1092 | except Exception: |
|
1093 | 1093 | log.error(traceback.format_exc()) |
|
1094 | 1094 | finally: |
|
1095 | 1095 | meta.Session.remove() |
|
1096 | 1096 | |
|
1097 | 1097 | |
|
1098 | 1098 | def get_csrf_token(session=None, force_new=False, save_if_missing=True): |
|
1099 | 1099 | """ |
|
1100 | 1100 | Return the current authentication token, creating one if one doesn't |
|
1101 | 1101 | already exist and the save_if_missing flag is present. |
|
1102 | 1102 | |
|
1103 | 1103 | :param session: pass in the pylons session, else we use the global ones |
|
1104 | 1104 | :param force_new: force to re-generate the token and store it in session |
|
1105 | 1105 | :param save_if_missing: save the newly generated token if it's missing in |
|
1106 | 1106 | session |
|
1107 | 1107 | """ |
|
1108 | 1108 | # NOTE(marcink): probably should be replaced with below one from pyramid 1.9 |
|
1109 | 1109 | # from pyramid.csrf import get_csrf_token |
|
1110 | 1110 | |
|
1111 | 1111 | if not session: |
|
1112 | 1112 | from pylons import session |
|
1113 | 1113 | |
|
1114 | 1114 | if (csrf_token_key not in session and save_if_missing) or force_new: |
|
1115 | 1115 | token = hashlib.sha1(str(random.getrandbits(128))).hexdigest() |
|
1116 | 1116 | session[csrf_token_key] = token |
|
1117 | 1117 | if hasattr(session, 'save'): |
|
1118 | 1118 | session.save() |
|
1119 | 1119 | return session.get(csrf_token_key) |
|
1120 | 1120 | |
|
1121 | 1121 | |
|
1122 | 1122 | def get_request(perm_class): |
|
1123 | 1123 | from pyramid.threadlocal import get_current_request |
|
1124 | 1124 | pyramid_request = get_current_request() |
|
1125 | 1125 | if not pyramid_request: |
|
1126 | 1126 | # return global request of pylons in case pyramid isn't available |
|
1127 | 1127 | # NOTE(marcink): this should be removed after migration to pyramid |
|
1128 | 1128 | from pylons import request |
|
1129 | 1129 | return request |
|
1130 | 1130 | return pyramid_request |
|
1131 | 1131 | |
|
1132 | 1132 | |
|
1133 | 1133 | # CHECK DECORATORS |
|
1134 | 1134 | class CSRFRequired(object): |
|
1135 | 1135 | """ |
|
1136 | 1136 | Decorator for authenticating a form |
|
1137 | 1137 | |
|
1138 | 1138 | This decorator uses an authorization token stored in the client's |
|
1139 | 1139 | session for prevention of certain Cross-site request forgery (CSRF) |
|
1140 | 1140 | attacks (See |
|
1141 | 1141 | http://en.wikipedia.org/wiki/Cross-site_request_forgery for more |
|
1142 | 1142 | information). |
|
1143 | 1143 | |
|
1144 | 1144 | For use with the ``webhelpers.secure_form`` helper functions. |
|
1145 | 1145 | |
|
1146 | 1146 | """ |
|
1147 | 1147 | def __init__(self, token=csrf_token_key, header='X-CSRF-Token', |
|
1148 | 1148 | except_methods=None): |
|
1149 | 1149 | self.token = token |
|
1150 | 1150 | self.header = header |
|
1151 | 1151 | self.except_methods = except_methods or [] |
|
1152 | 1152 | |
|
1153 | 1153 | def __call__(self, func): |
|
1154 | 1154 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1155 | 1155 | |
|
1156 | 1156 | def _get_csrf(self, _request): |
|
1157 | 1157 | return _request.POST.get(self.token, _request.headers.get(self.header)) |
|
1158 | 1158 | |
|
1159 | 1159 | def check_csrf(self, _request, cur_token): |
|
1160 | 1160 | supplied_token = self._get_csrf(_request) |
|
1161 | 1161 | return supplied_token and supplied_token == cur_token |
|
1162 | 1162 | |
|
1163 | 1163 | def _get_request(self): |
|
1164 | 1164 | return get_request(self) |
|
1165 | 1165 | |
|
1166 | 1166 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1167 | 1167 | request = self._get_request() |
|
1168 | 1168 | |
|
1169 | 1169 | if request.method in self.except_methods: |
|
1170 | 1170 | return func(*fargs, **fkwargs) |
|
1171 | 1171 | |
|
1172 | 1172 | cur_token = get_csrf_token(save_if_missing=False) |
|
1173 | 1173 | if self.check_csrf(request, cur_token): |
|
1174 | 1174 | if request.POST.get(self.token): |
|
1175 | 1175 | del request.POST[self.token] |
|
1176 | 1176 | return func(*fargs, **fkwargs) |
|
1177 | 1177 | else: |
|
1178 | 1178 | reason = 'token-missing' |
|
1179 | 1179 | supplied_token = self._get_csrf(request) |
|
1180 | 1180 | if supplied_token and cur_token != supplied_token: |
|
1181 | 1181 | reason = 'token-mismatch [%s:%s]' % ( |
|
1182 | 1182 | cur_token or ''[:6], supplied_token or ''[:6]) |
|
1183 | 1183 | |
|
1184 | 1184 | csrf_message = \ |
|
1185 | 1185 | ("Cross-site request forgery detected, request denied. See " |
|
1186 | 1186 | "http://en.wikipedia.org/wiki/Cross-site_request_forgery for " |
|
1187 | 1187 | "more information.") |
|
1188 | 1188 | log.warn('Cross-site request forgery detected, request %r DENIED: %s ' |
|
1189 | 1189 | 'REMOTE_ADDR:%s, HEADERS:%s' % ( |
|
1190 | 1190 | request, reason, request.remote_addr, request.headers)) |
|
1191 | 1191 | |
|
1192 | 1192 | raise HTTPForbidden(explanation=csrf_message) |
|
1193 | 1193 | |
|
1194 | 1194 | |
|
1195 | 1195 | class LoginRequired(object): |
|
1196 | 1196 | """ |
|
1197 | 1197 | Must be logged in to execute this function else |
|
1198 | 1198 | redirect to login page |
|
1199 | 1199 | |
|
1200 | 1200 | :param api_access: if enabled this checks only for valid auth token |
|
1201 | 1201 | and grants access based on valid token |
|
1202 | 1202 | """ |
|
1203 | 1203 | def __init__(self, auth_token_access=None): |
|
1204 | 1204 | self.auth_token_access = auth_token_access |
|
1205 | 1205 | |
|
1206 | 1206 | def __call__(self, func): |
|
1207 | 1207 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1208 | 1208 | |
|
1209 | 1209 | def _get_request(self): |
|
1210 | 1210 | return get_request(self) |
|
1211 | 1211 | |
|
1212 | 1212 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1213 | 1213 | from rhodecode.lib import helpers as h |
|
1214 | 1214 | cls = fargs[0] |
|
1215 | 1215 | user = cls._rhodecode_user |
|
1216 | 1216 | request = self._get_request() |
|
1217 | 1217 | |
|
1218 | 1218 | loc = "%s:%s" % (cls.__class__.__name__, func.__name__) |
|
1219 | 1219 | log.debug('Starting login restriction checks for user: %s' % (user,)) |
|
1220 | 1220 | # check if our IP is allowed |
|
1221 | 1221 | ip_access_valid = True |
|
1222 | 1222 | if not user.ip_allowed: |
|
1223 | 1223 | h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))), |
|
1224 | 1224 | category='warning') |
|
1225 | 1225 | ip_access_valid = False |
|
1226 | 1226 | |
|
1227 | 1227 | # check if we used an APIKEY and it's a valid one |
|
1228 | 1228 | # defined white-list of controllers which API access will be enabled |
|
1229 | 1229 | _auth_token = request.GET.get( |
|
1230 | 1230 | 'auth_token', '') or request.GET.get('api_key', '') |
|
1231 | 1231 | auth_token_access_valid = allowed_auth_token_access( |
|
1232 | 1232 | loc, auth_token=_auth_token) |
|
1233 | 1233 | |
|
1234 | 1234 | # explicit controller is enabled or API is in our whitelist |
|
1235 | 1235 | if self.auth_token_access or auth_token_access_valid: |
|
1236 | 1236 | log.debug('Checking AUTH TOKEN access for %s' % (cls,)) |
|
1237 | 1237 | db_user = user.get_instance() |
|
1238 | 1238 | |
|
1239 | 1239 | if db_user: |
|
1240 | 1240 | if self.auth_token_access: |
|
1241 | 1241 | roles = self.auth_token_access |
|
1242 | 1242 | else: |
|
1243 | 1243 | roles = [UserApiKeys.ROLE_HTTP] |
|
1244 | 1244 | token_match = db_user.authenticate_by_token( |
|
1245 | 1245 | _auth_token, roles=roles) |
|
1246 | 1246 | else: |
|
1247 | 1247 | log.debug('Unable to fetch db instance for auth user: %s', user) |
|
1248 | 1248 | token_match = False |
|
1249 | 1249 | |
|
1250 | 1250 | if _auth_token and token_match: |
|
1251 | 1251 | auth_token_access_valid = True |
|
1252 | 1252 | log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],)) |
|
1253 | 1253 | else: |
|
1254 | 1254 | auth_token_access_valid = False |
|
1255 | 1255 | if not _auth_token: |
|
1256 | 1256 | log.debug("AUTH TOKEN *NOT* present in request") |
|
1257 | 1257 | else: |
|
1258 | 1258 | log.warning( |
|
1259 | 1259 | "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:]) |
|
1260 | 1260 | |
|
1261 | 1261 | log.debug('Checking if %s is authenticated @ %s' % (user.username, loc)) |
|
1262 | 1262 | reason = 'RHODECODE_AUTH' if user.is_authenticated \ |
|
1263 | 1263 | else 'AUTH_TOKEN_AUTH' |
|
1264 | 1264 | |
|
1265 | 1265 | if ip_access_valid and ( |
|
1266 | 1266 | user.is_authenticated or auth_token_access_valid): |
|
1267 | 1267 | log.info( |
|
1268 | 1268 | 'user %s authenticating with:%s IS authenticated on func %s' |
|
1269 | 1269 | % (user, reason, loc)) |
|
1270 | 1270 | |
|
1271 | 1271 | # update user data to check last activity |
|
1272 | 1272 | user.update_lastactivity() |
|
1273 | 1273 | Session().commit() |
|
1274 | 1274 | return func(*fargs, **fkwargs) |
|
1275 | 1275 | else: |
|
1276 | 1276 | log.warning( |
|
1277 | 1277 | 'user %s authenticating with:%s NOT authenticated on ' |
|
1278 | 1278 | 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s' |
|
1279 | 1279 | % (user, reason, loc, ip_access_valid, |
|
1280 | 1280 | auth_token_access_valid)) |
|
1281 | 1281 | # we preserve the get PARAM |
|
1282 | 1282 | came_from = request.path_qs |
|
1283 | 1283 | log.debug('redirecting to login page with %s' % (came_from,)) |
|
1284 | 1284 | raise HTTPFound( |
|
1285 | 1285 | h.route_path('login', _query={'came_from': came_from})) |
|
1286 | 1286 | |
|
1287 | 1287 | |
|
1288 | 1288 | class NotAnonymous(object): |
|
1289 | 1289 | """ |
|
1290 | 1290 | Must be logged in to execute this function else |
|
1291 | 1291 | redirect to login page |
|
1292 | 1292 | """ |
|
1293 | 1293 | |
|
1294 | 1294 | def __call__(self, func): |
|
1295 | 1295 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1296 | 1296 | |
|
1297 | 1297 | def _get_request(self): |
|
1298 | 1298 | return get_request(self) |
|
1299 | 1299 | |
|
1300 | 1300 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1301 | 1301 | import rhodecode.lib.helpers as h |
|
1302 | 1302 | cls = fargs[0] |
|
1303 | 1303 | self.user = cls._rhodecode_user |
|
1304 | 1304 | request = self._get_request() |
|
1305 | 1305 | |
|
1306 | 1306 | log.debug('Checking if user is not anonymous @%s' % cls) |
|
1307 | 1307 | |
|
1308 | 1308 | anonymous = self.user.username == User.DEFAULT_USER |
|
1309 | 1309 | |
|
1310 | 1310 | if anonymous: |
|
1311 | 1311 | came_from = request.path_qs |
|
1312 | 1312 | h.flash(_('You need to be a registered user to ' |
|
1313 | 1313 | 'perform this action'), |
|
1314 | 1314 | category='warning') |
|
1315 | 1315 | raise HTTPFound( |
|
1316 | 1316 | h.route_path('login', _query={'came_from': came_from})) |
|
1317 | 1317 | else: |
|
1318 | 1318 | return func(*fargs, **fkwargs) |
|
1319 | 1319 | |
|
1320 | 1320 | |
|
1321 | 1321 | class XHRRequired(object): |
|
1322 | 1322 | # TODO(marcink): remove this in favor of the predicates in pyramid routes |
|
1323 | 1323 | |
|
1324 | 1324 | def __call__(self, func): |
|
1325 | 1325 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1326 | 1326 | |
|
1327 | 1327 | def _get_request(self): |
|
1328 | 1328 | return get_request(self) |
|
1329 | 1329 | |
|
1330 | 1330 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1331 | 1331 | from pylons.controllers.util import abort |
|
1332 | 1332 | request = self._get_request() |
|
1333 | 1333 | |
|
1334 | 1334 | log.debug('Checking if request is XMLHttpRequest (XHR)') |
|
1335 | 1335 | xhr_message = 'This is not a valid XMLHttpRequest (XHR) request' |
|
1336 | 1336 | |
|
1337 | 1337 | if not request.is_xhr: |
|
1338 | 1338 | abort(400, detail=xhr_message) |
|
1339 | 1339 | |
|
1340 | 1340 | return func(*fargs, **fkwargs) |
|
1341 | 1341 | |
|
1342 | 1342 | |
|
1343 | 1343 | class HasAcceptedRepoType(object): |
|
1344 | 1344 | """ |
|
1345 | 1345 | Check if requested repo is within given repo type aliases |
|
1346 | 1346 | """ |
|
1347 | 1347 | |
|
1348 | 1348 | # TODO(marcink): remove this in favor of the predicates in pyramid routes |
|
1349 | 1349 | |
|
1350 | 1350 | def __init__(self, *repo_type_list): |
|
1351 | 1351 | self.repo_type_list = set(repo_type_list) |
|
1352 | 1352 | |
|
1353 | 1353 | def __call__(self, func): |
|
1354 | 1354 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1355 | 1355 | |
|
1356 | 1356 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1357 | 1357 | import rhodecode.lib.helpers as h |
|
1358 | 1358 | cls = fargs[0] |
|
1359 | 1359 | rhodecode_repo = cls.rhodecode_repo |
|
1360 | 1360 | |
|
1361 | 1361 | log.debug('%s checking repo type for %s in %s', |
|
1362 | 1362 | self.__class__.__name__, |
|
1363 | 1363 | rhodecode_repo.alias, self.repo_type_list) |
|
1364 | 1364 | |
|
1365 | 1365 | if rhodecode_repo.alias in self.repo_type_list: |
|
1366 | 1366 | return func(*fargs, **fkwargs) |
|
1367 | 1367 | else: |
|
1368 | 1368 | h.flash(h.literal( |
|
1369 | 1369 | _('Action not supported for %s.' % rhodecode_repo.alias)), |
|
1370 | 1370 | category='warning') |
|
1371 | 1371 | raise HTTPFound( |
|
1372 | 1372 | h.route_path('repo_summary', |
|
1373 | 1373 | repo_name=cls.rhodecode_db_repo.repo_name)) |
|
1374 | 1374 | |
|
1375 | 1375 | |
|
1376 | 1376 | class PermsDecorator(object): |
|
1377 | 1377 | """ |
|
1378 | 1378 | Base class for controller decorators, we extract the current user from |
|
1379 | 1379 | the class itself, which has it stored in base controllers |
|
1380 | 1380 | """ |
|
1381 | 1381 | |
|
1382 | 1382 | def __init__(self, *required_perms): |
|
1383 | 1383 | self.required_perms = set(required_perms) |
|
1384 | 1384 | |
|
1385 | 1385 | def __call__(self, func): |
|
1386 | 1386 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1387 | 1387 | |
|
1388 | 1388 | def _get_request(self): |
|
1389 | 1389 | return get_request(self) |
|
1390 | 1390 | |
|
1391 | 1391 | def _get_came_from(self): |
|
1392 | 1392 | _request = self._get_request() |
|
1393 | 1393 | |
|
1394 | 1394 | # both pylons/pyramid has this attribute |
|
1395 | 1395 | return _request.path_qs |
|
1396 | 1396 | |
|
1397 | 1397 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1398 | 1398 | import rhodecode.lib.helpers as h |
|
1399 | 1399 | cls = fargs[0] |
|
1400 | 1400 | _user = cls._rhodecode_user |
|
1401 | 1401 | |
|
1402 | 1402 | log.debug('checking %s permissions %s for %s %s', |
|
1403 | 1403 | self.__class__.__name__, self.required_perms, cls, _user) |
|
1404 | 1404 | |
|
1405 | 1405 | if self.check_permissions(_user): |
|
1406 | 1406 | log.debug('Permission granted for %s %s', cls, _user) |
|
1407 | 1407 | return func(*fargs, **fkwargs) |
|
1408 | 1408 | |
|
1409 | 1409 | else: |
|
1410 | 1410 | log.debug('Permission denied for %s %s', cls, _user) |
|
1411 | 1411 | anonymous = _user.username == User.DEFAULT_USER |
|
1412 | 1412 | |
|
1413 | 1413 | if anonymous: |
|
1414 | 1414 | came_from = self._get_came_from() |
|
1415 | 1415 | h.flash(_('You need to be signed in to view this page'), |
|
1416 | 1416 | category='warning') |
|
1417 | 1417 | raise HTTPFound( |
|
1418 | 1418 | h.route_path('login', _query={'came_from': came_from})) |
|
1419 | 1419 | |
|
1420 | 1420 | else: |
|
1421 | 1421 | # redirect with 404 to prevent resource discovery |
|
1422 | 1422 | raise HTTPNotFound() |
|
1423 | 1423 | |
|
1424 | 1424 | def check_permissions(self, user): |
|
1425 | 1425 | """Dummy function for overriding""" |
|
1426 | 1426 | raise NotImplementedError( |
|
1427 | 1427 | 'You have to write this function in child class') |
|
1428 | 1428 | |
|
1429 | 1429 | |
|
1430 | 1430 | class HasPermissionAllDecorator(PermsDecorator): |
|
1431 | 1431 | """ |
|
1432 | 1432 | Checks for access permission for all given predicates. All of them |
|
1433 | 1433 | have to be meet in order to fulfill the request |
|
1434 | 1434 | """ |
|
1435 | 1435 | |
|
1436 | 1436 | def check_permissions(self, user): |
|
1437 | 1437 | perms = user.permissions_with_scope({}) |
|
1438 | 1438 | if self.required_perms.issubset(perms['global']): |
|
1439 | 1439 | return True |
|
1440 | 1440 | return False |
|
1441 | 1441 | |
|
1442 | 1442 | |
|
1443 | 1443 | class HasPermissionAnyDecorator(PermsDecorator): |
|
1444 | 1444 | """ |
|
1445 | 1445 | Checks for access permission for any of given predicates. In order to |
|
1446 | 1446 | fulfill the request any of predicates must be meet |
|
1447 | 1447 | """ |
|
1448 | 1448 | |
|
1449 | 1449 | def check_permissions(self, user): |
|
1450 | 1450 | perms = user.permissions_with_scope({}) |
|
1451 | 1451 | if self.required_perms.intersection(perms['global']): |
|
1452 | 1452 | return True |
|
1453 | 1453 | return False |
|
1454 | 1454 | |
|
1455 | 1455 | |
|
1456 | 1456 | class HasRepoPermissionAllDecorator(PermsDecorator): |
|
1457 | 1457 | """ |
|
1458 | 1458 | Checks for access permission for all given predicates for specific |
|
1459 | 1459 | repository. All of them have to be meet in order to fulfill the request |
|
1460 | 1460 | """ |
|
1461 | 1461 | def _get_repo_name(self): |
|
1462 | 1462 | _request = self._get_request() |
|
1463 | 1463 | return get_repo_slug(_request) |
|
1464 | 1464 | |
|
1465 | 1465 | def check_permissions(self, user): |
|
1466 | 1466 | perms = user.permissions |
|
1467 | 1467 | repo_name = self._get_repo_name() |
|
1468 | 1468 | |
|
1469 | 1469 | try: |
|
1470 | 1470 | user_perms = set([perms['repositories'][repo_name]]) |
|
1471 | 1471 | except KeyError: |
|
1472 | 1472 | log.debug('cannot locate repo with name: `%s` in permissions defs', |
|
1473 | 1473 | repo_name) |
|
1474 | 1474 | return False |
|
1475 | 1475 | |
|
1476 | 1476 | log.debug('checking `%s` permissions for repo `%s`', |
|
1477 | 1477 | user_perms, repo_name) |
|
1478 | 1478 | if self.required_perms.issubset(user_perms): |
|
1479 | 1479 | return True |
|
1480 | 1480 | return False |
|
1481 | 1481 | |
|
1482 | 1482 | |
|
1483 | 1483 | class HasRepoPermissionAnyDecorator(PermsDecorator): |
|
1484 | 1484 | """ |
|
1485 | 1485 | Checks for access permission for any of given predicates for specific |
|
1486 | 1486 | repository. In order to fulfill the request any of predicates must be meet |
|
1487 | 1487 | """ |
|
1488 | 1488 | def _get_repo_name(self): |
|
1489 | 1489 | _request = self._get_request() |
|
1490 | 1490 | return get_repo_slug(_request) |
|
1491 | 1491 | |
|
1492 | 1492 | def check_permissions(self, user): |
|
1493 | 1493 | perms = user.permissions |
|
1494 | 1494 | repo_name = self._get_repo_name() |
|
1495 | 1495 | |
|
1496 | 1496 | try: |
|
1497 | 1497 | user_perms = set([perms['repositories'][repo_name]]) |
|
1498 | 1498 | except KeyError: |
|
1499 | 1499 | log.debug('cannot locate repo with name: `%s` in permissions defs', |
|
1500 | 1500 | repo_name) |
|
1501 | 1501 | return False |
|
1502 | 1502 | |
|
1503 | 1503 | log.debug('checking `%s` permissions for repo `%s`', |
|
1504 | 1504 | user_perms, repo_name) |
|
1505 | 1505 | if self.required_perms.intersection(user_perms): |
|
1506 | 1506 | return True |
|
1507 | 1507 | return False |
|
1508 | 1508 | |
|
1509 | 1509 | |
|
1510 | 1510 | class HasRepoGroupPermissionAllDecorator(PermsDecorator): |
|
1511 | 1511 | """ |
|
1512 | 1512 | Checks for access permission for all given predicates for specific |
|
1513 | 1513 | repository group. All of them have to be meet in order to |
|
1514 | 1514 | fulfill the request |
|
1515 | 1515 | """ |
|
1516 | 1516 | def _get_repo_group_name(self): |
|
1517 | 1517 | _request = self._get_request() |
|
1518 | 1518 | return get_repo_group_slug(_request) |
|
1519 | 1519 | |
|
1520 | 1520 | def check_permissions(self, user): |
|
1521 | 1521 | perms = user.permissions |
|
1522 | 1522 | group_name = self._get_repo_group_name() |
|
1523 | 1523 | try: |
|
1524 | 1524 | user_perms = set([perms['repositories_groups'][group_name]]) |
|
1525 | 1525 | except KeyError: |
|
1526 | 1526 | log.debug('cannot locate repo group with name: `%s` in permissions defs', |
|
1527 | 1527 | group_name) |
|
1528 | 1528 | return False |
|
1529 | 1529 | |
|
1530 | 1530 | log.debug('checking `%s` permissions for repo group `%s`', |
|
1531 | 1531 | user_perms, group_name) |
|
1532 | 1532 | if self.required_perms.issubset(user_perms): |
|
1533 | 1533 | return True |
|
1534 | 1534 | return False |
|
1535 | 1535 | |
|
1536 | 1536 | |
|
1537 | 1537 | class HasRepoGroupPermissionAnyDecorator(PermsDecorator): |
|
1538 | 1538 | """ |
|
1539 | 1539 | Checks for access permission for any of given predicates for specific |
|
1540 | 1540 | repository group. In order to fulfill the request any |
|
1541 | 1541 | of predicates must be met |
|
1542 | 1542 | """ |
|
1543 | 1543 | def _get_repo_group_name(self): |
|
1544 | 1544 | _request = self._get_request() |
|
1545 | 1545 | return get_repo_group_slug(_request) |
|
1546 | 1546 | |
|
1547 | 1547 | def check_permissions(self, user): |
|
1548 | 1548 | perms = user.permissions |
|
1549 | 1549 | group_name = self._get_repo_group_name() |
|
1550 | 1550 | |
|
1551 | 1551 | try: |
|
1552 | 1552 | user_perms = set([perms['repositories_groups'][group_name]]) |
|
1553 | 1553 | except KeyError: |
|
1554 | 1554 | log.debug('cannot locate repo group with name: `%s` in permissions defs', |
|
1555 | 1555 | group_name) |
|
1556 | 1556 | return False |
|
1557 | 1557 | |
|
1558 | 1558 | log.debug('checking `%s` permissions for repo group `%s`', |
|
1559 | 1559 | user_perms, group_name) |
|
1560 | 1560 | if self.required_perms.intersection(user_perms): |
|
1561 | 1561 | return True |
|
1562 | 1562 | return False |
|
1563 | 1563 | |
|
1564 | 1564 | |
|
1565 | 1565 | class HasUserGroupPermissionAllDecorator(PermsDecorator): |
|
1566 | 1566 | """ |
|
1567 | 1567 | Checks for access permission for all given predicates for specific |
|
1568 | 1568 | user group. All of them have to be meet in order to fulfill the request |
|
1569 | 1569 | """ |
|
1570 | 1570 | def _get_user_group_name(self): |
|
1571 | 1571 | _request = self._get_request() |
|
1572 | 1572 | return get_user_group_slug(_request) |
|
1573 | 1573 | |
|
1574 | 1574 | def check_permissions(self, user): |
|
1575 | 1575 | perms = user.permissions |
|
1576 | 1576 | group_name = self._get_user_group_name() |
|
1577 | 1577 | try: |
|
1578 | 1578 | user_perms = set([perms['user_groups'][group_name]]) |
|
1579 | 1579 | except KeyError: |
|
1580 | 1580 | return False |
|
1581 | 1581 | |
|
1582 | 1582 | if self.required_perms.issubset(user_perms): |
|
1583 | 1583 | return True |
|
1584 | 1584 | return False |
|
1585 | 1585 | |
|
1586 | 1586 | |
|
1587 | 1587 | class HasUserGroupPermissionAnyDecorator(PermsDecorator): |
|
1588 | 1588 | """ |
|
1589 | 1589 | Checks for access permission for any of given predicates for specific |
|
1590 | 1590 | user group. In order to fulfill the request any of predicates must be meet |
|
1591 | 1591 | """ |
|
1592 | 1592 | def _get_user_group_name(self): |
|
1593 | 1593 | _request = self._get_request() |
|
1594 | 1594 | return get_user_group_slug(_request) |
|
1595 | 1595 | |
|
1596 | 1596 | def check_permissions(self, user): |
|
1597 | 1597 | perms = user.permissions |
|
1598 | 1598 | group_name = self._get_user_group_name() |
|
1599 | 1599 | try: |
|
1600 | 1600 | user_perms = set([perms['user_groups'][group_name]]) |
|
1601 | 1601 | except KeyError: |
|
1602 | 1602 | return False |
|
1603 | 1603 | |
|
1604 | 1604 | if self.required_perms.intersection(user_perms): |
|
1605 | 1605 | return True |
|
1606 | 1606 | return False |
|
1607 | 1607 | |
|
1608 | 1608 | |
|
1609 | 1609 | # CHECK FUNCTIONS |
|
1610 | 1610 | class PermsFunction(object): |
|
1611 | 1611 | """Base function for other check functions""" |
|
1612 | 1612 | |
|
1613 | 1613 | def __init__(self, *perms): |
|
1614 | 1614 | self.required_perms = set(perms) |
|
1615 | 1615 | self.repo_name = None |
|
1616 | 1616 | self.repo_group_name = None |
|
1617 | 1617 | self.user_group_name = None |
|
1618 | 1618 | |
|
1619 | 1619 | def __bool__(self): |
|
1620 | 1620 | frame = inspect.currentframe() |
|
1621 | 1621 | stack_trace = traceback.format_stack(frame) |
|
1622 | 1622 | log.error('Checking bool value on a class instance of perm ' |
|
1623 | 1623 | 'function is not allowed: %s' % ''.join(stack_trace)) |
|
1624 | 1624 | # rather than throwing errors, here we always return False so if by |
|
1625 | 1625 | # accident someone checks truth for just an instance it will always end |
|
1626 | 1626 | # up in returning False |
|
1627 | 1627 | return False |
|
1628 | 1628 | __nonzero__ = __bool__ |
|
1629 | 1629 | |
|
1630 | 1630 | def __call__(self, check_location='', user=None): |
|
1631 | 1631 | if not user: |
|
1632 | 1632 | log.debug('Using user attribute from global request') |
|
1633 | 1633 | # TODO: remove this someday,put as user as attribute here |
|
1634 | 1634 | request = self._get_request() |
|
1635 | 1635 | user = request.user |
|
1636 | 1636 | |
|
1637 | 1637 | # init auth user if not already given |
|
1638 | 1638 | if not isinstance(user, AuthUser): |
|
1639 | 1639 | log.debug('Wrapping user %s into AuthUser', user) |
|
1640 | 1640 | user = AuthUser(user.user_id) |
|
1641 | 1641 | |
|
1642 | 1642 | cls_name = self.__class__.__name__ |
|
1643 | 1643 | check_scope = self._get_check_scope(cls_name) |
|
1644 | 1644 | check_location = check_location or 'unspecified location' |
|
1645 | 1645 | |
|
1646 | 1646 | log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name, |
|
1647 | 1647 | self.required_perms, user, check_scope, check_location) |
|
1648 | 1648 | if not user: |
|
1649 | 1649 | log.warning('Empty user given for permission check') |
|
1650 | 1650 | return False |
|
1651 | 1651 | |
|
1652 | 1652 | if self.check_permissions(user): |
|
1653 | 1653 | log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s', |
|
1654 | 1654 | check_scope, user, check_location) |
|
1655 | 1655 | return True |
|
1656 | 1656 | |
|
1657 | 1657 | else: |
|
1658 | 1658 | log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s', |
|
1659 | 1659 | check_scope, user, check_location) |
|
1660 | 1660 | return False |
|
1661 | 1661 | |
|
1662 | 1662 | def _get_request(self): |
|
1663 | 1663 | return get_request(self) |
|
1664 | 1664 | |
|
1665 | 1665 | def _get_check_scope(self, cls_name): |
|
1666 | 1666 | return { |
|
1667 | 1667 | 'HasPermissionAll': 'GLOBAL', |
|
1668 | 1668 | 'HasPermissionAny': 'GLOBAL', |
|
1669 | 1669 | 'HasRepoPermissionAll': 'repo:%s' % self.repo_name, |
|
1670 | 1670 | 'HasRepoPermissionAny': 'repo:%s' % self.repo_name, |
|
1671 | 1671 | 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name, |
|
1672 | 1672 | 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name, |
|
1673 | 1673 | 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name, |
|
1674 | 1674 | 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name, |
|
1675 | 1675 | }.get(cls_name, '?:%s' % cls_name) |
|
1676 | 1676 | |
|
1677 | 1677 | def check_permissions(self, user): |
|
1678 | 1678 | """Dummy function for overriding""" |
|
1679 | 1679 | raise Exception('You have to write this function in child class') |
|
1680 | 1680 | |
|
1681 | 1681 | |
|
1682 | 1682 | class HasPermissionAll(PermsFunction): |
|
1683 | 1683 | def check_permissions(self, user): |
|
1684 | 1684 | perms = user.permissions_with_scope({}) |
|
1685 | 1685 | if self.required_perms.issubset(perms.get('global')): |
|
1686 | 1686 | return True |
|
1687 | 1687 | return False |
|
1688 | 1688 | |
|
1689 | 1689 | |
|
1690 | 1690 | class HasPermissionAny(PermsFunction): |
|
1691 | 1691 | def check_permissions(self, user): |
|
1692 | 1692 | perms = user.permissions_with_scope({}) |
|
1693 | 1693 | if self.required_perms.intersection(perms.get('global')): |
|
1694 | 1694 | return True |
|
1695 | 1695 | return False |
|
1696 | 1696 | |
|
1697 | 1697 | |
|
1698 | 1698 | class HasRepoPermissionAll(PermsFunction): |
|
1699 | 1699 | def __call__(self, repo_name=None, check_location='', user=None): |
|
1700 | 1700 | self.repo_name = repo_name |
|
1701 | 1701 | return super(HasRepoPermissionAll, self).__call__(check_location, user) |
|
1702 | 1702 | |
|
1703 | 1703 | def _get_repo_name(self): |
|
1704 | 1704 | if not self.repo_name: |
|
1705 | 1705 | _request = self._get_request() |
|
1706 | 1706 | self.repo_name = get_repo_slug(_request) |
|
1707 | 1707 | return self.repo_name |
|
1708 | 1708 | |
|
1709 | 1709 | def check_permissions(self, user): |
|
1710 | 1710 | self.repo_name = self._get_repo_name() |
|
1711 | 1711 | perms = user.permissions |
|
1712 | 1712 | try: |
|
1713 | 1713 | user_perms = set([perms['repositories'][self.repo_name]]) |
|
1714 | 1714 | except KeyError: |
|
1715 | 1715 | return False |
|
1716 | 1716 | if self.required_perms.issubset(user_perms): |
|
1717 | 1717 | return True |
|
1718 | 1718 | return False |
|
1719 | 1719 | |
|
1720 | 1720 | |
|
1721 | 1721 | class HasRepoPermissionAny(PermsFunction): |
|
1722 | 1722 | def __call__(self, repo_name=None, check_location='', user=None): |
|
1723 | 1723 | self.repo_name = repo_name |
|
1724 | 1724 | return super(HasRepoPermissionAny, self).__call__(check_location, user) |
|
1725 | 1725 | |
|
1726 | 1726 | def _get_repo_name(self): |
|
1727 | 1727 | if not self.repo_name: |
|
1728 | 1728 | _request = self._get_request() |
|
1729 | 1729 | self.repo_name = get_repo_slug(_request) |
|
1730 | 1730 | return self.repo_name |
|
1731 | 1731 | |
|
1732 | 1732 | def check_permissions(self, user): |
|
1733 | 1733 | self.repo_name = self._get_repo_name() |
|
1734 | 1734 | perms = user.permissions |
|
1735 | 1735 | try: |
|
1736 | 1736 | user_perms = set([perms['repositories'][self.repo_name]]) |
|
1737 | 1737 | except KeyError: |
|
1738 | 1738 | return False |
|
1739 | 1739 | if self.required_perms.intersection(user_perms): |
|
1740 | 1740 | return True |
|
1741 | 1741 | return False |
|
1742 | 1742 | |
|
1743 | 1743 | |
|
1744 | 1744 | class HasRepoGroupPermissionAny(PermsFunction): |
|
1745 | 1745 | def __call__(self, group_name=None, check_location='', user=None): |
|
1746 | 1746 | self.repo_group_name = group_name |
|
1747 | 1747 | return super(HasRepoGroupPermissionAny, self).__call__( |
|
1748 | 1748 | check_location, user) |
|
1749 | 1749 | |
|
1750 | 1750 | def check_permissions(self, user): |
|
1751 | 1751 | perms = user.permissions |
|
1752 | 1752 | try: |
|
1753 | 1753 | user_perms = set( |
|
1754 | 1754 | [perms['repositories_groups'][self.repo_group_name]]) |
|
1755 | 1755 | except KeyError: |
|
1756 | 1756 | return False |
|
1757 | 1757 | if self.required_perms.intersection(user_perms): |
|
1758 | 1758 | return True |
|
1759 | 1759 | return False |
|
1760 | 1760 | |
|
1761 | 1761 | |
|
1762 | 1762 | class HasRepoGroupPermissionAll(PermsFunction): |
|
1763 | 1763 | def __call__(self, group_name=None, check_location='', user=None): |
|
1764 | 1764 | self.repo_group_name = group_name |
|
1765 | 1765 | return super(HasRepoGroupPermissionAll, self).__call__( |
|
1766 | 1766 | check_location, user) |
|
1767 | 1767 | |
|
1768 | 1768 | def check_permissions(self, user): |
|
1769 | 1769 | perms = user.permissions |
|
1770 | 1770 | try: |
|
1771 | 1771 | user_perms = set( |
|
1772 | 1772 | [perms['repositories_groups'][self.repo_group_name]]) |
|
1773 | 1773 | except KeyError: |
|
1774 | 1774 | return False |
|
1775 | 1775 | if self.required_perms.issubset(user_perms): |
|
1776 | 1776 | return True |
|
1777 | 1777 | return False |
|
1778 | 1778 | |
|
1779 | 1779 | |
|
1780 | 1780 | class HasUserGroupPermissionAny(PermsFunction): |
|
1781 | 1781 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
1782 | 1782 | self.user_group_name = user_group_name |
|
1783 | 1783 | return super(HasUserGroupPermissionAny, self).__call__( |
|
1784 | 1784 | check_location, user) |
|
1785 | 1785 | |
|
1786 | 1786 | def check_permissions(self, user): |
|
1787 | 1787 | perms = user.permissions |
|
1788 | 1788 | try: |
|
1789 | 1789 | user_perms = set([perms['user_groups'][self.user_group_name]]) |
|
1790 | 1790 | except KeyError: |
|
1791 | 1791 | return False |
|
1792 | 1792 | if self.required_perms.intersection(user_perms): |
|
1793 | 1793 | return True |
|
1794 | 1794 | return False |
|
1795 | 1795 | |
|
1796 | 1796 | |
|
1797 | 1797 | class HasUserGroupPermissionAll(PermsFunction): |
|
1798 | 1798 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
1799 | 1799 | self.user_group_name = user_group_name |
|
1800 | 1800 | return super(HasUserGroupPermissionAll, self).__call__( |
|
1801 | 1801 | check_location, user) |
|
1802 | 1802 | |
|
1803 | 1803 | def check_permissions(self, user): |
|
1804 | 1804 | perms = user.permissions |
|
1805 | 1805 | try: |
|
1806 | 1806 | user_perms = set([perms['user_groups'][self.user_group_name]]) |
|
1807 | 1807 | except KeyError: |
|
1808 | 1808 | return False |
|
1809 | 1809 | if self.required_perms.issubset(user_perms): |
|
1810 | 1810 | return True |
|
1811 | 1811 | return False |
|
1812 | 1812 | |
|
1813 | 1813 | |
|
1814 | 1814 | # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH |
|
1815 | 1815 | class HasPermissionAnyMiddleware(object): |
|
1816 | 1816 | def __init__(self, *perms): |
|
1817 | 1817 | self.required_perms = set(perms) |
|
1818 | 1818 | |
|
1819 | 1819 | def __call__(self, user, repo_name): |
|
1820 | 1820 | # repo_name MUST be unicode, since we handle keys in permission |
|
1821 | 1821 | # dict by unicode |
|
1822 | 1822 | repo_name = safe_unicode(repo_name) |
|
1823 | 1823 | user = AuthUser(user.user_id) |
|
1824 | 1824 | log.debug( |
|
1825 | 1825 | 'Checking VCS protocol permissions %s for user:%s repo:`%s`', |
|
1826 | 1826 | self.required_perms, user, repo_name) |
|
1827 | 1827 | |
|
1828 | 1828 | if self.check_permissions(user, repo_name): |
|
1829 | 1829 | log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s', |
|
1830 | 1830 | repo_name, user, 'PermissionMiddleware') |
|
1831 | 1831 | return True |
|
1832 | 1832 | |
|
1833 | 1833 | else: |
|
1834 | 1834 | log.debug('Permission to repo:`%s` DENIED for user:%s @ %s', |
|
1835 | 1835 | repo_name, user, 'PermissionMiddleware') |
|
1836 | 1836 | return False |
|
1837 | 1837 | |
|
1838 | 1838 | def check_permissions(self, user, repo_name): |
|
1839 | 1839 | perms = user.permissions_with_scope({'repo_name': repo_name}) |
|
1840 | 1840 | |
|
1841 | 1841 | try: |
|
1842 | 1842 | user_perms = set([perms['repositories'][repo_name]]) |
|
1843 | 1843 | except Exception: |
|
1844 | 1844 | log.exception('Error while accessing user permissions') |
|
1845 | 1845 | return False |
|
1846 | 1846 | |
|
1847 | 1847 | if self.required_perms.intersection(user_perms): |
|
1848 | 1848 | return True |
|
1849 | 1849 | return False |
|
1850 | 1850 | |
|
1851 | 1851 | |
|
1852 | 1852 | # SPECIAL VERSION TO HANDLE API AUTH |
|
1853 | 1853 | class _BaseApiPerm(object): |
|
1854 | 1854 | def __init__(self, *perms): |
|
1855 | 1855 | self.required_perms = set(perms) |
|
1856 | 1856 | |
|
1857 | 1857 | def __call__(self, check_location=None, user=None, repo_name=None, |
|
1858 | 1858 | group_name=None, user_group_name=None): |
|
1859 | 1859 | cls_name = self.__class__.__name__ |
|
1860 | 1860 | check_scope = 'global:%s' % (self.required_perms,) |
|
1861 | 1861 | if repo_name: |
|
1862 | 1862 | check_scope += ', repo_name:%s' % (repo_name,) |
|
1863 | 1863 | |
|
1864 | 1864 | if group_name: |
|
1865 | 1865 | check_scope += ', repo_group_name:%s' % (group_name,) |
|
1866 | 1866 | |
|
1867 | 1867 | if user_group_name: |
|
1868 | 1868 | check_scope += ', user_group_name:%s' % (user_group_name,) |
|
1869 | 1869 | |
|
1870 | 1870 | log.debug( |
|
1871 | 1871 | 'checking cls:%s %s %s @ %s' |
|
1872 | 1872 | % (cls_name, self.required_perms, check_scope, check_location)) |
|
1873 | 1873 | if not user: |
|
1874 | 1874 | log.debug('Empty User passed into arguments') |
|
1875 | 1875 | return False |
|
1876 | 1876 | |
|
1877 | 1877 | # process user |
|
1878 | 1878 | if not isinstance(user, AuthUser): |
|
1879 | 1879 | user = AuthUser(user.user_id) |
|
1880 | 1880 | if not check_location: |
|
1881 | 1881 | check_location = 'unspecified' |
|
1882 | 1882 | if self.check_permissions(user.permissions, repo_name, group_name, |
|
1883 | 1883 | user_group_name): |
|
1884 | 1884 | log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s', |
|
1885 | 1885 | check_scope, user, check_location) |
|
1886 | 1886 | return True |
|
1887 | 1887 | |
|
1888 | 1888 | else: |
|
1889 | 1889 | log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s', |
|
1890 | 1890 | check_scope, user, check_location) |
|
1891 | 1891 | return False |
|
1892 | 1892 | |
|
1893 | 1893 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1894 | 1894 | user_group_name=None): |
|
1895 | 1895 | """ |
|
1896 | 1896 | implement in child class should return True if permissions are ok, |
|
1897 | 1897 | False otherwise |
|
1898 | 1898 | |
|
1899 | 1899 | :param perm_defs: dict with permission definitions |
|
1900 | 1900 | :param repo_name: repo name |
|
1901 | 1901 | """ |
|
1902 | 1902 | raise NotImplementedError() |
|
1903 | 1903 | |
|
1904 | 1904 | |
|
1905 | 1905 | class HasPermissionAllApi(_BaseApiPerm): |
|
1906 | 1906 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1907 | 1907 | user_group_name=None): |
|
1908 | 1908 | if self.required_perms.issubset(perm_defs.get('global')): |
|
1909 | 1909 | return True |
|
1910 | 1910 | return False |
|
1911 | 1911 | |
|
1912 | 1912 | |
|
1913 | 1913 | class HasPermissionAnyApi(_BaseApiPerm): |
|
1914 | 1914 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1915 | 1915 | user_group_name=None): |
|
1916 | 1916 | if self.required_perms.intersection(perm_defs.get('global')): |
|
1917 | 1917 | return True |
|
1918 | 1918 | return False |
|
1919 | 1919 | |
|
1920 | 1920 | |
|
1921 | 1921 | class HasRepoPermissionAllApi(_BaseApiPerm): |
|
1922 | 1922 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1923 | 1923 | user_group_name=None): |
|
1924 | 1924 | try: |
|
1925 | 1925 | _user_perms = set([perm_defs['repositories'][repo_name]]) |
|
1926 | 1926 | except KeyError: |
|
1927 | 1927 | log.warning(traceback.format_exc()) |
|
1928 | 1928 | return False |
|
1929 | 1929 | if self.required_perms.issubset(_user_perms): |
|
1930 | 1930 | return True |
|
1931 | 1931 | return False |
|
1932 | 1932 | |
|
1933 | 1933 | |
|
1934 | 1934 | class HasRepoPermissionAnyApi(_BaseApiPerm): |
|
1935 | 1935 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1936 | 1936 | user_group_name=None): |
|
1937 | 1937 | try: |
|
1938 | 1938 | _user_perms = set([perm_defs['repositories'][repo_name]]) |
|
1939 | 1939 | except KeyError: |
|
1940 | 1940 | log.warning(traceback.format_exc()) |
|
1941 | 1941 | return False |
|
1942 | 1942 | if self.required_perms.intersection(_user_perms): |
|
1943 | 1943 | return True |
|
1944 | 1944 | return False |
|
1945 | 1945 | |
|
1946 | 1946 | |
|
1947 | 1947 | class HasRepoGroupPermissionAnyApi(_BaseApiPerm): |
|
1948 | 1948 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1949 | 1949 | user_group_name=None): |
|
1950 | 1950 | try: |
|
1951 | 1951 | _user_perms = set([perm_defs['repositories_groups'][group_name]]) |
|
1952 | 1952 | except KeyError: |
|
1953 | 1953 | log.warning(traceback.format_exc()) |
|
1954 | 1954 | return False |
|
1955 | 1955 | if self.required_perms.intersection(_user_perms): |
|
1956 | 1956 | return True |
|
1957 | 1957 | return False |
|
1958 | 1958 | |
|
1959 | 1959 | |
|
1960 | 1960 | class HasRepoGroupPermissionAllApi(_BaseApiPerm): |
|
1961 | 1961 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1962 | 1962 | user_group_name=None): |
|
1963 | 1963 | try: |
|
1964 | 1964 | _user_perms = set([perm_defs['repositories_groups'][group_name]]) |
|
1965 | 1965 | except KeyError: |
|
1966 | 1966 | log.warning(traceback.format_exc()) |
|
1967 | 1967 | return False |
|
1968 | 1968 | if self.required_perms.issubset(_user_perms): |
|
1969 | 1969 | return True |
|
1970 | 1970 | return False |
|
1971 | 1971 | |
|
1972 | 1972 | |
|
1973 | 1973 | class HasUserGroupPermissionAnyApi(_BaseApiPerm): |
|
1974 | 1974 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1975 | 1975 | user_group_name=None): |
|
1976 | 1976 | try: |
|
1977 | 1977 | _user_perms = set([perm_defs['user_groups'][user_group_name]]) |
|
1978 | 1978 | except KeyError: |
|
1979 | 1979 | log.warning(traceback.format_exc()) |
|
1980 | 1980 | return False |
|
1981 | 1981 | if self.required_perms.intersection(_user_perms): |
|
1982 | 1982 | return True |
|
1983 | 1983 | return False |
|
1984 | 1984 | |
|
1985 | 1985 | |
|
1986 | 1986 | def check_ip_access(source_ip, allowed_ips=None): |
|
1987 | 1987 | """ |
|
1988 | 1988 | Checks if source_ip is a subnet of any of allowed_ips. |
|
1989 | 1989 | |
|
1990 | 1990 | :param source_ip: |
|
1991 | 1991 | :param allowed_ips: list of allowed ips together with mask |
|
1992 | 1992 | """ |
|
1993 | 1993 | log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips)) |
|
1994 | 1994 | source_ip_address = ipaddress.ip_address(safe_unicode(source_ip)) |
|
1995 | 1995 | if isinstance(allowed_ips, (tuple, list, set)): |
|
1996 | 1996 | for ip in allowed_ips: |
|
1997 | 1997 | ip = safe_unicode(ip) |
|
1998 | 1998 | try: |
|
1999 | 1999 | network_address = ipaddress.ip_network(ip, strict=False) |
|
2000 | 2000 | if source_ip_address in network_address: |
|
2001 | 2001 | log.debug('IP %s is network %s' % |
|
2002 | 2002 | (source_ip_address, network_address)) |
|
2003 | 2003 | return True |
|
2004 | 2004 | # for any case we cannot determine the IP, don't crash just |
|
2005 | 2005 | # skip it and log as error, we want to say forbidden still when |
|
2006 | 2006 | # sending bad IP |
|
2007 | 2007 | except Exception: |
|
2008 | 2008 | log.error(traceback.format_exc()) |
|
2009 | 2009 | continue |
|
2010 | 2010 | return False |
|
2011 | 2011 | |
|
2012 | 2012 | |
|
2013 | 2013 | def get_cython_compat_decorator(wrapper, func): |
|
2014 | 2014 | """ |
|
2015 | 2015 | Creates a cython compatible decorator. The previously used |
|
2016 | 2016 | decorator.decorator() function seems to be incompatible with cython. |
|
2017 | 2017 | |
|
2018 | 2018 | :param wrapper: __wrapper method of the decorator class |
|
2019 | 2019 | :param func: decorated function |
|
2020 | 2020 | """ |
|
2021 | 2021 | @wraps(func) |
|
2022 | 2022 | def local_wrapper(*args, **kwds): |
|
2023 | 2023 | return wrapper(func, *args, **kwds) |
|
2024 | 2024 | local_wrapper.__wrapped__ = func |
|
2025 | 2025 | return local_wrapper |
|
2026 | 2026 | |
|
2027 | 2027 |
@@ -1,2045 +1,2045 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Helper functions |
|
23 | 23 | |
|
24 | 24 | Consists of functions to typically be used within templates, but also |
|
25 | 25 | available to Controllers. This module is available to both as 'h'. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import random |
|
29 | 29 | import hashlib |
|
30 | 30 | import StringIO |
|
31 | 31 | import urllib |
|
32 | 32 | import math |
|
33 | 33 | import logging |
|
34 | 34 | import re |
|
35 | 35 | import urlparse |
|
36 | 36 | import time |
|
37 | 37 | import string |
|
38 | 38 | import hashlib |
|
39 | 39 | from collections import OrderedDict |
|
40 | 40 | |
|
41 | 41 | import pygments |
|
42 | 42 | import itertools |
|
43 | 43 | import fnmatch |
|
44 | 44 | |
|
45 | 45 | from datetime import datetime |
|
46 | 46 | from functools import partial |
|
47 | 47 | from pygments.formatters.html import HtmlFormatter |
|
48 | 48 | from pygments import highlight as code_highlight |
|
49 | 49 | from pygments.lexers import ( |
|
50 | 50 | get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) |
|
51 | 51 | |
|
52 | 52 | from pyramid.threadlocal import get_current_request |
|
53 | 53 | |
|
54 | 54 | from webhelpers.html import literal, HTML, escape |
|
55 | 55 | from webhelpers.html.tools import * |
|
56 | 56 | from webhelpers.html.builder import make_tag |
|
57 | 57 | from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \ |
|
58 | 58 | end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \ |
|
59 | 59 | link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \ |
|
60 | 60 | submit, text, password, textarea, title, ul, xml_declaration, radio |
|
61 | 61 | from webhelpers.html.tools import auto_link, button_to, highlight, \ |
|
62 | 62 | js_obfuscate, mail_to, strip_links, strip_tags, tag_re |
|
63 | 63 | from webhelpers.pylonslib import Flash as _Flash |
|
64 | 64 | from webhelpers.text import chop_at, collapse, convert_accented_entities, \ |
|
65 | 65 | convert_misc_entities, lchop, plural, rchop, remove_formatting, \ |
|
66 | 66 | replace_whitespace, urlify, truncate, wrap_paragraphs |
|
67 | 67 | from webhelpers.date import time_ago_in_words |
|
68 | 68 | from webhelpers.paginate import Page as _Page |
|
69 | 69 | from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \ |
|
70 | 70 | convert_boolean_attrs, NotGiven, _make_safe_id_component |
|
71 | 71 | from webhelpers2.number import format_byte_size |
|
72 | 72 | |
|
73 | 73 | from rhodecode.lib.action_parser import action_parser |
|
74 | 74 | from rhodecode.lib.ext_json import json |
|
75 | 75 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer |
|
76 | 76 | from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ |
|
77 | 77 | get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \ |
|
78 | 78 | AttributeDict, safe_int, md5, md5_safe |
|
79 | 79 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
80 | 80 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
81 | 81 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit |
|
82 | 82 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT |
|
83 | 83 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
84 | 84 | from rhodecode.model.db import Permission, User, Repository |
|
85 | 85 | from rhodecode.model.repo_group import RepoGroupModel |
|
86 | 86 | from rhodecode.model.settings import IssueTrackerSettingsModel |
|
87 | 87 | |
|
88 | 88 | log = logging.getLogger(__name__) |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | DEFAULT_USER = User.DEFAULT_USER |
|
92 | 92 | DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | def url(*args, **kw): |
|
96 | 96 | from pylons import url as pylons_url |
|
97 | 97 | return pylons_url(*args, **kw) |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def pylons_url_current(*args, **kw): |
|
101 | 101 | """ |
|
102 | 102 | This function overrides pylons.url.current() which returns the current |
|
103 | 103 | path so that it will also work from a pyramid only context. This |
|
104 | 104 | should be removed once port to pyramid is complete. |
|
105 | 105 | """ |
|
106 | 106 | from pylons import url as pylons_url |
|
107 | 107 | if not args and not kw: |
|
108 | 108 | request = get_current_request() |
|
109 | 109 | return request.path |
|
110 | 110 | return pylons_url.current(*args, **kw) |
|
111 | 111 | |
|
112 | 112 | url.current = pylons_url_current |
|
113 | 113 | |
|
114 | 114 | |
|
115 | 115 | def url_replace(**qargs): |
|
116 | 116 | """ Returns the current request url while replacing query string args """ |
|
117 | 117 | |
|
118 | 118 | request = get_current_request() |
|
119 | 119 | new_args = request.GET.mixed() |
|
120 | 120 | new_args.update(qargs) |
|
121 | 121 | return url('', **new_args) |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | def asset(path, ver=None, **kwargs): |
|
125 | 125 | """ |
|
126 | 126 | Helper to generate a static asset file path for rhodecode assets |
|
127 | 127 | |
|
128 | 128 | eg. h.asset('images/image.png', ver='3923') |
|
129 | 129 | |
|
130 | 130 | :param path: path of asset |
|
131 | 131 | :param ver: optional version query param to append as ?ver= |
|
132 | 132 | """ |
|
133 | 133 | request = get_current_request() |
|
134 | 134 | query = {} |
|
135 | 135 | query.update(kwargs) |
|
136 | 136 | if ver: |
|
137 | 137 | query = {'ver': ver} |
|
138 | 138 | return request.static_path( |
|
139 | 139 | 'rhodecode:public/{}'.format(path), _query=query) |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | default_html_escape_table = { |
|
143 | 143 | ord('&'): u'&', |
|
144 | 144 | ord('<'): u'<', |
|
145 | 145 | ord('>'): u'>', |
|
146 | 146 | ord('"'): u'"', |
|
147 | 147 | ord("'"): u''', |
|
148 | 148 | } |
|
149 | 149 | |
|
150 | 150 | |
|
151 | 151 | def html_escape(text, html_escape_table=default_html_escape_table): |
|
152 | 152 | """Produce entities within text.""" |
|
153 | 153 | return text.translate(html_escape_table) |
|
154 | 154 | |
|
155 | 155 | |
|
156 | 156 | def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None): |
|
157 | 157 | """ |
|
158 | 158 | Truncate string ``s`` at the first occurrence of ``sub``. |
|
159 | 159 | |
|
160 | 160 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
161 | 161 | """ |
|
162 | 162 | suffix_if_chopped = suffix_if_chopped or '' |
|
163 | 163 | pos = s.find(sub) |
|
164 | 164 | if pos == -1: |
|
165 | 165 | return s |
|
166 | 166 | |
|
167 | 167 | if inclusive: |
|
168 | 168 | pos += len(sub) |
|
169 | 169 | |
|
170 | 170 | chopped = s[:pos] |
|
171 | 171 | left = s[pos:].strip() |
|
172 | 172 | |
|
173 | 173 | if left and suffix_if_chopped: |
|
174 | 174 | chopped += suffix_if_chopped |
|
175 | 175 | |
|
176 | 176 | return chopped |
|
177 | 177 | |
|
178 | 178 | |
|
179 | 179 | def shorter(text, size=20): |
|
180 | 180 | postfix = '...' |
|
181 | 181 | if len(text) > size: |
|
182 | 182 | return text[:size - len(postfix)] + postfix |
|
183 | 183 | return text |
|
184 | 184 | |
|
185 | 185 | |
|
186 | 186 | def _reset(name, value=None, id=NotGiven, type="reset", **attrs): |
|
187 | 187 | """ |
|
188 | 188 | Reset button |
|
189 | 189 | """ |
|
190 | 190 | _set_input_attrs(attrs, type, name, value) |
|
191 | 191 | _set_id_attr(attrs, id, name) |
|
192 | 192 | convert_boolean_attrs(attrs, ["disabled"]) |
|
193 | 193 | return HTML.input(**attrs) |
|
194 | 194 | |
|
195 | 195 | reset = _reset |
|
196 | 196 | safeid = _make_safe_id_component |
|
197 | 197 | |
|
198 | 198 | |
|
199 | 199 | def branding(name, length=40): |
|
200 | 200 | return truncate(name, length, indicator="") |
|
201 | 201 | |
|
202 | 202 | |
|
203 | 203 | def FID(raw_id, path): |
|
204 | 204 | """ |
|
205 | 205 | Creates a unique ID for filenode based on it's hash of path and commit |
|
206 | 206 | it's safe to use in urls |
|
207 | 207 | |
|
208 | 208 | :param raw_id: |
|
209 | 209 | :param path: |
|
210 | 210 | """ |
|
211 | 211 | |
|
212 | 212 | return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12]) |
|
213 | 213 | |
|
214 | 214 | |
|
215 | 215 | class _GetError(object): |
|
216 | 216 | """Get error from form_errors, and represent it as span wrapped error |
|
217 | 217 | message |
|
218 | 218 | |
|
219 | 219 | :param field_name: field to fetch errors for |
|
220 | 220 | :param form_errors: form errors dict |
|
221 | 221 | """ |
|
222 | 222 | |
|
223 | 223 | def __call__(self, field_name, form_errors): |
|
224 | 224 | tmpl = """<span class="error_msg">%s</span>""" |
|
225 | 225 | if form_errors and field_name in form_errors: |
|
226 | 226 | return literal(tmpl % form_errors.get(field_name)) |
|
227 | 227 | |
|
228 | 228 | get_error = _GetError() |
|
229 | 229 | |
|
230 | 230 | |
|
231 | 231 | class _ToolTip(object): |
|
232 | 232 | |
|
233 | 233 | def __call__(self, tooltip_title, trim_at=50): |
|
234 | 234 | """ |
|
235 | 235 | Special function just to wrap our text into nice formatted |
|
236 | 236 | autowrapped text |
|
237 | 237 | |
|
238 | 238 | :param tooltip_title: |
|
239 | 239 | """ |
|
240 | 240 | tooltip_title = escape(tooltip_title) |
|
241 | 241 | tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') |
|
242 | 242 | return tooltip_title |
|
243 | 243 | tooltip = _ToolTip() |
|
244 | 244 | |
|
245 | 245 | |
|
246 | 246 | def files_breadcrumbs(repo_name, commit_id, file_path): |
|
247 | 247 | if isinstance(file_path, str): |
|
248 | 248 | file_path = safe_unicode(file_path) |
|
249 | 249 | |
|
250 | 250 | # TODO: johbo: Is this always a url like path, or is this operating |
|
251 | 251 | # system dependent? |
|
252 | 252 | path_segments = file_path.split('/') |
|
253 | 253 | |
|
254 | 254 | repo_name_html = escape(repo_name) |
|
255 | 255 | if len(path_segments) == 1 and path_segments[0] == '': |
|
256 | 256 | url_segments = [repo_name_html] |
|
257 | 257 | else: |
|
258 | 258 | url_segments = [ |
|
259 | 259 | link_to( |
|
260 | 260 | repo_name_html, |
|
261 | 261 | route_path( |
|
262 | 262 | 'repo_files', |
|
263 | 263 | repo_name=repo_name, |
|
264 | 264 | commit_id=commit_id, |
|
265 | 265 | f_path=''), |
|
266 | 266 | class_='pjax-link')] |
|
267 | 267 | |
|
268 | 268 | last_cnt = len(path_segments) - 1 |
|
269 | 269 | for cnt, segment in enumerate(path_segments): |
|
270 | 270 | if not segment: |
|
271 | 271 | continue |
|
272 | 272 | segment_html = escape(segment) |
|
273 | 273 | |
|
274 | 274 | if cnt != last_cnt: |
|
275 | 275 | url_segments.append( |
|
276 | 276 | link_to( |
|
277 | 277 | segment_html, |
|
278 | 278 | route_path( |
|
279 | 279 | 'repo_files', |
|
280 | 280 | repo_name=repo_name, |
|
281 | 281 | commit_id=commit_id, |
|
282 | 282 | f_path='/'.join(path_segments[:cnt + 1])), |
|
283 | 283 | class_='pjax-link')) |
|
284 | 284 | else: |
|
285 | 285 | url_segments.append(segment_html) |
|
286 | 286 | |
|
287 | 287 | return literal('/'.join(url_segments)) |
|
288 | 288 | |
|
289 | 289 | |
|
290 | 290 | class CodeHtmlFormatter(HtmlFormatter): |
|
291 | 291 | """ |
|
292 | 292 | My code Html Formatter for source codes |
|
293 | 293 | """ |
|
294 | 294 | |
|
295 | 295 | def wrap(self, source, outfile): |
|
296 | 296 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
297 | 297 | |
|
298 | 298 | def _wrap_code(self, source): |
|
299 | 299 | for cnt, it in enumerate(source): |
|
300 | 300 | i, t = it |
|
301 | 301 | t = '<div id="L%s">%s</div>' % (cnt + 1, t) |
|
302 | 302 | yield i, t |
|
303 | 303 | |
|
304 | 304 | def _wrap_tablelinenos(self, inner): |
|
305 | 305 | dummyoutfile = StringIO.StringIO() |
|
306 | 306 | lncount = 0 |
|
307 | 307 | for t, line in inner: |
|
308 | 308 | if t: |
|
309 | 309 | lncount += 1 |
|
310 | 310 | dummyoutfile.write(line) |
|
311 | 311 | |
|
312 | 312 | fl = self.linenostart |
|
313 | 313 | mw = len(str(lncount + fl - 1)) |
|
314 | 314 | sp = self.linenospecial |
|
315 | 315 | st = self.linenostep |
|
316 | 316 | la = self.lineanchors |
|
317 | 317 | aln = self.anchorlinenos |
|
318 | 318 | nocls = self.noclasses |
|
319 | 319 | if sp: |
|
320 | 320 | lines = [] |
|
321 | 321 | |
|
322 | 322 | for i in range(fl, fl + lncount): |
|
323 | 323 | if i % st == 0: |
|
324 | 324 | if i % sp == 0: |
|
325 | 325 | if aln: |
|
326 | 326 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
327 | 327 | (la, i, mw, i)) |
|
328 | 328 | else: |
|
329 | 329 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
330 | 330 | else: |
|
331 | 331 | if aln: |
|
332 | 332 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
333 | 333 | else: |
|
334 | 334 | lines.append('%*d' % (mw, i)) |
|
335 | 335 | else: |
|
336 | 336 | lines.append('') |
|
337 | 337 | ls = '\n'.join(lines) |
|
338 | 338 | else: |
|
339 | 339 | lines = [] |
|
340 | 340 | for i in range(fl, fl + lncount): |
|
341 | 341 | if i % st == 0: |
|
342 | 342 | if aln: |
|
343 | 343 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
344 | 344 | else: |
|
345 | 345 | lines.append('%*d' % (mw, i)) |
|
346 | 346 | else: |
|
347 | 347 | lines.append('') |
|
348 | 348 | ls = '\n'.join(lines) |
|
349 | 349 | |
|
350 | 350 | # in case you wonder about the seemingly redundant <div> here: since the |
|
351 | 351 | # content in the other cell also is wrapped in a div, some browsers in |
|
352 | 352 | # some configurations seem to mess up the formatting... |
|
353 | 353 | if nocls: |
|
354 | 354 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
355 | 355 | '<tr><td><div class="linenodiv" ' |
|
356 | 356 | 'style="background-color: #f0f0f0; padding-right: 10px">' |
|
357 | 357 | '<pre style="line-height: 125%">' + |
|
358 | 358 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
359 | 359 | else: |
|
360 | 360 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
361 | 361 | '<tr><td class="linenos"><div class="linenodiv"><pre>' + |
|
362 | 362 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
363 | 363 | yield 0, dummyoutfile.getvalue() |
|
364 | 364 | yield 0, '</td></tr></table>' |
|
365 | 365 | |
|
366 | 366 | |
|
367 | 367 | class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): |
|
368 | 368 | def __init__(self, **kw): |
|
369 | 369 | # only show these line numbers if set |
|
370 | 370 | self.only_lines = kw.pop('only_line_numbers', []) |
|
371 | 371 | self.query_terms = kw.pop('query_terms', []) |
|
372 | 372 | self.max_lines = kw.pop('max_lines', 5) |
|
373 | 373 | self.line_context = kw.pop('line_context', 3) |
|
374 | 374 | self.url = kw.pop('url', None) |
|
375 | 375 | |
|
376 | 376 | super(CodeHtmlFormatter, self).__init__(**kw) |
|
377 | 377 | |
|
378 | 378 | def _wrap_code(self, source): |
|
379 | 379 | for cnt, it in enumerate(source): |
|
380 | 380 | i, t = it |
|
381 | 381 | t = '<pre>%s</pre>' % t |
|
382 | 382 | yield i, t |
|
383 | 383 | |
|
384 | 384 | def _wrap_tablelinenos(self, inner): |
|
385 | 385 | yield 0, '<table class="code-highlight %stable">' % self.cssclass |
|
386 | 386 | |
|
387 | 387 | last_shown_line_number = 0 |
|
388 | 388 | current_line_number = 1 |
|
389 | 389 | |
|
390 | 390 | for t, line in inner: |
|
391 | 391 | if not t: |
|
392 | 392 | yield t, line |
|
393 | 393 | continue |
|
394 | 394 | |
|
395 | 395 | if current_line_number in self.only_lines: |
|
396 | 396 | if last_shown_line_number + 1 != current_line_number: |
|
397 | 397 | yield 0, '<tr>' |
|
398 | 398 | yield 0, '<td class="line">...</td>' |
|
399 | 399 | yield 0, '<td id="hlcode" class="code"></td>' |
|
400 | 400 | yield 0, '</tr>' |
|
401 | 401 | |
|
402 | 402 | yield 0, '<tr>' |
|
403 | 403 | if self.url: |
|
404 | 404 | yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % ( |
|
405 | 405 | self.url, current_line_number, current_line_number) |
|
406 | 406 | else: |
|
407 | 407 | yield 0, '<td class="line"><a href="">%i</a></td>' % ( |
|
408 | 408 | current_line_number) |
|
409 | 409 | yield 0, '<td id="hlcode" class="code">' + line + '</td>' |
|
410 | 410 | yield 0, '</tr>' |
|
411 | 411 | |
|
412 | 412 | last_shown_line_number = current_line_number |
|
413 | 413 | |
|
414 | 414 | current_line_number += 1 |
|
415 | 415 | |
|
416 | 416 | |
|
417 | 417 | yield 0, '</table>' |
|
418 | 418 | |
|
419 | 419 | |
|
420 | 420 | def extract_phrases(text_query): |
|
421 | 421 | """ |
|
422 | 422 | Extracts phrases from search term string making sure phrases |
|
423 | 423 | contained in double quotes are kept together - and discarding empty values |
|
424 | 424 | or fully whitespace values eg. |
|
425 | 425 | |
|
426 | 426 | 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more'] |
|
427 | 427 | |
|
428 | 428 | """ |
|
429 | 429 | |
|
430 | 430 | in_phrase = False |
|
431 | 431 | buf = '' |
|
432 | 432 | phrases = [] |
|
433 | 433 | for char in text_query: |
|
434 | 434 | if in_phrase: |
|
435 | 435 | if char == '"': # end phrase |
|
436 | 436 | phrases.append(buf) |
|
437 | 437 | buf = '' |
|
438 | 438 | in_phrase = False |
|
439 | 439 | continue |
|
440 | 440 | else: |
|
441 | 441 | buf += char |
|
442 | 442 | continue |
|
443 | 443 | else: |
|
444 | 444 | if char == '"': # start phrase |
|
445 | 445 | in_phrase = True |
|
446 | 446 | phrases.append(buf) |
|
447 | 447 | buf = '' |
|
448 | 448 | continue |
|
449 | 449 | elif char == ' ': |
|
450 | 450 | phrases.append(buf) |
|
451 | 451 | buf = '' |
|
452 | 452 | continue |
|
453 | 453 | else: |
|
454 | 454 | buf += char |
|
455 | 455 | |
|
456 | 456 | phrases.append(buf) |
|
457 | 457 | phrases = [phrase.strip() for phrase in phrases if phrase.strip()] |
|
458 | 458 | return phrases |
|
459 | 459 | |
|
460 | 460 | |
|
461 | 461 | def get_matching_offsets(text, phrases): |
|
462 | 462 | """ |
|
463 | 463 | Returns a list of string offsets in `text` that the list of `terms` match |
|
464 | 464 | |
|
465 | 465 | >>> get_matching_offsets('some text here', ['some', 'here']) |
|
466 | 466 | [(0, 4), (10, 14)] |
|
467 | 467 | |
|
468 | 468 | """ |
|
469 | 469 | offsets = [] |
|
470 | 470 | for phrase in phrases: |
|
471 | 471 | for match in re.finditer(phrase, text): |
|
472 | 472 | offsets.append((match.start(), match.end())) |
|
473 | 473 | |
|
474 | 474 | return offsets |
|
475 | 475 | |
|
476 | 476 | |
|
477 | 477 | def normalize_text_for_matching(x): |
|
478 | 478 | """ |
|
479 | 479 | Replaces all non alnum characters to spaces and lower cases the string, |
|
480 | 480 | useful for comparing two text strings without punctuation |
|
481 | 481 | """ |
|
482 | 482 | return re.sub(r'[^\w]', ' ', x.lower()) |
|
483 | 483 | |
|
484 | 484 | |
|
485 | 485 | def get_matching_line_offsets(lines, terms): |
|
486 | 486 | """ Return a set of `lines` indices (starting from 1) matching a |
|
487 | 487 | text search query, along with `context` lines above/below matching lines |
|
488 | 488 | |
|
489 | 489 | :param lines: list of strings representing lines |
|
490 | 490 | :param terms: search term string to match in lines eg. 'some text' |
|
491 | 491 | :param context: number of lines above/below a matching line to add to result |
|
492 | 492 | :param max_lines: cut off for lines of interest |
|
493 | 493 | eg. |
|
494 | 494 | |
|
495 | 495 | text = ''' |
|
496 | 496 | words words words |
|
497 | 497 | words words words |
|
498 | 498 | some text some |
|
499 | 499 | words words words |
|
500 | 500 | words words words |
|
501 | 501 | text here what |
|
502 | 502 | ''' |
|
503 | 503 | get_matching_line_offsets(text, 'text', context=1) |
|
504 | 504 | {3: [(5, 9)], 6: [(0, 4)]] |
|
505 | 505 | |
|
506 | 506 | """ |
|
507 | 507 | matching_lines = {} |
|
508 | 508 | phrases = [normalize_text_for_matching(phrase) |
|
509 | 509 | for phrase in extract_phrases(terms)] |
|
510 | 510 | |
|
511 | 511 | for line_index, line in enumerate(lines, start=1): |
|
512 | 512 | match_offsets = get_matching_offsets( |
|
513 | 513 | normalize_text_for_matching(line), phrases) |
|
514 | 514 | if match_offsets: |
|
515 | 515 | matching_lines[line_index] = match_offsets |
|
516 | 516 | |
|
517 | 517 | return matching_lines |
|
518 | 518 | |
|
519 | 519 | |
|
520 | 520 | def hsv_to_rgb(h, s, v): |
|
521 | 521 | """ Convert hsv color values to rgb """ |
|
522 | 522 | |
|
523 | 523 | if s == 0.0: |
|
524 | 524 | return v, v, v |
|
525 | 525 | i = int(h * 6.0) # XXX assume int() truncates! |
|
526 | 526 | f = (h * 6.0) - i |
|
527 | 527 | p = v * (1.0 - s) |
|
528 | 528 | q = v * (1.0 - s * f) |
|
529 | 529 | t = v * (1.0 - s * (1.0 - f)) |
|
530 | 530 | i = i % 6 |
|
531 | 531 | if i == 0: |
|
532 | 532 | return v, t, p |
|
533 | 533 | if i == 1: |
|
534 | 534 | return q, v, p |
|
535 | 535 | if i == 2: |
|
536 | 536 | return p, v, t |
|
537 | 537 | if i == 3: |
|
538 | 538 | return p, q, v |
|
539 | 539 | if i == 4: |
|
540 | 540 | return t, p, v |
|
541 | 541 | if i == 5: |
|
542 | 542 | return v, p, q |
|
543 | 543 | |
|
544 | 544 | |
|
545 | 545 | def unique_color_generator(n=10000, saturation=0.10, lightness=0.95): |
|
546 | 546 | """ |
|
547 | 547 | Generator for getting n of evenly distributed colors using |
|
548 | 548 | hsv color and golden ratio. It always return same order of colors |
|
549 | 549 | |
|
550 | 550 | :param n: number of colors to generate |
|
551 | 551 | :param saturation: saturation of returned colors |
|
552 | 552 | :param lightness: lightness of returned colors |
|
553 | 553 | :returns: RGB tuple |
|
554 | 554 | """ |
|
555 | 555 | |
|
556 | 556 | golden_ratio = 0.618033988749895 |
|
557 | 557 | h = 0.22717784590367374 |
|
558 | 558 | |
|
559 | 559 | for _ in xrange(n): |
|
560 | 560 | h += golden_ratio |
|
561 | 561 | h %= 1 |
|
562 | 562 | HSV_tuple = [h, saturation, lightness] |
|
563 | 563 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
564 | 564 | yield map(lambda x: str(int(x * 256)), RGB_tuple) |
|
565 | 565 | |
|
566 | 566 | |
|
567 | 567 | def color_hasher(n=10000, saturation=0.10, lightness=0.95): |
|
568 | 568 | """ |
|
569 | 569 | Returns a function which when called with an argument returns a unique |
|
570 | 570 | color for that argument, eg. |
|
571 | 571 | |
|
572 | 572 | :param n: number of colors to generate |
|
573 | 573 | :param saturation: saturation of returned colors |
|
574 | 574 | :param lightness: lightness of returned colors |
|
575 | 575 | :returns: css RGB string |
|
576 | 576 | |
|
577 | 577 | >>> color_hash = color_hasher() |
|
578 | 578 | >>> color_hash('hello') |
|
579 | 579 | 'rgb(34, 12, 59)' |
|
580 | 580 | >>> color_hash('hello') |
|
581 | 581 | 'rgb(34, 12, 59)' |
|
582 | 582 | >>> color_hash('other') |
|
583 | 583 | 'rgb(90, 224, 159)' |
|
584 | 584 | """ |
|
585 | 585 | |
|
586 | 586 | color_dict = {} |
|
587 | 587 | cgenerator = unique_color_generator( |
|
588 | 588 | saturation=saturation, lightness=lightness) |
|
589 | 589 | |
|
590 | 590 | def get_color_string(thing): |
|
591 | 591 | if thing in color_dict: |
|
592 | 592 | col = color_dict[thing] |
|
593 | 593 | else: |
|
594 | 594 | col = color_dict[thing] = cgenerator.next() |
|
595 | 595 | return "rgb(%s)" % (', '.join(col)) |
|
596 | 596 | |
|
597 | 597 | return get_color_string |
|
598 | 598 | |
|
599 | 599 | |
|
600 | 600 | def get_lexer_safe(mimetype=None, filepath=None): |
|
601 | 601 | """ |
|
602 | 602 | Tries to return a relevant pygments lexer using mimetype/filepath name, |
|
603 | 603 | defaulting to plain text if none could be found |
|
604 | 604 | """ |
|
605 | 605 | lexer = None |
|
606 | 606 | try: |
|
607 | 607 | if mimetype: |
|
608 | 608 | lexer = get_lexer_for_mimetype(mimetype) |
|
609 | 609 | if not lexer: |
|
610 | 610 | lexer = get_lexer_for_filename(filepath) |
|
611 | 611 | except pygments.util.ClassNotFound: |
|
612 | 612 | pass |
|
613 | 613 | |
|
614 | 614 | if not lexer: |
|
615 | 615 | lexer = get_lexer_by_name('text') |
|
616 | 616 | |
|
617 | 617 | return lexer |
|
618 | 618 | |
|
619 | 619 | |
|
620 | 620 | def get_lexer_for_filenode(filenode): |
|
621 | 621 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
622 | 622 | return lexer |
|
623 | 623 | |
|
624 | 624 | |
|
625 | 625 | def pygmentize(filenode, **kwargs): |
|
626 | 626 | """ |
|
627 | 627 | pygmentize function using pygments |
|
628 | 628 | |
|
629 | 629 | :param filenode: |
|
630 | 630 | """ |
|
631 | 631 | lexer = get_lexer_for_filenode(filenode) |
|
632 | 632 | return literal(code_highlight(filenode.content, lexer, |
|
633 | 633 | CodeHtmlFormatter(**kwargs))) |
|
634 | 634 | |
|
635 | 635 | |
|
636 | 636 | def is_following_repo(repo_name, user_id): |
|
637 | 637 | from rhodecode.model.scm import ScmModel |
|
638 | 638 | return ScmModel().is_following_repo(repo_name, user_id) |
|
639 | 639 | |
|
640 | 640 | |
|
641 | 641 | class _Message(object): |
|
642 | 642 | """A message returned by ``Flash.pop_messages()``. |
|
643 | 643 | |
|
644 | 644 | Converting the message to a string returns the message text. Instances |
|
645 | 645 | also have the following attributes: |
|
646 | 646 | |
|
647 | 647 | * ``message``: the message text. |
|
648 | 648 | * ``category``: the category specified when the message was created. |
|
649 | 649 | """ |
|
650 | 650 | |
|
651 | 651 | def __init__(self, category, message): |
|
652 | 652 | self.category = category |
|
653 | 653 | self.message = message |
|
654 | 654 | |
|
655 | 655 | def __str__(self): |
|
656 | 656 | return self.message |
|
657 | 657 | |
|
658 | 658 | __unicode__ = __str__ |
|
659 | 659 | |
|
660 | 660 | def __html__(self): |
|
661 | 661 | return escape(safe_unicode(self.message)) |
|
662 | 662 | |
|
663 | 663 | |
|
664 | 664 | class Flash(_Flash): |
|
665 | 665 | |
|
666 | 666 | def pop_messages(self, request=None): |
|
667 | 667 | """Return all accumulated messages and delete them from the session. |
|
668 | 668 | |
|
669 | 669 | The return value is a list of ``Message`` objects. |
|
670 | 670 | """ |
|
671 | 671 | messages = [] |
|
672 | 672 | |
|
673 | 673 | if request: |
|
674 | 674 | session = request.session |
|
675 | 675 | else: |
|
676 | 676 | from pylons import session |
|
677 | 677 | |
|
678 | 678 | # Pop the 'old' pylons flash messages. They are tuples of the form |
|
679 | 679 | # (category, message) |
|
680 | 680 | for cat, msg in session.pop(self.session_key, []): |
|
681 | 681 | messages.append(_Message(cat, msg)) |
|
682 | 682 | |
|
683 | 683 | # Pop the 'new' pyramid flash messages for each category as list |
|
684 | 684 | # of strings. |
|
685 | 685 | for cat in self.categories: |
|
686 | 686 | for msg in session.pop_flash(queue=cat): |
|
687 | 687 | messages.append(_Message(cat, msg)) |
|
688 | 688 | # Map messages from the default queue to the 'notice' category. |
|
689 | 689 | for msg in session.pop_flash(): |
|
690 | 690 | messages.append(_Message('notice', msg)) |
|
691 | 691 | |
|
692 | 692 | session.save() |
|
693 | 693 | return messages |
|
694 | 694 | |
|
695 | 695 | def json_alerts(self, request=None): |
|
696 | 696 | payloads = [] |
|
697 | 697 | messages = flash.pop_messages(request=request) |
|
698 | 698 | if messages: |
|
699 | 699 | for message in messages: |
|
700 | 700 | subdata = {} |
|
701 | 701 | if hasattr(message.message, 'rsplit'): |
|
702 | 702 | flash_data = message.message.rsplit('|DELIM|', 1) |
|
703 | 703 | org_message = flash_data[0] |
|
704 | 704 | if len(flash_data) > 1: |
|
705 | 705 | subdata = json.loads(flash_data[1]) |
|
706 | 706 | else: |
|
707 | 707 | org_message = message.message |
|
708 | 708 | payloads.append({ |
|
709 | 709 | 'message': { |
|
710 | 710 | 'message': u'{}'.format(org_message), |
|
711 | 711 | 'level': message.category, |
|
712 | 712 | 'force': True, |
|
713 | 713 | 'subdata': subdata |
|
714 | 714 | } |
|
715 | 715 | }) |
|
716 | 716 | return json.dumps(payloads) |
|
717 | 717 | |
|
718 | 718 | flash = Flash() |
|
719 | 719 | |
|
720 | 720 | #============================================================================== |
|
721 | 721 | # SCM FILTERS available via h. |
|
722 | 722 | #============================================================================== |
|
723 | 723 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
724 | 724 | from rhodecode.lib.utils2 import credentials_filter, age as _age |
|
725 | 725 | from rhodecode.model.db import User, ChangesetStatus |
|
726 | 726 | |
|
727 | 727 | age = _age |
|
728 | 728 | capitalize = lambda x: x.capitalize() |
|
729 | 729 | email = author_email |
|
730 | 730 | short_id = lambda x: x[:12] |
|
731 | 731 | hide_credentials = lambda x: ''.join(credentials_filter(x)) |
|
732 | 732 | |
|
733 | 733 | |
|
734 | 734 | def age_component(datetime_iso, value=None, time_is_local=False): |
|
735 | 735 | title = value or format_date(datetime_iso) |
|
736 | 736 | tzinfo = '+00:00' |
|
737 | 737 | |
|
738 | 738 | # detect if we have a timezone info, otherwise, add it |
|
739 | 739 | if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: |
|
740 | 740 | if time_is_local: |
|
741 | 741 | tzinfo = time.strftime("+%H:%M", |
|
742 | 742 | time.gmtime( |
|
743 | 743 | (datetime.now() - datetime.utcnow()).seconds + 1 |
|
744 | 744 | ) |
|
745 | 745 | ) |
|
746 | 746 | |
|
747 | 747 | return literal( |
|
748 | 748 | '<time class="timeago tooltip" ' |
|
749 | 749 | 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format( |
|
750 | 750 | datetime_iso, title, tzinfo)) |
|
751 | 751 | |
|
752 | 752 | |
|
753 | 753 | def _shorten_commit_id(commit_id): |
|
754 | 754 | from rhodecode import CONFIG |
|
755 | 755 | def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12)) |
|
756 | 756 | return commit_id[:def_len] |
|
757 | 757 | |
|
758 | 758 | |
|
759 | 759 | def show_id(commit): |
|
760 | 760 | """ |
|
761 | 761 | Configurable function that shows ID |
|
762 | 762 | by default it's r123:fffeeefffeee |
|
763 | 763 | |
|
764 | 764 | :param commit: commit instance |
|
765 | 765 | """ |
|
766 | 766 | from rhodecode import CONFIG |
|
767 | 767 | show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True)) |
|
768 | 768 | |
|
769 | 769 | raw_id = _shorten_commit_id(commit.raw_id) |
|
770 | 770 | if show_idx: |
|
771 | 771 | return 'r%s:%s' % (commit.idx, raw_id) |
|
772 | 772 | else: |
|
773 | 773 | return '%s' % (raw_id, ) |
|
774 | 774 | |
|
775 | 775 | |
|
776 | 776 | def format_date(date): |
|
777 | 777 | """ |
|
778 | 778 | use a standardized formatting for dates used in RhodeCode |
|
779 | 779 | |
|
780 | 780 | :param date: date/datetime object |
|
781 | 781 | :return: formatted date |
|
782 | 782 | """ |
|
783 | 783 | |
|
784 | 784 | if date: |
|
785 | 785 | _fmt = "%a, %d %b %Y %H:%M:%S" |
|
786 | 786 | return safe_unicode(date.strftime(_fmt)) |
|
787 | 787 | |
|
788 | 788 | return u"" |
|
789 | 789 | |
|
790 | 790 | |
|
791 | 791 | class _RepoChecker(object): |
|
792 | 792 | |
|
793 | 793 | def __init__(self, backend_alias): |
|
794 | 794 | self._backend_alias = backend_alias |
|
795 | 795 | |
|
796 | 796 | def __call__(self, repository): |
|
797 | 797 | if hasattr(repository, 'alias'): |
|
798 | 798 | _type = repository.alias |
|
799 | 799 | elif hasattr(repository, 'repo_type'): |
|
800 | 800 | _type = repository.repo_type |
|
801 | 801 | else: |
|
802 | 802 | _type = repository |
|
803 | 803 | return _type == self._backend_alias |
|
804 | 804 | |
|
805 | 805 | is_git = _RepoChecker('git') |
|
806 | 806 | is_hg = _RepoChecker('hg') |
|
807 | 807 | is_svn = _RepoChecker('svn') |
|
808 | 808 | |
|
809 | 809 | |
|
810 | 810 | def get_repo_type_by_name(repo_name): |
|
811 | 811 | repo = Repository.get_by_repo_name(repo_name) |
|
812 | 812 | return repo.repo_type |
|
813 | 813 | |
|
814 | 814 | |
|
815 | 815 | def is_svn_without_proxy(repository): |
|
816 | 816 | if is_svn(repository): |
|
817 | 817 | from rhodecode.model.settings import VcsSettingsModel |
|
818 | 818 | conf = VcsSettingsModel().get_ui_settings_as_config_obj() |
|
819 | 819 | return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) |
|
820 | 820 | return False |
|
821 | 821 | |
|
822 | 822 | |
|
823 | 823 | def discover_user(author): |
|
824 | 824 | """ |
|
825 | 825 | Tries to discover RhodeCode User based on the autho string. Author string |
|
826 | 826 | is typically `FirstName LastName <email@address.com>` |
|
827 | 827 | """ |
|
828 | 828 | |
|
829 | 829 | # if author is already an instance use it for extraction |
|
830 | 830 | if isinstance(author, User): |
|
831 | 831 | return author |
|
832 | 832 | |
|
833 | 833 | # Valid email in the attribute passed, see if they're in the system |
|
834 | 834 | _email = author_email(author) |
|
835 | 835 | if _email != '': |
|
836 | 836 | user = User.get_by_email(_email, case_insensitive=True, cache=True) |
|
837 | 837 | if user is not None: |
|
838 | 838 | return user |
|
839 | 839 | |
|
840 | 840 | # Maybe it's a username, we try to extract it and fetch by username ? |
|
841 | 841 | _author = author_name(author) |
|
842 | 842 | user = User.get_by_username(_author, case_insensitive=True, cache=True) |
|
843 | 843 | if user is not None: |
|
844 | 844 | return user |
|
845 | 845 | |
|
846 | 846 | return None |
|
847 | 847 | |
|
848 | 848 | |
|
849 | 849 | def email_or_none(author): |
|
850 | 850 | # extract email from the commit string |
|
851 | 851 | _email = author_email(author) |
|
852 | 852 | |
|
853 | 853 | # If we have an email, use it, otherwise |
|
854 | 854 | # see if it contains a username we can get an email from |
|
855 | 855 | if _email != '': |
|
856 | 856 | return _email |
|
857 | 857 | else: |
|
858 | 858 | user = User.get_by_username( |
|
859 | 859 | author_name(author), case_insensitive=True, cache=True) |
|
860 | 860 | |
|
861 | 861 | if user is not None: |
|
862 | 862 | return user.email |
|
863 | 863 | |
|
864 | 864 | # No valid email, not a valid user in the system, none! |
|
865 | 865 | return None |
|
866 | 866 | |
|
867 | 867 | |
|
868 | 868 | def link_to_user(author, length=0, **kwargs): |
|
869 | 869 | user = discover_user(author) |
|
870 | 870 | # user can be None, but if we have it already it means we can re-use it |
|
871 | 871 | # in the person() function, so we save 1 intensive-query |
|
872 | 872 | if user: |
|
873 | 873 | author = user |
|
874 | 874 | |
|
875 | 875 | display_person = person(author, 'username_or_name_or_email') |
|
876 | 876 | if length: |
|
877 | 877 | display_person = shorter(display_person, length) |
|
878 | 878 | |
|
879 | 879 | if user: |
|
880 | 880 | return link_to( |
|
881 | 881 | escape(display_person), |
|
882 | 882 | route_path('user_profile', username=user.username), |
|
883 | 883 | **kwargs) |
|
884 | 884 | else: |
|
885 | 885 | return escape(display_person) |
|
886 | 886 | |
|
887 | 887 | |
|
888 | 888 | def person(author, show_attr="username_and_name"): |
|
889 | 889 | user = discover_user(author) |
|
890 | 890 | if user: |
|
891 | 891 | return getattr(user, show_attr) |
|
892 | 892 | else: |
|
893 | 893 | _author = author_name(author) |
|
894 | 894 | _email = email(author) |
|
895 | 895 | return _author or _email |
|
896 | 896 | |
|
897 | 897 | |
|
898 | 898 | def author_string(email): |
|
899 | 899 | if email: |
|
900 | 900 | user = User.get_by_email(email, case_insensitive=True, cache=True) |
|
901 | 901 | if user: |
|
902 | 902 | if user.first_name or user.last_name: |
|
903 | 903 | return '%s %s <%s>' % ( |
|
904 | 904 | user.first_name, user.last_name, email) |
|
905 | 905 | else: |
|
906 | 906 | return email |
|
907 | 907 | else: |
|
908 | 908 | return email |
|
909 | 909 | else: |
|
910 | 910 | return None |
|
911 | 911 | |
|
912 | 912 | |
|
913 | 913 | def person_by_id(id_, show_attr="username_and_name"): |
|
914 | 914 | # attr to return from fetched user |
|
915 | 915 | person_getter = lambda usr: getattr(usr, show_attr) |
|
916 | 916 | |
|
917 | 917 | #maybe it's an ID ? |
|
918 | 918 | if str(id_).isdigit() or isinstance(id_, int): |
|
919 | 919 | id_ = int(id_) |
|
920 | 920 | user = User.get(id_) |
|
921 | 921 | if user is not None: |
|
922 | 922 | return person_getter(user) |
|
923 | 923 | return id_ |
|
924 | 924 | |
|
925 | 925 | |
|
926 | 926 | def gravatar_with_user(request, author, show_disabled=False): |
|
927 | 927 | _render = request.get_partial_renderer('base/base.mako') |
|
928 | 928 | return _render('gravatar_with_user', author, show_disabled=show_disabled) |
|
929 | 929 | |
|
930 | 930 | |
|
931 | 931 | def desc_stylize(value): |
|
932 | 932 | """ |
|
933 | 933 | converts tags from value into html equivalent |
|
934 | 934 | |
|
935 | 935 | :param value: |
|
936 | 936 | """ |
|
937 | 937 | if not value: |
|
938 | 938 | return '' |
|
939 | 939 | |
|
940 | 940 | value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
941 | 941 | '<div class="metatag" tag="see">see => \\1 </div>', value) |
|
942 | 942 | value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
943 | 943 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value) |
|
944 | 944 | value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]', |
|
945 | 945 | '<div class="metatag" tag="\\1">\\1 => <a href="/\\2">\\2</a></div>', value) |
|
946 | 946 | value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]', |
|
947 | 947 | '<div class="metatag" tag="lang">\\2</div>', value) |
|
948 | 948 | value = re.sub(r'\[([a-z]+)\]', |
|
949 | 949 | '<div class="metatag" tag="\\1">\\1</div>', value) |
|
950 | 950 | |
|
951 | 951 | return value |
|
952 | 952 | |
|
953 | 953 | |
|
954 | 954 | def escaped_stylize(value): |
|
955 | 955 | """ |
|
956 | 956 | converts tags from value into html equivalent, but escaping its value first |
|
957 | 957 | """ |
|
958 | 958 | if not value: |
|
959 | 959 | return '' |
|
960 | 960 | |
|
961 | 961 | # Using default webhelper escape method, but has to force it as a |
|
962 | 962 | # plain unicode instead of a markup tag to be used in regex expressions |
|
963 | 963 | value = unicode(escape(safe_unicode(value))) |
|
964 | 964 | |
|
965 | 965 | value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
966 | 966 | '<div class="metatag" tag="see">see => \\1 </div>', value) |
|
967 | 967 | value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
968 | 968 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value) |
|
969 | 969 | value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]', |
|
970 | 970 | '<div class="metatag" tag="\\1">\\1 => <a href="/\\2">\\2</a></div>', value) |
|
971 | 971 | value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]', |
|
972 | 972 | '<div class="metatag" tag="lang">\\2</div>', value) |
|
973 | 973 | value = re.sub(r'\[([a-z]+)\]', |
|
974 | 974 | '<div class="metatag" tag="\\1">\\1</div>', value) |
|
975 | 975 | |
|
976 | 976 | return value |
|
977 | 977 | |
|
978 | 978 | |
|
979 | 979 | def bool2icon(value): |
|
980 | 980 | """ |
|
981 | 981 | Returns boolean value of a given value, represented as html element with |
|
982 | 982 | classes that will represent icons |
|
983 | 983 | |
|
984 | 984 | :param value: given value to convert to html node |
|
985 | 985 | """ |
|
986 | 986 | |
|
987 | 987 | if value: # does bool conversion |
|
988 | 988 | return HTML.tag('i', class_="icon-true") |
|
989 | 989 | else: # not true as bool |
|
990 | 990 | return HTML.tag('i', class_="icon-false") |
|
991 | 991 | |
|
992 | 992 | |
|
993 | 993 | #============================================================================== |
|
994 | 994 | # PERMS |
|
995 | 995 | #============================================================================== |
|
996 | 996 | from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \ |
|
997 | 997 | HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \ |
|
998 | 998 | HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \ |
|
999 | 999 | csrf_token_key |
|
1000 | 1000 | |
|
1001 | 1001 | |
|
1002 | 1002 | #============================================================================== |
|
1003 | 1003 | # GRAVATAR URL |
|
1004 | 1004 | #============================================================================== |
|
1005 | 1005 | class InitialsGravatar(object): |
|
1006 | 1006 | def __init__(self, email_address, first_name, last_name, size=30, |
|
1007 | 1007 | background=None, text_color='#fff'): |
|
1008 | 1008 | self.size = size |
|
1009 | 1009 | self.first_name = first_name |
|
1010 | 1010 | self.last_name = last_name |
|
1011 | 1011 | self.email_address = email_address |
|
1012 | 1012 | self.background = background or self.str2color(email_address) |
|
1013 | 1013 | self.text_color = text_color |
|
1014 | 1014 | |
|
1015 | 1015 | def get_color_bank(self): |
|
1016 | 1016 | """ |
|
1017 | 1017 | returns a predefined list of colors that gravatars can use. |
|
1018 | 1018 | Those are randomized distinct colors that guarantee readability and |
|
1019 | 1019 | uniqueness. |
|
1020 | 1020 | |
|
1021 | 1021 | generated with: http://phrogz.net/css/distinct-colors.html |
|
1022 | 1022 | """ |
|
1023 | 1023 | return [ |
|
1024 | 1024 | '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000', |
|
1025 | 1025 | '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320', |
|
1026 | 1026 | '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300', |
|
1027 | 1027 | '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140', |
|
1028 | 1028 | '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c', |
|
1029 | 1029 | '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020', |
|
1030 | 1030 | '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039', |
|
1031 | 1031 | '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f', |
|
1032 | 1032 | '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340', |
|
1033 | 1033 | '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98', |
|
1034 | 1034 | '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c', |
|
1035 | 1035 | '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200', |
|
1036 | 1036 | '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a', |
|
1037 | 1037 | '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959', |
|
1038 | 1038 | '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3', |
|
1039 | 1039 | '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626', |
|
1040 | 1040 | '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000', |
|
1041 | 1041 | '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362', |
|
1042 | 1042 | '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3', |
|
1043 | 1043 | '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a', |
|
1044 | 1044 | '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939', |
|
1045 | 1045 | '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39', |
|
1046 | 1046 | '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953', |
|
1047 | 1047 | '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9', |
|
1048 | 1048 | '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1', |
|
1049 | 1049 | '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900', |
|
1050 | 1050 | '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00', |
|
1051 | 1051 | '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3', |
|
1052 | 1052 | '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59', |
|
1053 | 1053 | '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079', |
|
1054 | 1054 | '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700', |
|
1055 | 1055 | '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d', |
|
1056 | 1056 | '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2', |
|
1057 | 1057 | '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff', |
|
1058 | 1058 | '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20', |
|
1059 | 1059 | '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626', |
|
1060 | 1060 | '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23', |
|
1061 | 1061 | '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff', |
|
1062 | 1062 | '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6', |
|
1063 | 1063 | '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a', |
|
1064 | 1064 | '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c', |
|
1065 | 1065 | '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600', |
|
1066 | 1066 | '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff', |
|
1067 | 1067 | '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539', |
|
1068 | 1068 | '#4f8c46', '#368dd9', '#5c0073' |
|
1069 | 1069 | ] |
|
1070 | 1070 | |
|
1071 | 1071 | def rgb_to_hex_color(self, rgb_tuple): |
|
1072 | 1072 | """ |
|
1073 | 1073 | Converts an rgb_tuple passed to an hex color. |
|
1074 | 1074 | |
|
1075 | 1075 | :param rgb_tuple: tuple with 3 ints represents rgb color space |
|
1076 | 1076 | """ |
|
1077 | 1077 | return '#' + ("".join(map(chr, rgb_tuple)).encode('hex')) |
|
1078 | 1078 | |
|
1079 | 1079 | def email_to_int_list(self, email_str): |
|
1080 | 1080 | """ |
|
1081 | 1081 | Get every byte of the hex digest value of email and turn it to integer. |
|
1082 | 1082 | It's going to be always between 0-255 |
|
1083 | 1083 | """ |
|
1084 | 1084 | digest = md5_safe(email_str.lower()) |
|
1085 | 1085 | return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)] |
|
1086 | 1086 | |
|
1087 | 1087 | def pick_color_bank_index(self, email_str, color_bank): |
|
1088 | 1088 | return self.email_to_int_list(email_str)[0] % len(color_bank) |
|
1089 | 1089 | |
|
1090 | 1090 | def str2color(self, email_str): |
|
1091 | 1091 | """ |
|
1092 | 1092 | Tries to map in a stable algorithm an email to color |
|
1093 | 1093 | |
|
1094 | 1094 | :param email_str: |
|
1095 | 1095 | """ |
|
1096 | 1096 | color_bank = self.get_color_bank() |
|
1097 | 1097 | # pick position (module it's length so we always find it in the |
|
1098 | 1098 | # bank even if it's smaller than 256 values |
|
1099 | 1099 | pos = self.pick_color_bank_index(email_str, color_bank) |
|
1100 | 1100 | return color_bank[pos] |
|
1101 | 1101 | |
|
1102 | 1102 | def normalize_email(self, email_address): |
|
1103 | 1103 | import unicodedata |
|
1104 | 1104 | # default host used to fill in the fake/missing email |
|
1105 | 1105 | default_host = u'localhost' |
|
1106 | 1106 | |
|
1107 | 1107 | if not email_address: |
|
1108 | 1108 | email_address = u'%s@%s' % (User.DEFAULT_USER, default_host) |
|
1109 | 1109 | |
|
1110 | 1110 | email_address = safe_unicode(email_address) |
|
1111 | 1111 | |
|
1112 | 1112 | if u'@' not in email_address: |
|
1113 | 1113 | email_address = u'%s@%s' % (email_address, default_host) |
|
1114 | 1114 | |
|
1115 | 1115 | if email_address.endswith(u'@'): |
|
1116 | 1116 | email_address = u'%s%s' % (email_address, default_host) |
|
1117 | 1117 | |
|
1118 | 1118 | email_address = unicodedata.normalize('NFKD', email_address)\ |
|
1119 | 1119 | .encode('ascii', 'ignore') |
|
1120 | 1120 | return email_address |
|
1121 | 1121 | |
|
1122 | 1122 | def get_initials(self): |
|
1123 | 1123 | """ |
|
1124 | 1124 | Returns 2 letter initials calculated based on the input. |
|
1125 | 1125 | The algorithm picks first given email address, and takes first letter |
|
1126 | 1126 | of part before @, and then the first letter of server name. In case |
|
1127 | 1127 | the part before @ is in a format of `somestring.somestring2` it replaces |
|
1128 | 1128 | the server letter with first letter of somestring2 |
|
1129 | 1129 | |
|
1130 | 1130 | In case function was initialized with both first and lastname, this |
|
1131 | 1131 | overrides the extraction from email by first letter of the first and |
|
1132 | 1132 | last name. We add special logic to that functionality, In case Full name |
|
1133 | 1133 | is compound, like Guido Von Rossum, we use last part of the last name |
|
1134 | 1134 | (Von Rossum) picking `R`. |
|
1135 | 1135 | |
|
1136 | 1136 | Function also normalizes the non-ascii characters to they ascii |
|
1137 | 1137 | representation, eg Ą => A |
|
1138 | 1138 | """ |
|
1139 | 1139 | import unicodedata |
|
1140 | 1140 | # replace non-ascii to ascii |
|
1141 | 1141 | first_name = unicodedata.normalize( |
|
1142 | 1142 | 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore') |
|
1143 | 1143 | last_name = unicodedata.normalize( |
|
1144 | 1144 | 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore') |
|
1145 | 1145 | |
|
1146 | 1146 | # do NFKD encoding, and also make sure email has proper format |
|
1147 | 1147 | email_address = self.normalize_email(self.email_address) |
|
1148 | 1148 | |
|
1149 | 1149 | # first push the email initials |
|
1150 | 1150 | prefix, server = email_address.split('@', 1) |
|
1151 | 1151 | |
|
1152 | 1152 | # check if prefix is maybe a 'first_name.last_name' syntax |
|
1153 | 1153 | _dot_split = prefix.rsplit('.', 1) |
|
1154 | 1154 | if len(_dot_split) == 2: |
|
1155 | 1155 | initials = [_dot_split[0][0], _dot_split[1][0]] |
|
1156 | 1156 | else: |
|
1157 | 1157 | initials = [prefix[0], server[0]] |
|
1158 | 1158 | |
|
1159 | 1159 | # then try to replace either first_name or last_name |
|
1160 | 1160 | fn_letter = (first_name or " ")[0].strip() |
|
1161 | 1161 | ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip() |
|
1162 | 1162 | |
|
1163 | 1163 | if fn_letter: |
|
1164 | 1164 | initials[0] = fn_letter |
|
1165 | 1165 | |
|
1166 | 1166 | if ln_letter: |
|
1167 | 1167 | initials[1] = ln_letter |
|
1168 | 1168 | |
|
1169 | 1169 | return ''.join(initials).upper() |
|
1170 | 1170 | |
|
1171 | 1171 | def get_img_data_by_type(self, font_family, img_type): |
|
1172 | 1172 | default_user = """ |
|
1173 | 1173 | <svg xmlns="http://www.w3.org/2000/svg" |
|
1174 | 1174 | version="1.1" x="0px" y="0px" width="{size}" height="{size}" |
|
1175 | 1175 | viewBox="-15 -10 439.165 429.164" |
|
1176 | 1176 | |
|
1177 | 1177 | xml:space="preserve" |
|
1178 | 1178 | style="background:{background};" > |
|
1179 | 1179 | |
|
1180 | 1180 | <path d="M204.583,216.671c50.664,0,91.74-48.075, |
|
1181 | 1181 | 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377 |
|
1182 | 1182 | c-50.668,0-91.74,25.14-91.74,107.377C112.844, |
|
1183 | 1183 | 168.596,153.916,216.671, |
|
1184 | 1184 | 204.583,216.671z" fill="{text_color}"/> |
|
1185 | 1185 | <path d="M407.164,374.717L360.88, |
|
1186 | 1186 | 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392 |
|
1187 | 1187 | c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316, |
|
1188 | 1188 | 15.366-44.203,23.488-69.076,23.488c-24.877, |
|
1189 | 1189 | 0-48.762-8.122-69.078-23.488 |
|
1190 | 1190 | c-1.428-1.078-3.346-1.238-4.93-0.415L58.75, |
|
1191 | 1191 | 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717 |
|
1192 | 1192 | c-3.191,7.188-2.537,15.412,1.75,22.005c4.285, |
|
1193 | 1193 | 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936, |
|
1194 | 1194 | 19.402-10.527 C409.699,390.129, |
|
1195 | 1195 | 410.355,381.902,407.164,374.717z" fill="{text_color}"/> |
|
1196 | 1196 | </svg>""".format( |
|
1197 | 1197 | size=self.size, |
|
1198 | 1198 | background='#979797', # @grey4 |
|
1199 | 1199 | text_color=self.text_color, |
|
1200 | 1200 | font_family=font_family) |
|
1201 | 1201 | |
|
1202 | 1202 | return { |
|
1203 | 1203 | "default_user": default_user |
|
1204 | 1204 | }[img_type] |
|
1205 | 1205 | |
|
1206 | 1206 | def get_img_data(self, svg_type=None): |
|
1207 | 1207 | """ |
|
1208 | 1208 | generates the svg metadata for image |
|
1209 | 1209 | """ |
|
1210 | 1210 | |
|
1211 | 1211 | font_family = ','.join([ |
|
1212 | 1212 | 'proximanovaregular', |
|
1213 | 1213 | 'Proxima Nova Regular', |
|
1214 | 1214 | 'Proxima Nova', |
|
1215 | 1215 | 'Arial', |
|
1216 | 1216 | 'Lucida Grande', |
|
1217 | 1217 | 'sans-serif' |
|
1218 | 1218 | ]) |
|
1219 | 1219 | if svg_type: |
|
1220 | 1220 | return self.get_img_data_by_type(font_family, svg_type) |
|
1221 | 1221 | |
|
1222 | 1222 | initials = self.get_initials() |
|
1223 | 1223 | img_data = """ |
|
1224 | 1224 | <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" |
|
1225 | 1225 | width="{size}" height="{size}" |
|
1226 | 1226 | style="width: 100%; height: 100%; background-color: {background}" |
|
1227 | 1227 | viewBox="0 0 {size} {size}"> |
|
1228 | 1228 | <text text-anchor="middle" y="50%" x="50%" dy="0.35em" |
|
1229 | 1229 | pointer-events="auto" fill="{text_color}" |
|
1230 | 1230 | font-family="{font_family}" |
|
1231 | 1231 | style="font-weight: 400; font-size: {f_size}px;">{text} |
|
1232 | 1232 | </text> |
|
1233 | 1233 | </svg>""".format( |
|
1234 | 1234 | size=self.size, |
|
1235 | 1235 | f_size=self.size/1.85, # scale the text inside the box nicely |
|
1236 | 1236 | background=self.background, |
|
1237 | 1237 | text_color=self.text_color, |
|
1238 | 1238 | text=initials.upper(), |
|
1239 | 1239 | font_family=font_family) |
|
1240 | 1240 | |
|
1241 | 1241 | return img_data |
|
1242 | 1242 | |
|
1243 | 1243 | def generate_svg(self, svg_type=None): |
|
1244 | 1244 | img_data = self.get_img_data(svg_type) |
|
1245 | 1245 | return "data:image/svg+xml;base64,%s" % img_data.encode('base64') |
|
1246 | 1246 | |
|
1247 | 1247 | |
|
1248 | 1248 | def initials_gravatar(email_address, first_name, last_name, size=30): |
|
1249 | 1249 | svg_type = None |
|
1250 | 1250 | if email_address == User.DEFAULT_USER_EMAIL: |
|
1251 | 1251 | svg_type = 'default_user' |
|
1252 | 1252 | klass = InitialsGravatar(email_address, first_name, last_name, size) |
|
1253 | 1253 | return klass.generate_svg(svg_type=svg_type) |
|
1254 | 1254 | |
|
1255 | 1255 | |
|
1256 | 1256 | def gravatar_url(email_address, size=30, request=None): |
|
1257 | 1257 | request = get_current_request() |
|
1258 | 1258 | if request and hasattr(request, 'call_context'): |
|
1259 | 1259 | _use_gravatar = request.call_context.visual.use_gravatar |
|
1260 | 1260 | _gravatar_url = request.call_context.visual.gravatar_url |
|
1261 | 1261 | else: |
|
1262 | 1262 | # doh, we need to re-import those to mock it later |
|
1263 | 1263 | from pylons import tmpl_context as c |
|
1264 | 1264 | |
|
1265 | 1265 | _use_gravatar = c.visual.use_gravatar |
|
1266 | 1266 | _gravatar_url = c.visual.gravatar_url |
|
1267 | 1267 | |
|
1268 | 1268 | _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL |
|
1269 | 1269 | |
|
1270 | 1270 | email_address = email_address or User.DEFAULT_USER_EMAIL |
|
1271 | 1271 | if isinstance(email_address, unicode): |
|
1272 | 1272 | # hashlib crashes on unicode items |
|
1273 | 1273 | email_address = safe_str(email_address) |
|
1274 | 1274 | |
|
1275 | 1275 | # empty email or default user |
|
1276 | 1276 | if not email_address or email_address == User.DEFAULT_USER_EMAIL: |
|
1277 | 1277 | return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size) |
|
1278 | 1278 | |
|
1279 | 1279 | if _use_gravatar: |
|
1280 | 1280 | # TODO: Disuse pyramid thread locals. Think about another solution to |
|
1281 | 1281 | # get the host and schema here. |
|
1282 | 1282 | request = get_current_request() |
|
1283 | 1283 | tmpl = safe_str(_gravatar_url) |
|
1284 | 1284 | tmpl = tmpl.replace('{email}', email_address)\ |
|
1285 | 1285 | .replace('{md5email}', md5_safe(email_address.lower())) \ |
|
1286 | 1286 | .replace('{netloc}', request.host)\ |
|
1287 | 1287 | .replace('{scheme}', request.scheme)\ |
|
1288 | 1288 | .replace('{size}', safe_str(size)) |
|
1289 | 1289 | return tmpl |
|
1290 | 1290 | else: |
|
1291 | 1291 | return initials_gravatar(email_address, '', '', size=size) |
|
1292 | 1292 | |
|
1293 | 1293 | |
|
1294 | 1294 | class Page(_Page): |
|
1295 | 1295 | """ |
|
1296 | 1296 | Custom pager to match rendering style with paginator |
|
1297 | 1297 | """ |
|
1298 | 1298 | |
|
1299 | 1299 | def _get_pos(self, cur_page, max_page, items): |
|
1300 | 1300 | edge = (items / 2) + 1 |
|
1301 | 1301 | if (cur_page <= edge): |
|
1302 | 1302 | radius = max(items / 2, items - cur_page) |
|
1303 | 1303 | elif (max_page - cur_page) < edge: |
|
1304 | 1304 | radius = (items - 1) - (max_page - cur_page) |
|
1305 | 1305 | else: |
|
1306 | 1306 | radius = items / 2 |
|
1307 | 1307 | |
|
1308 | 1308 | left = max(1, (cur_page - (radius))) |
|
1309 | 1309 | right = min(max_page, cur_page + (radius)) |
|
1310 | 1310 | return left, cur_page, right |
|
1311 | 1311 | |
|
1312 | 1312 | def _range(self, regexp_match): |
|
1313 | 1313 | """ |
|
1314 | 1314 | Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8'). |
|
1315 | 1315 | |
|
1316 | 1316 | Arguments: |
|
1317 | 1317 | |
|
1318 | 1318 | regexp_match |
|
1319 | 1319 | A "re" (regular expressions) match object containing the |
|
1320 | 1320 | radius of linked pages around the current page in |
|
1321 | 1321 | regexp_match.group(1) as a string |
|
1322 | 1322 | |
|
1323 | 1323 | This function is supposed to be called as a callable in |
|
1324 | 1324 | re.sub. |
|
1325 | 1325 | |
|
1326 | 1326 | """ |
|
1327 | 1327 | radius = int(regexp_match.group(1)) |
|
1328 | 1328 | |
|
1329 | 1329 | # Compute the first and last page number within the radius |
|
1330 | 1330 | # e.g. '1 .. 5 6 [7] 8 9 .. 12' |
|
1331 | 1331 | # -> leftmost_page = 5 |
|
1332 | 1332 | # -> rightmost_page = 9 |
|
1333 | 1333 | leftmost_page, _cur, rightmost_page = self._get_pos(self.page, |
|
1334 | 1334 | self.last_page, |
|
1335 | 1335 | (radius * 2) + 1) |
|
1336 | 1336 | nav_items = [] |
|
1337 | 1337 | |
|
1338 | 1338 | # Create a link to the first page (unless we are on the first page |
|
1339 | 1339 | # or there would be no need to insert '..' spacers) |
|
1340 | 1340 | if self.page != self.first_page and self.first_page < leftmost_page: |
|
1341 | 1341 | nav_items.append(self._pagerlink(self.first_page, self.first_page)) |
|
1342 | 1342 | |
|
1343 | 1343 | # Insert dots if there are pages between the first page |
|
1344 | 1344 | # and the currently displayed page range |
|
1345 | 1345 | if leftmost_page - self.first_page > 1: |
|
1346 | 1346 | # Wrap in a SPAN tag if nolink_attr is set |
|
1347 | 1347 | text = '..' |
|
1348 | 1348 | if self.dotdot_attr: |
|
1349 | 1349 | text = HTML.span(c=text, **self.dotdot_attr) |
|
1350 | 1350 | nav_items.append(text) |
|
1351 | 1351 | |
|
1352 | 1352 | for thispage in xrange(leftmost_page, rightmost_page + 1): |
|
1353 | 1353 | # Hilight the current page number and do not use a link |
|
1354 | 1354 | if thispage == self.page: |
|
1355 | 1355 | text = '%s' % (thispage,) |
|
1356 | 1356 | # Wrap in a SPAN tag if nolink_attr is set |
|
1357 | 1357 | if self.curpage_attr: |
|
1358 | 1358 | text = HTML.span(c=text, **self.curpage_attr) |
|
1359 | 1359 | nav_items.append(text) |
|
1360 | 1360 | # Otherwise create just a link to that page |
|
1361 | 1361 | else: |
|
1362 | 1362 | text = '%s' % (thispage,) |
|
1363 | 1363 | nav_items.append(self._pagerlink(thispage, text)) |
|
1364 | 1364 | |
|
1365 | 1365 | # Insert dots if there are pages between the displayed |
|
1366 | 1366 | # page numbers and the end of the page range |
|
1367 | 1367 | if self.last_page - rightmost_page > 1: |
|
1368 | 1368 | text = '..' |
|
1369 | 1369 | # Wrap in a SPAN tag if nolink_attr is set |
|
1370 | 1370 | if self.dotdot_attr: |
|
1371 | 1371 | text = HTML.span(c=text, **self.dotdot_attr) |
|
1372 | 1372 | nav_items.append(text) |
|
1373 | 1373 | |
|
1374 | 1374 | # Create a link to the very last page (unless we are on the last |
|
1375 | 1375 | # page or there would be no need to insert '..' spacers) |
|
1376 | 1376 | if self.page != self.last_page and rightmost_page < self.last_page: |
|
1377 | 1377 | nav_items.append(self._pagerlink(self.last_page, self.last_page)) |
|
1378 | 1378 | |
|
1379 | 1379 | ## prerender links |
|
1380 | 1380 | #_page_link = url.current() |
|
1381 | 1381 | #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1)))) |
|
1382 | 1382 | #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1)))) |
|
1383 | 1383 | return self.separator.join(nav_items) |
|
1384 | 1384 | |
|
1385 | 1385 | def pager(self, format='~2~', page_param='page', partial_param='partial', |
|
1386 | 1386 | show_if_single_page=False, separator=' ', onclick=None, |
|
1387 | 1387 | symbol_first='<<', symbol_last='>>', |
|
1388 | 1388 | symbol_previous='<', symbol_next='>', |
|
1389 | 1389 | link_attr={'class': 'pager_link', 'rel': 'prerender'}, |
|
1390 | 1390 | curpage_attr={'class': 'pager_curpage'}, |
|
1391 | 1391 | dotdot_attr={'class': 'pager_dotdot'}, **kwargs): |
|
1392 | 1392 | |
|
1393 | 1393 | self.curpage_attr = curpage_attr |
|
1394 | 1394 | self.separator = separator |
|
1395 | 1395 | self.pager_kwargs = kwargs |
|
1396 | 1396 | self.page_param = page_param |
|
1397 | 1397 | self.partial_param = partial_param |
|
1398 | 1398 | self.onclick = onclick |
|
1399 | 1399 | self.link_attr = link_attr |
|
1400 | 1400 | self.dotdot_attr = dotdot_attr |
|
1401 | 1401 | |
|
1402 | 1402 | # Don't show navigator if there is no more than one page |
|
1403 | 1403 | if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page): |
|
1404 | 1404 | return '' |
|
1405 | 1405 | |
|
1406 | 1406 | from string import Template |
|
1407 | 1407 | # Replace ~...~ in token format by range of pages |
|
1408 | 1408 | result = re.sub(r'~(\d+)~', self._range, format) |
|
1409 | 1409 | |
|
1410 | 1410 | # Interpolate '%' variables |
|
1411 | 1411 | result = Template(result).safe_substitute({ |
|
1412 | 1412 | 'first_page': self.first_page, |
|
1413 | 1413 | 'last_page': self.last_page, |
|
1414 | 1414 | 'page': self.page, |
|
1415 | 1415 | 'page_count': self.page_count, |
|
1416 | 1416 | 'items_per_page': self.items_per_page, |
|
1417 | 1417 | 'first_item': self.first_item, |
|
1418 | 1418 | 'last_item': self.last_item, |
|
1419 | 1419 | 'item_count': self.item_count, |
|
1420 | 1420 | 'link_first': self.page > self.first_page and \ |
|
1421 | 1421 | self._pagerlink(self.first_page, symbol_first) or '', |
|
1422 | 1422 | 'link_last': self.page < self.last_page and \ |
|
1423 | 1423 | self._pagerlink(self.last_page, symbol_last) or '', |
|
1424 | 1424 | 'link_previous': self.previous_page and \ |
|
1425 | 1425 | self._pagerlink(self.previous_page, symbol_previous) \ |
|
1426 | 1426 | or HTML.span(symbol_previous, class_="pg-previous disabled"), |
|
1427 | 1427 | 'link_next': self.next_page and \ |
|
1428 | 1428 | self._pagerlink(self.next_page, symbol_next) \ |
|
1429 | 1429 | or HTML.span(symbol_next, class_="pg-next disabled") |
|
1430 | 1430 | }) |
|
1431 | 1431 | |
|
1432 | 1432 | return literal(result) |
|
1433 | 1433 | |
|
1434 | 1434 | |
|
1435 | 1435 | #============================================================================== |
|
1436 | 1436 | # REPO PAGER, PAGER FOR REPOSITORY |
|
1437 | 1437 | #============================================================================== |
|
1438 | 1438 | class RepoPage(Page): |
|
1439 | 1439 | |
|
1440 | 1440 | def __init__(self, collection, page=1, items_per_page=20, |
|
1441 | 1441 | item_count=None, url=None, **kwargs): |
|
1442 | 1442 | |
|
1443 | 1443 | """Create a "RepoPage" instance. special pager for paging |
|
1444 | 1444 | repository |
|
1445 | 1445 | """ |
|
1446 | 1446 | self._url_generator = url |
|
1447 | 1447 | |
|
1448 | 1448 | # Safe the kwargs class-wide so they can be used in the pager() method |
|
1449 | 1449 | self.kwargs = kwargs |
|
1450 | 1450 | |
|
1451 | 1451 | # Save a reference to the collection |
|
1452 | 1452 | self.original_collection = collection |
|
1453 | 1453 | |
|
1454 | 1454 | self.collection = collection |
|
1455 | 1455 | |
|
1456 | 1456 | # The self.page is the number of the current page. |
|
1457 | 1457 | # The first page has the number 1! |
|
1458 | 1458 | try: |
|
1459 | 1459 | self.page = int(page) # make it int() if we get it as a string |
|
1460 | 1460 | except (ValueError, TypeError): |
|
1461 | 1461 | self.page = 1 |
|
1462 | 1462 | |
|
1463 | 1463 | self.items_per_page = items_per_page |
|
1464 | 1464 | |
|
1465 | 1465 | # Unless the user tells us how many items the collections has |
|
1466 | 1466 | # we calculate that ourselves. |
|
1467 | 1467 | if item_count is not None: |
|
1468 | 1468 | self.item_count = item_count |
|
1469 | 1469 | else: |
|
1470 | 1470 | self.item_count = len(self.collection) |
|
1471 | 1471 | |
|
1472 | 1472 | # Compute the number of the first and last available page |
|
1473 | 1473 | if self.item_count > 0: |
|
1474 | 1474 | self.first_page = 1 |
|
1475 | 1475 | self.page_count = int(math.ceil(float(self.item_count) / |
|
1476 | 1476 | self.items_per_page)) |
|
1477 | 1477 | self.last_page = self.first_page + self.page_count - 1 |
|
1478 | 1478 | |
|
1479 | 1479 | # Make sure that the requested page number is the range of |
|
1480 | 1480 | # valid pages |
|
1481 | 1481 | if self.page > self.last_page: |
|
1482 | 1482 | self.page = self.last_page |
|
1483 | 1483 | elif self.page < self.first_page: |
|
1484 | 1484 | self.page = self.first_page |
|
1485 | 1485 | |
|
1486 | 1486 | # Note: the number of items on this page can be less than |
|
1487 | 1487 | # items_per_page if the last page is not full |
|
1488 | 1488 | self.first_item = max(0, (self.item_count) - (self.page * |
|
1489 | 1489 | items_per_page)) |
|
1490 | 1490 | self.last_item = ((self.item_count - 1) - items_per_page * |
|
1491 | 1491 | (self.page - 1)) |
|
1492 | 1492 | |
|
1493 | 1493 | self.items = list(self.collection[self.first_item:self.last_item + 1]) |
|
1494 | 1494 | |
|
1495 | 1495 | # Links to previous and next page |
|
1496 | 1496 | if self.page > self.first_page: |
|
1497 | 1497 | self.previous_page = self.page - 1 |
|
1498 | 1498 | else: |
|
1499 | 1499 | self.previous_page = None |
|
1500 | 1500 | |
|
1501 | 1501 | if self.page < self.last_page: |
|
1502 | 1502 | self.next_page = self.page + 1 |
|
1503 | 1503 | else: |
|
1504 | 1504 | self.next_page = None |
|
1505 | 1505 | |
|
1506 | 1506 | # No items available |
|
1507 | 1507 | else: |
|
1508 | 1508 | self.first_page = None |
|
1509 | 1509 | self.page_count = 0 |
|
1510 | 1510 | self.last_page = None |
|
1511 | 1511 | self.first_item = None |
|
1512 | 1512 | self.last_item = None |
|
1513 | 1513 | self.previous_page = None |
|
1514 | 1514 | self.next_page = None |
|
1515 | 1515 | self.items = [] |
|
1516 | 1516 | |
|
1517 | 1517 | # This is a subclass of the 'list' type. Initialise the list now. |
|
1518 | 1518 | list.__init__(self, reversed(self.items)) |
|
1519 | 1519 | |
|
1520 | 1520 | |
|
1521 | 1521 | def breadcrumb_repo_link(repo): |
|
1522 | 1522 | """ |
|
1523 | 1523 | Makes a breadcrumbs path link to repo |
|
1524 | 1524 | |
|
1525 | 1525 | ex:: |
|
1526 | 1526 | group >> subgroup >> repo |
|
1527 | 1527 | |
|
1528 | 1528 | :param repo: a Repository instance |
|
1529 | 1529 | """ |
|
1530 | 1530 | |
|
1531 | 1531 | path = [ |
|
1532 | 1532 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name)) |
|
1533 | 1533 | for group in repo.groups_with_parents |
|
1534 | 1534 | ] + [ |
|
1535 | 1535 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name)) |
|
1536 | 1536 | ] |
|
1537 | 1537 | |
|
1538 | 1538 | return literal(' » '.join(path)) |
|
1539 | 1539 | |
|
1540 | 1540 | |
|
1541 | 1541 | def format_byte_size_binary(file_size): |
|
1542 | 1542 | """ |
|
1543 | 1543 | Formats file/folder sizes to standard. |
|
1544 | 1544 | """ |
|
1545 | 1545 | if file_size is None: |
|
1546 | 1546 | file_size = 0 |
|
1547 | 1547 | |
|
1548 | 1548 | formatted_size = format_byte_size(file_size, binary=True) |
|
1549 | 1549 | return formatted_size |
|
1550 | 1550 | |
|
1551 | 1551 | |
|
1552 | 1552 | def urlify_text(text_, safe=True): |
|
1553 | 1553 | """ |
|
1554 | 1554 | Extrac urls from text and make html links out of them |
|
1555 | 1555 | |
|
1556 | 1556 | :param text_: |
|
1557 | 1557 | """ |
|
1558 | 1558 | |
|
1559 | 1559 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' |
|
1560 | 1560 | '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') |
|
1561 | 1561 | |
|
1562 | 1562 | def url_func(match_obj): |
|
1563 | 1563 | url_full = match_obj.groups()[0] |
|
1564 | 1564 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) |
|
1565 | 1565 | _newtext = url_pat.sub(url_func, text_) |
|
1566 | 1566 | if safe: |
|
1567 | 1567 | return literal(_newtext) |
|
1568 | 1568 | return _newtext |
|
1569 | 1569 | |
|
1570 | 1570 | |
|
1571 | 1571 | def urlify_commits(text_, repository): |
|
1572 | 1572 | """ |
|
1573 | 1573 | Extract commit ids from text and make link from them |
|
1574 | 1574 | |
|
1575 | 1575 | :param text_: |
|
1576 | 1576 | :param repository: repo name to build the URL with |
|
1577 | 1577 | """ |
|
1578 | from pylons import url # doh, we need to re-import url to mock it later | |
|
1578 | ||
|
1579 | 1579 | URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') |
|
1580 | 1580 | |
|
1581 | 1581 | def url_func(match_obj): |
|
1582 | 1582 | commit_id = match_obj.groups()[1] |
|
1583 | 1583 | pref = match_obj.groups()[0] |
|
1584 | 1584 | suf = match_obj.groups()[2] |
|
1585 | 1585 | |
|
1586 | 1586 | tmpl = ( |
|
1587 | 1587 | '%(pref)s<a class="%(cls)s" href="%(url)s">' |
|
1588 | 1588 | '%(commit_id)s</a>%(suf)s' |
|
1589 | 1589 | ) |
|
1590 | 1590 | return tmpl % { |
|
1591 | 1591 | 'pref': pref, |
|
1592 | 1592 | 'cls': 'revision-link', |
|
1593 |
'url': url(' |
|
|
1594 |
|
|
|
1593 | 'url': route_url('repo_commit', repo_name=repository, | |
|
1594 | commit_id=commit_id), | |
|
1595 | 1595 | 'commit_id': commit_id, |
|
1596 | 1596 | 'suf': suf |
|
1597 | 1597 | } |
|
1598 | 1598 | |
|
1599 | 1599 | newtext = URL_PAT.sub(url_func, text_) |
|
1600 | 1600 | |
|
1601 | 1601 | return newtext |
|
1602 | 1602 | |
|
1603 | 1603 | |
|
1604 | 1604 | def _process_url_func(match_obj, repo_name, uid, entry, |
|
1605 | 1605 | return_raw_data=False, link_format='html'): |
|
1606 | 1606 | pref = '' |
|
1607 | 1607 | if match_obj.group().startswith(' '): |
|
1608 | 1608 | pref = ' ' |
|
1609 | 1609 | |
|
1610 | 1610 | issue_id = ''.join(match_obj.groups()) |
|
1611 | 1611 | |
|
1612 | 1612 | if link_format == 'html': |
|
1613 | 1613 | tmpl = ( |
|
1614 | 1614 | '%(pref)s<a class="%(cls)s" href="%(url)s">' |
|
1615 | 1615 | '%(issue-prefix)s%(id-repr)s' |
|
1616 | 1616 | '</a>') |
|
1617 | 1617 | elif link_format == 'rst': |
|
1618 | 1618 | tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' |
|
1619 | 1619 | elif link_format == 'markdown': |
|
1620 | 1620 | tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)' |
|
1621 | 1621 | else: |
|
1622 | 1622 | raise ValueError('Bad link_format:{}'.format(link_format)) |
|
1623 | 1623 | |
|
1624 | 1624 | (repo_name_cleaned, |
|
1625 | 1625 | parent_group_name) = RepoGroupModel().\ |
|
1626 | 1626 | _get_group_name_and_parent(repo_name) |
|
1627 | 1627 | |
|
1628 | 1628 | # variables replacement |
|
1629 | 1629 | named_vars = { |
|
1630 | 1630 | 'id': issue_id, |
|
1631 | 1631 | 'repo': repo_name, |
|
1632 | 1632 | 'repo_name': repo_name_cleaned, |
|
1633 | 1633 | 'group_name': parent_group_name |
|
1634 | 1634 | } |
|
1635 | 1635 | # named regex variables |
|
1636 | 1636 | named_vars.update(match_obj.groupdict()) |
|
1637 | 1637 | _url = string.Template(entry['url']).safe_substitute(**named_vars) |
|
1638 | 1638 | |
|
1639 | 1639 | data = { |
|
1640 | 1640 | 'pref': pref, |
|
1641 | 1641 | 'cls': 'issue-tracker-link', |
|
1642 | 1642 | 'url': _url, |
|
1643 | 1643 | 'id-repr': issue_id, |
|
1644 | 1644 | 'issue-prefix': entry['pref'], |
|
1645 | 1645 | 'serv': entry['url'], |
|
1646 | 1646 | } |
|
1647 | 1647 | if return_raw_data: |
|
1648 | 1648 | return { |
|
1649 | 1649 | 'id': issue_id, |
|
1650 | 1650 | 'url': _url |
|
1651 | 1651 | } |
|
1652 | 1652 | return tmpl % data |
|
1653 | 1653 | |
|
1654 | 1654 | |
|
1655 | 1655 | def process_patterns(text_string, repo_name, link_format='html'): |
|
1656 | 1656 | allowed_formats = ['html', 'rst', 'markdown'] |
|
1657 | 1657 | if link_format not in allowed_formats: |
|
1658 | 1658 | raise ValueError('Link format can be only one of:{} got {}'.format( |
|
1659 | 1659 | allowed_formats, link_format)) |
|
1660 | 1660 | |
|
1661 | 1661 | repo = None |
|
1662 | 1662 | if repo_name: |
|
1663 | 1663 | # Retrieving repo_name to avoid invalid repo_name to explode on |
|
1664 | 1664 | # IssueTrackerSettingsModel but still passing invalid name further down |
|
1665 | 1665 | repo = Repository.get_by_repo_name(repo_name, cache=True) |
|
1666 | 1666 | |
|
1667 | 1667 | settings_model = IssueTrackerSettingsModel(repo=repo) |
|
1668 | 1668 | active_entries = settings_model.get_settings(cache=True) |
|
1669 | 1669 | |
|
1670 | 1670 | issues_data = [] |
|
1671 | 1671 | newtext = text_string |
|
1672 | 1672 | |
|
1673 | 1673 | for uid, entry in active_entries.items(): |
|
1674 | 1674 | log.debug('found issue tracker entry with uid %s' % (uid,)) |
|
1675 | 1675 | |
|
1676 | 1676 | if not (entry['pat'] and entry['url']): |
|
1677 | 1677 | log.debug('skipping due to missing data') |
|
1678 | 1678 | continue |
|
1679 | 1679 | |
|
1680 | 1680 | log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s' |
|
1681 | 1681 | % (uid, entry['pat'], entry['url'], entry['pref'])) |
|
1682 | 1682 | |
|
1683 | 1683 | try: |
|
1684 | 1684 | pattern = re.compile(r'%s' % entry['pat']) |
|
1685 | 1685 | except re.error: |
|
1686 | 1686 | log.exception( |
|
1687 | 1687 | 'issue tracker pattern: `%s` failed to compile', |
|
1688 | 1688 | entry['pat']) |
|
1689 | 1689 | continue |
|
1690 | 1690 | |
|
1691 | 1691 | data_func = partial( |
|
1692 | 1692 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1693 | 1693 | return_raw_data=True) |
|
1694 | 1694 | |
|
1695 | 1695 | for match_obj in pattern.finditer(text_string): |
|
1696 | 1696 | issues_data.append(data_func(match_obj)) |
|
1697 | 1697 | |
|
1698 | 1698 | url_func = partial( |
|
1699 | 1699 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1700 | 1700 | link_format=link_format) |
|
1701 | 1701 | |
|
1702 | 1702 | newtext = pattern.sub(url_func, newtext) |
|
1703 | 1703 | log.debug('processed prefix:uid `%s`' % (uid,)) |
|
1704 | 1704 | |
|
1705 | 1705 | return newtext, issues_data |
|
1706 | 1706 | |
|
1707 | 1707 | |
|
1708 | 1708 | def urlify_commit_message(commit_text, repository=None): |
|
1709 | 1709 | """ |
|
1710 | 1710 | Parses given text message and makes proper links. |
|
1711 | 1711 | issues are linked to given issue-server, and rest is a commit link |
|
1712 | 1712 | |
|
1713 | 1713 | :param commit_text: |
|
1714 | 1714 | :param repository: |
|
1715 | 1715 | """ |
|
1716 | 1716 | from pylons import url # doh, we need to re-import url to mock it later |
|
1717 | 1717 | |
|
1718 | 1718 | def escaper(string): |
|
1719 | 1719 | return string.replace('<', '<').replace('>', '>') |
|
1720 | 1720 | |
|
1721 | 1721 | newtext = escaper(commit_text) |
|
1722 | 1722 | |
|
1723 | 1723 | # extract http/https links and make them real urls |
|
1724 | 1724 | newtext = urlify_text(newtext, safe=False) |
|
1725 | 1725 | |
|
1726 | 1726 | # urlify commits - extract commit ids and make link out of them, if we have |
|
1727 | 1727 | # the scope of repository present. |
|
1728 | 1728 | if repository: |
|
1729 | 1729 | newtext = urlify_commits(newtext, repository) |
|
1730 | 1730 | |
|
1731 | 1731 | # process issue tracker patterns |
|
1732 | 1732 | newtext, issues = process_patterns(newtext, repository or '') |
|
1733 | 1733 | |
|
1734 | 1734 | return literal(newtext) |
|
1735 | 1735 | |
|
1736 | 1736 | |
|
1737 | 1737 | def render_binary(repo_name, file_obj): |
|
1738 | 1738 | """ |
|
1739 | 1739 | Choose how to render a binary file |
|
1740 | 1740 | """ |
|
1741 | 1741 | filename = file_obj.name |
|
1742 | 1742 | |
|
1743 | 1743 | # images |
|
1744 | 1744 | for ext in ['*.png', '*.jpg', '*.ico', '*.gif']: |
|
1745 | 1745 | if fnmatch.fnmatch(filename, pat=ext): |
|
1746 | 1746 | alt = filename |
|
1747 | 1747 | src = route_path( |
|
1748 | 1748 | 'repo_file_raw', repo_name=repo_name, |
|
1749 | 1749 | commit_id=file_obj.commit.raw_id, f_path=file_obj.path) |
|
1750 | 1750 | return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src)) |
|
1751 | 1751 | |
|
1752 | 1752 | |
|
1753 | 1753 | def renderer_from_filename(filename, exclude=None): |
|
1754 | 1754 | """ |
|
1755 | 1755 | choose a renderer based on filename, this works only for text based files |
|
1756 | 1756 | """ |
|
1757 | 1757 | |
|
1758 | 1758 | # ipython |
|
1759 | 1759 | for ext in ['*.ipynb']: |
|
1760 | 1760 | if fnmatch.fnmatch(filename, pat=ext): |
|
1761 | 1761 | return 'jupyter' |
|
1762 | 1762 | |
|
1763 | 1763 | is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude) |
|
1764 | 1764 | if is_markup: |
|
1765 | 1765 | return is_markup |
|
1766 | 1766 | return None |
|
1767 | 1767 | |
|
1768 | 1768 | |
|
1769 | 1769 | def render(source, renderer='rst', mentions=False, relative_url=None, |
|
1770 | 1770 | repo_name=None): |
|
1771 | 1771 | |
|
1772 | 1772 | def maybe_convert_relative_links(html_source): |
|
1773 | 1773 | if relative_url: |
|
1774 | 1774 | return relative_links(html_source, relative_url) |
|
1775 | 1775 | return html_source |
|
1776 | 1776 | |
|
1777 | 1777 | if renderer == 'rst': |
|
1778 | 1778 | if repo_name: |
|
1779 | 1779 | # process patterns on comments if we pass in repo name |
|
1780 | 1780 | source, issues = process_patterns( |
|
1781 | 1781 | source, repo_name, link_format='rst') |
|
1782 | 1782 | |
|
1783 | 1783 | return literal( |
|
1784 | 1784 | '<div class="rst-block">%s</div>' % |
|
1785 | 1785 | maybe_convert_relative_links( |
|
1786 | 1786 | MarkupRenderer.rst(source, mentions=mentions))) |
|
1787 | 1787 | elif renderer == 'markdown': |
|
1788 | 1788 | if repo_name: |
|
1789 | 1789 | # process patterns on comments if we pass in repo name |
|
1790 | 1790 | source, issues = process_patterns( |
|
1791 | 1791 | source, repo_name, link_format='markdown') |
|
1792 | 1792 | |
|
1793 | 1793 | return literal( |
|
1794 | 1794 | '<div class="markdown-block">%s</div>' % |
|
1795 | 1795 | maybe_convert_relative_links( |
|
1796 | 1796 | MarkupRenderer.markdown(source, flavored=True, |
|
1797 | 1797 | mentions=mentions))) |
|
1798 | 1798 | elif renderer == 'jupyter': |
|
1799 | 1799 | return literal( |
|
1800 | 1800 | '<div class="ipynb">%s</div>' % |
|
1801 | 1801 | maybe_convert_relative_links( |
|
1802 | 1802 | MarkupRenderer.jupyter(source))) |
|
1803 | 1803 | |
|
1804 | 1804 | # None means just show the file-source |
|
1805 | 1805 | return None |
|
1806 | 1806 | |
|
1807 | 1807 | |
|
1808 | 1808 | def commit_status(repo, commit_id): |
|
1809 | 1809 | return ChangesetStatusModel().get_status(repo, commit_id) |
|
1810 | 1810 | |
|
1811 | 1811 | |
|
1812 | 1812 | def commit_status_lbl(commit_status): |
|
1813 | 1813 | return dict(ChangesetStatus.STATUSES).get(commit_status) |
|
1814 | 1814 | |
|
1815 | 1815 | |
|
1816 | 1816 | def commit_time(repo_name, commit_id): |
|
1817 | 1817 | repo = Repository.get_by_repo_name(repo_name) |
|
1818 | 1818 | commit = repo.get_commit(commit_id=commit_id) |
|
1819 | 1819 | return commit.date |
|
1820 | 1820 | |
|
1821 | 1821 | |
|
1822 | 1822 | def get_permission_name(key): |
|
1823 | 1823 | return dict(Permission.PERMS).get(key) |
|
1824 | 1824 | |
|
1825 | 1825 | |
|
1826 | 1826 | def journal_filter_help(request): |
|
1827 | 1827 | _ = request.translate |
|
1828 | 1828 | |
|
1829 | 1829 | return _( |
|
1830 | 1830 | 'Example filter terms:\n' + |
|
1831 | 1831 | ' repository:vcs\n' + |
|
1832 | 1832 | ' username:marcin\n' + |
|
1833 | 1833 | ' username:(NOT marcin)\n' + |
|
1834 | 1834 | ' action:*push*\n' + |
|
1835 | 1835 | ' ip:127.0.0.1\n' + |
|
1836 | 1836 | ' date:20120101\n' + |
|
1837 | 1837 | ' date:[20120101100000 TO 20120102]\n' + |
|
1838 | 1838 | '\n' + |
|
1839 | 1839 | 'Generate wildcards using \'*\' character:\n' + |
|
1840 | 1840 | ' "repository:vcs*" - search everything starting with \'vcs\'\n' + |
|
1841 | 1841 | ' "repository:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1842 | 1842 | '\n' + |
|
1843 | 1843 | 'Optional AND / OR operators in queries\n' + |
|
1844 | 1844 | ' "repository:vcs OR repository:test"\n' + |
|
1845 | 1845 | ' "username:test AND repository:test*"\n' |
|
1846 | 1846 | ) |
|
1847 | 1847 | |
|
1848 | 1848 | |
|
1849 | 1849 | def search_filter_help(searcher, request): |
|
1850 | 1850 | _ = request.translate |
|
1851 | 1851 | |
|
1852 | 1852 | terms = '' |
|
1853 | 1853 | return _( |
|
1854 | 1854 | 'Example filter terms for `{searcher}` search:\n' + |
|
1855 | 1855 | '{terms}\n' + |
|
1856 | 1856 | 'Generate wildcards using \'*\' character:\n' + |
|
1857 | 1857 | ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' + |
|
1858 | 1858 | ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1859 | 1859 | '\n' + |
|
1860 | 1860 | 'Optional AND / OR operators in queries\n' + |
|
1861 | 1861 | ' "repo_name:vcs OR repo_name:test"\n' + |
|
1862 | 1862 | ' "owner:test AND repo_name:test*"\n' + |
|
1863 | 1863 | 'More: {search_doc}' |
|
1864 | 1864 | ).format(searcher=searcher.name, |
|
1865 | 1865 | terms=terms, search_doc=searcher.query_lang_doc) |
|
1866 | 1866 | |
|
1867 | 1867 | |
|
1868 | 1868 | def not_mapped_error(repo_name): |
|
1869 | 1869 | from rhodecode.translation import _ |
|
1870 | 1870 | flash(_('%s repository is not mapped to db perhaps' |
|
1871 | 1871 | ' it was created or renamed from the filesystem' |
|
1872 | 1872 | ' please run the application again' |
|
1873 | 1873 | ' in order to rescan repositories') % repo_name, category='error') |
|
1874 | 1874 | |
|
1875 | 1875 | |
|
1876 | 1876 | def ip_range(ip_addr): |
|
1877 | 1877 | from rhodecode.model.db import UserIpMap |
|
1878 | 1878 | s, e = UserIpMap._get_ip_range(ip_addr) |
|
1879 | 1879 | return '%s - %s' % (s, e) |
|
1880 | 1880 | |
|
1881 | 1881 | |
|
1882 | 1882 | def form(url, method='post', needs_csrf_token=True, **attrs): |
|
1883 | 1883 | """Wrapper around webhelpers.tags.form to prevent CSRF attacks.""" |
|
1884 | 1884 | if method.lower() != 'get' and needs_csrf_token: |
|
1885 | 1885 | raise Exception( |
|
1886 | 1886 | 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' + |
|
1887 | 1887 | 'CSRF token. If the endpoint does not require such token you can ' + |
|
1888 | 1888 | 'explicitly set the parameter needs_csrf_token to false.') |
|
1889 | 1889 | |
|
1890 | 1890 | return wh_form(url, method=method, **attrs) |
|
1891 | 1891 | |
|
1892 | 1892 | |
|
1893 | 1893 | def secure_form(url, method="POST", multipart=False, **attrs): |
|
1894 | 1894 | """Start a form tag that points the action to an url. This |
|
1895 | 1895 | form tag will also include the hidden field containing |
|
1896 | 1896 | the auth token. |
|
1897 | 1897 | |
|
1898 | 1898 | The url options should be given either as a string, or as a |
|
1899 | 1899 | ``url()`` function. The method for the form defaults to POST. |
|
1900 | 1900 | |
|
1901 | 1901 | Options: |
|
1902 | 1902 | |
|
1903 | 1903 | ``multipart`` |
|
1904 | 1904 | If set to True, the enctype is set to "multipart/form-data". |
|
1905 | 1905 | ``method`` |
|
1906 | 1906 | The method to use when submitting the form, usually either |
|
1907 | 1907 | "GET" or "POST". If "PUT", "DELETE", or another verb is used, a |
|
1908 | 1908 | hidden input with name _method is added to simulate the verb |
|
1909 | 1909 | over POST. |
|
1910 | 1910 | |
|
1911 | 1911 | """ |
|
1912 | 1912 | from webhelpers.pylonslib.secure_form import insecure_form |
|
1913 | 1913 | form = insecure_form(url, method, multipart, **attrs) |
|
1914 | 1914 | |
|
1915 | 1915 | session = None |
|
1916 | 1916 | # TODO(marcink): after pyramid migration require request variable ALWAYS |
|
1917 | 1917 | if 'request' in attrs: |
|
1918 | 1918 | session = attrs['request'].session |
|
1919 | 1919 | |
|
1920 | 1920 | token = literal( |
|
1921 | 1921 | '<input type="hidden" id="{}" name="{}" value="{}">'.format( |
|
1922 | 1922 | csrf_token_key, csrf_token_key, get_csrf_token(session))) |
|
1923 | 1923 | |
|
1924 | 1924 | return literal("%s\n%s" % (form, token)) |
|
1925 | 1925 | |
|
1926 | 1926 | |
|
1927 | 1927 | def dropdownmenu(name, selected, options, enable_filter=False, **attrs): |
|
1928 | 1928 | select_html = select(name, selected, options, **attrs) |
|
1929 | 1929 | select2 = """ |
|
1930 | 1930 | <script> |
|
1931 | 1931 | $(document).ready(function() { |
|
1932 | 1932 | $('#%s').select2({ |
|
1933 | 1933 | containerCssClass: 'drop-menu', |
|
1934 | 1934 | dropdownCssClass: 'drop-menu-dropdown', |
|
1935 | 1935 | dropdownAutoWidth: true%s |
|
1936 | 1936 | }); |
|
1937 | 1937 | }); |
|
1938 | 1938 | </script> |
|
1939 | 1939 | """ |
|
1940 | 1940 | filter_option = """, |
|
1941 | 1941 | minimumResultsForSearch: -1 |
|
1942 | 1942 | """ |
|
1943 | 1943 | input_id = attrs.get('id') or name |
|
1944 | 1944 | filter_enabled = "" if enable_filter else filter_option |
|
1945 | 1945 | select_script = literal(select2 % (input_id, filter_enabled)) |
|
1946 | 1946 | |
|
1947 | 1947 | return literal(select_html+select_script) |
|
1948 | 1948 | |
|
1949 | 1949 | |
|
1950 | 1950 | def get_visual_attr(tmpl_context_var, attr_name): |
|
1951 | 1951 | """ |
|
1952 | 1952 | A safe way to get a variable from visual variable of template context |
|
1953 | 1953 | |
|
1954 | 1954 | :param tmpl_context_var: instance of tmpl_context, usually present as `c` |
|
1955 | 1955 | :param attr_name: name of the attribute we fetch from the c.visual |
|
1956 | 1956 | """ |
|
1957 | 1957 | visual = getattr(tmpl_context_var, 'visual', None) |
|
1958 | 1958 | if not visual: |
|
1959 | 1959 | return |
|
1960 | 1960 | else: |
|
1961 | 1961 | return getattr(visual, attr_name, None) |
|
1962 | 1962 | |
|
1963 | 1963 | |
|
1964 | 1964 | def get_last_path_part(file_node): |
|
1965 | 1965 | if not file_node.path: |
|
1966 | 1966 | return u'' |
|
1967 | 1967 | |
|
1968 | 1968 | path = safe_unicode(file_node.path.split('/')[-1]) |
|
1969 | 1969 | return u'../' + path |
|
1970 | 1970 | |
|
1971 | 1971 | |
|
1972 | 1972 | def route_url(*args, **kwargs): |
|
1973 | 1973 | """ |
|
1974 | 1974 | Wrapper around pyramids `route_url` (fully qualified url) function. |
|
1975 | 1975 | It is used to generate URLs from within pylons views or templates. |
|
1976 | 1976 | This will be removed when pyramid migration if finished. |
|
1977 | 1977 | """ |
|
1978 | 1978 | req = get_current_request() |
|
1979 | 1979 | return req.route_url(*args, **kwargs) |
|
1980 | 1980 | |
|
1981 | 1981 | |
|
1982 | 1982 | def route_path(*args, **kwargs): |
|
1983 | 1983 | """ |
|
1984 | 1984 | Wrapper around pyramids `route_path` function. It is used to generate |
|
1985 | 1985 | URLs from within pylons views or templates. This will be removed when |
|
1986 | 1986 | pyramid migration if finished. |
|
1987 | 1987 | """ |
|
1988 | 1988 | req = get_current_request() |
|
1989 | 1989 | return req.route_path(*args, **kwargs) |
|
1990 | 1990 | |
|
1991 | 1991 | |
|
1992 | 1992 | def route_path_or_none(*args, **kwargs): |
|
1993 | 1993 | try: |
|
1994 | 1994 | return route_path(*args, **kwargs) |
|
1995 | 1995 | except KeyError: |
|
1996 | 1996 | return None |
|
1997 | 1997 | |
|
1998 | 1998 | |
|
1999 | 1999 | def static_url(*args, **kwds): |
|
2000 | 2000 | """ |
|
2001 | 2001 | Wrapper around pyramids `route_path` function. It is used to generate |
|
2002 | 2002 | URLs from within pylons views or templates. This will be removed when |
|
2003 | 2003 | pyramid migration if finished. |
|
2004 | 2004 | """ |
|
2005 | 2005 | req = get_current_request() |
|
2006 | 2006 | return req.static_url(*args, **kwds) |
|
2007 | 2007 | |
|
2008 | 2008 | |
|
2009 | 2009 | def resource_path(*args, **kwds): |
|
2010 | 2010 | """ |
|
2011 | 2011 | Wrapper around pyramids `route_path` function. It is used to generate |
|
2012 | 2012 | URLs from within pylons views or templates. This will be removed when |
|
2013 | 2013 | pyramid migration if finished. |
|
2014 | 2014 | """ |
|
2015 | 2015 | req = get_current_request() |
|
2016 | 2016 | return req.resource_path(*args, **kwds) |
|
2017 | 2017 | |
|
2018 | 2018 | |
|
2019 | 2019 | def api_call_example(method, args): |
|
2020 | 2020 | """ |
|
2021 | 2021 | Generates an API call example via CURL |
|
2022 | 2022 | """ |
|
2023 | 2023 | args_json = json.dumps(OrderedDict([ |
|
2024 | 2024 | ('id', 1), |
|
2025 | 2025 | ('auth_token', 'SECRET'), |
|
2026 | 2026 | ('method', method), |
|
2027 | 2027 | ('args', args) |
|
2028 | 2028 | ])) |
|
2029 | 2029 | return literal( |
|
2030 | 2030 | "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'" |
|
2031 | 2031 | "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, " |
|
2032 | 2032 | "and needs to be of `api calls` role." |
|
2033 | 2033 | .format( |
|
2034 | 2034 | api_url=route_url('apiv2'), |
|
2035 | 2035 | token_url=route_url('my_account_auth_tokens'), |
|
2036 | 2036 | data=args_json)) |
|
2037 | 2037 | |
|
2038 | 2038 | |
|
2039 | 2039 | def notification_description(notification, request): |
|
2040 | 2040 | """ |
|
2041 | 2041 | Generate notification human readable description based on notification type |
|
2042 | 2042 | """ |
|
2043 | 2043 | from rhodecode.model.notification import NotificationModel |
|
2044 | 2044 | return NotificationModel().make_description( |
|
2045 | 2045 | notification, translate=request.translate) |
@@ -1,214 +1,216 b'' | |||
|
1 | 1 | |
|
2 | 2 | /****************************************************************************** |
|
3 | 3 | * * |
|
4 | 4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
5 | 5 | * * |
|
6 | 6 | * * |
|
7 | 7 | * This file is automatically generated when the app starts up with * |
|
8 | 8 | * generate_js_files = true * |
|
9 | 9 | * * |
|
10 | 10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
11 | 11 | * * |
|
12 | 12 | ******************************************************************************/ |
|
13 | 13 | function registerRCRoutes() { |
|
14 | 14 | // routes registration |
|
15 | 15 | pyroutes.register('new_repo', '/_admin/create_repository', []); |
|
16 | 16 | pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
17 | 17 | pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']); |
|
18 | pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']); | |
|
19 | pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']); | |
|
20 | pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']); | |
|
21 | pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); | |
|
22 | pyroutes.register('changeset_info', '/%(repo_name)s/changeset_info/%(revision)s', ['repo_name', 'revision']); | |
|
23 | 18 | pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
24 | 19 | pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
25 | 20 | pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
26 | 21 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
27 | 22 | pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); |
|
28 | 23 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
29 | 24 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
30 | 25 | pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
31 | 26 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); |
|
32 | 27 | pyroutes.register('favicon', '/favicon.ico', []); |
|
33 | 28 | pyroutes.register('robots', '/robots.txt', []); |
|
34 | 29 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
35 | 30 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
36 | 31 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
37 | 32 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
38 | 33 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
39 | 34 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
40 | 35 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/settings/integrations', ['repo_group_name']); |
|
41 | 36 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
42 | 37 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/settings/integrations/new', ['repo_group_name']); |
|
43 | 38 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
44 | 39 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
45 | 40 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
46 | 41 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
47 | 42 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
48 | 43 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
49 | 44 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
50 | 45 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
51 | 46 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
52 | 47 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
53 | 48 | pyroutes.register('admin_home', '/_admin', []); |
|
54 | 49 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
55 | 50 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
56 | 51 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
57 | 52 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
58 | 53 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
59 | 54 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
60 | 55 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
61 | 56 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
62 | 57 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
63 | 58 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
64 | 59 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
65 | 60 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
66 | 61 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
67 | 62 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
68 | 63 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
69 | 64 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
70 | 65 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
71 | 66 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
72 | 67 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
73 | 68 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
74 | 69 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
75 | 70 | pyroutes.register('users', '/_admin/users', []); |
|
76 | 71 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
77 | 72 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
78 | 73 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
79 | 74 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
80 | 75 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
81 | 76 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
82 | 77 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
83 | 78 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
84 | 79 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
85 | 80 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
86 | 81 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
87 | 82 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
88 | 83 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
89 | 84 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
90 | 85 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
91 | 86 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
92 | 87 | pyroutes.register('login', '/_admin/login', []); |
|
93 | 88 | pyroutes.register('logout', '/_admin/logout', []); |
|
94 | 89 | pyroutes.register('register', '/_admin/register', []); |
|
95 | 90 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
96 | 91 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
97 | 92 | pyroutes.register('home', '/', []); |
|
98 | 93 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
99 | 94 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
100 | 95 | pyroutes.register('repo_list_data', '/_repos', []); |
|
101 | 96 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
102 | 97 | pyroutes.register('journal', '/_admin/journal', []); |
|
103 | 98 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
104 | 99 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
105 | 100 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
106 | 101 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
107 | 102 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
108 | 103 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
109 | 104 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
110 | 105 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
111 | 106 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
112 | 107 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
113 | 108 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
109 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); | |
|
110 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); | |
|
111 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); | |
|
112 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); | |
|
113 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); | |
|
114 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); | |
|
115 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); | |
|
116 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); | |
|
117 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); | |
|
118 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); | |
|
114 | 119 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
115 | 120 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
116 | 121 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
117 | 122 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
118 | 123 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
119 | 124 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
120 | 125 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
121 | 126 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
122 | 127 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
123 | 128 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
124 | 129 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
125 | 130 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
126 | 131 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
127 | 132 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
128 | 133 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
129 | 134 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
130 | 135 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
131 | 136 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
132 | 137 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
133 | 138 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
134 | 139 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
135 | 140 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
136 | 141 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
137 | 142 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
138 | 143 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
139 | 144 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
140 | 145 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
141 | 146 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
142 | 147 | pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']); |
|
143 | 148 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
144 | 149 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
145 | 150 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
146 | 151 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
147 | 152 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
148 | 153 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
149 | pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']); | |
|
150 | pyroutes.register('changeset_children', '/%(repo_name)s/changeset_children/%(revision)s', ['repo_name', 'revision']); | |
|
151 | pyroutes.register('changeset_parents', '/%(repo_name)s/changeset_parents/%(revision)s', ['repo_name', 'revision']); | |
|
152 | 154 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
153 | 155 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
154 | 156 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
155 | 157 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
156 | 158 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
157 | 159 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
158 | 160 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
159 | 161 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
160 | 162 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
161 | 163 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
162 | 164 | pyroutes.register('repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); |
|
163 | 165 | pyroutes.register('repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
164 | 166 | pyroutes.register('strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
165 | 167 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
166 | 168 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
167 | 169 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
168 | 170 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
169 | 171 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
170 | 172 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
171 | 173 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
172 | 174 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
173 | 175 | pyroutes.register('search', '/_admin/search', []); |
|
174 | 176 | pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']); |
|
175 | 177 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
176 | 178 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
177 | 179 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
178 | 180 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
179 | 181 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
180 | 182 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
181 | 183 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
182 | 184 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
183 | 185 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
184 | 186 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
185 | 187 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
186 | 188 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
187 | 189 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
188 | 190 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
189 | 191 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
190 | 192 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
191 | 193 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
192 | 194 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
193 | 195 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
194 | 196 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
195 | 197 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
196 | 198 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
197 | 199 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
198 | 200 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
199 | 201 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
200 | 202 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
201 | 203 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
202 | 204 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
203 | 205 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
204 | 206 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
205 | 207 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
206 | 208 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
207 | 209 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
208 | 210 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
209 | 211 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
210 | 212 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
211 | 213 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
212 | 214 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
213 | 215 | pyroutes.register('apiv2', '/_admin/api', []); |
|
214 | 216 | } |
@@ -1,830 +1,831 b'' | |||
|
1 | 1 | // # Copyright (C) 2010-2017 RhodeCode GmbH |
|
2 | 2 | // # |
|
3 | 3 | // # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | // # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | // # (only), as published by the Free Software Foundation. |
|
6 | 6 | // # |
|
7 | 7 | // # This program is distributed in the hope that it will be useful, |
|
8 | 8 | // # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | // # GNU General Public License for more details. |
|
11 | 11 | // # |
|
12 | 12 | // # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | // # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | // # |
|
15 | 15 | // # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | // # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | // # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | var firefoxAnchorFix = function() { |
|
20 | 20 | // hack to make anchor links behave properly on firefox, in our inline |
|
21 | 21 | // comments generation when comments are injected firefox is misbehaving |
|
22 | 22 | // when jumping to anchor links |
|
23 | 23 | if (location.href.indexOf('#') > -1) { |
|
24 | 24 | location.href += ''; |
|
25 | 25 | } |
|
26 | 26 | }; |
|
27 | 27 | |
|
28 | 28 | var linkifyComments = function(comments) { |
|
29 | 29 | var firstCommentId = null; |
|
30 | 30 | if (comments) { |
|
31 | 31 | firstCommentId = $(comments[0]).data('comment-id'); |
|
32 | 32 | } |
|
33 | 33 | |
|
34 | 34 | if (firstCommentId){ |
|
35 | 35 | $('#inline-comments-counter').attr('href', '#comment-' + firstCommentId); |
|
36 | 36 | } |
|
37 | 37 | }; |
|
38 | 38 | |
|
39 | 39 | var bindToggleButtons = function() { |
|
40 | 40 | $('.comment-toggle').on('click', function() { |
|
41 | 41 | $(this).parent().nextUntil('tr.line').toggle('inline-comments'); |
|
42 | 42 | }); |
|
43 | 43 | }; |
|
44 | 44 | |
|
45 | 45 | /* Comment form for main and inline comments */ |
|
46 | 46 | (function(mod) { |
|
47 | 47 | |
|
48 | 48 | if (typeof exports == "object" && typeof module == "object") { |
|
49 | 49 | // CommonJS |
|
50 | 50 | module.exports = mod(); |
|
51 | 51 | } |
|
52 | 52 | else { |
|
53 | 53 | // Plain browser env |
|
54 | 54 | (this || window).CommentForm = mod(); |
|
55 | 55 | } |
|
56 | 56 | |
|
57 | 57 | })(function() { |
|
58 | 58 | "use strict"; |
|
59 | 59 | |
|
60 | 60 | function CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId) { |
|
61 | 61 | if (!(this instanceof CommentForm)) { |
|
62 | 62 | return new CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId); |
|
63 | 63 | } |
|
64 | 64 | |
|
65 | 65 | // bind the element instance to our Form |
|
66 | 66 | $(formElement).get(0).CommentForm = this; |
|
67 | 67 | |
|
68 | 68 | this.withLineNo = function(selector) { |
|
69 | 69 | var lineNo = this.lineNo; |
|
70 | 70 | if (lineNo === undefined) { |
|
71 | 71 | return selector |
|
72 | 72 | } else { |
|
73 | 73 | return selector + '_' + lineNo; |
|
74 | 74 | } |
|
75 | 75 | }; |
|
76 | 76 | |
|
77 | 77 | this.commitId = commitId; |
|
78 | 78 | this.pullRequestId = pullRequestId; |
|
79 | 79 | this.lineNo = lineNo; |
|
80 | 80 | this.initAutocompleteActions = initAutocompleteActions; |
|
81 | 81 | |
|
82 | 82 | this.previewButton = this.withLineNo('#preview-btn'); |
|
83 | 83 | this.previewContainer = this.withLineNo('#preview-container'); |
|
84 | 84 | |
|
85 | 85 | this.previewBoxSelector = this.withLineNo('#preview-box'); |
|
86 | 86 | |
|
87 | 87 | this.editButton = this.withLineNo('#edit-btn'); |
|
88 | 88 | this.editContainer = this.withLineNo('#edit-container'); |
|
89 | 89 | this.cancelButton = this.withLineNo('#cancel-btn'); |
|
90 | 90 | this.commentType = this.withLineNo('#comment_type'); |
|
91 | 91 | |
|
92 | 92 | this.resolvesId = null; |
|
93 | 93 | this.resolvesActionId = null; |
|
94 | 94 | |
|
95 | 95 | this.closesPr = '#close_pull_request'; |
|
96 | 96 | |
|
97 | 97 | this.cmBox = this.withLineNo('#text'); |
|
98 | 98 | this.cm = initCommentBoxCodeMirror(this, this.cmBox, this.initAutocompleteActions); |
|
99 | 99 | |
|
100 | 100 | this.statusChange = this.withLineNo('#change_status'); |
|
101 | 101 | |
|
102 | 102 | this.submitForm = formElement; |
|
103 | 103 | this.submitButton = $(this.submitForm).find('input[type="submit"]'); |
|
104 | 104 | this.submitButtonText = this.submitButton.val(); |
|
105 | 105 | |
|
106 |
this.previewUrl = pyroutes.url(' |
|
|
107 |
{'repo_name': templateContext.repo_name |
|
|
106 | this.previewUrl = pyroutes.url('repo_commit_comment_preview', | |
|
107 | {'repo_name': templateContext.repo_name, | |
|
108 | 'commit_id': templateContext.commit_data.commit_id}); | |
|
108 | 109 | |
|
109 | 110 | if (resolvesCommentId){ |
|
110 | 111 | this.resolvesId = '#resolve_comment_{0}'.format(resolvesCommentId); |
|
111 | 112 | this.resolvesActionId = '#resolve_comment_action_{0}'.format(resolvesCommentId); |
|
112 | 113 | $(this.commentType).prop('disabled', true); |
|
113 | 114 | $(this.commentType).addClass('disabled'); |
|
114 | 115 | |
|
115 | 116 | // disable select |
|
116 | 117 | setTimeout(function() { |
|
117 | 118 | $(self.statusChange).select2('readonly', true); |
|
118 | 119 | }, 10); |
|
119 | 120 | |
|
120 | 121 | var resolvedInfo = ( |
|
121 | 122 | '<li class="resolve-action">' + |
|
122 | 123 | '<input type="hidden" id="resolve_comment_{0}" name="resolve_comment_{0}" value="{0}">' + |
|
123 | 124 | '<button id="resolve_comment_action_{0}" class="resolve-text btn btn-sm" onclick="return Rhodecode.comments.submitResolution({0})">{1} #{0}</button>' + |
|
124 | 125 | '</li>' |
|
125 | 126 | ).format(resolvesCommentId, _gettext('resolve comment')); |
|
126 | 127 | $(resolvedInfo).insertAfter($(this.commentType).parent()); |
|
127 | 128 | } |
|
128 | 129 | |
|
129 | 130 | // based on commitId, or pullRequestId decide where do we submit |
|
130 | 131 | // out data |
|
131 | 132 | if (this.commitId){ |
|
132 |
this.submitUrl = pyroutes.url(' |
|
|
133 | this.submitUrl = pyroutes.url('repo_commit_comment_create', | |
|
133 | 134 | {'repo_name': templateContext.repo_name, |
|
134 |
' |
|
|
135 |
this.selfUrl = pyroutes.url(' |
|
|
135 | 'commit_id': this.commitId}); | |
|
136 | this.selfUrl = pyroutes.url('repo_commit', | |
|
136 | 137 | {'repo_name': templateContext.repo_name, |
|
137 |
' |
|
|
138 | 'commit_id': this.commitId}); | |
|
138 | 139 | |
|
139 | 140 | } else if (this.pullRequestId) { |
|
140 | 141 | this.submitUrl = pyroutes.url('pullrequest_comment', |
|
141 | 142 | {'repo_name': templateContext.repo_name, |
|
142 | 143 | 'pull_request_id': this.pullRequestId}); |
|
143 | 144 | this.selfUrl = pyroutes.url('pullrequest_show', |
|
144 | 145 | {'repo_name': templateContext.repo_name, |
|
145 | 146 | 'pull_request_id': this.pullRequestId}); |
|
146 | 147 | |
|
147 | 148 | } else { |
|
148 | 149 | throw new Error( |
|
149 | 150 | 'CommentForm requires pullRequestId, or commitId to be specified.') |
|
150 | 151 | } |
|
151 | 152 | |
|
152 | 153 | // FUNCTIONS and helpers |
|
153 | 154 | var self = this; |
|
154 | 155 | |
|
155 | 156 | this.isInline = function(){ |
|
156 | 157 | return this.lineNo && this.lineNo != 'general'; |
|
157 | 158 | }; |
|
158 | 159 | |
|
159 | 160 | this.getCmInstance = function(){ |
|
160 | 161 | return this.cm |
|
161 | 162 | }; |
|
162 | 163 | |
|
163 | 164 | this.setPlaceholder = function(placeholder) { |
|
164 | 165 | var cm = this.getCmInstance(); |
|
165 | 166 | if (cm){ |
|
166 | 167 | cm.setOption('placeholder', placeholder); |
|
167 | 168 | } |
|
168 | 169 | }; |
|
169 | 170 | |
|
170 | 171 | this.getCommentStatus = function() { |
|
171 | 172 | return $(this.submitForm).find(this.statusChange).val(); |
|
172 | 173 | }; |
|
173 | 174 | this.getCommentType = function() { |
|
174 | 175 | return $(this.submitForm).find(this.commentType).val(); |
|
175 | 176 | }; |
|
176 | 177 | |
|
177 | 178 | this.getResolvesId = function() { |
|
178 | 179 | return $(this.submitForm).find(this.resolvesId).val() || null; |
|
179 | 180 | }; |
|
180 | 181 | |
|
181 | 182 | this.getClosePr = function() { |
|
182 | 183 | return $(this.submitForm).find(this.closesPr).val() || null; |
|
183 | 184 | }; |
|
184 | 185 | |
|
185 | 186 | this.markCommentResolved = function(resolvedCommentId){ |
|
186 | 187 | $('#comment-label-{0}'.format(resolvedCommentId)).find('.resolved').show(); |
|
187 | 188 | $('#comment-label-{0}'.format(resolvedCommentId)).find('.resolve').hide(); |
|
188 | 189 | }; |
|
189 | 190 | |
|
190 | 191 | this.isAllowedToSubmit = function() { |
|
191 | 192 | return !$(this.submitButton).prop('disabled'); |
|
192 | 193 | }; |
|
193 | 194 | |
|
194 | 195 | this.initStatusChangeSelector = function(){ |
|
195 | 196 | var formatChangeStatus = function(state, escapeMarkup) { |
|
196 | 197 | var originalOption = state.element; |
|
197 | 198 | return '<div class="flag_status ' + $(originalOption).data('status') + ' pull-left"></div>' + |
|
198 | 199 | '<span>' + escapeMarkup(state.text) + '</span>'; |
|
199 | 200 | }; |
|
200 | 201 | var formatResult = function(result, container, query, escapeMarkup) { |
|
201 | 202 | return formatChangeStatus(result, escapeMarkup); |
|
202 | 203 | }; |
|
203 | 204 | |
|
204 | 205 | var formatSelection = function(data, container, escapeMarkup) { |
|
205 | 206 | return formatChangeStatus(data, escapeMarkup); |
|
206 | 207 | }; |
|
207 | 208 | |
|
208 | 209 | $(this.submitForm).find(this.statusChange).select2({ |
|
209 | 210 | placeholder: _gettext('Status Review'), |
|
210 | 211 | formatResult: formatResult, |
|
211 | 212 | formatSelection: formatSelection, |
|
212 | 213 | containerCssClass: "drop-menu status_box_menu", |
|
213 | 214 | dropdownCssClass: "drop-menu-dropdown", |
|
214 | 215 | dropdownAutoWidth: true, |
|
215 | 216 | minimumResultsForSearch: -1 |
|
216 | 217 | }); |
|
217 | 218 | $(this.submitForm).find(this.statusChange).on('change', function() { |
|
218 | 219 | var status = self.getCommentStatus(); |
|
219 | 220 | |
|
220 | 221 | if (status && !self.isInline()) { |
|
221 | 222 | $(self.submitButton).prop('disabled', false); |
|
222 | 223 | } |
|
223 | 224 | |
|
224 | 225 | var placeholderText = _gettext('Comment text will be set automatically based on currently selected status ({0}) ...').format(status); |
|
225 | 226 | self.setPlaceholder(placeholderText) |
|
226 | 227 | }) |
|
227 | 228 | }; |
|
228 | 229 | |
|
229 | 230 | // reset the comment form into it's original state |
|
230 | 231 | this.resetCommentFormState = function(content) { |
|
231 | 232 | content = content || ''; |
|
232 | 233 | |
|
233 | 234 | $(this.editContainer).show(); |
|
234 | 235 | $(this.editButton).parent().addClass('active'); |
|
235 | 236 | |
|
236 | 237 | $(this.previewContainer).hide(); |
|
237 | 238 | $(this.previewButton).parent().removeClass('active'); |
|
238 | 239 | |
|
239 | 240 | this.setActionButtonsDisabled(true); |
|
240 | 241 | self.cm.setValue(content); |
|
241 | 242 | self.cm.setOption("readOnly", false); |
|
242 | 243 | |
|
243 | 244 | if (this.resolvesId) { |
|
244 | 245 | // destroy the resolve action |
|
245 | 246 | $(this.resolvesId).parent().remove(); |
|
246 | 247 | } |
|
247 | 248 | // reset closingPR flag |
|
248 | 249 | $('.close-pr-input').remove(); |
|
249 | 250 | |
|
250 | 251 | $(this.statusChange).select2('readonly', false); |
|
251 | 252 | }; |
|
252 | 253 | |
|
253 | 254 | this.globalSubmitSuccessCallback = function(){ |
|
254 | 255 | // default behaviour is to call GLOBAL hook, if it's registered. |
|
255 | 256 | if (window.commentFormGlobalSubmitSuccessCallback !== undefined){ |
|
256 | 257 | commentFormGlobalSubmitSuccessCallback() |
|
257 | 258 | } |
|
258 | 259 | }; |
|
259 | 260 | |
|
260 | 261 | this.submitAjaxPOST = function(url, postData, successHandler, failHandler) { |
|
261 | 262 | failHandler = failHandler || function() {}; |
|
262 | 263 | var postData = toQueryString(postData); |
|
263 | 264 | var request = $.ajax({ |
|
264 | 265 | url: url, |
|
265 | 266 | type: 'POST', |
|
266 | 267 | data: postData, |
|
267 | 268 | headers: {'X-PARTIAL-XHR': true} |
|
268 | 269 | }) |
|
269 | 270 | .done(function(data) { |
|
270 | 271 | successHandler(data); |
|
271 | 272 | }) |
|
272 | 273 | .fail(function(data, textStatus, errorThrown){ |
|
273 | 274 | alert( |
|
274 | 275 | "Error while submitting comment.\n" + |
|
275 | 276 | "Error code {0} ({1}).".format(data.status, data.statusText)); |
|
276 | 277 | failHandler() |
|
277 | 278 | }); |
|
278 | 279 | return request; |
|
279 | 280 | }; |
|
280 | 281 | |
|
281 | 282 | // overwrite a submitHandler, we need to do it for inline comments |
|
282 | 283 | this.setHandleFormSubmit = function(callback) { |
|
283 | 284 | this.handleFormSubmit = callback; |
|
284 | 285 | }; |
|
285 | 286 | |
|
286 | 287 | // overwrite a submitSuccessHandler |
|
287 | 288 | this.setGlobalSubmitSuccessCallback = function(callback) { |
|
288 | 289 | this.globalSubmitSuccessCallback = callback; |
|
289 | 290 | }; |
|
290 | 291 | |
|
291 | 292 | // default handler for for submit for main comments |
|
292 | 293 | this.handleFormSubmit = function() { |
|
293 | 294 | var text = self.cm.getValue(); |
|
294 | 295 | var status = self.getCommentStatus(); |
|
295 | 296 | var commentType = self.getCommentType(); |
|
296 | 297 | var resolvesCommentId = self.getResolvesId(); |
|
297 | 298 | var closePullRequest = self.getClosePr(); |
|
298 | 299 | |
|
299 | 300 | if (text === "" && !status) { |
|
300 | 301 | return; |
|
301 | 302 | } |
|
302 | 303 | |
|
303 | 304 | var excludeCancelBtn = false; |
|
304 | 305 | var submitEvent = true; |
|
305 | 306 | self.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent); |
|
306 | 307 | self.cm.setOption("readOnly", true); |
|
307 | 308 | |
|
308 | 309 | var postData = { |
|
309 | 310 | 'text': text, |
|
310 | 311 | 'changeset_status': status, |
|
311 | 312 | 'comment_type': commentType, |
|
312 | 313 | 'csrf_token': CSRF_TOKEN |
|
313 | 314 | }; |
|
314 | 315 | |
|
315 | 316 | if (resolvesCommentId) { |
|
316 | 317 | postData['resolves_comment_id'] = resolvesCommentId; |
|
317 | 318 | } |
|
318 | 319 | |
|
319 | 320 | if (closePullRequest) { |
|
320 | 321 | postData['close_pull_request'] = true; |
|
321 | 322 | } |
|
322 | 323 | |
|
323 | 324 | var submitSuccessCallback = function(o) { |
|
324 | 325 | // reload page if we change status for single commit. |
|
325 | 326 | if (status && self.commitId) { |
|
326 | 327 | location.reload(true); |
|
327 | 328 | } else { |
|
328 | 329 | $('#injected_page_comments').append(o.rendered_text); |
|
329 | 330 | self.resetCommentFormState(); |
|
330 | 331 | timeagoActivate(); |
|
331 | 332 | |
|
332 | 333 | // mark visually which comment was resolved |
|
333 | 334 | if (resolvesCommentId) { |
|
334 | 335 | self.markCommentResolved(resolvesCommentId); |
|
335 | 336 | } |
|
336 | 337 | } |
|
337 | 338 | |
|
338 | 339 | // run global callback on submit |
|
339 | 340 | self.globalSubmitSuccessCallback(); |
|
340 | 341 | |
|
341 | 342 | }; |
|
342 | 343 | var submitFailCallback = function(){ |
|
343 | 344 | self.resetCommentFormState(text); |
|
344 | 345 | }; |
|
345 | 346 | self.submitAjaxPOST( |
|
346 | 347 | self.submitUrl, postData, submitSuccessCallback, submitFailCallback); |
|
347 | 348 | }; |
|
348 | 349 | |
|
349 | 350 | this.previewSuccessCallback = function(o) { |
|
350 | 351 | $(self.previewBoxSelector).html(o); |
|
351 | 352 | $(self.previewBoxSelector).removeClass('unloaded'); |
|
352 | 353 | |
|
353 | 354 | // swap buttons, making preview active |
|
354 | 355 | $(self.previewButton).parent().addClass('active'); |
|
355 | 356 | $(self.editButton).parent().removeClass('active'); |
|
356 | 357 | |
|
357 | 358 | // unlock buttons |
|
358 | 359 | self.setActionButtonsDisabled(false); |
|
359 | 360 | }; |
|
360 | 361 | |
|
361 | 362 | this.setActionButtonsDisabled = function(state, excludeCancelBtn, submitEvent) { |
|
362 | 363 | excludeCancelBtn = excludeCancelBtn || false; |
|
363 | 364 | submitEvent = submitEvent || false; |
|
364 | 365 | |
|
365 | 366 | $(this.editButton).prop('disabled', state); |
|
366 | 367 | $(this.previewButton).prop('disabled', state); |
|
367 | 368 | |
|
368 | 369 | if (!excludeCancelBtn) { |
|
369 | 370 | $(this.cancelButton).prop('disabled', state); |
|
370 | 371 | } |
|
371 | 372 | |
|
372 | 373 | var submitState = state; |
|
373 | 374 | if (!submitEvent && this.getCommentStatus() && !self.isInline()) { |
|
374 | 375 | // if the value of commit review status is set, we allow |
|
375 | 376 | // submit button, but only on Main form, isInline means inline |
|
376 | 377 | submitState = false |
|
377 | 378 | } |
|
378 | 379 | |
|
379 | 380 | $(this.submitButton).prop('disabled', submitState); |
|
380 | 381 | if (submitEvent) { |
|
381 | 382 | $(this.submitButton).val(_gettext('Submitting...')); |
|
382 | 383 | } else { |
|
383 | 384 | $(this.submitButton).val(this.submitButtonText); |
|
384 | 385 | } |
|
385 | 386 | |
|
386 | 387 | }; |
|
387 | 388 | |
|
388 | 389 | // lock preview/edit/submit buttons on load, but exclude cancel button |
|
389 | 390 | var excludeCancelBtn = true; |
|
390 | 391 | this.setActionButtonsDisabled(true, excludeCancelBtn); |
|
391 | 392 | |
|
392 | 393 | // anonymous users don't have access to initialized CM instance |
|
393 | 394 | if (this.cm !== undefined){ |
|
394 | 395 | this.cm.on('change', function(cMirror) { |
|
395 | 396 | if (cMirror.getValue() === "") { |
|
396 | 397 | self.setActionButtonsDisabled(true, excludeCancelBtn) |
|
397 | 398 | } else { |
|
398 | 399 | self.setActionButtonsDisabled(false, excludeCancelBtn) |
|
399 | 400 | } |
|
400 | 401 | }); |
|
401 | 402 | } |
|
402 | 403 | |
|
403 | 404 | $(this.editButton).on('click', function(e) { |
|
404 | 405 | e.preventDefault(); |
|
405 | 406 | |
|
406 | 407 | $(self.previewButton).parent().removeClass('active'); |
|
407 | 408 | $(self.previewContainer).hide(); |
|
408 | 409 | |
|
409 | 410 | $(self.editButton).parent().addClass('active'); |
|
410 | 411 | $(self.editContainer).show(); |
|
411 | 412 | |
|
412 | 413 | }); |
|
413 | 414 | |
|
414 | 415 | $(this.previewButton).on('click', function(e) { |
|
415 | 416 | e.preventDefault(); |
|
416 | 417 | var text = self.cm.getValue(); |
|
417 | 418 | |
|
418 | 419 | if (text === "") { |
|
419 | 420 | return; |
|
420 | 421 | } |
|
421 | 422 | |
|
422 | 423 | var postData = { |
|
423 | 424 | 'text': text, |
|
424 | 425 | 'renderer': templateContext.visual.default_renderer, |
|
425 | 426 | 'csrf_token': CSRF_TOKEN |
|
426 | 427 | }; |
|
427 | 428 | |
|
428 | 429 | // lock ALL buttons on preview |
|
429 | 430 | self.setActionButtonsDisabled(true); |
|
430 | 431 | |
|
431 | 432 | $(self.previewBoxSelector).addClass('unloaded'); |
|
432 | 433 | $(self.previewBoxSelector).html(_gettext('Loading ...')); |
|
433 | 434 | |
|
434 | 435 | $(self.editContainer).hide(); |
|
435 | 436 | $(self.previewContainer).show(); |
|
436 | 437 | |
|
437 | 438 | // by default we reset state of comment preserving the text |
|
438 | 439 | var previewFailCallback = function(){ |
|
439 | 440 | self.resetCommentFormState(text) |
|
440 | 441 | }; |
|
441 | 442 | self.submitAjaxPOST( |
|
442 | 443 | self.previewUrl, postData, self.previewSuccessCallback, |
|
443 | 444 | previewFailCallback); |
|
444 | 445 | |
|
445 | 446 | $(self.previewButton).parent().addClass('active'); |
|
446 | 447 | $(self.editButton).parent().removeClass('active'); |
|
447 | 448 | }); |
|
448 | 449 | |
|
449 | 450 | $(this.submitForm).submit(function(e) { |
|
450 | 451 | e.preventDefault(); |
|
451 | 452 | var allowedToSubmit = self.isAllowedToSubmit(); |
|
452 | 453 | if (!allowedToSubmit){ |
|
453 | 454 | return false; |
|
454 | 455 | } |
|
455 | 456 | self.handleFormSubmit(); |
|
456 | 457 | }); |
|
457 | 458 | |
|
458 | 459 | } |
|
459 | 460 | |
|
460 | 461 | return CommentForm; |
|
461 | 462 | }); |
|
462 | 463 | |
|
463 | 464 | /* comments controller */ |
|
464 | 465 | var CommentsController = function() { |
|
465 | 466 | var mainComment = '#text'; |
|
466 | 467 | var self = this; |
|
467 | 468 | |
|
468 | 469 | this.cancelComment = function(node) { |
|
469 | 470 | var $node = $(node); |
|
470 | 471 | var $td = $node.closest('td'); |
|
471 | 472 | $node.closest('.comment-inline-form').remove(); |
|
472 | 473 | return false; |
|
473 | 474 | }; |
|
474 | 475 | |
|
475 | 476 | this.getLineNumber = function(node) { |
|
476 | 477 | var $node = $(node); |
|
477 | 478 | return $node.closest('td').attr('data-line-number'); |
|
478 | 479 | }; |
|
479 | 480 | |
|
480 | 481 | this.scrollToComment = function(node, offset, outdated) { |
|
481 | 482 | if (offset === undefined) { |
|
482 | 483 | offset = 0; |
|
483 | 484 | } |
|
484 | 485 | var outdated = outdated || false; |
|
485 | 486 | var klass = outdated ? 'div.comment-outdated' : 'div.comment-current'; |
|
486 | 487 | |
|
487 | 488 | if (!node) { |
|
488 | 489 | node = $('.comment-selected'); |
|
489 | 490 | if (!node.length) { |
|
490 | 491 | node = $('comment-current') |
|
491 | 492 | } |
|
492 | 493 | } |
|
493 | 494 | $wrapper = $(node).closest('div.comment'); |
|
494 | 495 | $comment = $(node).closest(klass); |
|
495 | 496 | $comments = $(klass); |
|
496 | 497 | |
|
497 | 498 | // show hidden comment when referenced. |
|
498 | 499 | if (!$wrapper.is(':visible')){ |
|
499 | 500 | $wrapper.show(); |
|
500 | 501 | } |
|
501 | 502 | |
|
502 | 503 | $('.comment-selected').removeClass('comment-selected'); |
|
503 | 504 | |
|
504 | 505 | var nextIdx = $(klass).index($comment) + offset; |
|
505 | 506 | if (nextIdx >= $comments.length) { |
|
506 | 507 | nextIdx = 0; |
|
507 | 508 | } |
|
508 | 509 | var $next = $(klass).eq(nextIdx); |
|
509 | 510 | |
|
510 | 511 | var $cb = $next.closest('.cb'); |
|
511 | 512 | $cb.removeClass('cb-collapsed'); |
|
512 | 513 | |
|
513 | 514 | var $filediffCollapseState = $cb.closest('.filediff').prev(); |
|
514 | 515 | $filediffCollapseState.prop('checked', false); |
|
515 | 516 | $next.addClass('comment-selected'); |
|
516 | 517 | scrollToElement($next); |
|
517 | 518 | return false; |
|
518 | 519 | }; |
|
519 | 520 | |
|
520 | 521 | this.nextComment = function(node) { |
|
521 | 522 | return self.scrollToComment(node, 1); |
|
522 | 523 | }; |
|
523 | 524 | |
|
524 | 525 | this.prevComment = function(node) { |
|
525 | 526 | return self.scrollToComment(node, -1); |
|
526 | 527 | }; |
|
527 | 528 | |
|
528 | 529 | this.nextOutdatedComment = function(node) { |
|
529 | 530 | return self.scrollToComment(node, 1, true); |
|
530 | 531 | }; |
|
531 | 532 | |
|
532 | 533 | this.prevOutdatedComment = function(node) { |
|
533 | 534 | return self.scrollToComment(node, -1, true); |
|
534 | 535 | }; |
|
535 | 536 | |
|
536 | 537 | this.deleteComment = function(node) { |
|
537 | 538 | if (!confirm(_gettext('Delete this comment?'))) { |
|
538 | 539 | return false; |
|
539 | 540 | } |
|
540 | 541 | var $node = $(node); |
|
541 | 542 | var $td = $node.closest('td'); |
|
542 | 543 | var $comment = $node.closest('.comment'); |
|
543 | 544 | var comment_id = $comment.attr('data-comment-id'); |
|
544 | 545 | var url = AJAX_COMMENT_DELETE_URL.replace('__COMMENT_ID__', comment_id); |
|
545 | 546 | var postData = { |
|
546 | 547 | '_method': 'delete', |
|
547 | 548 | 'csrf_token': CSRF_TOKEN |
|
548 | 549 | }; |
|
549 | 550 | |
|
550 | 551 | $comment.addClass('comment-deleting'); |
|
551 | 552 | $comment.hide('fast'); |
|
552 | 553 | |
|
553 | 554 | var success = function(response) { |
|
554 | 555 | $comment.remove(); |
|
555 | 556 | return false; |
|
556 | 557 | }; |
|
557 | 558 | var failure = function(data, textStatus, xhr) { |
|
558 | 559 | alert("error processing request: " + textStatus); |
|
559 | 560 | $comment.show('fast'); |
|
560 | 561 | $comment.removeClass('comment-deleting'); |
|
561 | 562 | return false; |
|
562 | 563 | }; |
|
563 | 564 | ajaxPOST(url, postData, success, failure); |
|
564 | 565 | }; |
|
565 | 566 | |
|
566 | 567 | this.toggleWideMode = function (node) { |
|
567 | 568 | if ($('#content').hasClass('wrapper')) { |
|
568 | 569 | $('#content').removeClass("wrapper"); |
|
569 | 570 | $('#content').addClass("wide-mode-wrapper"); |
|
570 | 571 | $(node).addClass('btn-success'); |
|
571 | 572 | } else { |
|
572 | 573 | $('#content').removeClass("wide-mode-wrapper"); |
|
573 | 574 | $('#content').addClass("wrapper"); |
|
574 | 575 | $(node).removeClass('btn-success'); |
|
575 | 576 | } |
|
576 | 577 | return false; |
|
577 | 578 | }; |
|
578 | 579 | |
|
579 | 580 | this.toggleComments = function(node, show) { |
|
580 | 581 | var $filediff = $(node).closest('.filediff'); |
|
581 | 582 | if (show === true) { |
|
582 | 583 | $filediff.removeClass('hide-comments'); |
|
583 | 584 | } else if (show === false) { |
|
584 | 585 | $filediff.find('.hide-line-comments').removeClass('hide-line-comments'); |
|
585 | 586 | $filediff.addClass('hide-comments'); |
|
586 | 587 | } else { |
|
587 | 588 | $filediff.find('.hide-line-comments').removeClass('hide-line-comments'); |
|
588 | 589 | $filediff.toggleClass('hide-comments'); |
|
589 | 590 | } |
|
590 | 591 | return false; |
|
591 | 592 | }; |
|
592 | 593 | |
|
593 | 594 | this.toggleLineComments = function(node) { |
|
594 | 595 | self.toggleComments(node, true); |
|
595 | 596 | var $node = $(node); |
|
596 | 597 | $node.closest('tr').toggleClass('hide-line-comments'); |
|
597 | 598 | }; |
|
598 | 599 | |
|
599 | 600 | this.createCommentForm = function(formElement, lineno, placeholderText, initAutocompleteActions, resolvesCommentId){ |
|
600 | 601 | var pullRequestId = templateContext.pull_request_data.pull_request_id; |
|
601 | 602 | var commitId = templateContext.commit_data.commit_id; |
|
602 | 603 | |
|
603 | 604 | var commentForm = new CommentForm( |
|
604 | 605 | formElement, commitId, pullRequestId, lineno, initAutocompleteActions, resolvesCommentId); |
|
605 | 606 | var cm = commentForm.getCmInstance(); |
|
606 | 607 | |
|
607 | 608 | if (resolvesCommentId){ |
|
608 | 609 | var placeholderText = _gettext('Leave a comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId); |
|
609 | 610 | } |
|
610 | 611 | |
|
611 | 612 | setTimeout(function() { |
|
612 | 613 | // callbacks |
|
613 | 614 | if (cm !== undefined) { |
|
614 | 615 | commentForm.setPlaceholder(placeholderText); |
|
615 | 616 | if (commentForm.isInline()) { |
|
616 | 617 | cm.focus(); |
|
617 | 618 | cm.refresh(); |
|
618 | 619 | } |
|
619 | 620 | } |
|
620 | 621 | }, 10); |
|
621 | 622 | |
|
622 | 623 | // trigger scrolldown to the resolve comment, since it might be away |
|
623 | 624 | // from the clicked |
|
624 | 625 | if (resolvesCommentId){ |
|
625 | 626 | var actionNode = $(commentForm.resolvesActionId).offset(); |
|
626 | 627 | |
|
627 | 628 | setTimeout(function() { |
|
628 | 629 | if (actionNode) { |
|
629 | 630 | $('body, html').animate({scrollTop: actionNode.top}, 10); |
|
630 | 631 | } |
|
631 | 632 | }, 100); |
|
632 | 633 | } |
|
633 | 634 | |
|
634 | 635 | return commentForm; |
|
635 | 636 | }; |
|
636 | 637 | |
|
637 | 638 | this.createGeneralComment = function (lineNo, placeholderText, resolvesCommentId) { |
|
638 | 639 | |
|
639 | 640 | var tmpl = $('#cb-comment-general-form-template').html(); |
|
640 | 641 | tmpl = tmpl.format(null, 'general'); |
|
641 | 642 | var $form = $(tmpl); |
|
642 | 643 | |
|
643 | 644 | var $formPlaceholder = $('#cb-comment-general-form-placeholder'); |
|
644 | 645 | var curForm = $formPlaceholder.find('form'); |
|
645 | 646 | if (curForm){ |
|
646 | 647 | curForm.remove(); |
|
647 | 648 | } |
|
648 | 649 | $formPlaceholder.append($form); |
|
649 | 650 | |
|
650 | 651 | var _form = $($form[0]); |
|
651 | 652 | var autocompleteActions = ['approve', 'reject', 'as_note', 'as_todo']; |
|
652 | 653 | var commentForm = this.createCommentForm( |
|
653 | 654 | _form, lineNo, placeholderText, autocompleteActions, resolvesCommentId); |
|
654 | 655 | commentForm.initStatusChangeSelector(); |
|
655 | 656 | |
|
656 | 657 | return commentForm; |
|
657 | 658 | }; |
|
658 | 659 | |
|
659 | 660 | this.createComment = function(node, resolutionComment) { |
|
660 | 661 | var resolvesCommentId = resolutionComment || null; |
|
661 | 662 | var $node = $(node); |
|
662 | 663 | var $td = $node.closest('td'); |
|
663 | 664 | var $form = $td.find('.comment-inline-form'); |
|
664 | 665 | |
|
665 | 666 | if (!$form.length) { |
|
666 | 667 | |
|
667 | 668 | var $filediff = $node.closest('.filediff'); |
|
668 | 669 | $filediff.removeClass('hide-comments'); |
|
669 | 670 | var f_path = $filediff.attr('data-f-path'); |
|
670 | 671 | var lineno = self.getLineNumber(node); |
|
671 | 672 | // create a new HTML from template |
|
672 | 673 | var tmpl = $('#cb-comment-inline-form-template').html(); |
|
673 | 674 | tmpl = tmpl.format(f_path, lineno); |
|
674 | 675 | $form = $(tmpl); |
|
675 | 676 | |
|
676 | 677 | var $comments = $td.find('.inline-comments'); |
|
677 | 678 | if (!$comments.length) { |
|
678 | 679 | $comments = $( |
|
679 | 680 | $('#cb-comments-inline-container-template').html()); |
|
680 | 681 | $td.append($comments); |
|
681 | 682 | } |
|
682 | 683 | |
|
683 | 684 | $td.find('.cb-comment-add-button').before($form); |
|
684 | 685 | |
|
685 | 686 | var placeholderText = _gettext('Leave a comment on line {0}.').format(lineno); |
|
686 | 687 | var _form = $($form[0]).find('form'); |
|
687 | 688 | var autocompleteActions = ['as_note', 'as_todo']; |
|
688 | 689 | var commentForm = this.createCommentForm( |
|
689 | 690 | _form, lineno, placeholderText, autocompleteActions, resolvesCommentId); |
|
690 | 691 | |
|
691 | 692 | $.Topic('/ui/plugins/code/comment_form_built').prepareOrPublish({ |
|
692 | 693 | form: _form, |
|
693 | 694 | parent: $td[0], |
|
694 | 695 | lineno: lineno, |
|
695 | 696 | f_path: f_path} |
|
696 | 697 | ); |
|
697 | 698 | |
|
698 | 699 | // set a CUSTOM submit handler for inline comments. |
|
699 | 700 | commentForm.setHandleFormSubmit(function(o) { |
|
700 | 701 | var text = commentForm.cm.getValue(); |
|
701 | 702 | var commentType = commentForm.getCommentType(); |
|
702 | 703 | var resolvesCommentId = commentForm.getResolvesId(); |
|
703 | 704 | |
|
704 | 705 | if (text === "") { |
|
705 | 706 | return; |
|
706 | 707 | } |
|
707 | 708 | |
|
708 | 709 | if (lineno === undefined) { |
|
709 | 710 | alert('missing line !'); |
|
710 | 711 | return; |
|
711 | 712 | } |
|
712 | 713 | if (f_path === undefined) { |
|
713 | 714 | alert('missing file path !'); |
|
714 | 715 | return; |
|
715 | 716 | } |
|
716 | 717 | |
|
717 | 718 | var excludeCancelBtn = false; |
|
718 | 719 | var submitEvent = true; |
|
719 | 720 | commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent); |
|
720 | 721 | commentForm.cm.setOption("readOnly", true); |
|
721 | 722 | var postData = { |
|
722 | 723 | 'text': text, |
|
723 | 724 | 'f_path': f_path, |
|
724 | 725 | 'line': lineno, |
|
725 | 726 | 'comment_type': commentType, |
|
726 | 727 | 'csrf_token': CSRF_TOKEN |
|
727 | 728 | }; |
|
728 | 729 | if (resolvesCommentId){ |
|
729 | 730 | postData['resolves_comment_id'] = resolvesCommentId; |
|
730 | 731 | } |
|
731 | 732 | |
|
732 | 733 | var submitSuccessCallback = function(json_data) { |
|
733 | 734 | $form.remove(); |
|
734 | 735 | try { |
|
735 | 736 | var html = json_data.rendered_text; |
|
736 | 737 | var lineno = json_data.line_no; |
|
737 | 738 | var target_id = json_data.target_id; |
|
738 | 739 | |
|
739 | 740 | $comments.find('.cb-comment-add-button').before(html); |
|
740 | 741 | |
|
741 | 742 | //mark visually which comment was resolved |
|
742 | 743 | if (resolvesCommentId) { |
|
743 | 744 | commentForm.markCommentResolved(resolvesCommentId); |
|
744 | 745 | } |
|
745 | 746 | |
|
746 | 747 | // run global callback on submit |
|
747 | 748 | commentForm.globalSubmitSuccessCallback(); |
|
748 | 749 | |
|
749 | 750 | } catch (e) { |
|
750 | 751 | console.error(e); |
|
751 | 752 | } |
|
752 | 753 | |
|
753 | 754 | // re trigger the linkification of next/prev navigation |
|
754 | 755 | linkifyComments($('.inline-comment-injected')); |
|
755 | 756 | timeagoActivate(); |
|
756 | 757 | commentForm.setActionButtonsDisabled(false); |
|
757 | 758 | |
|
758 | 759 | }; |
|
759 | 760 | var submitFailCallback = function(){ |
|
760 | 761 | commentForm.resetCommentFormState(text) |
|
761 | 762 | }; |
|
762 | 763 | commentForm.submitAjaxPOST( |
|
763 | 764 | commentForm.submitUrl, postData, submitSuccessCallback, submitFailCallback); |
|
764 | 765 | }); |
|
765 | 766 | } |
|
766 | 767 | |
|
767 | 768 | $form.addClass('comment-inline-form-open'); |
|
768 | 769 | }; |
|
769 | 770 | |
|
770 | 771 | this.createResolutionComment = function(commentId){ |
|
771 | 772 | // hide the trigger text |
|
772 | 773 | $('#resolve-comment-{0}'.format(commentId)).hide(); |
|
773 | 774 | |
|
774 | 775 | var comment = $('#comment-'+commentId); |
|
775 | 776 | var commentData = comment.data(); |
|
776 | 777 | if (commentData.commentInline) { |
|
777 | 778 | this.createComment(comment, commentId) |
|
778 | 779 | } else { |
|
779 | 780 | Rhodecode.comments.createGeneralComment('general', "$placeholder", commentId) |
|
780 | 781 | } |
|
781 | 782 | |
|
782 | 783 | return false; |
|
783 | 784 | }; |
|
784 | 785 | |
|
785 | 786 | this.submitResolution = function(commentId){ |
|
786 | 787 | var form = $('#resolve_comment_{0}'.format(commentId)).closest('form'); |
|
787 | 788 | var commentForm = form.get(0).CommentForm; |
|
788 | 789 | |
|
789 | 790 | var cm = commentForm.getCmInstance(); |
|
790 | 791 | var renderer = templateContext.visual.default_renderer; |
|
791 | 792 | if (renderer == 'rst'){ |
|
792 | 793 | var commentUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentForm.selfUrl); |
|
793 | 794 | } else if (renderer == 'markdown') { |
|
794 | 795 | var commentUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentForm.selfUrl); |
|
795 | 796 | } else { |
|
796 | 797 | var commentUrl = '{1}#comment-{0}'.format(commentId, commentForm.selfUrl); |
|
797 | 798 | } |
|
798 | 799 | |
|
799 | 800 | cm.setValue(_gettext('TODO from comment {0} was fixed.').format(commentUrl)); |
|
800 | 801 | form.submit(); |
|
801 | 802 | return false; |
|
802 | 803 | }; |
|
803 | 804 | |
|
804 | 805 | this.renderInlineComments = function(file_comments) { |
|
805 | 806 | show_add_button = typeof show_add_button !== 'undefined' ? show_add_button : true; |
|
806 | 807 | |
|
807 | 808 | for (var i = 0; i < file_comments.length; i++) { |
|
808 | 809 | var box = file_comments[i]; |
|
809 | 810 | |
|
810 | 811 | var target_id = $(box).attr('target_id'); |
|
811 | 812 | |
|
812 | 813 | // actually comments with line numbers |
|
813 | 814 | var comments = box.children; |
|
814 | 815 | |
|
815 | 816 | for (var j = 0; j < comments.length; j++) { |
|
816 | 817 | var data = { |
|
817 | 818 | 'rendered_text': comments[j].outerHTML, |
|
818 | 819 | 'line_no': $(comments[j]).attr('line'), |
|
819 | 820 | 'target_id': target_id |
|
820 | 821 | }; |
|
821 | 822 | } |
|
822 | 823 | } |
|
823 | 824 | |
|
824 | 825 | // since order of injection is random, we're now re-iterating |
|
825 | 826 | // from correct order and filling in links |
|
826 | 827 | linkifyComments($('.inline-comment-injected')); |
|
827 | 828 | firefoxAnchorFix(); |
|
828 | 829 | }; |
|
829 | 830 | |
|
830 | 831 | }; |
@@ -1,210 +1,210 b'' | |||
|
1 | 1 | <%namespace name="base" file="/base/base.mako"/> |
|
2 | 2 | |
|
3 | 3 | <% |
|
4 | 4 | elems = [ |
|
5 | 5 | (_('Owner'), lambda:base.gravatar_with_user(c.repo_info.user.email), '', ''), |
|
6 | 6 | (_('Created on'), h.format_date(c.repo_info.created_on), '', ''), |
|
7 | 7 | (_('Updated on'), h.format_date(c.repo_info.updated_on), '', ''), |
|
8 |
(_('Cached Commit id'), lambda: h.link_to(c.repo_info.changeset_cache.get('short_id'), h. |
|
|
8 | (_('Cached Commit id'), lambda: h.link_to(c.repo_info.changeset_cache.get('short_id'), h.route_path('repo_commit',repo_name=c.repo_name,commit_id=c.repo_info.changeset_cache.get('raw_id'))), '', ''), | |
|
9 | 9 | ] |
|
10 | 10 | %> |
|
11 | 11 | |
|
12 | 12 | <div class="panel panel-default"> |
|
13 | 13 | <div class="panel-heading" id="advanced-info" > |
|
14 | 14 | <h3 class="panel-title">${_('Repository: %s') % c.repo_info.repo_name} <a class="permalink" href="#advanced-info"> ¶</a></h3> |
|
15 | 15 | </div> |
|
16 | 16 | <div class="panel-body"> |
|
17 | 17 | ${base.dt_info_panel(elems)} |
|
18 | 18 | </div> |
|
19 | 19 | </div> |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | <div class="panel panel-default"> |
|
23 | 23 | <div class="panel-heading" id="advanced-fork"> |
|
24 | 24 | <h3 class="panel-title">${_('Fork Reference')} <a class="permalink" href="#advanced-fork"> ¶</a></h3> |
|
25 | 25 | </div> |
|
26 | 26 | <div class="panel-body"> |
|
27 | 27 | ${h.secure_form(h.route_path('edit_repo_advanced_fork', repo_name=c.repo_info.repo_name), method='POST', request=request)} |
|
28 | 28 | |
|
29 | 29 | % if c.repo_info.fork: |
|
30 | 30 | <div class="panel-body-title-text">${h.literal(_('This repository is a fork of %(repo_link)s') % {'repo_link': h.link_to_if(c.has_origin_repo_read_perm,c.repo_info.fork.repo_name, h.route_path('repo_summary', repo_name=c.repo_info.fork.repo_name))})} |
|
31 | 31 | | <button class="btn btn-link btn-danger" type="submit">Remove fork reference</button></div> |
|
32 | 32 | % endif |
|
33 | 33 | |
|
34 | 34 | <div class="field"> |
|
35 | 35 | ${h.hidden('id_fork_of')} |
|
36 | 36 | ${h.submit('set_as_fork_%s' % c.repo_info.repo_name,_('Set'),class_="btn btn-small",)} |
|
37 | 37 | </div> |
|
38 | 38 | <div class="field"> |
|
39 | 39 | <span class="help-block">${_('Manually set this repository as a fork of another from the list')}</span> |
|
40 | 40 | </div> |
|
41 | 41 | ${h.end_form()} |
|
42 | 42 | </div> |
|
43 | 43 | </div> |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | <div class="panel panel-default"> |
|
47 | 47 | <div class="panel-heading" id="advanced-journal"> |
|
48 | 48 | <h3 class="panel-title">${_('Public Journal Visibility')} <a class="permalink" href="#advanced-journal"> ¶</a></h3> |
|
49 | 49 | </div> |
|
50 | 50 | <div class="panel-body"> |
|
51 | 51 | ${h.secure_form(h.route_path('edit_repo_advanced_journal', repo_name=c.repo_info.repo_name), method='POST', request=request)} |
|
52 | 52 | <div class="field"> |
|
53 | 53 | %if c.in_public_journal: |
|
54 | 54 | <button class="btn btn-small" type="submit"> |
|
55 | 55 | ${_('Remove from Public Journal')} |
|
56 | 56 | </button> |
|
57 | 57 | %else: |
|
58 | 58 | <button class="btn btn-small" type="submit"> |
|
59 | 59 | ${_('Add to Public Journal')} |
|
60 | 60 | </button> |
|
61 | 61 | %endif |
|
62 | 62 | </div> |
|
63 | 63 | <div class="field" > |
|
64 | 64 | <span class="help-block">${_('All actions made on this repository will be visible to everyone following the public journal.')}</span> |
|
65 | 65 | </div> |
|
66 | 66 | ${h.end_form()} |
|
67 | 67 | </div> |
|
68 | 68 | </div> |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | <div class="panel panel-default"> |
|
72 | 72 | <div class="panel-heading" id="advanced-locking"> |
|
73 | 73 | <h3 class="panel-title">${_('Locking state')} <a class="permalink" href="#advanced-locking"> ¶</a></h3> |
|
74 | 74 | </div> |
|
75 | 75 | <div class="panel-body"> |
|
76 | 76 | ${h.secure_form(h.route_path('edit_repo_advanced_locking', repo_name=c.repo_info.repo_name), method='POST', request=request)} |
|
77 | 77 | |
|
78 | 78 | %if c.repo_info.locked[0]: |
|
79 | 79 | <div class="panel-body-title-text">${'Locked by %s on %s. Lock reason: %s' % (h.person_by_id(c.repo_info.locked[0]), |
|
80 | 80 | h.format_date(h. time_to_datetime(c.repo_info.locked[1])), c.repo_info.locked[2])}</div> |
|
81 | 81 | %else: |
|
82 | 82 | <div class="panel-body-title-text">${_('This Repository is not currently locked.')}</div> |
|
83 | 83 | %endif |
|
84 | 84 | |
|
85 | 85 | <div class="field" > |
|
86 | 86 | %if c.repo_info.locked[0]: |
|
87 | 87 | ${h.hidden('set_unlock', '1')} |
|
88 | 88 | <button class="btn btn-small" type="submit" |
|
89 | 89 | onclick="return confirm('${_('Confirm to unlock repository.')}');"> |
|
90 | 90 | <i class="icon-unlock"></i> |
|
91 | 91 | ${_('Unlock repository')} |
|
92 | 92 | </button> |
|
93 | 93 | %else: |
|
94 | 94 | ${h.hidden('set_lock', '1')} |
|
95 | 95 | <button class="btn btn-small" type="submit" |
|
96 | 96 | onclick="return confirm('${_('Confirm to lock repository.')}');"> |
|
97 | 97 | <i class="icon-lock"></i> |
|
98 | 98 | ${_('Lock Repository')} |
|
99 | 99 | </button> |
|
100 | 100 | %endif |
|
101 | 101 | </div> |
|
102 | 102 | <div class="field" > |
|
103 | 103 | <span class="help-block"> |
|
104 | 104 | ${_('Force repository locking. This only works when anonymous access is disabled. Pulling from the repository locks the repository to that user until the same user pushes to that repository again.')} |
|
105 | 105 | </span> |
|
106 | 106 | </div> |
|
107 | 107 | ${h.end_form()} |
|
108 | 108 | </div> |
|
109 | 109 | </div> |
|
110 | 110 | |
|
111 | 111 | <div class="panel panel-danger"> |
|
112 | 112 | <div class="panel-heading" id="advanced-delete"> |
|
113 | 113 | <h3 class="panel-title">${_('Delete repository')} <a class="permalink" href="#advanced-delete"> ¶</a></h3> |
|
114 | 114 | </div> |
|
115 | 115 | <div class="panel-body"> |
|
116 | 116 | ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=c.repo_name), method='POST', request=request)} |
|
117 | 117 | <table class="display"> |
|
118 | 118 | <tr> |
|
119 | 119 | <td> |
|
120 | 120 | ${_ungettext('This repository has %s fork.', 'This repository has %s forks.', c.repo_info.forks.count()) % c.repo_info.forks.count()} |
|
121 | 121 | </td> |
|
122 | 122 | <td> |
|
123 | 123 | %if c.repo_info.forks.count(): |
|
124 | 124 | <input type="radio" name="forks" value="detach_forks" checked="checked"/> <label for="forks">${_('Detach forks')}</label> |
|
125 | 125 | %endif |
|
126 | 126 | </td> |
|
127 | 127 | <td> |
|
128 | 128 | %if c.repo_info.forks.count(): |
|
129 | 129 | <input type="radio" name="forks" value="delete_forks"/> <label for="forks">${_('Delete forks')}</label> |
|
130 | 130 | %endif |
|
131 | 131 | </td> |
|
132 | 132 | </tr> |
|
133 | 133 | </table> |
|
134 | 134 | <div style="margin: 0 0 20px 0" class="fake-space"></div> |
|
135 | 135 | |
|
136 | 136 | <div class="field"> |
|
137 | 137 | <button class="btn btn-small btn-danger" type="submit" |
|
138 | 138 | onclick="return confirm('${_('Confirm to delete this repository: %s') % c.repo_name}');"> |
|
139 | 139 | <i class="icon-remove-sign"></i> |
|
140 | 140 | ${_('Delete This Repository')} |
|
141 | 141 | </button> |
|
142 | 142 | </div> |
|
143 | 143 | <div class="field"> |
|
144 | 144 | <span class="help-block"> |
|
145 | 145 | ${_('This repository will be renamed in a special way in order to make it inaccessible to RhodeCode Enterprise and its VCS systems. If you need to fully delete it from the file system, please do it manually, or with rhodecode-cleanup-repos command available in rhodecode-tools.')} |
|
146 | 146 | </span> |
|
147 | 147 | </div> |
|
148 | 148 | |
|
149 | 149 | ${h.end_form()} |
|
150 | 150 | </div> |
|
151 | 151 | </div> |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | <script> |
|
155 | 155 | |
|
156 | 156 | var currentRepoId = ${c.repo_info.repo_id}; |
|
157 | 157 | |
|
158 | 158 | var repoTypeFilter = function(data) { |
|
159 | 159 | var results = []; |
|
160 | 160 | |
|
161 | 161 | if (!data.results[0]) { |
|
162 | 162 | return data |
|
163 | 163 | } |
|
164 | 164 | |
|
165 | 165 | $.each(data.results[0].children, function() { |
|
166 | 166 | // filter out the SAME repo, it cannot be used as fork of itself |
|
167 | 167 | if (this.obj.repo_id != currentRepoId) { |
|
168 | 168 | this.id = this.obj.repo_id; |
|
169 | 169 | results.push(this) |
|
170 | 170 | } |
|
171 | 171 | }); |
|
172 | 172 | data.results[0].children = results; |
|
173 | 173 | return data; |
|
174 | 174 | }; |
|
175 | 175 | |
|
176 | 176 | $("#id_fork_of").select2({ |
|
177 | 177 | cachedDataSource: {}, |
|
178 | 178 | minimumInputLength: 2, |
|
179 | 179 | placeholder: "${_('Change repository') if c.repo_info.fork else _('Pick repository')}", |
|
180 | 180 | dropdownAutoWidth: true, |
|
181 | 181 | containerCssClass: "drop-menu", |
|
182 | 182 | dropdownCssClass: "drop-menu-dropdown", |
|
183 | 183 | formatResult: formatResult, |
|
184 | 184 | query: $.debounce(250, function(query){ |
|
185 | 185 | self = this; |
|
186 | 186 | var cacheKey = query.term; |
|
187 | 187 | var cachedData = self.cachedDataSource[cacheKey]; |
|
188 | 188 | |
|
189 | 189 | if (cachedData) { |
|
190 | 190 | query.callback({results: cachedData.results}); |
|
191 | 191 | } else { |
|
192 | 192 | $.ajax({ |
|
193 | 193 | url: pyroutes.url('repo_list_data'), |
|
194 | 194 | data: {'query': query.term, repo_type: '${c.repo_info.repo_type}'}, |
|
195 | 195 | dataType: 'json', |
|
196 | 196 | type: 'GET', |
|
197 | 197 | success: function(data) { |
|
198 | 198 | data = repoTypeFilter(data); |
|
199 | 199 | self.cachedDataSource[cacheKey] = data; |
|
200 | 200 | query.callback({results: data.results}); |
|
201 | 201 | }, |
|
202 | 202 | error: function(data, textStatus, errorThrown) { |
|
203 | 203 | alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText)); |
|
204 | 204 | } |
|
205 | 205 | }) |
|
206 | 206 | } |
|
207 | 207 | }) |
|
208 | 208 | }); |
|
209 | 209 | </script> |
|
210 | 210 |
@@ -1,299 +1,299 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | <%inherit file="/base/base.mako"/> |
|
4 | 4 | |
|
5 | 5 | <%def name="title()"> |
|
6 | 6 | ${_('%s Changelog') % c.repo_name} |
|
7 | 7 | %if c.changelog_for_path: |
|
8 | 8 | /${c.changelog_for_path} |
|
9 | 9 | %endif |
|
10 | 10 | %if c.rhodecode_name: |
|
11 | 11 | · ${h.branding(c.rhodecode_name)} |
|
12 | 12 | %endif |
|
13 | 13 | </%def> |
|
14 | 14 | |
|
15 | 15 | <%def name="breadcrumbs_links()"> |
|
16 | 16 | %if c.changelog_for_path: |
|
17 | 17 | /${c.changelog_for_path} |
|
18 | 18 | %endif |
|
19 | 19 | </%def> |
|
20 | 20 | |
|
21 | 21 | <%def name="menu_bar_nav()"> |
|
22 | 22 | ${self.menu_items(active='repositories')} |
|
23 | 23 | </%def> |
|
24 | 24 | |
|
25 | 25 | <%def name="menu_bar_subnav()"> |
|
26 | 26 | ${self.repo_menu(active='changelog')} |
|
27 | 27 | </%def> |
|
28 | 28 | |
|
29 | 29 | <%def name="main()"> |
|
30 | 30 | |
|
31 | 31 | <div class="box"> |
|
32 | 32 | <div class="title"> |
|
33 | 33 | ${self.repo_page_title(c.rhodecode_db_repo)} |
|
34 | 34 | <ul class="links"> |
|
35 | 35 | <li> |
|
36 | 36 | <a href="#" class="btn btn-small" id="rev_range_container" style="display:none;"></a> |
|
37 | 37 | %if c.rhodecode_db_repo.fork: |
|
38 | 38 | <span> |
|
39 | 39 | <a id="compare_fork_button" |
|
40 | 40 | title="${h.tooltip(_('Compare fork with %s' % c.rhodecode_db_repo.fork.repo_name))}" |
|
41 | 41 | class="btn btn-small" |
|
42 | 42 | href="${h.url('compare_url', |
|
43 | 43 | repo_name=c.rhodecode_db_repo.fork.repo_name, |
|
44 | 44 | source_ref_type=c.rhodecode_db_repo.landing_rev[0], |
|
45 | 45 | source_ref=c.rhodecode_db_repo.landing_rev[1], |
|
46 | 46 | target_repo=c.repo_name, |
|
47 | 47 | target_ref_type='branch' if request.GET.get('branch') else c.rhodecode_db_repo.landing_rev[0], |
|
48 | 48 | target_ref=request.GET.get('branch') or c.rhodecode_db_repo.landing_rev[1], |
|
49 | 49 | merge=1)}" |
|
50 | 50 | > |
|
51 | 51 | <i class="icon-loop"></i> |
|
52 | 52 | ${_('Compare fork with Parent (%s)' % c.rhodecode_db_repo.fork.repo_name)} |
|
53 | 53 | </a> |
|
54 | 54 | </span> |
|
55 | 55 | %endif |
|
56 | 56 | |
|
57 | 57 | ## pr open link |
|
58 | 58 | %if h.is_hg(c.rhodecode_repo) or h.is_git(c.rhodecode_repo): |
|
59 | 59 | <span> |
|
60 | 60 | <a id="open_new_pull_request" class="btn btn-small btn-success" href="${h.url('pullrequest_home',repo_name=c.repo_name)}"> |
|
61 | 61 | ${_('Open new pull request')} |
|
62 | 62 | </a> |
|
63 | 63 | </span> |
|
64 | 64 | %endif |
|
65 | 65 | |
|
66 | 66 | ## clear selection |
|
67 | 67 | <div title="${_('Clear selection')}" class="btn" id="rev_range_clear" style="display:none"> |
|
68 | 68 | ${_('Clear selection')} |
|
69 | 69 | </div> |
|
70 | 70 | |
|
71 | 71 | </li> |
|
72 | 72 | </ul> |
|
73 | 73 | </div> |
|
74 | 74 | |
|
75 | 75 | % if c.pagination: |
|
76 | 76 | <script type="text/javascript" src="${h.asset('js/jquery.commits-graph.js')}"></script> |
|
77 | 77 | |
|
78 | 78 | <div class="graph-header"> |
|
79 | 79 | <div id="filter_changelog"> |
|
80 | 80 | ${h.hidden('branch_filter')} |
|
81 | 81 | %if c.selected_name: |
|
82 | 82 | <div class="btn btn-default" id="clear_filter" > |
|
83 | 83 | ${_('Clear filter')} |
|
84 | 84 | </div> |
|
85 | 85 | %endif |
|
86 | 86 | </div> |
|
87 | 87 | ${self.breadcrumbs('breadcrumbs_light')} |
|
88 | 88 | <div id="commit-counter" data-total=${c.total_cs} class="pull-right"> |
|
89 | 89 | ${_ungettext('showing %d out of %d commit', 'showing %d out of %d commits', c.showing_commits) % (c.showing_commits, c.total_cs)} |
|
90 | 90 | </div> |
|
91 | 91 | </div> |
|
92 | 92 | |
|
93 | 93 | <div id="graph"> |
|
94 | 94 | <div class="graph-col-wrapper"> |
|
95 | 95 | <div id="graph_nodes"> |
|
96 | 96 | <div id="graph_canvas"></div> |
|
97 | 97 | </div> |
|
98 | 98 | <div id="graph_content" class="main-content graph_full_width"> |
|
99 | 99 | |
|
100 | 100 | <div class="table"> |
|
101 | 101 | <table id="changesets" class="rctable"> |
|
102 | 102 | <tr> |
|
103 | 103 | ## checkbox |
|
104 | 104 | <th></th> |
|
105 | 105 | <th colspan="2"></th> |
|
106 | 106 | |
|
107 | 107 | <th>${_('Commit')}</th> |
|
108 | 108 | ## commit message expand arrow |
|
109 | 109 | <th></th> |
|
110 | 110 | <th>${_('Commit Message')}</th> |
|
111 | 111 | |
|
112 | 112 | <th>${_('Age')}</th> |
|
113 | 113 | <th>${_('Author')}</th> |
|
114 | 114 | |
|
115 | 115 | <th>${_('Refs')}</th> |
|
116 | 116 | </tr> |
|
117 | 117 | |
|
118 | 118 | <tbody class="commits-range"> |
|
119 | 119 | <%include file='changelog_elements.mako'/> |
|
120 | 120 | </tbody> |
|
121 | 121 | </table> |
|
122 | 122 | </div> |
|
123 | 123 | </div> |
|
124 | 124 | <div class="pagination-wh pagination-left"> |
|
125 | 125 | ${c.pagination.pager('$link_previous ~2~ $link_next')} |
|
126 | 126 | </div> |
|
127 | 127 | </div> |
|
128 | 128 | |
|
129 | 129 | <script type="text/javascript"> |
|
130 | 130 | var cache = {}; |
|
131 | 131 | $(function(){ |
|
132 | 132 | |
|
133 | 133 | // Create links to commit ranges when range checkboxes are selected |
|
134 | 134 | var $commitCheckboxes = $('.commit-range'); |
|
135 | 135 | // cache elements |
|
136 | 136 | var $commitRangeContainer = $('#rev_range_container'); |
|
137 | 137 | var $commitRangeClear = $('#rev_range_clear'); |
|
138 | 138 | |
|
139 | 139 | var checkboxRangeSelector = function(e){ |
|
140 | 140 | var selectedCheckboxes = []; |
|
141 | 141 | for (pos in $commitCheckboxes){ |
|
142 | 142 | if($commitCheckboxes[pos].checked){ |
|
143 | 143 | selectedCheckboxes.push($commitCheckboxes[pos]); |
|
144 | 144 | } |
|
145 | 145 | } |
|
146 | 146 | var open_new_pull_request = $('#open_new_pull_request'); |
|
147 | 147 | if(open_new_pull_request){ |
|
148 | 148 | var selected_changes = selectedCheckboxes.length; |
|
149 | 149 | if (selected_changes > 1 || selected_changes == 1 && templateContext.repo_type != 'hg') { |
|
150 | 150 | open_new_pull_request.hide(); |
|
151 | 151 | } else { |
|
152 | 152 | if (selected_changes == 1) { |
|
153 | 153 | open_new_pull_request.html(_gettext('Open new pull request for selected commit')); |
|
154 | 154 | } else if (selected_changes == 0) { |
|
155 | 155 | open_new_pull_request.html(_gettext('Open new pull request')); |
|
156 | 156 | } |
|
157 | 157 | open_new_pull_request.show(); |
|
158 | 158 | } |
|
159 | 159 | } |
|
160 | 160 | |
|
161 | 161 | if (selectedCheckboxes.length>0){ |
|
162 | 162 | var revEnd = selectedCheckboxes[0].name; |
|
163 | 163 | var revStart = selectedCheckboxes[selectedCheckboxes.length-1].name; |
|
164 |
var url = pyroutes.url(' |
|
|
164 | var url = pyroutes.url('repo_commit', | |
|
165 | 165 | {'repo_name': '${c.repo_name}', |
|
166 |
' |
|
|
166 | 'commit_id': revStart+'...'+revEnd}); | |
|
167 | 167 | |
|
168 | 168 | var link = (revStart == revEnd) |
|
169 | 169 | ? _gettext('Show selected commit __S') |
|
170 | 170 | : _gettext('Show selected commits __S ... __E'); |
|
171 | 171 | |
|
172 | 172 | link = link.replace('__S', revStart.substr(0,6)); |
|
173 | 173 | link = link.replace('__E', revEnd.substr(0,6)); |
|
174 | 174 | |
|
175 | 175 | $commitRangeContainer |
|
176 | 176 | .attr('href',url) |
|
177 | 177 | .html(link) |
|
178 | 178 | .show(); |
|
179 | 179 | |
|
180 | 180 | $commitRangeClear.show(); |
|
181 | 181 | var _url = pyroutes.url('pullrequest_home', |
|
182 | 182 | {'repo_name': '${c.repo_name}', |
|
183 | 183 | 'commit': revEnd}); |
|
184 | 184 | open_new_pull_request.attr('href', _url); |
|
185 | 185 | $('#compare_fork_button').hide(); |
|
186 | 186 | } else { |
|
187 | 187 | $commitRangeContainer.hide(); |
|
188 | 188 | $commitRangeClear.hide(); |
|
189 | 189 | |
|
190 | 190 | %if c.branch_name: |
|
191 | 191 | var _url = pyroutes.url('pullrequest_home', |
|
192 | 192 | {'repo_name': '${c.repo_name}', |
|
193 | 193 | 'branch':'${c.branch_name}'}); |
|
194 | 194 | open_new_pull_request.attr('href', _url); |
|
195 | 195 | %else: |
|
196 | 196 | var _url = pyroutes.url('pullrequest_home', |
|
197 | 197 | {'repo_name': '${c.repo_name}'}); |
|
198 | 198 | open_new_pull_request.attr('href', _url); |
|
199 | 199 | %endif |
|
200 | 200 | $('#compare_fork_button').show(); |
|
201 | 201 | } |
|
202 | 202 | }; |
|
203 | 203 | |
|
204 | 204 | $commitCheckboxes.on('click', checkboxRangeSelector); |
|
205 | 205 | |
|
206 | 206 | $commitRangeClear.on('click',function(e) { |
|
207 | 207 | $commitCheckboxes.attr('checked', false); |
|
208 | 208 | checkboxRangeSelector(); |
|
209 | 209 | e.preventDefault(); |
|
210 | 210 | }); |
|
211 | 211 | |
|
212 | 212 | // make sure the buttons are consistent when navigate back and forth |
|
213 | 213 | checkboxRangeSelector(); |
|
214 | 214 | |
|
215 | 215 | var msgs = $('.message'); |
|
216 | 216 | // get first element height |
|
217 | 217 | var el = $('#graph_content .container')[0]; |
|
218 | 218 | var row_h = el.clientHeight; |
|
219 | 219 | for (var i=0; i < msgs.length; i++) { |
|
220 | 220 | var m = msgs[i]; |
|
221 | 221 | |
|
222 | 222 | var h = m.clientHeight; |
|
223 | 223 | var pad = $(m).css('padding'); |
|
224 | 224 | if (h > row_h) { |
|
225 | 225 | var offset = row_h - (h+12); |
|
226 | 226 | $(m.nextElementSibling).css('display','block'); |
|
227 | 227 | $(m.nextElementSibling).css('margin-top',offset+'px'); |
|
228 | 228 | } |
|
229 | 229 | } |
|
230 | 230 | |
|
231 | 231 | $("#clear_filter").on("click", function() { |
|
232 | 232 | var filter = {'repo_name': '${c.repo_name}'}; |
|
233 | 233 | window.location = pyroutes.url('repo_changelog', filter); |
|
234 | 234 | }); |
|
235 | 235 | |
|
236 | 236 | $("#branch_filter").select2({ |
|
237 | 237 | 'dropdownAutoWidth': true, |
|
238 | 238 | 'width': 'resolve', |
|
239 | 239 | 'placeholder': "${c.selected_name or _('Filter changelog')}", |
|
240 | 240 | containerCssClass: "drop-menu", |
|
241 | 241 | dropdownCssClass: "drop-menu-dropdown", |
|
242 | 242 | query: function(query){ |
|
243 | 243 | var key = 'cache'; |
|
244 | 244 | var cached = cache[key] ; |
|
245 | 245 | if(cached) { |
|
246 | 246 | var data = {results: []}; |
|
247 | 247 | //filter results |
|
248 | 248 | $.each(cached.results, function(){ |
|
249 | 249 | var section = this.text; |
|
250 | 250 | var children = []; |
|
251 | 251 | $.each(this.children, function(){ |
|
252 | 252 | if(query.term.length == 0 || this.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0 ){ |
|
253 | 253 | children.push({'id': this.id, 'text': this.text, 'type': this.type}) |
|
254 | 254 | } |
|
255 | 255 | }); |
|
256 | 256 | data.results.push({'text': section, 'children': children}); |
|
257 | 257 | query.callback({results: data.results}); |
|
258 | 258 | }); |
|
259 | 259 | }else{ |
|
260 | 260 | $.ajax({ |
|
261 | 261 | url: pyroutes.url('repo_refs_changelog_data', {'repo_name': '${c.repo_name}'}), |
|
262 | 262 | data: {}, |
|
263 | 263 | dataType: 'json', |
|
264 | 264 | type: 'GET', |
|
265 | 265 | success: function(data) { |
|
266 | 266 | cache[key] = data; |
|
267 | 267 | query.callback({results: data.results}); |
|
268 | 268 | } |
|
269 | 269 | }) |
|
270 | 270 | } |
|
271 | 271 | } |
|
272 | 272 | }); |
|
273 | 273 | $('#branch_filter').on('change', function(e){ |
|
274 | 274 | var data = $('#branch_filter').select2('data'); |
|
275 | 275 | var selected = data.text; |
|
276 | 276 | var filter = {'repo_name': '${c.repo_name}'}; |
|
277 | 277 | if(data.type == 'branch' || data.type == 'branch_closed'){ |
|
278 | 278 | filter["branch"] = selected; |
|
279 | 279 | } |
|
280 | 280 | else if (data.type == 'book'){ |
|
281 | 281 | filter["bookmark"] = selected; |
|
282 | 282 | } |
|
283 | 283 | window.location = pyroutes.url('repo_changelog', filter); |
|
284 | 284 | }); |
|
285 | 285 | |
|
286 | 286 | commitsController = new CommitsController(); |
|
287 | 287 | % if not c.changelog_for_path: |
|
288 | 288 | commitsController.reloadGraph(); |
|
289 | 289 | % endif |
|
290 | 290 | |
|
291 | 291 | }); |
|
292 | 292 | |
|
293 | 293 | </script> |
|
294 | 294 | </div> |
|
295 | 295 | % else: |
|
296 | 296 | ${_('There are no changes yet')} |
|
297 | 297 | % endif |
|
298 | 298 | </div> |
|
299 | 299 | </%def> |
@@ -1,144 +1,144 b'' | |||
|
1 | 1 | ## small box that displays changed/added/removed details fetched by AJAX |
|
2 | 2 | <%namespace name="base" file="/base/base.mako"/> |
|
3 | 3 | |
|
4 | 4 | |
|
5 | 5 | % if c.prev_page: |
|
6 | 6 | <tr> |
|
7 | 7 | <td colspan="9" class="load-more-commits"> |
|
8 | 8 | <a class="prev-commits" href="#loadPrevCommits" onclick="commitsController.loadPrev(this, ${c.prev_page}, '${c.branch_name}');return false"> |
|
9 | 9 | ${_('load previous')} |
|
10 | 10 | </a> |
|
11 | 11 | </td> |
|
12 | 12 | </tr> |
|
13 | 13 | % endif |
|
14 | 14 | |
|
15 | 15 | % for cnt,commit in enumerate(c.pagination): |
|
16 | 16 | <tr id="sha_${commit.raw_id}" class="changelogRow container ${'tablerow%s' % (cnt%2)}"> |
|
17 | 17 | |
|
18 | 18 | <td class="td-checkbox"> |
|
19 | 19 | ${h.checkbox(commit.raw_id,class_="commit-range")} |
|
20 | 20 | </td> |
|
21 | 21 | <td class="td-status"> |
|
22 | 22 | |
|
23 | 23 | %if c.statuses.get(commit.raw_id): |
|
24 | 24 | <div class="changeset-status-ico"> |
|
25 | 25 | %if c.statuses.get(commit.raw_id)[2]: |
|
26 | 26 | <a class="tooltip" title="${_('Commit status: %s\nClick to open associated pull request #%s') % (h.commit_status_lbl(c.statuses.get(commit.raw_id)[0]), c.statuses.get(commit.raw_id)[2])}" href="${h.route_path('pullrequest_show',repo_name=c.statuses.get(commit.raw_id)[3],pull_request_id=c.statuses.get(commit.raw_id)[2])}"> |
|
27 |
<div class="${'flag_status |
|
|
27 | <div class="${'flag_status {}'.format(c.statuses.get(commit.raw_id)[0])}"></div> | |
|
28 | 28 | </a> |
|
29 | 29 | %else: |
|
30 |
<a class="tooltip" title="${_('Commit status: |
|
|
31 |
<div class="${'flag_status |
|
|
30 | <a class="tooltip" title="${_('Commit status: {}').format(h.commit_status_lbl(c.statuses.get(commit.raw_id)[0]))}" href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id,_anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}"> | |
|
31 | <div class="${'flag_status {}'.format(c.statuses.get(commit.raw_id)[0])}"></div> | |
|
32 | 32 | </a> |
|
33 | 33 | %endif |
|
34 | 34 | </div> |
|
35 | 35 | %else: |
|
36 | 36 | <div class="tooltip flag_status not_reviewed" title="${_('Commit status: Not Reviewed')}"></div> |
|
37 | 37 | %endif |
|
38 | 38 | </td> |
|
39 | 39 | <td class="td-comments comments-col"> |
|
40 | 40 | %if c.comments.get(commit.raw_id): |
|
41 |
<a title="${_('Commit has comments')}" href="${h. |
|
|
41 | <a title="${_('Commit has comments')}" href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id,_anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}"> | |
|
42 | 42 | <i class="icon-comment"></i> ${len(c.comments[commit.raw_id])} |
|
43 | 43 | </a> |
|
44 | 44 | %endif |
|
45 | 45 | </td> |
|
46 | 46 | <td class="td-hash"> |
|
47 | 47 | <code> |
|
48 | 48 | |
|
49 |
<a href="${h. |
|
|
49 | <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id)}"> | |
|
50 | 50 | <span class="${'commit_hash obsolete' if getattr(commit, 'obsolete', None) else 'commit_hash'}">${h.show_id(commit)}</span> |
|
51 | 51 | </a> |
|
52 | 52 | <i class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${commit.raw_id}" title="${_('Copy the full commit id')}"></i> |
|
53 | 53 | % if hasattr(commit, 'phase'): |
|
54 | 54 | % if commit.phase != 'public': |
|
55 | 55 | <span class="tag phase-${commit.phase} tooltip" title="${_('Commit phase')}">${commit.phase}</span> |
|
56 | 56 | % endif |
|
57 | 57 | % endif |
|
58 | 58 | |
|
59 | 59 | ## obsolete commits |
|
60 | 60 | % if hasattr(commit, 'obsolete'): |
|
61 | 61 | % if commit.obsolete: |
|
62 | 62 | <span class="tag obsolete-${commit.obsolete} tooltip" title="${_('Evolve State')}">${_('obsolete')}</span> |
|
63 | 63 | % endif |
|
64 | 64 | % endif |
|
65 | 65 | |
|
66 | 66 | ## hidden commits |
|
67 | 67 | % if hasattr(commit, 'hidden'): |
|
68 | 68 | % if commit.hidden: |
|
69 | 69 | <span class="tag obsolete-${commit.hidden} tooltip" title="${_('Evolve State')}">${_('hidden')}</span> |
|
70 | 70 | % endif |
|
71 | 71 | % endif |
|
72 | 72 | |
|
73 | 73 | </code> |
|
74 | 74 | </td> |
|
75 | 75 | <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_('Expand commit message')}" onclick="commitsController.expandCommit(this); return false"> |
|
76 | 76 | <div class="show_more_col"> |
|
77 | 77 | <i class="show_more"></i> |
|
78 | 78 | </div> |
|
79 | 79 | </td> |
|
80 | 80 | <td class="td-description mid"> |
|
81 | 81 | <div class="log-container truncate-wrap"> |
|
82 | 82 | <div class="message truncate" id="c-${commit.raw_id}">${h.urlify_commit_message(commit.message, c.repo_name)}</div> |
|
83 | 83 | </div> |
|
84 | 84 | </td> |
|
85 | 85 | |
|
86 | 86 | <td class="td-time"> |
|
87 | 87 | ${h.age_component(commit.date)} |
|
88 | 88 | </td> |
|
89 | 89 | <td class="td-user"> |
|
90 | 90 | ${base.gravatar_with_user(commit.author)} |
|
91 | 91 | </td> |
|
92 | 92 | |
|
93 | 93 | <td class="td-tags tags-col"> |
|
94 | 94 | <div id="t-${commit.raw_id}"> |
|
95 | 95 | |
|
96 | 96 | ## merge |
|
97 | 97 | %if commit.merge: |
|
98 | 98 | <span class="tag mergetag"> |
|
99 | 99 | <i class="icon-merge"></i>${_('merge')} |
|
100 | 100 | </span> |
|
101 | 101 | %endif |
|
102 | 102 | |
|
103 | 103 | ## branch |
|
104 | 104 | %if commit.branch: |
|
105 | 105 | <span class="tag branchtag" title="${h.tooltip(_('Branch %s') % commit.branch)}"> |
|
106 | 106 | <a href="${h.route_path('repo_changelog',repo_name=c.repo_name,_query=dict(branch=commit.branch))}"><i class="icon-code-fork"></i>${h.shorter(commit.branch)}</a> |
|
107 | 107 | </span> |
|
108 | 108 | %endif |
|
109 | 109 | |
|
110 | 110 | ## bookmarks |
|
111 | 111 | %if h.is_hg(c.rhodecode_repo): |
|
112 | 112 | %for book in commit.bookmarks: |
|
113 | 113 | <span class="tag booktag" title="${h.tooltip(_('Bookmark %s') % book)}"> |
|
114 | 114 | <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=commit.raw_id, _query=dict(at=book))}"><i class="icon-bookmark"></i>${h.shorter(book)}</a> |
|
115 | 115 | </span> |
|
116 | 116 | %endfor |
|
117 | 117 | %endif |
|
118 | 118 | |
|
119 | 119 | ## tags |
|
120 | 120 | %for tag in commit.tags: |
|
121 | 121 | <span class="tag tagtag" title="${h.tooltip(_('Tag %s') % tag)}"> |
|
122 | 122 | <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=commit.raw_id, _query=dict(at=tag))}"><i class="icon-tag"></i>${h.shorter(tag)}</a> |
|
123 | 123 | </span> |
|
124 | 124 | %endfor |
|
125 | 125 | |
|
126 | 126 | </div> |
|
127 | 127 | </td> |
|
128 | 128 | </tr> |
|
129 | 129 | % endfor |
|
130 | 130 | |
|
131 | 131 | % if c.next_page: |
|
132 | 132 | <tr> |
|
133 | 133 | <td colspan="9" class="load-more-commits"> |
|
134 | 134 | <a class="next-commits" href="#loadNextCommits" onclick="commitsController.loadNext(this, ${c.next_page}, '${c.branch_name}');return false"> |
|
135 | 135 | ${_('load next')} |
|
136 | 136 | </a> |
|
137 | 137 | </td> |
|
138 | 138 | </tr> |
|
139 | 139 | % endif |
|
140 | 140 | <tr class="chunk-graph-data" style="display:none" |
|
141 | 141 | data-graph='${c.graph_data|n}' |
|
142 | 142 | data-node='${c.prev_page}:${c.next_page}' |
|
143 | 143 | data-commits='${c.graph_commits|n}'> |
|
144 | 144 | </tr> No newline at end of file |
@@ -1,39 +1,39 b'' | |||
|
1 | 1 | <%namespace name="base" file="/base/base.mako"/> |
|
2 | 2 | <div class="table"> |
|
3 | 3 | |
|
4 | 4 | <table class="table rctable file_history"> |
|
5 | 5 | %for cnt,cs in enumerate(c.pagination): |
|
6 | 6 | <tr id="chg_${cnt+1}" class="${'tablerow%s' % (cnt%2)}"> |
|
7 | 7 | <td class="td-user"> |
|
8 | 8 | ${base.gravatar_with_user(cs.author, 16)} |
|
9 | 9 | </td> |
|
10 | 10 | <td class="td-time"> |
|
11 | 11 | <div class="date"> |
|
12 | 12 | ${h.age_component(cs.date)} |
|
13 | 13 | </div> |
|
14 | 14 | </td> |
|
15 | 15 | <td class="td-message"> |
|
16 | 16 | <div class="log-container"> |
|
17 | 17 | <div class="message_history" title="${h.tooltip(cs.message)}"> |
|
18 |
<a href="${h. |
|
|
18 | <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=cs.raw_id)}"> | |
|
19 | 19 | ${h.shorter(cs.message, 75)} |
|
20 | 20 | </a> |
|
21 | 21 | </div> |
|
22 | 22 | </div> |
|
23 | 23 | </td> |
|
24 | 24 | <td class="td-hash"> |
|
25 | 25 | <code> |
|
26 |
<a href="${h. |
|
|
26 | <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=cs.raw_id)}"> | |
|
27 | 27 | <span>${h.show_id(cs)}</span> |
|
28 | 28 | </a> |
|
29 | 29 | </code> |
|
30 | 30 | </td> |
|
31 | 31 | <td class="td-actions"> |
|
32 | 32 | <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=cs.raw_id,f_path=c.changelog_for_path)}"> |
|
33 | 33 | ${_('Show File')} |
|
34 | 34 | </a> |
|
35 | 35 | </td> |
|
36 | 36 | </tr> |
|
37 | 37 | %endfor |
|
38 | 38 | </table> |
|
39 | 39 | </div> |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now