##// END OF EJS Templates
repo-commits: ported changeset code into pyramid views....
marcink -
r1951:965019b0 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

This diff has been collapsed as it changes many lines, (557 lines changed) Show them Hide them
@@ -0,0 +1,557 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21
22 import logging
23 import collections
24
25 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
26 from pyramid.view import view_config
27 from pyramid.renderers import render
28 from pyramid.response import Response
29
30 from rhodecode.apps._base import RepoAppView
31
32 from rhodecode.lib import diffs, codeblocks
33 from rhodecode.lib.auth import (
34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35
36 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
38 import rhodecode.lib.helpers as h
39 from rhodecode.lib.utils2 import safe_unicode, safe_int
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib.vcs.exceptions import (
42 RepositoryError, CommitDoesNotExistError, NodeDoesNotExistError)
43 from rhodecode.model.db import ChangesetComment, ChangesetStatus
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.meta import Session
47
48
49 log = logging.getLogger(__name__)
50
51
52 def _update_with_GET(params, request):
53 for k in ['diff1', 'diff2', 'diff']:
54 params[k] += request.GET.getall(k)
55
56
57 def get_ignore_ws(fid, request):
58 ig_ws_global = request.GET.get('ignorews')
59 ig_ws = filter(lambda k: k.startswith('WS'), request.GET.getall(fid))
60 if ig_ws:
61 try:
62 return int(ig_ws[0].split(':')[-1])
63 except Exception:
64 pass
65 return ig_ws_global
66
67
68 def _ignorews_url(request, fileid=None):
69 _ = request.translate
70 fileid = str(fileid) if fileid else None
71 params = collections.defaultdict(list)
72 _update_with_GET(params, request)
73 label = _('Show whitespace')
74 tooltiplbl = _('Show whitespace for all diffs')
75 ig_ws = get_ignore_ws(fileid, request)
76 ln_ctx = get_line_ctx(fileid, request)
77
78 if ig_ws is None:
79 params['ignorews'] += [1]
80 label = _('Ignore whitespace')
81 tooltiplbl = _('Ignore whitespace for all diffs')
82 ctx_key = 'context'
83 ctx_val = ln_ctx
84
85 # if we have passed in ln_ctx pass it along to our params
86 if ln_ctx:
87 params[ctx_key] += [ctx_val]
88
89 if fileid:
90 params['anchor'] = 'a_' + fileid
91 return h.link_to(label, request.current_route_path(_query=params),
92 title=tooltiplbl, class_='tooltip')
93
94
95 def get_line_ctx(fid, request):
96 ln_ctx_global = request.GET.get('context')
97 if fid:
98 ln_ctx = filter(lambda k: k.startswith('C'), request.GET.getall(fid))
99 else:
100 _ln_ctx = filter(lambda k: k.startswith('C'), request.GET)
101 ln_ctx = request.GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
102 if ln_ctx:
103 ln_ctx = [ln_ctx]
104
105 if ln_ctx:
106 retval = ln_ctx[0].split(':')[-1]
107 else:
108 retval = ln_ctx_global
109
110 try:
111 return int(retval)
112 except Exception:
113 return 3
114
115
116 def _context_url(request, fileid=None):
117 """
118 Generates a url for context lines.
119
120 :param fileid:
121 """
122
123 _ = request.translate
124 fileid = str(fileid) if fileid else None
125 ig_ws = get_ignore_ws(fileid, request)
126 ln_ctx = (get_line_ctx(fileid, request) or 3) * 2
127
128 params = collections.defaultdict(list)
129 _update_with_GET(params, request)
130
131 if ln_ctx > 0:
132 params['context'] += [ln_ctx]
133
134 if ig_ws:
135 ig_ws_key = 'ignorews'
136 ig_ws_val = 1
137 params[ig_ws_key] += [ig_ws_val]
138
139 lbl = _('Increase context')
140 tooltiplbl = _('Increase context for all diffs')
141
142 if fileid:
143 params['anchor'] = 'a_' + fileid
144 return h.link_to(lbl, request.current_route_path(_query=params),
145 title=tooltiplbl, class_='tooltip')
146
147
148 class RepoCommitsView(RepoAppView):
149 def load_default_context(self):
150 c = self._get_local_tmpl_context(include_app_defaults=True)
151
152 # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead
153 c.repo_info = self.db_repo
154 c.rhodecode_repo = self.rhodecode_vcs_repo
155
156 self._register_global_c(c)
157 return c
158
159 def _commit(self, commit_id_range, method):
160 _ = self.request.translate
161 c = self.load_default_context()
162 c.ignorews_url = _ignorews_url
163 c.context_url = _context_url
164 c.fulldiff = self.request.GET.get('fulldiff')
165
166 # fetch global flags of ignore ws or context lines
167 context_lcl = get_line_ctx('', self.request)
168 ign_whitespace_lcl = get_ignore_ws('', self.request)
169
170 # diff_limit will cut off the whole diff if the limit is applied
171 # otherwise it will just hide the big files from the front-end
172 diff_limit = c.visual.cut_off_limit_diff
173 file_limit = c.visual.cut_off_limit_file
174
175 # get ranges of commit ids if preset
176 commit_range = commit_id_range.split('...')[:2]
177
178 try:
179 pre_load = ['affected_files', 'author', 'branch', 'date',
180 'message', 'parents']
181
182 if len(commit_range) == 2:
183 commits = self.rhodecode_vcs_repo.get_commits(
184 start_id=commit_range[0], end_id=commit_range[1],
185 pre_load=pre_load)
186 commits = list(commits)
187 else:
188 commits = [self.rhodecode_vcs_repo.get_commit(
189 commit_id=commit_id_range, pre_load=pre_load)]
190
191 c.commit_ranges = commits
192 if not c.commit_ranges:
193 raise RepositoryError(
194 'The commit range returned an empty result')
195 except CommitDoesNotExistError:
196 msg = _('No such commit exists for this repository')
197 h.flash(msg, category='error')
198 raise HTTPNotFound()
199 except Exception:
200 log.exception("General failure")
201 raise HTTPNotFound()
202
203 c.changes = OrderedDict()
204 c.lines_added = 0
205 c.lines_deleted = 0
206
207 # auto collapse if we have more than limit
208 collapse_limit = diffs.DiffProcessor._collapse_commits_over
209 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
210
211 c.commit_statuses = ChangesetStatus.STATUSES
212 c.inline_comments = []
213 c.files = []
214
215 c.statuses = []
216 c.comments = []
217 c.unresolved_comments = []
218 if len(c.commit_ranges) == 1:
219 commit = c.commit_ranges[0]
220 c.comments = CommentsModel().get_comments(
221 self.db_repo.repo_id,
222 revision=commit.raw_id)
223 c.statuses.append(ChangesetStatusModel().get_status(
224 self.db_repo.repo_id, commit.raw_id))
225 # comments from PR
226 statuses = ChangesetStatusModel().get_statuses(
227 self.db_repo.repo_id, commit.raw_id,
228 with_revisions=True)
229 prs = set(st.pull_request for st in statuses
230 if st.pull_request is not None)
231 # from associated statuses, check the pull requests, and
232 # show comments from them
233 for pr in prs:
234 c.comments.extend(pr.comments)
235
236 c.unresolved_comments = CommentsModel()\
237 .get_commit_unresolved_todos(commit.raw_id)
238
239 diff = None
240 # Iterate over ranges (default commit view is always one commit)
241 for commit in c.commit_ranges:
242 c.changes[commit.raw_id] = []
243
244 commit2 = commit
245 commit1 = commit.parents[0] if commit.parents else EmptyCommit()
246
247 _diff = self.rhodecode_vcs_repo.get_diff(
248 commit1, commit2,
249 ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
250 diff_processor = diffs.DiffProcessor(
251 _diff, format='newdiff', diff_limit=diff_limit,
252 file_limit=file_limit, show_full_diff=c.fulldiff)
253
254 commit_changes = OrderedDict()
255 if method == 'show':
256 _parsed = diff_processor.prepare()
257 c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer)
258
259 _parsed = diff_processor.prepare()
260
261 def _node_getter(commit):
262 def get_node(fname):
263 try:
264 return commit.get_node(fname)
265 except NodeDoesNotExistError:
266 return None
267 return get_node
268
269 inline_comments = CommentsModel().get_inline_comments(
270 self.db_repo.repo_id, revision=commit.raw_id)
271 c.inline_cnt = CommentsModel().get_inline_comments_count(
272 inline_comments)
273
274 diffset = codeblocks.DiffSet(
275 repo_name=self.db_repo_name,
276 source_node_getter=_node_getter(commit1),
277 target_node_getter=_node_getter(commit2),
278 comments=inline_comments)
279 diffset = diffset.render_patchset(
280 _parsed, commit1.raw_id, commit2.raw_id)
281
282 c.changes[commit.raw_id] = diffset
283 else:
284 # downloads/raw we only need RAW diff nothing else
285 diff = diff_processor.as_raw()
286 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
287
288 # sort comments by how they were generated
289 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
290
291 if len(c.commit_ranges) == 1:
292 c.commit = c.commit_ranges[0]
293 c.parent_tmpl = ''.join(
294 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
295
296 if method == 'download':
297 response = Response(diff)
298 response.content_type = 'text/plain'
299 response.content_disposition = (
300 'attachment; filename=%s.diff' % commit_id_range[:12])
301 return response
302 elif method == 'patch':
303 c.diff = safe_unicode(diff)
304 patch = render(
305 'rhodecode:templates/changeset/patch_changeset.mako',
306 self._get_template_context(c), self.request)
307 response = Response(patch)
308 response.content_type = 'text/plain'
309 return response
310 elif method == 'raw':
311 response = Response(diff)
312 response.content_type = 'text/plain'
313 return response
314 elif method == 'show':
315 if len(c.commit_ranges) == 1:
316 html = render(
317 'rhodecode:templates/changeset/changeset.mako',
318 self._get_template_context(c), self.request)
319 return Response(html)
320 else:
321 c.ancestor = None
322 c.target_repo = self.db_repo
323 html = render(
324 'rhodecode:templates/changeset/changeset_range.mako',
325 self._get_template_context(c), self.request)
326 return Response(html)
327
328 raise HTTPBadRequest()
329
330 @LoginRequired()
331 @HasRepoPermissionAnyDecorator(
332 'repository.read', 'repository.write', 'repository.admin')
333 @view_config(
334 route_name='repo_commit', request_method='GET',
335 renderer=None)
336 def repo_commit_show(self):
337 commit_id = self.request.matchdict['commit_id']
338 return self._commit(commit_id, method='show')
339
340 @LoginRequired()
341 @HasRepoPermissionAnyDecorator(
342 'repository.read', 'repository.write', 'repository.admin')
343 @view_config(
344 route_name='repo_commit_raw', request_method='GET',
345 renderer=None)
346 @view_config(
347 route_name='repo_commit_raw_deprecated', request_method='GET',
348 renderer=None)
349 def repo_commit_raw(self):
350 commit_id = self.request.matchdict['commit_id']
351 return self._commit(commit_id, method='raw')
352
353 @LoginRequired()
354 @HasRepoPermissionAnyDecorator(
355 'repository.read', 'repository.write', 'repository.admin')
356 @view_config(
357 route_name='repo_commit_patch', request_method='GET',
358 renderer=None)
359 def repo_commit_patch(self):
360 commit_id = self.request.matchdict['commit_id']
361 return self._commit(commit_id, method='patch')
362
363 @LoginRequired()
364 @HasRepoPermissionAnyDecorator(
365 'repository.read', 'repository.write', 'repository.admin')
366 @view_config(
367 route_name='repo_commit_download', request_method='GET',
368 renderer=None)
369 def repo_commit_download(self):
370 commit_id = self.request.matchdict['commit_id']
371 return self._commit(commit_id, method='download')
372
373 @LoginRequired()
374 @NotAnonymous()
375 @HasRepoPermissionAnyDecorator(
376 'repository.read', 'repository.write', 'repository.admin')
377 @CSRFRequired()
378 @view_config(
379 route_name='repo_commit_comment_create', request_method='POST',
380 renderer='json_ext')
381 def repo_commit_comment_create(self):
382 _ = self.request.translate
383 commit_id = self.request.matchdict['commit_id']
384
385 c = self.load_default_context()
386 status = self.request.POST.get('changeset_status', None)
387 text = self.request.POST.get('text')
388 comment_type = self.request.POST.get('comment_type')
389 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
390
391 if status:
392 text = text or (_('Status change %(transition_icon)s %(status)s')
393 % {'transition_icon': '>',
394 'status': ChangesetStatus.get_status_lbl(status)})
395
396 multi_commit_ids = []
397 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
398 if _commit_id not in ['', None, EmptyCommit.raw_id]:
399 if _commit_id not in multi_commit_ids:
400 multi_commit_ids.append(_commit_id)
401
402 commit_ids = multi_commit_ids or [commit_id]
403
404 comment = None
405 for current_id in filter(None, commit_ids):
406 comment = CommentsModel().create(
407 text=text,
408 repo=self.db_repo.repo_id,
409 user=self._rhodecode_db_user.user_id,
410 commit_id=current_id,
411 f_path=self.request.POST.get('f_path'),
412 line_no=self.request.POST.get('line'),
413 status_change=(ChangesetStatus.get_status_lbl(status)
414 if status else None),
415 status_change_type=status,
416 comment_type=comment_type,
417 resolves_comment_id=resolves_comment_id
418 )
419
420 # get status if set !
421 if status:
422 # if latest status was from pull request and it's closed
423 # disallow changing status !
424 # dont_allow_on_closed_pull_request = True !
425
426 try:
427 ChangesetStatusModel().set_status(
428 self.db_repo.repo_id,
429 status,
430 self._rhodecode_db_user.user_id,
431 comment,
432 revision=current_id,
433 dont_allow_on_closed_pull_request=True
434 )
435 except StatusChangeOnClosedPullRequestError:
436 msg = _('Changing the status of a commit associated with '
437 'a closed pull request is not allowed')
438 log.exception(msg)
439 h.flash(msg, category='warning')
440 raise HTTPFound(h.route_path(
441 'repo_commit', repo_name=self.db_repo_name,
442 commit_id=current_id))
443
444 # finalize, commit and redirect
445 Session().commit()
446
447 data = {
448 'target_id': h.safeid(h.safe_unicode(
449 self.request.POST.get('f_path'))),
450 }
451 if comment:
452 c.co = comment
453 rendered_comment = render(
454 'rhodecode:templates/changeset/changeset_comment_block.mako',
455 self._get_template_context(c), self.request)
456
457 data.update(comment.get_dict())
458 data.update({'rendered_text': rendered_comment})
459
460 return data
461
462 @LoginRequired()
463 @NotAnonymous()
464 @HasRepoPermissionAnyDecorator(
465 'repository.read', 'repository.write', 'repository.admin')
466 @CSRFRequired()
467 @view_config(
468 route_name='repo_commit_comment_preview', request_method='POST',
469 renderer='string', xhr=True)
470 def repo_commit_comment_preview(self):
471 # Technically a CSRF token is not needed as no state changes with this
472 # call. However, as this is a POST is better to have it, so automated
473 # tools don't flag it as potential CSRF.
474 # Post is required because the payload could be bigger than the maximum
475 # allowed by GET.
476
477 text = self.request.POST.get('text')
478 renderer = self.request.POST.get('renderer') or 'rst'
479 if text:
480 return h.render(text, renderer=renderer, mentions=True)
481 return ''
482
483 @LoginRequired()
484 @NotAnonymous()
485 @HasRepoPermissionAnyDecorator(
486 'repository.read', 'repository.write', 'repository.admin')
487 @CSRFRequired()
488 @view_config(
489 route_name='repo_commit_comment_delete', request_method='POST',
490 renderer='json_ext')
491 def repo_commit_comment_delete(self):
492 commit_id = self.request.matchdict['commit_id']
493 comment_id = self.request.matchdict['comment_id']
494
495 comment = ChangesetComment.get_or_404(safe_int(comment_id))
496 if not comment:
497 log.debug('Comment with id:%s not found, skipping', comment_id)
498 # comment already deleted in another call probably
499 return True
500
501 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
502 super_admin = h.HasPermissionAny('hg.admin')()
503 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
504 is_repo_comment = comment.repo.repo_name == self.db_repo_name
505 comment_repo_admin = is_repo_admin and is_repo_comment
506
507 if super_admin or comment_owner or comment_repo_admin:
508 CommentsModel().delete(comment=comment, user=self._rhodecode_db_user)
509 Session().commit()
510 return True
511 else:
512 log.warning('No permissions for user %s to delete comment_id: %s',
513 self._rhodecode_db_user, comment_id)
514 raise HTTPNotFound()
515
516 @LoginRequired()
517 @HasRepoPermissionAnyDecorator(
518 'repository.read', 'repository.write', 'repository.admin')
519 @view_config(
520 route_name='repo_commit_data', request_method='GET',
521 renderer='json_ext', xhr=True)
522 def repo_commit_data(self):
523 commit_id = self.request.matchdict['commit_id']
524 self.load_default_context()
525
526 try:
527 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
528 except CommitDoesNotExistError as e:
529 return EmptyCommit(message=str(e))
530
531 @LoginRequired()
532 @HasRepoPermissionAnyDecorator(
533 'repository.read', 'repository.write', 'repository.admin')
534 @view_config(
535 route_name='repo_commit_children', request_method='GET',
536 renderer='json_ext', xhr=True)
537 def repo_commit_children(self):
538 commit_id = self.request.matchdict['commit_id']
539 self.load_default_context()
540
541 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
542 result = {"results": commit.children}
543 return result
544
545 @LoginRequired()
546 @HasRepoPermissionAnyDecorator(
547 'repository.read', 'repository.write', 'repository.admin')
548 @view_config(
549 route_name='repo_commit_parents', request_method='GET',
550 renderer='json_ext')
551 def repo_commit_parents(self):
552 commit_id = self.request.matchdict['commit_id']
553 self.load_default_context()
554
555 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
556 result = {"results": commit.parents}
557 return result
@@ -1,677 +1,678 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 use = egg:waitress#main
54 use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 threads = 5
56 threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 max_request_body_size = 107374182400
58 max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 asyncore_use_poll = true
61 asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 #use = egg:gunicorn#main
69 #use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 #workers = 2
74 #workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommened to be at 1
76 ## generally recommened to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 #proc_name = rhodecode
79 #proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 #worker_class = sync
82 #worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 #max_requests = 1000
87 #max_requests = 1000
88 #max_requests_jitter = 30
88 #max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 #timeout = 21600
91 #timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 # During development the we want to have the debug toolbar enabled
111 # During development the we want to have the debug toolbar enabled
112 pyramid.includes =
112 pyramid.includes =
113 pyramid_debugtoolbar
113 pyramid_debugtoolbar
114 rhodecode.utils.debugtoolbar
114 rhodecode.utils.debugtoolbar
115 rhodecode.lib.middleware.request_wrapper
115 rhodecode.lib.middleware.request_wrapper
116
116
117 pyramid.reload_templates = true
117 pyramid.reload_templates = true
118
118
119 debugtoolbar.hosts = 0.0.0.0/0
119 debugtoolbar.hosts = 0.0.0.0/0
120 debugtoolbar.exclude_prefixes =
120 debugtoolbar.exclude_prefixes =
121 /css
121 /css
122 /fonts
122 /fonts
123 /images
123 /images
124 /js
124 /js
125
125
126 ## RHODECODE PLUGINS ##
126 ## RHODECODE PLUGINS ##
127 rhodecode.includes =
127 rhodecode.includes =
128 rhodecode.api
128 rhodecode.api
129
129
130
130
131 # api prefix url
131 # api prefix url
132 rhodecode.api.url = /_admin/api
132 rhodecode.api.url = /_admin/api
133
133
134
134
135 ## END RHODECODE PLUGINS ##
135 ## END RHODECODE PLUGINS ##
136
136
137 ## encryption key used to encrypt social plugin tokens,
137 ## encryption key used to encrypt social plugin tokens,
138 ## remote_urls with credentials etc, if not set it defaults to
138 ## remote_urls with credentials etc, if not set it defaults to
139 ## `beaker.session.secret`
139 ## `beaker.session.secret`
140 #rhodecode.encrypted_values.secret =
140 #rhodecode.encrypted_values.secret =
141
141
142 ## decryption strict mode (enabled by default). It controls if decryption raises
142 ## decryption strict mode (enabled by default). It controls if decryption raises
143 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
143 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
144 #rhodecode.encrypted_values.strict = false
144 #rhodecode.encrypted_values.strict = false
145
145
146 ## return gzipped responses from Rhodecode (static files/application)
146 ## return gzipped responses from Rhodecode (static files/application)
147 gzip_responses = false
147 gzip_responses = false
148
148
149 ## autogenerate javascript routes file on startup
149 ## autogenerate javascript routes file on startup
150 generate_js_files = false
150 generate_js_files = false
151
151
152 ## Optional Languages
152 ## Optional Languages
153 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
153 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
154 lang = en
154 lang = en
155
155
156 ## perform a full repository scan on each server start, this should be
156 ## perform a full repository scan on each server start, this should be
157 ## set to false after first startup, to allow faster server restarts.
157 ## set to false after first startup, to allow faster server restarts.
158 startup.import_repos = false
158 startup.import_repos = false
159
159
160 ## Uncomment and set this path to use archive download cache.
160 ## Uncomment and set this path to use archive download cache.
161 ## Once enabled, generated archives will be cached at this location
161 ## Once enabled, generated archives will be cached at this location
162 ## and served from the cache during subsequent requests for the same archive of
162 ## and served from the cache during subsequent requests for the same archive of
163 ## the repository.
163 ## the repository.
164 #archive_cache_dir = /tmp/tarballcache
164 #archive_cache_dir = /tmp/tarballcache
165
165
166 ## change this to unique ID for security
166 ## change this to unique ID for security
167 app_instance_uuid = rc-production
167 app_instance_uuid = rc-production
168
168
169 ## cut off limit for large diffs (size in bytes)
169 ## cut off limit for large diffs (size in bytes)
170 cut_off_limit_diff = 1024000
170 cut_off_limit_diff = 1024000
171 cut_off_limit_file = 256000
171 cut_off_limit_file = 256000
172
172
173 ## use cache version of scm repo everywhere
173 ## use cache version of scm repo everywhere
174 vcs_full_cache = true
174 vcs_full_cache = true
175
175
176 ## force https in RhodeCode, fixes https redirects, assumes it's always https
176 ## force https in RhodeCode, fixes https redirects, assumes it's always https
177 ## Normally this is controlled by proper http flags sent from http server
177 ## Normally this is controlled by proper http flags sent from http server
178 force_https = false
178 force_https = false
179
179
180 ## use Strict-Transport-Security headers
180 ## use Strict-Transport-Security headers
181 use_htsts = false
181 use_htsts = false
182
182
183 ## number of commits stats will parse on each iteration
183 ## number of commits stats will parse on each iteration
184 commit_parse_limit = 25
184 commit_parse_limit = 25
185
185
186 ## git rev filter option, --all is the default filter, if you need to
186 ## git rev filter option, --all is the default filter, if you need to
187 ## hide all refs in changelog switch this to --branches --tags
187 ## hide all refs in changelog switch this to --branches --tags
188 git_rev_filter = --branches --tags
188 git_rev_filter = --branches --tags
189
189
190 # Set to true if your repos are exposed using the dumb protocol
190 # Set to true if your repos are exposed using the dumb protocol
191 git_update_server_info = false
191 git_update_server_info = false
192
192
193 ## RSS/ATOM feed options
193 ## RSS/ATOM feed options
194 rss_cut_off_limit = 256000
194 rss_cut_off_limit = 256000
195 rss_items_per_page = 10
195 rss_items_per_page = 10
196 rss_include_diff = false
196 rss_include_diff = false
197
197
198 ## gist URL alias, used to create nicer urls for gist. This should be an
198 ## gist URL alias, used to create nicer urls for gist. This should be an
199 ## url that does rewrites to _admin/gists/{gistid}.
199 ## url that does rewrites to _admin/gists/{gistid}.
200 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
200 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
201 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
201 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
202 gist_alias_url =
202 gist_alias_url =
203
203
204 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
204 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
205 ## used for access.
205 ## used for access.
206 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
206 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
207 ## came from the the logged in user who own this authentication token.
207 ## came from the the logged in user who own this authentication token.
208 ##
208 ##
209 ## list of all views can be found under `_admin/permissions/auth_token_access`
209 ## list of all views can be found under `_admin/permissions/auth_token_access`
210 ## The list should be "," separated and on a single line.
210 ## The list should be "," separated and on a single line.
211 ##
211 ##
212 ## Most common views to enable:
212 ## Most common views to enable:
213 # ChangesetController:changeset_patch
213 # RepoCommitsView:repo_commit_download
214 # ChangesetController:changeset_raw
214 # RepoCommitsView:repo_commit_patch
215 # RepoFilesView.repo_files_diff
215 # RepoCommitsView:repo_commit_raw
216 # RepoFilesView.repo_archivefile
216 # RepoFilesView:repo_files_diff
217 # RepoFilesView.repo_file_raw
217 # RepoFilesView:repo_archivefile
218 # RepoFilesView:repo_file_raw
218 # GistView:*
219 # GistView:*
219 api_access_controllers_whitelist =
220 api_access_controllers_whitelist =
220
221
221 ## default encoding used to convert from and to unicode
222 ## default encoding used to convert from and to unicode
222 ## can be also a comma separated list of encoding in case of mixed encodings
223 ## can be also a comma separated list of encoding in case of mixed encodings
223 default_encoding = UTF-8
224 default_encoding = UTF-8
224
225
225 ## instance-id prefix
226 ## instance-id prefix
226 ## a prefix key for this instance used for cache invalidation when running
227 ## a prefix key for this instance used for cache invalidation when running
227 ## multiple instances of rhodecode, make sure it's globally unique for
228 ## multiple instances of rhodecode, make sure it's globally unique for
228 ## all running rhodecode instances. Leave empty if you don't use it
229 ## all running rhodecode instances. Leave empty if you don't use it
229 instance_id =
230 instance_id =
230
231
231 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
232 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
232 ## of an authentication plugin also if it is disabled by it's settings.
233 ## of an authentication plugin also if it is disabled by it's settings.
233 ## This could be useful if you are unable to log in to the system due to broken
234 ## This could be useful if you are unable to log in to the system due to broken
234 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
235 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
235 ## module to log in again and fix the settings.
236 ## module to log in again and fix the settings.
236 ##
237 ##
237 ## Available builtin plugin IDs (hash is part of the ID):
238 ## Available builtin plugin IDs (hash is part of the ID):
238 ## egg:rhodecode-enterprise-ce#rhodecode
239 ## egg:rhodecode-enterprise-ce#rhodecode
239 ## egg:rhodecode-enterprise-ce#pam
240 ## egg:rhodecode-enterprise-ce#pam
240 ## egg:rhodecode-enterprise-ce#ldap
241 ## egg:rhodecode-enterprise-ce#ldap
241 ## egg:rhodecode-enterprise-ce#jasig_cas
242 ## egg:rhodecode-enterprise-ce#jasig_cas
242 ## egg:rhodecode-enterprise-ce#headers
243 ## egg:rhodecode-enterprise-ce#headers
243 ## egg:rhodecode-enterprise-ce#crowd
244 ## egg:rhodecode-enterprise-ce#crowd
244 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
245 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
245
246
246 ## alternative return HTTP header for failed authentication. Default HTTP
247 ## alternative return HTTP header for failed authentication. Default HTTP
247 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
248 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
248 ## handling that causing a series of failed authentication calls.
249 ## handling that causing a series of failed authentication calls.
249 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
250 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
250 ## This will be served instead of default 401 on bad authnetication
251 ## This will be served instead of default 401 on bad authnetication
251 auth_ret_code =
252 auth_ret_code =
252
253
253 ## use special detection method when serving auth_ret_code, instead of serving
254 ## use special detection method when serving auth_ret_code, instead of serving
254 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
255 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
255 ## and then serve auth_ret_code to clients
256 ## and then serve auth_ret_code to clients
256 auth_ret_code_detection = false
257 auth_ret_code_detection = false
257
258
258 ## locking return code. When repository is locked return this HTTP code. 2XX
259 ## locking return code. When repository is locked return this HTTP code. 2XX
259 ## codes don't break the transactions while 4XX codes do
260 ## codes don't break the transactions while 4XX codes do
260 lock_ret_code = 423
261 lock_ret_code = 423
261
262
262 ## allows to change the repository location in settings page
263 ## allows to change the repository location in settings page
263 allow_repo_location_change = true
264 allow_repo_location_change = true
264
265
265 ## allows to setup custom hooks in settings page
266 ## allows to setup custom hooks in settings page
266 allow_custom_hooks_settings = true
267 allow_custom_hooks_settings = true
267
268
268 ## generated license token, goto license page in RhodeCode settings to obtain
269 ## generated license token, goto license page in RhodeCode settings to obtain
269 ## new token
270 ## new token
270 license_token =
271 license_token =
271
272
272 ## supervisor connection uri, for managing supervisor and logs.
273 ## supervisor connection uri, for managing supervisor and logs.
273 supervisor.uri =
274 supervisor.uri =
274 ## supervisord group name/id we only want this RC instance to handle
275 ## supervisord group name/id we only want this RC instance to handle
275 supervisor.group_id = dev
276 supervisor.group_id = dev
276
277
277 ## Display extended labs settings
278 ## Display extended labs settings
278 labs_settings_active = true
279 labs_settings_active = true
279
280
280 ####################################
281 ####################################
281 ### CELERY CONFIG ####
282 ### CELERY CONFIG ####
282 ####################################
283 ####################################
283 use_celery = false
284 use_celery = false
284 broker.host = localhost
285 broker.host = localhost
285 broker.vhost = rabbitmqhost
286 broker.vhost = rabbitmqhost
286 broker.port = 5672
287 broker.port = 5672
287 broker.user = rabbitmq
288 broker.user = rabbitmq
288 broker.password = qweqwe
289 broker.password = qweqwe
289
290
290 celery.imports = rhodecode.lib.celerylib.tasks
291 celery.imports = rhodecode.lib.celerylib.tasks
291
292
292 celery.result.backend = amqp
293 celery.result.backend = amqp
293 celery.result.dburi = amqp://
294 celery.result.dburi = amqp://
294 celery.result.serialier = json
295 celery.result.serialier = json
295
296
296 #celery.send.task.error.emails = true
297 #celery.send.task.error.emails = true
297 #celery.amqp.task.result.expires = 18000
298 #celery.amqp.task.result.expires = 18000
298
299
299 celeryd.concurrency = 2
300 celeryd.concurrency = 2
300 #celeryd.log.file = celeryd.log
301 #celeryd.log.file = celeryd.log
301 celeryd.log.level = debug
302 celeryd.log.level = debug
302 celeryd.max.tasks.per.child = 1
303 celeryd.max.tasks.per.child = 1
303
304
304 ## tasks will never be sent to the queue, but executed locally instead.
305 ## tasks will never be sent to the queue, but executed locally instead.
305 celery.always.eager = false
306 celery.always.eager = false
306
307
307 ####################################
308 ####################################
308 ### BEAKER CACHE ####
309 ### BEAKER CACHE ####
309 ####################################
310 ####################################
310 # default cache dir for templates. Putting this into a ramdisk
311 # default cache dir for templates. Putting this into a ramdisk
311 ## can boost performance, eg. %(here)s/data_ramdisk
312 ## can boost performance, eg. %(here)s/data_ramdisk
312 cache_dir = %(here)s/data
313 cache_dir = %(here)s/data
313
314
314 ## locking and default file storage for Beaker. Putting this into a ramdisk
315 ## locking and default file storage for Beaker. Putting this into a ramdisk
315 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
316 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
316 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
317 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
317 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
318 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
318
319
319 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
320 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
320
321
321 beaker.cache.super_short_term.type = memory
322 beaker.cache.super_short_term.type = memory
322 beaker.cache.super_short_term.expire = 10
323 beaker.cache.super_short_term.expire = 10
323 beaker.cache.super_short_term.key_length = 256
324 beaker.cache.super_short_term.key_length = 256
324
325
325 beaker.cache.short_term.type = memory
326 beaker.cache.short_term.type = memory
326 beaker.cache.short_term.expire = 60
327 beaker.cache.short_term.expire = 60
327 beaker.cache.short_term.key_length = 256
328 beaker.cache.short_term.key_length = 256
328
329
329 beaker.cache.long_term.type = memory
330 beaker.cache.long_term.type = memory
330 beaker.cache.long_term.expire = 36000
331 beaker.cache.long_term.expire = 36000
331 beaker.cache.long_term.key_length = 256
332 beaker.cache.long_term.key_length = 256
332
333
333 beaker.cache.sql_cache_short.type = memory
334 beaker.cache.sql_cache_short.type = memory
334 beaker.cache.sql_cache_short.expire = 10
335 beaker.cache.sql_cache_short.expire = 10
335 beaker.cache.sql_cache_short.key_length = 256
336 beaker.cache.sql_cache_short.key_length = 256
336
337
337 ## default is memory cache, configure only if required
338 ## default is memory cache, configure only if required
338 ## using multi-node or multi-worker setup
339 ## using multi-node or multi-worker setup
339 #beaker.cache.auth_plugins.type = ext:database
340 #beaker.cache.auth_plugins.type = ext:database
340 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
341 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
341 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
342 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
342 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
343 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
343 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
344 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
344 #beaker.cache.auth_plugins.sa.pool_size = 10
345 #beaker.cache.auth_plugins.sa.pool_size = 10
345 #beaker.cache.auth_plugins.sa.max_overflow = 0
346 #beaker.cache.auth_plugins.sa.max_overflow = 0
346
347
347 beaker.cache.repo_cache_long.type = memorylru_base
348 beaker.cache.repo_cache_long.type = memorylru_base
348 beaker.cache.repo_cache_long.max_items = 4096
349 beaker.cache.repo_cache_long.max_items = 4096
349 beaker.cache.repo_cache_long.expire = 2592000
350 beaker.cache.repo_cache_long.expire = 2592000
350
351
351 ## default is memorylru_base cache, configure only if required
352 ## default is memorylru_base cache, configure only if required
352 ## using multi-node or multi-worker setup
353 ## using multi-node or multi-worker setup
353 #beaker.cache.repo_cache_long.type = ext:memcached
354 #beaker.cache.repo_cache_long.type = ext:memcached
354 #beaker.cache.repo_cache_long.url = localhost:11211
355 #beaker.cache.repo_cache_long.url = localhost:11211
355 #beaker.cache.repo_cache_long.expire = 1209600
356 #beaker.cache.repo_cache_long.expire = 1209600
356 #beaker.cache.repo_cache_long.key_length = 256
357 #beaker.cache.repo_cache_long.key_length = 256
357
358
358 ####################################
359 ####################################
359 ### BEAKER SESSION ####
360 ### BEAKER SESSION ####
360 ####################################
361 ####################################
361
362
362 ## .session.type is type of storage options for the session, current allowed
363 ## .session.type is type of storage options for the session, current allowed
363 ## types are file, ext:memcached, ext:database, and memory (default).
364 ## types are file, ext:memcached, ext:database, and memory (default).
364 beaker.session.type = file
365 beaker.session.type = file
365 beaker.session.data_dir = %(here)s/data/sessions/data
366 beaker.session.data_dir = %(here)s/data/sessions/data
366
367
367 ## db based session, fast, and allows easy management over logged in users
368 ## db based session, fast, and allows easy management over logged in users
368 #beaker.session.type = ext:database
369 #beaker.session.type = ext:database
369 #beaker.session.table_name = db_session
370 #beaker.session.table_name = db_session
370 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
371 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
371 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
372 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
372 #beaker.session.sa.pool_recycle = 3600
373 #beaker.session.sa.pool_recycle = 3600
373 #beaker.session.sa.echo = false
374 #beaker.session.sa.echo = false
374
375
375 beaker.session.key = rhodecode
376 beaker.session.key = rhodecode
376 beaker.session.secret = develop-rc-uytcxaz
377 beaker.session.secret = develop-rc-uytcxaz
377 beaker.session.lock_dir = %(here)s/data/sessions/lock
378 beaker.session.lock_dir = %(here)s/data/sessions/lock
378
379
379 ## Secure encrypted cookie. Requires AES and AES python libraries
380 ## Secure encrypted cookie. Requires AES and AES python libraries
380 ## you must disable beaker.session.secret to use this
381 ## you must disable beaker.session.secret to use this
381 #beaker.session.encrypt_key = key_for_encryption
382 #beaker.session.encrypt_key = key_for_encryption
382 #beaker.session.validate_key = validation_key
383 #beaker.session.validate_key = validation_key
383
384
384 ## sets session as invalid(also logging out user) if it haven not been
385 ## sets session as invalid(also logging out user) if it haven not been
385 ## accessed for given amount of time in seconds
386 ## accessed for given amount of time in seconds
386 beaker.session.timeout = 2592000
387 beaker.session.timeout = 2592000
387 beaker.session.httponly = true
388 beaker.session.httponly = true
388 ## Path to use for the cookie. Set to prefix if you use prefix middleware
389 ## Path to use for the cookie. Set to prefix if you use prefix middleware
389 #beaker.session.cookie_path = /custom_prefix
390 #beaker.session.cookie_path = /custom_prefix
390
391
391 ## uncomment for https secure cookie
392 ## uncomment for https secure cookie
392 beaker.session.secure = false
393 beaker.session.secure = false
393
394
394 ## auto save the session to not to use .save()
395 ## auto save the session to not to use .save()
395 beaker.session.auto = false
396 beaker.session.auto = false
396
397
397 ## default cookie expiration time in seconds, set to `true` to set expire
398 ## default cookie expiration time in seconds, set to `true` to set expire
398 ## at browser close
399 ## at browser close
399 #beaker.session.cookie_expires = 3600
400 #beaker.session.cookie_expires = 3600
400
401
401 ###################################
402 ###################################
402 ## SEARCH INDEXING CONFIGURATION ##
403 ## SEARCH INDEXING CONFIGURATION ##
403 ###################################
404 ###################################
404 ## Full text search indexer is available in rhodecode-tools under
405 ## Full text search indexer is available in rhodecode-tools under
405 ## `rhodecode-tools index` command
406 ## `rhodecode-tools index` command
406
407
407 ## WHOOSH Backend, doesn't require additional services to run
408 ## WHOOSH Backend, doesn't require additional services to run
408 ## it works good with few dozen repos
409 ## it works good with few dozen repos
409 search.module = rhodecode.lib.index.whoosh
410 search.module = rhodecode.lib.index.whoosh
410 search.location = %(here)s/data/index
411 search.location = %(here)s/data/index
411
412
412 ########################################
413 ########################################
413 ### CHANNELSTREAM CONFIG ####
414 ### CHANNELSTREAM CONFIG ####
414 ########################################
415 ########################################
415 ## channelstream enables persistent connections and live notification
416 ## channelstream enables persistent connections and live notification
416 ## in the system. It's also used by the chat system
417 ## in the system. It's also used by the chat system
417 channelstream.enabled = false
418 channelstream.enabled = false
418
419
419 ## server address for channelstream server on the backend
420 ## server address for channelstream server on the backend
420 channelstream.server = 127.0.0.1:9800
421 channelstream.server = 127.0.0.1:9800
421
422
422 ## location of the channelstream server from outside world
423 ## location of the channelstream server from outside world
423 ## use ws:// for http or wss:// for https. This address needs to be handled
424 ## use ws:// for http or wss:// for https. This address needs to be handled
424 ## by external HTTP server such as Nginx or Apache
425 ## by external HTTP server such as Nginx or Apache
425 ## see nginx/apache configuration examples in our docs
426 ## see nginx/apache configuration examples in our docs
426 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
427 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
427 channelstream.secret = secret
428 channelstream.secret = secret
428 channelstream.history.location = %(here)s/channelstream_history
429 channelstream.history.location = %(here)s/channelstream_history
429
430
430 ## Internal application path that Javascript uses to connect into.
431 ## Internal application path that Javascript uses to connect into.
431 ## If you use proxy-prefix the prefix should be added before /_channelstream
432 ## If you use proxy-prefix the prefix should be added before /_channelstream
432 channelstream.proxy_path = /_channelstream
433 channelstream.proxy_path = /_channelstream
433
434
434
435
435 ###################################
436 ###################################
436 ## APPENLIGHT CONFIG ##
437 ## APPENLIGHT CONFIG ##
437 ###################################
438 ###################################
438
439
439 ## Appenlight is tailored to work with RhodeCode, see
440 ## Appenlight is tailored to work with RhodeCode, see
440 ## http://appenlight.com for details how to obtain an account
441 ## http://appenlight.com for details how to obtain an account
441
442
442 ## appenlight integration enabled
443 ## appenlight integration enabled
443 appenlight = false
444 appenlight = false
444
445
445 appenlight.server_url = https://api.appenlight.com
446 appenlight.server_url = https://api.appenlight.com
446 appenlight.api_key = YOUR_API_KEY
447 appenlight.api_key = YOUR_API_KEY
447 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
448 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
448
449
449 # used for JS client
450 # used for JS client
450 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
451 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
451
452
452 ## TWEAK AMOUNT OF INFO SENT HERE
453 ## TWEAK AMOUNT OF INFO SENT HERE
453
454
454 ## enables 404 error logging (default False)
455 ## enables 404 error logging (default False)
455 appenlight.report_404 = false
456 appenlight.report_404 = false
456
457
457 ## time in seconds after request is considered being slow (default 1)
458 ## time in seconds after request is considered being slow (default 1)
458 appenlight.slow_request_time = 1
459 appenlight.slow_request_time = 1
459
460
460 ## record slow requests in application
461 ## record slow requests in application
461 ## (needs to be enabled for slow datastore recording and time tracking)
462 ## (needs to be enabled for slow datastore recording and time tracking)
462 appenlight.slow_requests = true
463 appenlight.slow_requests = true
463
464
464 ## enable hooking to application loggers
465 ## enable hooking to application loggers
465 appenlight.logging = true
466 appenlight.logging = true
466
467
467 ## minimum log level for log capture
468 ## minimum log level for log capture
468 appenlight.logging.level = WARNING
469 appenlight.logging.level = WARNING
469
470
470 ## send logs only from erroneous/slow requests
471 ## send logs only from erroneous/slow requests
471 ## (saves API quota for intensive logging)
472 ## (saves API quota for intensive logging)
472 appenlight.logging_on_error = false
473 appenlight.logging_on_error = false
473
474
474 ## list of additonal keywords that should be grabbed from environ object
475 ## list of additonal keywords that should be grabbed from environ object
475 ## can be string with comma separated list of words in lowercase
476 ## can be string with comma separated list of words in lowercase
476 ## (by default client will always send following info:
477 ## (by default client will always send following info:
477 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
478 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
478 ## start with HTTP* this list be extended with additional keywords here
479 ## start with HTTP* this list be extended with additional keywords here
479 appenlight.environ_keys_whitelist =
480 appenlight.environ_keys_whitelist =
480
481
481 ## list of keywords that should be blanked from request object
482 ## list of keywords that should be blanked from request object
482 ## can be string with comma separated list of words in lowercase
483 ## can be string with comma separated list of words in lowercase
483 ## (by default client will always blank keys that contain following words
484 ## (by default client will always blank keys that contain following words
484 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
485 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
485 ## this list be extended with additional keywords set here
486 ## this list be extended with additional keywords set here
486 appenlight.request_keys_blacklist =
487 appenlight.request_keys_blacklist =
487
488
488 ## list of namespaces that should be ignores when gathering log entries
489 ## list of namespaces that should be ignores when gathering log entries
489 ## can be string with comma separated list of namespaces
490 ## can be string with comma separated list of namespaces
490 ## (by default the client ignores own entries: appenlight_client.client)
491 ## (by default the client ignores own entries: appenlight_client.client)
491 appenlight.log_namespace_blacklist =
492 appenlight.log_namespace_blacklist =
492
493
493
494
494 ################################################################################
495 ################################################################################
495 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
496 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
496 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
497 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
497 ## execute malicious code after an exception is raised. ##
498 ## execute malicious code after an exception is raised. ##
498 ################################################################################
499 ################################################################################
499 #set debug = false
500 #set debug = false
500
501
501
502
502 ##############
503 ##############
503 ## STYLING ##
504 ## STYLING ##
504 ##############
505 ##############
505 debug_style = true
506 debug_style = true
506
507
507 ###########################################
508 ###########################################
508 ### MAIN RHODECODE DATABASE CONFIG ###
509 ### MAIN RHODECODE DATABASE CONFIG ###
509 ###########################################
510 ###########################################
510 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
511 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
511 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
512 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
512 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
513 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
513 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
514 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
514
515
515 # see sqlalchemy docs for other advanced settings
516 # see sqlalchemy docs for other advanced settings
516
517
517 ## print the sql statements to output
518 ## print the sql statements to output
518 sqlalchemy.db1.echo = false
519 sqlalchemy.db1.echo = false
519 ## recycle the connections after this amount of seconds
520 ## recycle the connections after this amount of seconds
520 sqlalchemy.db1.pool_recycle = 3600
521 sqlalchemy.db1.pool_recycle = 3600
521 sqlalchemy.db1.convert_unicode = true
522 sqlalchemy.db1.convert_unicode = true
522
523
523 ## the number of connections to keep open inside the connection pool.
524 ## the number of connections to keep open inside the connection pool.
524 ## 0 indicates no limit
525 ## 0 indicates no limit
525 #sqlalchemy.db1.pool_size = 5
526 #sqlalchemy.db1.pool_size = 5
526
527
527 ## the number of connections to allow in connection pool "overflow", that is
528 ## the number of connections to allow in connection pool "overflow", that is
528 ## connections that can be opened above and beyond the pool_size setting,
529 ## connections that can be opened above and beyond the pool_size setting,
529 ## which defaults to five.
530 ## which defaults to five.
530 #sqlalchemy.db1.max_overflow = 10
531 #sqlalchemy.db1.max_overflow = 10
531
532
532
533
533 ##################
534 ##################
534 ### VCS CONFIG ###
535 ### VCS CONFIG ###
535 ##################
536 ##################
536 vcs.server.enable = true
537 vcs.server.enable = true
537 vcs.server = localhost:9900
538 vcs.server = localhost:9900
538
539
539 ## Web server connectivity protocol, responsible for web based VCS operatations
540 ## Web server connectivity protocol, responsible for web based VCS operatations
540 ## Available protocols are:
541 ## Available protocols are:
541 ## `http` - use http-rpc backend (default)
542 ## `http` - use http-rpc backend (default)
542 vcs.server.protocol = http
543 vcs.server.protocol = http
543
544
544 ## Push/Pull operations protocol, available options are:
545 ## Push/Pull operations protocol, available options are:
545 ## `http` - use http-rpc backend (default)
546 ## `http` - use http-rpc backend (default)
546 ##
547 ##
547 vcs.scm_app_implementation = http
548 vcs.scm_app_implementation = http
548
549
549 ## Push/Pull operations hooks protocol, available options are:
550 ## Push/Pull operations hooks protocol, available options are:
550 ## `http` - use http-rpc backend (default)
551 ## `http` - use http-rpc backend (default)
551 vcs.hooks.protocol = http
552 vcs.hooks.protocol = http
552
553
553 vcs.server.log_level = debug
554 vcs.server.log_level = debug
554 ## Start VCSServer with this instance as a subprocess, usefull for development
555 ## Start VCSServer with this instance as a subprocess, usefull for development
555 vcs.start_server = true
556 vcs.start_server = true
556
557
557 ## List of enabled VCS backends, available options are:
558 ## List of enabled VCS backends, available options are:
558 ## `hg` - mercurial
559 ## `hg` - mercurial
559 ## `git` - git
560 ## `git` - git
560 ## `svn` - subversion
561 ## `svn` - subversion
561 vcs.backends = hg, git, svn
562 vcs.backends = hg, git, svn
562
563
563 vcs.connection_timeout = 3600
564 vcs.connection_timeout = 3600
564 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
565 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
565 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
566 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
566 #vcs.svn.compatible_version = pre-1.8-compatible
567 #vcs.svn.compatible_version = pre-1.8-compatible
567
568
568
569
569 ############################################################
570 ############################################################
570 ### Subversion proxy support (mod_dav_svn) ###
571 ### Subversion proxy support (mod_dav_svn) ###
571 ### Maps RhodeCode repo groups into SVN paths for Apache ###
572 ### Maps RhodeCode repo groups into SVN paths for Apache ###
572 ############################################################
573 ############################################################
573 ## Enable or disable the config file generation.
574 ## Enable or disable the config file generation.
574 svn.proxy.generate_config = false
575 svn.proxy.generate_config = false
575 ## Generate config file with `SVNListParentPath` set to `On`.
576 ## Generate config file with `SVNListParentPath` set to `On`.
576 svn.proxy.list_parent_path = true
577 svn.proxy.list_parent_path = true
577 ## Set location and file name of generated config file.
578 ## Set location and file name of generated config file.
578 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
579 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
579 ## Used as a prefix to the `Location` block in the generated config file.
580 ## Used as a prefix to the `Location` block in the generated config file.
580 ## In most cases it should be set to `/`.
581 ## In most cases it should be set to `/`.
581 svn.proxy.location_root = /
582 svn.proxy.location_root = /
582 ## Command to reload the mod dav svn configuration on change.
583 ## Command to reload the mod dav svn configuration on change.
583 ## Example: `/etc/init.d/apache2 reload`
584 ## Example: `/etc/init.d/apache2 reload`
584 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
585 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
585 ## If the timeout expires before the reload command finishes, the command will
586 ## If the timeout expires before the reload command finishes, the command will
586 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
587 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
587 #svn.proxy.reload_timeout = 10
588 #svn.proxy.reload_timeout = 10
588
589
589 ## Dummy marker to add new entries after.
590 ## Dummy marker to add new entries after.
590 ## Add any custom entries below. Please don't remove.
591 ## Add any custom entries below. Please don't remove.
591 custom.conf = 1
592 custom.conf = 1
592
593
593
594
594 ################################
595 ################################
595 ### LOGGING CONFIGURATION ####
596 ### LOGGING CONFIGURATION ####
596 ################################
597 ################################
597 [loggers]
598 [loggers]
598 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
599 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
599
600
600 [handlers]
601 [handlers]
601 keys = console, console_sql
602 keys = console, console_sql
602
603
603 [formatters]
604 [formatters]
604 keys = generic, color_formatter, color_formatter_sql
605 keys = generic, color_formatter, color_formatter_sql
605
606
606 #############
607 #############
607 ## LOGGERS ##
608 ## LOGGERS ##
608 #############
609 #############
609 [logger_root]
610 [logger_root]
610 level = NOTSET
611 level = NOTSET
611 handlers = console
612 handlers = console
612
613
613 [logger_routes]
614 [logger_routes]
614 level = DEBUG
615 level = DEBUG
615 handlers =
616 handlers =
616 qualname = routes.middleware
617 qualname = routes.middleware
617 ## "level = DEBUG" logs the route matched and routing variables.
618 ## "level = DEBUG" logs the route matched and routing variables.
618 propagate = 1
619 propagate = 1
619
620
620 [logger_beaker]
621 [logger_beaker]
621 level = DEBUG
622 level = DEBUG
622 handlers =
623 handlers =
623 qualname = beaker.container
624 qualname = beaker.container
624 propagate = 1
625 propagate = 1
625
626
626 [logger_templates]
627 [logger_templates]
627 level = INFO
628 level = INFO
628 handlers =
629 handlers =
629 qualname = pylons.templating
630 qualname = pylons.templating
630 propagate = 1
631 propagate = 1
631
632
632 [logger_rhodecode]
633 [logger_rhodecode]
633 level = DEBUG
634 level = DEBUG
634 handlers =
635 handlers =
635 qualname = rhodecode
636 qualname = rhodecode
636 propagate = 1
637 propagate = 1
637
638
638 [logger_sqlalchemy]
639 [logger_sqlalchemy]
639 level = INFO
640 level = INFO
640 handlers = console_sql
641 handlers = console_sql
641 qualname = sqlalchemy.engine
642 qualname = sqlalchemy.engine
642 propagate = 0
643 propagate = 0
643
644
644 ##############
645 ##############
645 ## HANDLERS ##
646 ## HANDLERS ##
646 ##############
647 ##############
647
648
648 [handler_console]
649 [handler_console]
649 class = StreamHandler
650 class = StreamHandler
650 args = (sys.stderr, )
651 args = (sys.stderr, )
651 level = DEBUG
652 level = DEBUG
652 formatter = color_formatter
653 formatter = color_formatter
653
654
654 [handler_console_sql]
655 [handler_console_sql]
655 class = StreamHandler
656 class = StreamHandler
656 args = (sys.stderr, )
657 args = (sys.stderr, )
657 level = DEBUG
658 level = DEBUG
658 formatter = color_formatter_sql
659 formatter = color_formatter_sql
659
660
660 ################
661 ################
661 ## FORMATTERS ##
662 ## FORMATTERS ##
662 ################
663 ################
663
664
664 [formatter_generic]
665 [formatter_generic]
665 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
666 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
666 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
667 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
667 datefmt = %Y-%m-%d %H:%M:%S
668 datefmt = %Y-%m-%d %H:%M:%S
668
669
669 [formatter_color_formatter]
670 [formatter_color_formatter]
670 class = rhodecode.lib.logging_formatter.ColorFormatter
671 class = rhodecode.lib.logging_formatter.ColorFormatter
671 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
672 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
672 datefmt = %Y-%m-%d %H:%M:%S
673 datefmt = %Y-%m-%d %H:%M:%S
673
674
674 [formatter_color_formatter_sql]
675 [formatter_color_formatter_sql]
675 class = rhodecode.lib.logging_formatter.ColorFormatterSql
676 class = rhodecode.lib.logging_formatter.ColorFormatterSql
676 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
677 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
677 datefmt = %Y-%m-%d %H:%M:%S
678 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,646 +1,647 b''
1
1
2
2
3 ################################################################################
3 ################################################################################
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
4 ## RHODECODE COMMUNITY EDITION CONFIGURATION ##
5 # The %(here)s variable will be replaced with the parent directory of this file#
5 # The %(here)s variable will be replaced with the parent directory of this file#
6 ################################################################################
6 ################################################################################
7
7
8 [DEFAULT]
8 [DEFAULT]
9 debug = true
9 debug = true
10
10
11 ################################################################################
11 ################################################################################
12 ## EMAIL CONFIGURATION ##
12 ## EMAIL CONFIGURATION ##
13 ## Uncomment and replace with the email address which should receive ##
13 ## Uncomment and replace with the email address which should receive ##
14 ## any error reports after an application crash ##
14 ## any error reports after an application crash ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
15 ## Additionally these settings will be used by the RhodeCode mailing system ##
16 ################################################################################
16 ################################################################################
17
17
18 ## prefix all emails subjects with given prefix, helps filtering out emails
18 ## prefix all emails subjects with given prefix, helps filtering out emails
19 #email_prefix = [RhodeCode]
19 #email_prefix = [RhodeCode]
20
20
21 ## email FROM address all mails will be sent
21 ## email FROM address all mails will be sent
22 #app_email_from = rhodecode-noreply@localhost
22 #app_email_from = rhodecode-noreply@localhost
23
23
24 ## Uncomment and replace with the address which should receive any error report
24 ## Uncomment and replace with the address which should receive any error report
25 ## note: using appenlight for error handling doesn't need this to be uncommented
25 ## note: using appenlight for error handling doesn't need this to be uncommented
26 #email_to = admin@localhost
26 #email_to = admin@localhost
27
27
28 ## in case of Application errors, sent an error email form
28 ## in case of Application errors, sent an error email form
29 #error_email_from = rhodecode_error@localhost
29 #error_email_from = rhodecode_error@localhost
30
30
31 ## additional error message to be send in case of server crash
31 ## additional error message to be send in case of server crash
32 #error_message =
32 #error_message =
33
33
34
34
35 #smtp_server = mail.server.com
35 #smtp_server = mail.server.com
36 #smtp_username =
36 #smtp_username =
37 #smtp_password =
37 #smtp_password =
38 #smtp_port =
38 #smtp_port =
39 #smtp_use_tls = false
39 #smtp_use_tls = false
40 #smtp_use_ssl = true
40 #smtp_use_ssl = true
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
41 ## Specify available auth parameters here (e.g. LOGIN PLAIN CRAM-MD5, etc.)
42 #smtp_auth =
42 #smtp_auth =
43
43
44 [server:main]
44 [server:main]
45 ## COMMON ##
45 ## COMMON ##
46 host = 127.0.0.1
46 host = 127.0.0.1
47 port = 5000
47 port = 5000
48
48
49 ##################################
49 ##################################
50 ## WAITRESS WSGI SERVER ##
50 ## WAITRESS WSGI SERVER ##
51 ## Recommended for Development ##
51 ## Recommended for Development ##
52 ##################################
52 ##################################
53
53
54 #use = egg:waitress#main
54 #use = egg:waitress#main
55 ## number of worker threads
55 ## number of worker threads
56 #threads = 5
56 #threads = 5
57 ## MAX BODY SIZE 100GB
57 ## MAX BODY SIZE 100GB
58 #max_request_body_size = 107374182400
58 #max_request_body_size = 107374182400
59 ## Use poll instead of select, fixes file descriptors limits problems.
59 ## Use poll instead of select, fixes file descriptors limits problems.
60 ## May not work on old windows systems.
60 ## May not work on old windows systems.
61 #asyncore_use_poll = true
61 #asyncore_use_poll = true
62
62
63
63
64 ##########################
64 ##########################
65 ## GUNICORN WSGI SERVER ##
65 ## GUNICORN WSGI SERVER ##
66 ##########################
66 ##########################
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
67 ## run with gunicorn --log-config rhodecode.ini --paste rhodecode.ini
68
68
69 use = egg:gunicorn#main
69 use = egg:gunicorn#main
70 ## Sets the number of process workers. You must set `instance_id = *`
70 ## Sets the number of process workers. You must set `instance_id = *`
71 ## when this option is set to more than one worker, recommended
71 ## when this option is set to more than one worker, recommended
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
72 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
73 ## The `instance_id = *` must be set in the [app:main] section below
73 ## The `instance_id = *` must be set in the [app:main] section below
74 workers = 2
74 workers = 2
75 ## number of threads for each of the worker, must be set to 1 for gevent
75 ## number of threads for each of the worker, must be set to 1 for gevent
76 ## generally recommened to be at 1
76 ## generally recommened to be at 1
77 #threads = 1
77 #threads = 1
78 ## process name
78 ## process name
79 proc_name = rhodecode
79 proc_name = rhodecode
80 ## type of worker class, one of sync, gevent
80 ## type of worker class, one of sync, gevent
81 ## recommended for bigger setup is using of of other than sync one
81 ## recommended for bigger setup is using of of other than sync one
82 worker_class = sync
82 worker_class = sync
83 ## The maximum number of simultaneous clients. Valid only for Gevent
83 ## The maximum number of simultaneous clients. Valid only for Gevent
84 #worker_connections = 10
84 #worker_connections = 10
85 ## max number of requests that worker will handle before being gracefully
85 ## max number of requests that worker will handle before being gracefully
86 ## restarted, could prevent memory leaks
86 ## restarted, could prevent memory leaks
87 max_requests = 1000
87 max_requests = 1000
88 max_requests_jitter = 30
88 max_requests_jitter = 30
89 ## amount of time a worker can spend with handling a request before it
89 ## amount of time a worker can spend with handling a request before it
90 ## gets killed and restarted. Set to 6hrs
90 ## gets killed and restarted. Set to 6hrs
91 timeout = 21600
91 timeout = 21600
92
92
93
93
94 ## prefix middleware for RhodeCode.
94 ## prefix middleware for RhodeCode.
95 ## recommended when using proxy setup.
95 ## recommended when using proxy setup.
96 ## allows to set RhodeCode under a prefix in server.
96 ## allows to set RhodeCode under a prefix in server.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
97 ## eg https://server.com/custom_prefix. Enable `filter-with =` option below as well.
98 ## And set your prefix like: `prefix = /custom_prefix`
98 ## And set your prefix like: `prefix = /custom_prefix`
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
99 ## be sure to also set beaker.session.cookie_path = /custom_prefix if you need
100 ## to make your cookies only work on prefix url
100 ## to make your cookies only work on prefix url
101 [filter:proxy-prefix]
101 [filter:proxy-prefix]
102 use = egg:PasteDeploy#prefix
102 use = egg:PasteDeploy#prefix
103 prefix = /
103 prefix = /
104
104
105 [app:main]
105 [app:main]
106 use = egg:rhodecode-enterprise-ce
106 use = egg:rhodecode-enterprise-ce
107
107
108 ## enable proxy prefix middleware, defined above
108 ## enable proxy prefix middleware, defined above
109 #filter-with = proxy-prefix
109 #filter-with = proxy-prefix
110
110
111 ## encryption key used to encrypt social plugin tokens,
111 ## encryption key used to encrypt social plugin tokens,
112 ## remote_urls with credentials etc, if not set it defaults to
112 ## remote_urls with credentials etc, if not set it defaults to
113 ## `beaker.session.secret`
113 ## `beaker.session.secret`
114 #rhodecode.encrypted_values.secret =
114 #rhodecode.encrypted_values.secret =
115
115
116 ## decryption strict mode (enabled by default). It controls if decryption raises
116 ## decryption strict mode (enabled by default). It controls if decryption raises
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
117 ## `SignatureVerificationError` in case of wrong key, or damaged encryption data.
118 #rhodecode.encrypted_values.strict = false
118 #rhodecode.encrypted_values.strict = false
119
119
120 ## return gzipped responses from Rhodecode (static files/application)
120 ## return gzipped responses from Rhodecode (static files/application)
121 gzip_responses = false
121 gzip_responses = false
122
122
123 ## autogenerate javascript routes file on startup
123 ## autogenerate javascript routes file on startup
124 generate_js_files = false
124 generate_js_files = false
125
125
126 ## Optional Languages
126 ## Optional Languages
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
127 ## en(default), be, de, es, fr, it, ja, pl, pt, ru, zh
128 lang = en
128 lang = en
129
129
130 ## perform a full repository scan on each server start, this should be
130 ## perform a full repository scan on each server start, this should be
131 ## set to false after first startup, to allow faster server restarts.
131 ## set to false after first startup, to allow faster server restarts.
132 startup.import_repos = false
132 startup.import_repos = false
133
133
134 ## Uncomment and set this path to use archive download cache.
134 ## Uncomment and set this path to use archive download cache.
135 ## Once enabled, generated archives will be cached at this location
135 ## Once enabled, generated archives will be cached at this location
136 ## and served from the cache during subsequent requests for the same archive of
136 ## and served from the cache during subsequent requests for the same archive of
137 ## the repository.
137 ## the repository.
138 #archive_cache_dir = /tmp/tarballcache
138 #archive_cache_dir = /tmp/tarballcache
139
139
140 ## change this to unique ID for security
140 ## change this to unique ID for security
141 app_instance_uuid = rc-production
141 app_instance_uuid = rc-production
142
142
143 ## cut off limit for large diffs (size in bytes)
143 ## cut off limit for large diffs (size in bytes)
144 cut_off_limit_diff = 1024000
144 cut_off_limit_diff = 1024000
145 cut_off_limit_file = 256000
145 cut_off_limit_file = 256000
146
146
147 ## use cache version of scm repo everywhere
147 ## use cache version of scm repo everywhere
148 vcs_full_cache = true
148 vcs_full_cache = true
149
149
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
150 ## force https in RhodeCode, fixes https redirects, assumes it's always https
151 ## Normally this is controlled by proper http flags sent from http server
151 ## Normally this is controlled by proper http flags sent from http server
152 force_https = false
152 force_https = false
153
153
154 ## use Strict-Transport-Security headers
154 ## use Strict-Transport-Security headers
155 use_htsts = false
155 use_htsts = false
156
156
157 ## number of commits stats will parse on each iteration
157 ## number of commits stats will parse on each iteration
158 commit_parse_limit = 25
158 commit_parse_limit = 25
159
159
160 ## git rev filter option, --all is the default filter, if you need to
160 ## git rev filter option, --all is the default filter, if you need to
161 ## hide all refs in changelog switch this to --branches --tags
161 ## hide all refs in changelog switch this to --branches --tags
162 git_rev_filter = --branches --tags
162 git_rev_filter = --branches --tags
163
163
164 # Set to true if your repos are exposed using the dumb protocol
164 # Set to true if your repos are exposed using the dumb protocol
165 git_update_server_info = false
165 git_update_server_info = false
166
166
167 ## RSS/ATOM feed options
167 ## RSS/ATOM feed options
168 rss_cut_off_limit = 256000
168 rss_cut_off_limit = 256000
169 rss_items_per_page = 10
169 rss_items_per_page = 10
170 rss_include_diff = false
170 rss_include_diff = false
171
171
172 ## gist URL alias, used to create nicer urls for gist. This should be an
172 ## gist URL alias, used to create nicer urls for gist. This should be an
173 ## url that does rewrites to _admin/gists/{gistid}.
173 ## url that does rewrites to _admin/gists/{gistid}.
174 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
174 ## example: http://gist.rhodecode.org/{gistid}. Empty means use the internal
175 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
175 ## RhodeCode url, ie. http[s]://rhodecode.server/_admin/gists/{gistid}
176 gist_alias_url =
176 gist_alias_url =
177
177
178 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
178 ## List of views (using glob pattern syntax) that AUTH TOKENS could be
179 ## used for access.
179 ## used for access.
180 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
180 ## Adding ?auth_token=TOKEN_HASH to the url authenticates this request as if it
181 ## came from the the logged in user who own this authentication token.
181 ## came from the the logged in user who own this authentication token.
182 ##
182 ##
183 ## list of all views can be found under `_admin/permissions/auth_token_access`
183 ## list of all views can be found under `_admin/permissions/auth_token_access`
184 ## The list should be "," separated and on a single line.
184 ## The list should be "," separated and on a single line.
185 ##
185 ##
186 ## Most common views to enable:
186 ## Most common views to enable:
187 # ChangesetController:changeset_patch
187 # RepoCommitsView:repo_commit_download
188 # ChangesetController:changeset_raw
188 # RepoCommitsView:repo_commit_patch
189 # RepoFilesView.repo_files_diff
189 # RepoCommitsView:repo_commit_raw
190 # RepoFilesView.repo_archivefile
190 # RepoFilesView:repo_files_diff
191 # RepoFilesView.repo_file_raw
191 # RepoFilesView:repo_archivefile
192 # RepoFilesView:repo_file_raw
192 # GistView:*
193 # GistView:*
193 api_access_controllers_whitelist =
194 api_access_controllers_whitelist =
194
195
195 ## default encoding used to convert from and to unicode
196 ## default encoding used to convert from and to unicode
196 ## can be also a comma separated list of encoding in case of mixed encodings
197 ## can be also a comma separated list of encoding in case of mixed encodings
197 default_encoding = UTF-8
198 default_encoding = UTF-8
198
199
199 ## instance-id prefix
200 ## instance-id prefix
200 ## a prefix key for this instance used for cache invalidation when running
201 ## a prefix key for this instance used for cache invalidation when running
201 ## multiple instances of rhodecode, make sure it's globally unique for
202 ## multiple instances of rhodecode, make sure it's globally unique for
202 ## all running rhodecode instances. Leave empty if you don't use it
203 ## all running rhodecode instances. Leave empty if you don't use it
203 instance_id =
204 instance_id =
204
205
205 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
206 ## Fallback authentication plugin. Set this to a plugin ID to force the usage
206 ## of an authentication plugin also if it is disabled by it's settings.
207 ## of an authentication plugin also if it is disabled by it's settings.
207 ## This could be useful if you are unable to log in to the system due to broken
208 ## This could be useful if you are unable to log in to the system due to broken
208 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
209 ## authentication settings. Then you can enable e.g. the internal rhodecode auth
209 ## module to log in again and fix the settings.
210 ## module to log in again and fix the settings.
210 ##
211 ##
211 ## Available builtin plugin IDs (hash is part of the ID):
212 ## Available builtin plugin IDs (hash is part of the ID):
212 ## egg:rhodecode-enterprise-ce#rhodecode
213 ## egg:rhodecode-enterprise-ce#rhodecode
213 ## egg:rhodecode-enterprise-ce#pam
214 ## egg:rhodecode-enterprise-ce#pam
214 ## egg:rhodecode-enterprise-ce#ldap
215 ## egg:rhodecode-enterprise-ce#ldap
215 ## egg:rhodecode-enterprise-ce#jasig_cas
216 ## egg:rhodecode-enterprise-ce#jasig_cas
216 ## egg:rhodecode-enterprise-ce#headers
217 ## egg:rhodecode-enterprise-ce#headers
217 ## egg:rhodecode-enterprise-ce#crowd
218 ## egg:rhodecode-enterprise-ce#crowd
218 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
219 #rhodecode.auth_plugin_fallback = egg:rhodecode-enterprise-ce#rhodecode
219
220
220 ## alternative return HTTP header for failed authentication. Default HTTP
221 ## alternative return HTTP header for failed authentication. Default HTTP
221 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
222 ## response is 401 HTTPUnauthorized. Currently HG clients have troubles with
222 ## handling that causing a series of failed authentication calls.
223 ## handling that causing a series of failed authentication calls.
223 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
224 ## Set this variable to 403 to return HTTPForbidden, or any other HTTP code
224 ## This will be served instead of default 401 on bad authnetication
225 ## This will be served instead of default 401 on bad authnetication
225 auth_ret_code =
226 auth_ret_code =
226
227
227 ## use special detection method when serving auth_ret_code, instead of serving
228 ## use special detection method when serving auth_ret_code, instead of serving
228 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
229 ## ret_code directly, use 401 initially (Which triggers credentials prompt)
229 ## and then serve auth_ret_code to clients
230 ## and then serve auth_ret_code to clients
230 auth_ret_code_detection = false
231 auth_ret_code_detection = false
231
232
232 ## locking return code. When repository is locked return this HTTP code. 2XX
233 ## locking return code. When repository is locked return this HTTP code. 2XX
233 ## codes don't break the transactions while 4XX codes do
234 ## codes don't break the transactions while 4XX codes do
234 lock_ret_code = 423
235 lock_ret_code = 423
235
236
236 ## allows to change the repository location in settings page
237 ## allows to change the repository location in settings page
237 allow_repo_location_change = true
238 allow_repo_location_change = true
238
239
239 ## allows to setup custom hooks in settings page
240 ## allows to setup custom hooks in settings page
240 allow_custom_hooks_settings = true
241 allow_custom_hooks_settings = true
241
242
242 ## generated license token, goto license page in RhodeCode settings to obtain
243 ## generated license token, goto license page in RhodeCode settings to obtain
243 ## new token
244 ## new token
244 license_token =
245 license_token =
245
246
246 ## supervisor connection uri, for managing supervisor and logs.
247 ## supervisor connection uri, for managing supervisor and logs.
247 supervisor.uri =
248 supervisor.uri =
248 ## supervisord group name/id we only want this RC instance to handle
249 ## supervisord group name/id we only want this RC instance to handle
249 supervisor.group_id = prod
250 supervisor.group_id = prod
250
251
251 ## Display extended labs settings
252 ## Display extended labs settings
252 labs_settings_active = true
253 labs_settings_active = true
253
254
254 ####################################
255 ####################################
255 ### CELERY CONFIG ####
256 ### CELERY CONFIG ####
256 ####################################
257 ####################################
257 use_celery = false
258 use_celery = false
258 broker.host = localhost
259 broker.host = localhost
259 broker.vhost = rabbitmqhost
260 broker.vhost = rabbitmqhost
260 broker.port = 5672
261 broker.port = 5672
261 broker.user = rabbitmq
262 broker.user = rabbitmq
262 broker.password = qweqwe
263 broker.password = qweqwe
263
264
264 celery.imports = rhodecode.lib.celerylib.tasks
265 celery.imports = rhodecode.lib.celerylib.tasks
265
266
266 celery.result.backend = amqp
267 celery.result.backend = amqp
267 celery.result.dburi = amqp://
268 celery.result.dburi = amqp://
268 celery.result.serialier = json
269 celery.result.serialier = json
269
270
270 #celery.send.task.error.emails = true
271 #celery.send.task.error.emails = true
271 #celery.amqp.task.result.expires = 18000
272 #celery.amqp.task.result.expires = 18000
272
273
273 celeryd.concurrency = 2
274 celeryd.concurrency = 2
274 #celeryd.log.file = celeryd.log
275 #celeryd.log.file = celeryd.log
275 celeryd.log.level = debug
276 celeryd.log.level = debug
276 celeryd.max.tasks.per.child = 1
277 celeryd.max.tasks.per.child = 1
277
278
278 ## tasks will never be sent to the queue, but executed locally instead.
279 ## tasks will never be sent to the queue, but executed locally instead.
279 celery.always.eager = false
280 celery.always.eager = false
280
281
281 ####################################
282 ####################################
282 ### BEAKER CACHE ####
283 ### BEAKER CACHE ####
283 ####################################
284 ####################################
284 # default cache dir for templates. Putting this into a ramdisk
285 # default cache dir for templates. Putting this into a ramdisk
285 ## can boost performance, eg. %(here)s/data_ramdisk
286 ## can boost performance, eg. %(here)s/data_ramdisk
286 cache_dir = %(here)s/data
287 cache_dir = %(here)s/data
287
288
288 ## locking and default file storage for Beaker. Putting this into a ramdisk
289 ## locking and default file storage for Beaker. Putting this into a ramdisk
289 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
290 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
290 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
291 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
291 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
292 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
292
293
293 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
294 beaker.cache.regions = super_short_term, short_term, long_term, sql_cache_short, auth_plugins, repo_cache_long
294
295
295 beaker.cache.super_short_term.type = memory
296 beaker.cache.super_short_term.type = memory
296 beaker.cache.super_short_term.expire = 10
297 beaker.cache.super_short_term.expire = 10
297 beaker.cache.super_short_term.key_length = 256
298 beaker.cache.super_short_term.key_length = 256
298
299
299 beaker.cache.short_term.type = memory
300 beaker.cache.short_term.type = memory
300 beaker.cache.short_term.expire = 60
301 beaker.cache.short_term.expire = 60
301 beaker.cache.short_term.key_length = 256
302 beaker.cache.short_term.key_length = 256
302
303
303 beaker.cache.long_term.type = memory
304 beaker.cache.long_term.type = memory
304 beaker.cache.long_term.expire = 36000
305 beaker.cache.long_term.expire = 36000
305 beaker.cache.long_term.key_length = 256
306 beaker.cache.long_term.key_length = 256
306
307
307 beaker.cache.sql_cache_short.type = memory
308 beaker.cache.sql_cache_short.type = memory
308 beaker.cache.sql_cache_short.expire = 10
309 beaker.cache.sql_cache_short.expire = 10
309 beaker.cache.sql_cache_short.key_length = 256
310 beaker.cache.sql_cache_short.key_length = 256
310
311
311 ## default is memory cache, configure only if required
312 ## default is memory cache, configure only if required
312 ## using multi-node or multi-worker setup
313 ## using multi-node or multi-worker setup
313 #beaker.cache.auth_plugins.type = ext:database
314 #beaker.cache.auth_plugins.type = ext:database
314 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
315 #beaker.cache.auth_plugins.lock_dir = %(here)s/data/cache/auth_plugin_lock
315 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
316 #beaker.cache.auth_plugins.url = postgresql://postgres:secret@localhost/rhodecode
316 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
317 #beaker.cache.auth_plugins.url = mysql://root:secret@127.0.0.1/rhodecode
317 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
318 #beaker.cache.auth_plugins.sa.pool_recycle = 3600
318 #beaker.cache.auth_plugins.sa.pool_size = 10
319 #beaker.cache.auth_plugins.sa.pool_size = 10
319 #beaker.cache.auth_plugins.sa.max_overflow = 0
320 #beaker.cache.auth_plugins.sa.max_overflow = 0
320
321
321 beaker.cache.repo_cache_long.type = memorylru_base
322 beaker.cache.repo_cache_long.type = memorylru_base
322 beaker.cache.repo_cache_long.max_items = 4096
323 beaker.cache.repo_cache_long.max_items = 4096
323 beaker.cache.repo_cache_long.expire = 2592000
324 beaker.cache.repo_cache_long.expire = 2592000
324
325
325 ## default is memorylru_base cache, configure only if required
326 ## default is memorylru_base cache, configure only if required
326 ## using multi-node or multi-worker setup
327 ## using multi-node or multi-worker setup
327 #beaker.cache.repo_cache_long.type = ext:memcached
328 #beaker.cache.repo_cache_long.type = ext:memcached
328 #beaker.cache.repo_cache_long.url = localhost:11211
329 #beaker.cache.repo_cache_long.url = localhost:11211
329 #beaker.cache.repo_cache_long.expire = 1209600
330 #beaker.cache.repo_cache_long.expire = 1209600
330 #beaker.cache.repo_cache_long.key_length = 256
331 #beaker.cache.repo_cache_long.key_length = 256
331
332
332 ####################################
333 ####################################
333 ### BEAKER SESSION ####
334 ### BEAKER SESSION ####
334 ####################################
335 ####################################
335
336
336 ## .session.type is type of storage options for the session, current allowed
337 ## .session.type is type of storage options for the session, current allowed
337 ## types are file, ext:memcached, ext:database, and memory (default).
338 ## types are file, ext:memcached, ext:database, and memory (default).
338 beaker.session.type = file
339 beaker.session.type = file
339 beaker.session.data_dir = %(here)s/data/sessions/data
340 beaker.session.data_dir = %(here)s/data/sessions/data
340
341
341 ## db based session, fast, and allows easy management over logged in users
342 ## db based session, fast, and allows easy management over logged in users
342 #beaker.session.type = ext:database
343 #beaker.session.type = ext:database
343 #beaker.session.table_name = db_session
344 #beaker.session.table_name = db_session
344 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
345 #beaker.session.sa.url = postgresql://postgres:secret@localhost/rhodecode
345 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
346 #beaker.session.sa.url = mysql://root:secret@127.0.0.1/rhodecode
346 #beaker.session.sa.pool_recycle = 3600
347 #beaker.session.sa.pool_recycle = 3600
347 #beaker.session.sa.echo = false
348 #beaker.session.sa.echo = false
348
349
349 beaker.session.key = rhodecode
350 beaker.session.key = rhodecode
350 beaker.session.secret = production-rc-uytcxaz
351 beaker.session.secret = production-rc-uytcxaz
351 beaker.session.lock_dir = %(here)s/data/sessions/lock
352 beaker.session.lock_dir = %(here)s/data/sessions/lock
352
353
353 ## Secure encrypted cookie. Requires AES and AES python libraries
354 ## Secure encrypted cookie. Requires AES and AES python libraries
354 ## you must disable beaker.session.secret to use this
355 ## you must disable beaker.session.secret to use this
355 #beaker.session.encrypt_key = key_for_encryption
356 #beaker.session.encrypt_key = key_for_encryption
356 #beaker.session.validate_key = validation_key
357 #beaker.session.validate_key = validation_key
357
358
358 ## sets session as invalid(also logging out user) if it haven not been
359 ## sets session as invalid(also logging out user) if it haven not been
359 ## accessed for given amount of time in seconds
360 ## accessed for given amount of time in seconds
360 beaker.session.timeout = 2592000
361 beaker.session.timeout = 2592000
361 beaker.session.httponly = true
362 beaker.session.httponly = true
362 ## Path to use for the cookie. Set to prefix if you use prefix middleware
363 ## Path to use for the cookie. Set to prefix if you use prefix middleware
363 #beaker.session.cookie_path = /custom_prefix
364 #beaker.session.cookie_path = /custom_prefix
364
365
365 ## uncomment for https secure cookie
366 ## uncomment for https secure cookie
366 beaker.session.secure = false
367 beaker.session.secure = false
367
368
368 ## auto save the session to not to use .save()
369 ## auto save the session to not to use .save()
369 beaker.session.auto = false
370 beaker.session.auto = false
370
371
371 ## default cookie expiration time in seconds, set to `true` to set expire
372 ## default cookie expiration time in seconds, set to `true` to set expire
372 ## at browser close
373 ## at browser close
373 #beaker.session.cookie_expires = 3600
374 #beaker.session.cookie_expires = 3600
374
375
375 ###################################
376 ###################################
376 ## SEARCH INDEXING CONFIGURATION ##
377 ## SEARCH INDEXING CONFIGURATION ##
377 ###################################
378 ###################################
378 ## Full text search indexer is available in rhodecode-tools under
379 ## Full text search indexer is available in rhodecode-tools under
379 ## `rhodecode-tools index` command
380 ## `rhodecode-tools index` command
380
381
381 ## WHOOSH Backend, doesn't require additional services to run
382 ## WHOOSH Backend, doesn't require additional services to run
382 ## it works good with few dozen repos
383 ## it works good with few dozen repos
383 search.module = rhodecode.lib.index.whoosh
384 search.module = rhodecode.lib.index.whoosh
384 search.location = %(here)s/data/index
385 search.location = %(here)s/data/index
385
386
386 ########################################
387 ########################################
387 ### CHANNELSTREAM CONFIG ####
388 ### CHANNELSTREAM CONFIG ####
388 ########################################
389 ########################################
389 ## channelstream enables persistent connections and live notification
390 ## channelstream enables persistent connections and live notification
390 ## in the system. It's also used by the chat system
391 ## in the system. It's also used by the chat system
391 channelstream.enabled = false
392 channelstream.enabled = false
392
393
393 ## server address for channelstream server on the backend
394 ## server address for channelstream server on the backend
394 channelstream.server = 127.0.0.1:9800
395 channelstream.server = 127.0.0.1:9800
395
396
396 ## location of the channelstream server from outside world
397 ## location of the channelstream server from outside world
397 ## use ws:// for http or wss:// for https. This address needs to be handled
398 ## use ws:// for http or wss:// for https. This address needs to be handled
398 ## by external HTTP server such as Nginx or Apache
399 ## by external HTTP server such as Nginx or Apache
399 ## see nginx/apache configuration examples in our docs
400 ## see nginx/apache configuration examples in our docs
400 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
401 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
401 channelstream.secret = secret
402 channelstream.secret = secret
402 channelstream.history.location = %(here)s/channelstream_history
403 channelstream.history.location = %(here)s/channelstream_history
403
404
404 ## Internal application path that Javascript uses to connect into.
405 ## Internal application path that Javascript uses to connect into.
405 ## If you use proxy-prefix the prefix should be added before /_channelstream
406 ## If you use proxy-prefix the prefix should be added before /_channelstream
406 channelstream.proxy_path = /_channelstream
407 channelstream.proxy_path = /_channelstream
407
408
408
409
409 ###################################
410 ###################################
410 ## APPENLIGHT CONFIG ##
411 ## APPENLIGHT CONFIG ##
411 ###################################
412 ###################################
412
413
413 ## Appenlight is tailored to work with RhodeCode, see
414 ## Appenlight is tailored to work with RhodeCode, see
414 ## http://appenlight.com for details how to obtain an account
415 ## http://appenlight.com for details how to obtain an account
415
416
416 ## appenlight integration enabled
417 ## appenlight integration enabled
417 appenlight = false
418 appenlight = false
418
419
419 appenlight.server_url = https://api.appenlight.com
420 appenlight.server_url = https://api.appenlight.com
420 appenlight.api_key = YOUR_API_KEY
421 appenlight.api_key = YOUR_API_KEY
421 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
422 #appenlight.transport_config = https://api.appenlight.com?threaded=1&timeout=5
422
423
423 # used for JS client
424 # used for JS client
424 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
425 appenlight.api_public_key = YOUR_API_PUBLIC_KEY
425
426
426 ## TWEAK AMOUNT OF INFO SENT HERE
427 ## TWEAK AMOUNT OF INFO SENT HERE
427
428
428 ## enables 404 error logging (default False)
429 ## enables 404 error logging (default False)
429 appenlight.report_404 = false
430 appenlight.report_404 = false
430
431
431 ## time in seconds after request is considered being slow (default 1)
432 ## time in seconds after request is considered being slow (default 1)
432 appenlight.slow_request_time = 1
433 appenlight.slow_request_time = 1
433
434
434 ## record slow requests in application
435 ## record slow requests in application
435 ## (needs to be enabled for slow datastore recording and time tracking)
436 ## (needs to be enabled for slow datastore recording and time tracking)
436 appenlight.slow_requests = true
437 appenlight.slow_requests = true
437
438
438 ## enable hooking to application loggers
439 ## enable hooking to application loggers
439 appenlight.logging = true
440 appenlight.logging = true
440
441
441 ## minimum log level for log capture
442 ## minimum log level for log capture
442 appenlight.logging.level = WARNING
443 appenlight.logging.level = WARNING
443
444
444 ## send logs only from erroneous/slow requests
445 ## send logs only from erroneous/slow requests
445 ## (saves API quota for intensive logging)
446 ## (saves API quota for intensive logging)
446 appenlight.logging_on_error = false
447 appenlight.logging_on_error = false
447
448
448 ## list of additonal keywords that should be grabbed from environ object
449 ## list of additonal keywords that should be grabbed from environ object
449 ## can be string with comma separated list of words in lowercase
450 ## can be string with comma separated list of words in lowercase
450 ## (by default client will always send following info:
451 ## (by default client will always send following info:
451 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
452 ## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
452 ## start with HTTP* this list be extended with additional keywords here
453 ## start with HTTP* this list be extended with additional keywords here
453 appenlight.environ_keys_whitelist =
454 appenlight.environ_keys_whitelist =
454
455
455 ## list of keywords that should be blanked from request object
456 ## list of keywords that should be blanked from request object
456 ## can be string with comma separated list of words in lowercase
457 ## can be string with comma separated list of words in lowercase
457 ## (by default client will always blank keys that contain following words
458 ## (by default client will always blank keys that contain following words
458 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
459 ## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
459 ## this list be extended with additional keywords set here
460 ## this list be extended with additional keywords set here
460 appenlight.request_keys_blacklist =
461 appenlight.request_keys_blacklist =
461
462
462 ## list of namespaces that should be ignores when gathering log entries
463 ## list of namespaces that should be ignores when gathering log entries
463 ## can be string with comma separated list of namespaces
464 ## can be string with comma separated list of namespaces
464 ## (by default the client ignores own entries: appenlight_client.client)
465 ## (by default the client ignores own entries: appenlight_client.client)
465 appenlight.log_namespace_blacklist =
466 appenlight.log_namespace_blacklist =
466
467
467
468
468 ################################################################################
469 ################################################################################
469 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
470 ## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* ##
470 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
471 ## Debug mode will enable the interactive debugging tool, allowing ANYONE to ##
471 ## execute malicious code after an exception is raised. ##
472 ## execute malicious code after an exception is raised. ##
472 ################################################################################
473 ################################################################################
473 set debug = false
474 set debug = false
474
475
475
476
476 ###########################################
477 ###########################################
477 ### MAIN RHODECODE DATABASE CONFIG ###
478 ### MAIN RHODECODE DATABASE CONFIG ###
478 ###########################################
479 ###########################################
479 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
480 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
480 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
481 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
481 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
482 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
482 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
483 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
483
484
484 # see sqlalchemy docs for other advanced settings
485 # see sqlalchemy docs for other advanced settings
485
486
486 ## print the sql statements to output
487 ## print the sql statements to output
487 sqlalchemy.db1.echo = false
488 sqlalchemy.db1.echo = false
488 ## recycle the connections after this amount of seconds
489 ## recycle the connections after this amount of seconds
489 sqlalchemy.db1.pool_recycle = 3600
490 sqlalchemy.db1.pool_recycle = 3600
490 sqlalchemy.db1.convert_unicode = true
491 sqlalchemy.db1.convert_unicode = true
491
492
492 ## the number of connections to keep open inside the connection pool.
493 ## the number of connections to keep open inside the connection pool.
493 ## 0 indicates no limit
494 ## 0 indicates no limit
494 #sqlalchemy.db1.pool_size = 5
495 #sqlalchemy.db1.pool_size = 5
495
496
496 ## the number of connections to allow in connection pool "overflow", that is
497 ## the number of connections to allow in connection pool "overflow", that is
497 ## connections that can be opened above and beyond the pool_size setting,
498 ## connections that can be opened above and beyond the pool_size setting,
498 ## which defaults to five.
499 ## which defaults to five.
499 #sqlalchemy.db1.max_overflow = 10
500 #sqlalchemy.db1.max_overflow = 10
500
501
501
502
502 ##################
503 ##################
503 ### VCS CONFIG ###
504 ### VCS CONFIG ###
504 ##################
505 ##################
505 vcs.server.enable = true
506 vcs.server.enable = true
506 vcs.server = localhost:9900
507 vcs.server = localhost:9900
507
508
508 ## Web server connectivity protocol, responsible for web based VCS operatations
509 ## Web server connectivity protocol, responsible for web based VCS operatations
509 ## Available protocols are:
510 ## Available protocols are:
510 ## `http` - use http-rpc backend (default)
511 ## `http` - use http-rpc backend (default)
511 vcs.server.protocol = http
512 vcs.server.protocol = http
512
513
513 ## Push/Pull operations protocol, available options are:
514 ## Push/Pull operations protocol, available options are:
514 ## `http` - use http-rpc backend (default)
515 ## `http` - use http-rpc backend (default)
515 ##
516 ##
516 vcs.scm_app_implementation = http
517 vcs.scm_app_implementation = http
517
518
518 ## Push/Pull operations hooks protocol, available options are:
519 ## Push/Pull operations hooks protocol, available options are:
519 ## `http` - use http-rpc backend (default)
520 ## `http` - use http-rpc backend (default)
520 vcs.hooks.protocol = http
521 vcs.hooks.protocol = http
521
522
522 vcs.server.log_level = info
523 vcs.server.log_level = info
523 ## Start VCSServer with this instance as a subprocess, usefull for development
524 ## Start VCSServer with this instance as a subprocess, usefull for development
524 vcs.start_server = false
525 vcs.start_server = false
525
526
526 ## List of enabled VCS backends, available options are:
527 ## List of enabled VCS backends, available options are:
527 ## `hg` - mercurial
528 ## `hg` - mercurial
528 ## `git` - git
529 ## `git` - git
529 ## `svn` - subversion
530 ## `svn` - subversion
530 vcs.backends = hg, git, svn
531 vcs.backends = hg, git, svn
531
532
532 vcs.connection_timeout = 3600
533 vcs.connection_timeout = 3600
533 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
534 ## Compatibility version when creating SVN repositories. Defaults to newest version when commented out.
534 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
535 ## Available options are: pre-1.4-compatible, pre-1.5-compatible, pre-1.6-compatible, pre-1.8-compatible
535 #vcs.svn.compatible_version = pre-1.8-compatible
536 #vcs.svn.compatible_version = pre-1.8-compatible
536
537
537
538
538 ############################################################
539 ############################################################
539 ### Subversion proxy support (mod_dav_svn) ###
540 ### Subversion proxy support (mod_dav_svn) ###
540 ### Maps RhodeCode repo groups into SVN paths for Apache ###
541 ### Maps RhodeCode repo groups into SVN paths for Apache ###
541 ############################################################
542 ############################################################
542 ## Enable or disable the config file generation.
543 ## Enable or disable the config file generation.
543 svn.proxy.generate_config = false
544 svn.proxy.generate_config = false
544 ## Generate config file with `SVNListParentPath` set to `On`.
545 ## Generate config file with `SVNListParentPath` set to `On`.
545 svn.proxy.list_parent_path = true
546 svn.proxy.list_parent_path = true
546 ## Set location and file name of generated config file.
547 ## Set location and file name of generated config file.
547 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
548 svn.proxy.config_file_path = %(here)s/mod_dav_svn.conf
548 ## Used as a prefix to the `Location` block in the generated config file.
549 ## Used as a prefix to the `Location` block in the generated config file.
549 ## In most cases it should be set to `/`.
550 ## In most cases it should be set to `/`.
550 svn.proxy.location_root = /
551 svn.proxy.location_root = /
551 ## Command to reload the mod dav svn configuration on change.
552 ## Command to reload the mod dav svn configuration on change.
552 ## Example: `/etc/init.d/apache2 reload`
553 ## Example: `/etc/init.d/apache2 reload`
553 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
554 #svn.proxy.reload_cmd = /etc/init.d/apache2 reload
554 ## If the timeout expires before the reload command finishes, the command will
555 ## If the timeout expires before the reload command finishes, the command will
555 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
556 ## be killed. Setting it to zero means no timeout. Defaults to 10 seconds.
556 #svn.proxy.reload_timeout = 10
557 #svn.proxy.reload_timeout = 10
557
558
558 ## Dummy marker to add new entries after.
559 ## Dummy marker to add new entries after.
559 ## Add any custom entries below. Please don't remove.
560 ## Add any custom entries below. Please don't remove.
560 custom.conf = 1
561 custom.conf = 1
561
562
562
563
563 ################################
564 ################################
564 ### LOGGING CONFIGURATION ####
565 ### LOGGING CONFIGURATION ####
565 ################################
566 ################################
566 [loggers]
567 [loggers]
567 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
568 keys = root, routes, rhodecode, sqlalchemy, beaker, templates
568
569
569 [handlers]
570 [handlers]
570 keys = console, console_sql
571 keys = console, console_sql
571
572
572 [formatters]
573 [formatters]
573 keys = generic, color_formatter, color_formatter_sql
574 keys = generic, color_formatter, color_formatter_sql
574
575
575 #############
576 #############
576 ## LOGGERS ##
577 ## LOGGERS ##
577 #############
578 #############
578 [logger_root]
579 [logger_root]
579 level = NOTSET
580 level = NOTSET
580 handlers = console
581 handlers = console
581
582
582 [logger_routes]
583 [logger_routes]
583 level = DEBUG
584 level = DEBUG
584 handlers =
585 handlers =
585 qualname = routes.middleware
586 qualname = routes.middleware
586 ## "level = DEBUG" logs the route matched and routing variables.
587 ## "level = DEBUG" logs the route matched and routing variables.
587 propagate = 1
588 propagate = 1
588
589
589 [logger_beaker]
590 [logger_beaker]
590 level = DEBUG
591 level = DEBUG
591 handlers =
592 handlers =
592 qualname = beaker.container
593 qualname = beaker.container
593 propagate = 1
594 propagate = 1
594
595
595 [logger_templates]
596 [logger_templates]
596 level = INFO
597 level = INFO
597 handlers =
598 handlers =
598 qualname = pylons.templating
599 qualname = pylons.templating
599 propagate = 1
600 propagate = 1
600
601
601 [logger_rhodecode]
602 [logger_rhodecode]
602 level = DEBUG
603 level = DEBUG
603 handlers =
604 handlers =
604 qualname = rhodecode
605 qualname = rhodecode
605 propagate = 1
606 propagate = 1
606
607
607 [logger_sqlalchemy]
608 [logger_sqlalchemy]
608 level = INFO
609 level = INFO
609 handlers = console_sql
610 handlers = console_sql
610 qualname = sqlalchemy.engine
611 qualname = sqlalchemy.engine
611 propagate = 0
612 propagate = 0
612
613
613 ##############
614 ##############
614 ## HANDLERS ##
615 ## HANDLERS ##
615 ##############
616 ##############
616
617
617 [handler_console]
618 [handler_console]
618 class = StreamHandler
619 class = StreamHandler
619 args = (sys.stderr, )
620 args = (sys.stderr, )
620 level = INFO
621 level = INFO
621 formatter = generic
622 formatter = generic
622
623
623 [handler_console_sql]
624 [handler_console_sql]
624 class = StreamHandler
625 class = StreamHandler
625 args = (sys.stderr, )
626 args = (sys.stderr, )
626 level = WARN
627 level = WARN
627 formatter = generic
628 formatter = generic
628
629
629 ################
630 ################
630 ## FORMATTERS ##
631 ## FORMATTERS ##
631 ################
632 ################
632
633
633 [formatter_generic]
634 [formatter_generic]
634 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
635 class = rhodecode.lib.logging_formatter.ExceptionAwareFormatter
635 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
636 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
636 datefmt = %Y-%m-%d %H:%M:%S
637 datefmt = %Y-%m-%d %H:%M:%S
637
638
638 [formatter_color_formatter]
639 [formatter_color_formatter]
639 class = rhodecode.lib.logging_formatter.ColorFormatter
640 class = rhodecode.lib.logging_formatter.ColorFormatter
640 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
641 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
641 datefmt = %Y-%m-%d %H:%M:%S
642 datefmt = %Y-%m-%d %H:%M:%S
642
643
643 [formatter_color_formatter_sql]
644 [formatter_color_formatter_sql]
644 class = rhodecode.lib.logging_formatter.ColorFormatterSql
645 class = rhodecode.lib.logging_formatter.ColorFormatterSql
645 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
646 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
646 datefmt = %Y-%m-%d %H:%M:%S
647 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,207 +1,207 b''
1 .. _api:
1 .. _api:
2
2
3 API Documentation
3 API Documentation
4 =================
4 =================
5
5
6 The |RCE| API uses a single scheme for calling all API methods. The API is
6 The |RCE| API uses a single scheme for calling all API methods. The API is
7 implemented with JSON protocol in both directions. To send API requests to
7 implemented with JSON protocol in both directions. To send API requests to
8 your instance of |RCE|, use the following URL format
8 your instance of |RCE|, use the following URL format
9 ``<your_server>/_admin``
9 ``<your_server>/_admin``
10
10
11 .. note::
11 .. note::
12
12
13 To use the API, you should configure the :file:`~/.rhoderc` file with
13 To use the API, you should configure the :file:`~/.rhoderc` file with
14 access details per instance. For more information, see
14 access details per instance. For more information, see
15 :ref:`config-rhoderc`.
15 :ref:`config-rhoderc`.
16
16
17
17
18 API ACCESS FOR WEB VIEWS
18 API ACCESS FOR WEB VIEWS
19 ------------------------
19 ------------------------
20
20
21 API access can also be turned on for each web view in |RCE| that is
21 API access can also be turned on for each web view in |RCE| that is
22 decorated with a `@LoginRequired` decorator. To enable API access, change
22 decorated with a `@LoginRequired` decorator. To enable API access, change
23 the standard login decorator to `@LoginRequired(api_access=True)`.
23 the standard login decorator to `@LoginRequired(api_access=True)`.
24
24
25 From |RCM| version 1.7.0 you can configure a white list
25 From |RCM| version 1.7.0 you can configure a white list
26 of views that have API access enabled by default. To enable these,
26 of views that have API access enabled by default. To enable these,
27 edit the |RCM| configuration ``.ini`` file. The default location is:
27 edit the |RCM| configuration ``.ini`` file. The default location is:
28
28
29 * |RCM| Pre-2.2.7 :file:`root/rhodecode/data/production.ini`
29 * |RCM| Pre-2.2.7 :file:`root/rhodecode/data/production.ini`
30 * |RCM| 3.0 :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
30 * |RCM| 3.0 :file:`/home/{user}/.rccontrol/{instance-id}/rhodecode.ini`
31
31
32 To configure the white list, edit this section of the file. In this
32 To configure the white list, edit this section of the file. In this
33 configuration example, API access is granted to the patch/diff raw file and
33 configuration example, API access is granted to the patch/diff raw file and
34 archive.
34 archive.
35
35
36 .. code-block:: ini
36 .. code-block:: ini
37
37
38 ## List of controllers (using glob syntax) that AUTH TOKENS could be used for access.
38 ## List of controllers (using glob syntax) that AUTH TOKENS could be used for access.
39 ## Adding ?auth_token = <token> to the url authenticates this request as if it
39 ## Adding ?auth_token = <token> to the url authenticates this request as if it
40 ## came from the the logged in user who own this authentication token.
40 ## came from the the logged in user who own this authentication token.
41 ##
41 ##
42 ## Syntax is <ControllerClass>:<function_pattern>.
42 ## Syntax is <ControllerClass>:<function_pattern>.
43 ## The list should be "," separated and on a single line.
43 ## The list should be "," separated and on a single line.
44 ##
44 ##
45 api_access_controllers_whitelist = ChangesetController:changeset_patch,ChangesetController:changeset_raw,ilesController:raw,FilesController:archivefile,
45 api_access_controllers_whitelist = RepoCommitsView:repo_commit_raw,RepoCommitsView:repo_commit_patch,RepoCommitsView:repo_commit_download
46
46
47 After this change, a |RCE| view can be accessed without login by adding a
47 After this change, a |RCE| view can be accessed without login by adding a
48 GET parameter ``?auth_token=<auth_token>`` to a url. For example to
48 GET parameter ``?auth_token=<auth_token>`` to a url. For example to
49 access the raw diff.
49 access the raw diff.
50
50
51 .. code-block:: html
51 .. code-block:: html
52
52
53 http://<server>/<repo>/changeset-diff/<sha>?auth_token=<auth_token>
53 http://<server>/<repo>/changeset-diff/<sha>?auth_token=<auth_token>
54
54
55 By default this is only enabled on RSS/ATOM feed views. Exposing raw diffs is a
55 By default this is only enabled on RSS/ATOM feed views. Exposing raw diffs is a
56 good way to integrate with 3rd party services like code review, or build farms
56 good way to integrate with 3rd party services like code review, or build farms
57 that could download archives.
57 that could download archives.
58
58
59 API ACCESS
59 API ACCESS
60 ----------
60 ----------
61
61
62 All clients are required to send JSON-RPC spec JSON data.
62 All clients are required to send JSON-RPC spec JSON data.
63
63
64 .. code-block:: bash
64 .. code-block:: bash
65
65
66 {
66 {
67 "id:"<id>",
67 "id:"<id>",
68 "auth_token":"<auth_token>",
68 "auth_token":"<auth_token>",
69 "method":"<method_name>",
69 "method":"<method_name>",
70 "args":{"<arg_key>":"<arg_val>"}
70 "args":{"<arg_key>":"<arg_val>"}
71 }
71 }
72
72
73 Example call for auto pulling from remote repositories using curl:
73 Example call for auto pulling from remote repositories using curl:
74
74
75 .. code-block:: bash
75 .. code-block:: bash
76
76
77 curl https://server.com/_admin/api -X POST -H 'content-type:text/plain' --data-binary '{"id":1,
77 curl https://server.com/_admin/api -X POST -H 'content-type:text/plain' --data-binary '{"id":1,
78 "auth_token":"xe7cdb2v278e4evbdf5vs04v832v0efvcbcve4a3","method":"pull", "args":{"repoid":"CPython"}}'
78 "auth_token":"xe7cdb2v278e4evbdf5vs04v832v0efvcbcve4a3","method":"pull", "args":{"repoid":"CPython"}}'
79
79
80 Provide those parameters:
80 Provide those parameters:
81 - **id** A value of any type, which is used to match the response with the
81 - **id** A value of any type, which is used to match the response with the
82 request that it is replying to.
82 request that it is replying to.
83 - **auth_token** for access and permission validation.
83 - **auth_token** for access and permission validation.
84 - **method** is name of method to call
84 - **method** is name of method to call
85 - **args** is an ``key:value`` list of arguments to pass to method
85 - **args** is an ``key:value`` list of arguments to pass to method
86
86
87 .. note::
87 .. note::
88
88
89 To get your |authtoken|, from the |RCE| interface,
89 To get your |authtoken|, from the |RCE| interface,
90 go to:
90 go to:
91 :menuselection:`username --> My account --> Auth tokens`
91 :menuselection:`username --> My account --> Auth tokens`
92
92
93 For security reasons you should always create a dedicated |authtoken| for
93 For security reasons you should always create a dedicated |authtoken| for
94 API use only.
94 API use only.
95
95
96
96
97 The |RCE| API will always return a JSON-RPC response:
97 The |RCE| API will always return a JSON-RPC response:
98
98
99 .. code-block:: bash
99 .. code-block:: bash
100
100
101 {
101 {
102 "id": <id>, # matching id sent by request
102 "id": <id>, # matching id sent by request
103 "result": "<result>"|null, # JSON formatted result, null if any errors
103 "result": "<result>"|null, # JSON formatted result, null if any errors
104 "error": "null"|<error_message> # JSON formatted error (if any)
104 "error": "null"|<error_message> # JSON formatted error (if any)
105 }
105 }
106
106
107 All responses from API will be with `HTTP/1.0 200 OK` status code.
107 All responses from API will be with `HTTP/1.0 200 OK` status code.
108 If there is an error when calling the API, the *error* key will contain a
108 If there is an error when calling the API, the *error* key will contain a
109 failure description and the *result* will be `null`.
109 failure description and the *result* will be `null`.
110
110
111 API CLIENT
111 API CLIENT
112 ----------
112 ----------
113
113
114 To install the |RCE| API, see :ref:`install-tools`. To configure the API per
114 To install the |RCE| API, see :ref:`install-tools`. To configure the API per
115 instance, see the :ref:`rc-tools` section as you need to configure a
115 instance, see the :ref:`rc-tools` section as you need to configure a
116 :file:`~/.rhoderc` file with your |authtokens|.
116 :file:`~/.rhoderc` file with your |authtokens|.
117
117
118 Once you have set up your instance API access, use the following examples to
118 Once you have set up your instance API access, use the following examples to
119 get started.
119 get started.
120
120
121 .. code-block:: bash
121 .. code-block:: bash
122
122
123 # Getting the 'rhodecode' repository
123 # Getting the 'rhodecode' repository
124 # from a RhodeCode Enterprise instance
124 # from a RhodeCode Enterprise instance
125 rhodecode-api --instance-name=enterprise-1 get_repo repoid:rhodecode
125 rhodecode-api --instance-name=enterprise-1 get_repo repoid:rhodecode
126
126
127 Calling method get_repo => http://127.0.0.1:5000
127 Calling method get_repo => http://127.0.0.1:5000
128 Server response
128 Server response
129 {
129 {
130 <json data>
130 <json data>
131 }
131 }
132
132
133 # Creating a new mercurial repository called 'brand-new'
133 # Creating a new mercurial repository called 'brand-new'
134 # with a description 'Repo-description'
134 # with a description 'Repo-description'
135 rhodecode-api --instance-name=enterprise-1 create_repo repo_name:brand-new repo_type:hg description:Repo-description
135 rhodecode-api --instance-name=enterprise-1 create_repo repo_name:brand-new repo_type:hg description:Repo-description
136 {
136 {
137 "error": null,
137 "error": null,
138 "id": 1110,
138 "id": 1110,
139 "result": {
139 "result": {
140 "msg": "Created new repository `brand-new`",
140 "msg": "Created new repository `brand-new`",
141 "success": true,
141 "success": true,
142 "task": null
142 "task": null
143 }
143 }
144 }
144 }
145
145
146 A broken example, what not to do.
146 A broken example, what not to do.
147
147
148 .. code-block:: bash
148 .. code-block:: bash
149
149
150 # A call missing the required arguments
150 # A call missing the required arguments
151 # and not specifying the instance
151 # and not specifying the instance
152 rhodecode-api get_repo
152 rhodecode-api get_repo
153
153
154 Calling method get_repo => http://127.0.0.1:5000
154 Calling method get_repo => http://127.0.0.1:5000
155 Server response
155 Server response
156 "Missing non optional `repoid` arg in JSON DATA"
156 "Missing non optional `repoid` arg in JSON DATA"
157
157
158 You can specify pure JSON using the ``--format`` parameter.
158 You can specify pure JSON using the ``--format`` parameter.
159
159
160 .. code-block:: bash
160 .. code-block:: bash
161
161
162 rhodecode-api --format=json get_repo repoid:rhodecode
162 rhodecode-api --format=json get_repo repoid:rhodecode
163
163
164 In such case only output that this function shows is pure JSON, we can use that
164 In such case only output that this function shows is pure JSON, we can use that
165 and pipe output to some json formatter.
165 and pipe output to some json formatter.
166
166
167 If output is in pure JSON format, you can pipe output to a JSON formatter.
167 If output is in pure JSON format, you can pipe output to a JSON formatter.
168
168
169 .. code-block:: bash
169 .. code-block:: bash
170
170
171 rhodecode-api --instance-name=enterprise-1 --format=json get_repo repoid:rhodecode | python -m json.tool
171 rhodecode-api --instance-name=enterprise-1 --format=json get_repo repoid:rhodecode | python -m json.tool
172
172
173 API METHODS
173 API METHODS
174 -----------
174 -----------
175
175
176 Each method by default required following arguments.
176 Each method by default required following arguments.
177
177
178 .. code-block:: bash
178 .. code-block:: bash
179
179
180 id : "<id_for_response>"
180 id : "<id_for_response>"
181 auth_token : "<auth_token>"
181 auth_token : "<auth_token>"
182 method : "<method name>"
182 method : "<method name>"
183 args : {}
183 args : {}
184
184
185 Use each **param** from docs and put it in args, Optional parameters
185 Use each **param** from docs and put it in args, Optional parameters
186 are not required in args.
186 are not required in args.
187
187
188 .. code-block:: bash
188 .. code-block:: bash
189
189
190 args: {"repoid": "rhodecode"}
190 args: {"repoid": "rhodecode"}
191
191
192 .. Note: From this point on things are generated by the script in
192 .. Note: From this point on things are generated by the script in
193 `scripts/fabfile.py`. To change things below, update the docstrings in the
193 `scripts/fabfile.py`. To change things below, update the docstrings in the
194 ApiController.
194 ApiController.
195
195
196 .. --- API DEFS MARKER ---
196 .. --- API DEFS MARKER ---
197 .. toctree::
197 .. toctree::
198
198
199 methods/license-methods
199 methods/license-methods
200 methods/deprecated-methods
200 methods/deprecated-methods
201 methods/gist-methods
201 methods/gist-methods
202 methods/pull-request-methods
202 methods/pull-request-methods
203 methods/repo-methods
203 methods/repo-methods
204 methods/repo-group-methods
204 methods/repo-group-methods
205 methods/server-methods
205 methods/server-methods
206 methods/user-methods
206 methods/user-methods
207 methods/user-group-methods
207 methods/user-group-methods
@@ -1,304 +1,304 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22 import logging
22 import logging
23
23
24 from pyramid.view import view_config
24 from pyramid.view import view_config
25
25
26 from rhodecode.apps._base import BaseAppView
26 from rhodecode.apps._base import BaseAppView
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.auth import LoginRequired, NotAnonymous, \
28 from rhodecode.lib.auth import LoginRequired, NotAnonymous, \
29 HasRepoGroupPermissionAnyDecorator
29 HasRepoGroupPermissionAnyDecorator
30 from rhodecode.lib.index import searcher_from_config
30 from rhodecode.lib.index import searcher_from_config
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
32 from rhodecode.lib.ext_json import json
32 from rhodecode.lib.ext_json import json
33 from rhodecode.model.db import func, Repository, RepoGroup
33 from rhodecode.model.db import func, Repository, RepoGroup
34 from rhodecode.model.repo import RepoModel
34 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo_group import RepoGroupModel
35 from rhodecode.model.repo_group import RepoGroupModel
36 from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList
36 from rhodecode.model.scm import ScmModel, RepoGroupList, RepoList
37 from rhodecode.model.user import UserModel
37 from rhodecode.model.user import UserModel
38 from rhodecode.model.user_group import UserGroupModel
38 from rhodecode.model.user_group import UserGroupModel
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class HomeView(BaseAppView):
43 class HomeView(BaseAppView):
44
44
45 def load_default_context(self):
45 def load_default_context(self):
46 c = self._get_local_tmpl_context()
46 c = self._get_local_tmpl_context()
47 c.user = c.auth_user.get_instance()
47 c.user = c.auth_user.get_instance()
48 self._register_global_c(c)
48 self._register_global_c(c)
49 return c
49 return c
50
50
51 @LoginRequired()
51 @LoginRequired()
52 @view_config(
52 @view_config(
53 route_name='user_autocomplete_data', request_method='GET',
53 route_name='user_autocomplete_data', request_method='GET',
54 renderer='json_ext', xhr=True)
54 renderer='json_ext', xhr=True)
55 def user_autocomplete_data(self):
55 def user_autocomplete_data(self):
56 query = self.request.GET.get('query')
56 query = self.request.GET.get('query')
57 active = str2bool(self.request.GET.get('active') or True)
57 active = str2bool(self.request.GET.get('active') or True)
58 include_groups = str2bool(self.request.GET.get('user_groups'))
58 include_groups = str2bool(self.request.GET.get('user_groups'))
59 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
59 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
60 skip_default_user = str2bool(self.request.GET.get('skip_default_user'))
60 skip_default_user = str2bool(self.request.GET.get('skip_default_user'))
61
61
62 log.debug('generating user list, query:%s, active:%s, with_groups:%s',
62 log.debug('generating user list, query:%s, active:%s, with_groups:%s',
63 query, active, include_groups)
63 query, active, include_groups)
64
64
65 _users = UserModel().get_users(
65 _users = UserModel().get_users(
66 name_contains=query, only_active=active)
66 name_contains=query, only_active=active)
67
67
68 def maybe_skip_default_user(usr):
68 def maybe_skip_default_user(usr):
69 if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER:
69 if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER:
70 return False
70 return False
71 return True
71 return True
72 _users = filter(maybe_skip_default_user, _users)
72 _users = filter(maybe_skip_default_user, _users)
73
73
74 if include_groups:
74 if include_groups:
75 # extend with user groups
75 # extend with user groups
76 _user_groups = UserGroupModel().get_user_groups(
76 _user_groups = UserGroupModel().get_user_groups(
77 name_contains=query, only_active=active,
77 name_contains=query, only_active=active,
78 expand_groups=expand_groups)
78 expand_groups=expand_groups)
79 _users = _users + _user_groups
79 _users = _users + _user_groups
80
80
81 return {'suggestions': _users}
81 return {'suggestions': _users}
82
82
83 @LoginRequired()
83 @LoginRequired()
84 @NotAnonymous()
84 @NotAnonymous()
85 @view_config(
85 @view_config(
86 route_name='user_group_autocomplete_data', request_method='GET',
86 route_name='user_group_autocomplete_data', request_method='GET',
87 renderer='json_ext', xhr=True)
87 renderer='json_ext', xhr=True)
88 def user_group_autocomplete_data(self):
88 def user_group_autocomplete_data(self):
89 query = self.request.GET.get('query')
89 query = self.request.GET.get('query')
90 active = str2bool(self.request.GET.get('active') or True)
90 active = str2bool(self.request.GET.get('active') or True)
91 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
91 expand_groups = str2bool(self.request.GET.get('user_groups_expand'))
92
92
93 log.debug('generating user group list, query:%s, active:%s',
93 log.debug('generating user group list, query:%s, active:%s',
94 query, active)
94 query, active)
95
95
96 _user_groups = UserGroupModel().get_user_groups(
96 _user_groups = UserGroupModel().get_user_groups(
97 name_contains=query, only_active=active,
97 name_contains=query, only_active=active,
98 expand_groups=expand_groups)
98 expand_groups=expand_groups)
99 _user_groups = _user_groups
99 _user_groups = _user_groups
100
100
101 return {'suggestions': _user_groups}
101 return {'suggestions': _user_groups}
102
102
103 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
103 def _get_repo_list(self, name_contains=None, repo_type=None, limit=20):
104 query = Repository.query()\
104 query = Repository.query()\
105 .order_by(func.length(Repository.repo_name))\
105 .order_by(func.length(Repository.repo_name))\
106 .order_by(Repository.repo_name)
106 .order_by(Repository.repo_name)
107
107
108 if repo_type:
108 if repo_type:
109 query = query.filter(Repository.repo_type == repo_type)
109 query = query.filter(Repository.repo_type == repo_type)
110
110
111 if name_contains:
111 if name_contains:
112 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
112 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
113 query = query.filter(
113 query = query.filter(
114 Repository.repo_name.ilike(ilike_expression))
114 Repository.repo_name.ilike(ilike_expression))
115 query = query.limit(limit)
115 query = query.limit(limit)
116
116
117 all_repos = query.all()
117 all_repos = query.all()
118 # permission checks are inside this function
118 # permission checks are inside this function
119 repo_iter = ScmModel().get_repos(all_repos)
119 repo_iter = ScmModel().get_repos(all_repos)
120 return [
120 return [
121 {
121 {
122 'id': obj['name'],
122 'id': obj['name'],
123 'text': obj['name'],
123 'text': obj['name'],
124 'type': 'repo',
124 'type': 'repo',
125 'obj': obj['dbrepo'],
125 'obj': obj['dbrepo'],
126 'url': h.route_path('repo_summary', repo_name=obj['name'])
126 'url': h.route_path('repo_summary', repo_name=obj['name'])
127 }
127 }
128 for obj in repo_iter]
128 for obj in repo_iter]
129
129
130 def _get_repo_group_list(self, name_contains=None, limit=20):
130 def _get_repo_group_list(self, name_contains=None, limit=20):
131 query = RepoGroup.query()\
131 query = RepoGroup.query()\
132 .order_by(func.length(RepoGroup.group_name))\
132 .order_by(func.length(RepoGroup.group_name))\
133 .order_by(RepoGroup.group_name)
133 .order_by(RepoGroup.group_name)
134
134
135 if name_contains:
135 if name_contains:
136 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
136 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
137 query = query.filter(
137 query = query.filter(
138 RepoGroup.group_name.ilike(ilike_expression))
138 RepoGroup.group_name.ilike(ilike_expression))
139 query = query.limit(limit)
139 query = query.limit(limit)
140
140
141 all_groups = query.all()
141 all_groups = query.all()
142 repo_groups_iter = ScmModel().get_repo_groups(all_groups)
142 repo_groups_iter = ScmModel().get_repo_groups(all_groups)
143 return [
143 return [
144 {
144 {
145 'id': obj.group_name,
145 'id': obj.group_name,
146 'text': obj.group_name,
146 'text': obj.group_name,
147 'type': 'group',
147 'type': 'group',
148 'obj': {},
148 'obj': {},
149 'url': h.route_path('repo_group_home', repo_group_name=obj.group_name)
149 'url': h.route_path('repo_group_home', repo_group_name=obj.group_name)
150 }
150 }
151 for obj in repo_groups_iter]
151 for obj in repo_groups_iter]
152
152
153 def _get_hash_commit_list(self, auth_user, hash_starts_with=None):
153 def _get_hash_commit_list(self, auth_user, hash_starts_with=None):
154 if not hash_starts_with or len(hash_starts_with) < 3:
154 if not hash_starts_with or len(hash_starts_with) < 3:
155 return []
155 return []
156
156
157 commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with)
157 commit_hashes = re.compile('([0-9a-f]{2,40})').findall(hash_starts_with)
158
158
159 if len(commit_hashes) != 1:
159 if len(commit_hashes) != 1:
160 return []
160 return []
161
161
162 commit_hash_prefix = commit_hashes[0]
162 commit_hash_prefix = commit_hashes[0]
163
163
164 searcher = searcher_from_config(self.request.registry.settings)
164 searcher = searcher_from_config(self.request.registry.settings)
165 result = searcher.search(
165 result = searcher.search(
166 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user,
166 'commit_id:%s*' % commit_hash_prefix, 'commit', auth_user,
167 raise_on_exc=False)
167 raise_on_exc=False)
168
168
169 return [
169 return [
170 {
170 {
171 'id': entry['commit_id'],
171 'id': entry['commit_id'],
172 'text': entry['commit_id'],
172 'text': entry['commit_id'],
173 'type': 'commit',
173 'type': 'commit',
174 'obj': {'repo': entry['repository']},
174 'obj': {'repo': entry['repository']},
175 'url': h.url('changeset_home',
175 'url': h.route_path('repo_commit',
176 repo_name=entry['repository'],
176 repo_name=entry['repository'],
177 revision=entry['commit_id'])
177 commit_id=entry['commit_id'])
178 }
178 }
179 for entry in result['results']]
179 for entry in result['results']]
180
180
181 @LoginRequired()
181 @LoginRequired()
182 @view_config(
182 @view_config(
183 route_name='repo_list_data', request_method='GET',
183 route_name='repo_list_data', request_method='GET',
184 renderer='json_ext', xhr=True)
184 renderer='json_ext', xhr=True)
185 def repo_list_data(self):
185 def repo_list_data(self):
186 _ = self.request.translate
186 _ = self.request.translate
187
187
188 query = self.request.GET.get('query')
188 query = self.request.GET.get('query')
189 repo_type = self.request.GET.get('repo_type')
189 repo_type = self.request.GET.get('repo_type')
190 log.debug('generating repo list, query:%s, repo_type:%s',
190 log.debug('generating repo list, query:%s, repo_type:%s',
191 query, repo_type)
191 query, repo_type)
192
192
193 res = []
193 res = []
194 repos = self._get_repo_list(query, repo_type=repo_type)
194 repos = self._get_repo_list(query, repo_type=repo_type)
195 if repos:
195 if repos:
196 res.append({
196 res.append({
197 'text': _('Repositories'),
197 'text': _('Repositories'),
198 'children': repos
198 'children': repos
199 })
199 })
200
200
201 data = {
201 data = {
202 'more': False,
202 'more': False,
203 'results': res
203 'results': res
204 }
204 }
205 return data
205 return data
206
206
207 @LoginRequired()
207 @LoginRequired()
208 @view_config(
208 @view_config(
209 route_name='goto_switcher_data', request_method='GET',
209 route_name='goto_switcher_data', request_method='GET',
210 renderer='json_ext', xhr=True)
210 renderer='json_ext', xhr=True)
211 def goto_switcher_data(self):
211 def goto_switcher_data(self):
212 c = self.load_default_context()
212 c = self.load_default_context()
213
213
214 _ = self.request.translate
214 _ = self.request.translate
215
215
216 query = self.request.GET.get('query')
216 query = self.request.GET.get('query')
217 log.debug('generating goto switcher list, query %s', query)
217 log.debug('generating goto switcher list, query %s', query)
218
218
219 res = []
219 res = []
220 repo_groups = self._get_repo_group_list(query)
220 repo_groups = self._get_repo_group_list(query)
221 if repo_groups:
221 if repo_groups:
222 res.append({
222 res.append({
223 'text': _('Groups'),
223 'text': _('Groups'),
224 'children': repo_groups
224 'children': repo_groups
225 })
225 })
226
226
227 repos = self._get_repo_list(query)
227 repos = self._get_repo_list(query)
228 if repos:
228 if repos:
229 res.append({
229 res.append({
230 'text': _('Repositories'),
230 'text': _('Repositories'),
231 'children': repos
231 'children': repos
232 })
232 })
233
233
234 commits = self._get_hash_commit_list(c.auth_user, query)
234 commits = self._get_hash_commit_list(c.auth_user, query)
235 if commits:
235 if commits:
236 unique_repos = {}
236 unique_repos = {}
237 for commit in commits:
237 for commit in commits:
238 unique_repos.setdefault(commit['obj']['repo'], []
238 unique_repos.setdefault(commit['obj']['repo'], []
239 ).append(commit)
239 ).append(commit)
240
240
241 for repo in unique_repos:
241 for repo in unique_repos:
242 res.append({
242 res.append({
243 'text': _('Commits in %(repo)s') % {'repo': repo},
243 'text': _('Commits in %(repo)s') % {'repo': repo},
244 'children': unique_repos[repo]
244 'children': unique_repos[repo]
245 })
245 })
246
246
247 data = {
247 data = {
248 'more': False,
248 'more': False,
249 'results': res
249 'results': res
250 }
250 }
251 return data
251 return data
252
252
253 def _get_groups_and_repos(self, repo_group_id=None):
253 def _get_groups_and_repos(self, repo_group_id=None):
254 # repo groups groups
254 # repo groups groups
255 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
255 repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id)
256 _perms = ['group.read', 'group.write', 'group.admin']
256 _perms = ['group.read', 'group.write', 'group.admin']
257 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
257 repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms)
258 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
258 repo_group_data = RepoGroupModel().get_repo_groups_as_dict(
259 repo_group_list=repo_group_list_acl, admin=False)
259 repo_group_list=repo_group_list_acl, admin=False)
260
260
261 # repositories
261 # repositories
262 repo_list = Repository.get_all_repos(group_id=repo_group_id)
262 repo_list = Repository.get_all_repos(group_id=repo_group_id)
263 _perms = ['repository.read', 'repository.write', 'repository.admin']
263 _perms = ['repository.read', 'repository.write', 'repository.admin']
264 repo_list_acl = RepoList(repo_list, perm_set=_perms)
264 repo_list_acl = RepoList(repo_list, perm_set=_perms)
265 repo_data = RepoModel().get_repos_as_dict(
265 repo_data = RepoModel().get_repos_as_dict(
266 repo_list=repo_list_acl, admin=False)
266 repo_list=repo_list_acl, admin=False)
267
267
268 return repo_data, repo_group_data
268 return repo_data, repo_group_data
269
269
270 @LoginRequired()
270 @LoginRequired()
271 @view_config(
271 @view_config(
272 route_name='home', request_method='GET',
272 route_name='home', request_method='GET',
273 renderer='rhodecode:templates/index.mako')
273 renderer='rhodecode:templates/index.mako')
274 def main_page(self):
274 def main_page(self):
275 c = self.load_default_context()
275 c = self.load_default_context()
276 c.repo_group = None
276 c.repo_group = None
277
277
278 repo_data, repo_group_data = self._get_groups_and_repos()
278 repo_data, repo_group_data = self._get_groups_and_repos()
279 # json used to render the grids
279 # json used to render the grids
280 c.repos_data = json.dumps(repo_data)
280 c.repos_data = json.dumps(repo_data)
281 c.repo_groups_data = json.dumps(repo_group_data)
281 c.repo_groups_data = json.dumps(repo_group_data)
282
282
283 return self._get_template_context(c)
283 return self._get_template_context(c)
284
284
285 @LoginRequired()
285 @LoginRequired()
286 @HasRepoGroupPermissionAnyDecorator(
286 @HasRepoGroupPermissionAnyDecorator(
287 'group.read', 'group.write', 'group.admin')
287 'group.read', 'group.write', 'group.admin')
288 @view_config(
288 @view_config(
289 route_name='repo_group_home', request_method='GET',
289 route_name='repo_group_home', request_method='GET',
290 renderer='rhodecode:templates/index_repo_group.mako')
290 renderer='rhodecode:templates/index_repo_group.mako')
291 @view_config(
291 @view_config(
292 route_name='repo_group_home_slash', request_method='GET',
292 route_name='repo_group_home_slash', request_method='GET',
293 renderer='rhodecode:templates/index_repo_group.mako')
293 renderer='rhodecode:templates/index_repo_group.mako')
294 def repo_group_main_page(self):
294 def repo_group_main_page(self):
295 c = self.load_default_context()
295 c = self.load_default_context()
296 c.repo_group = self.request.db_repo_group
296 c.repo_group = self.request.db_repo_group
297 repo_data, repo_group_data = self._get_groups_and_repos(
297 repo_data, repo_group_data = self._get_groups_and_repos(
298 c.repo_group.group_id)
298 c.repo_group.group_id)
299
299
300 # json used to render the grids
300 # json used to render the grids
301 c.repos_data = json.dumps(repo_data)
301 c.repos_data = json.dumps(repo_data)
302 c.repo_groups_data = json.dumps(repo_group_data)
302 c.repo_groups_data = json.dumps(repo_group_data)
303
303
304 return self._get_template_context(c)
304 return self._get_template_context(c)
@@ -1,530 +1,529 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import urlparse
21 import urlparse
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.tests import (
26 from rhodecode.tests import (
27 assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN,
27 assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN,
28 no_newline_id_generator)
28 no_newline_id_generator)
29 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.fixture import Fixture
30 from rhodecode.lib.auth import check_password
30 from rhodecode.lib.auth import check_password
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.model.auth_token import AuthTokenModel
32 from rhodecode.model.auth_token import AuthTokenModel
33 from rhodecode.model import validators
33 from rhodecode.model import validators
34 from rhodecode.model.db import User, Notification, UserApiKeys
34 from rhodecode.model.db import User, Notification, UserApiKeys
35 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
36
36
37 fixture = Fixture()
37 fixture = Fixture()
38
38
39 whitelist_view = ['RepoCommitsView:repo_commit_raw']
40
39
41
40 def route_path(name, params=None, **kwargs):
42 def route_path(name, params=None, **kwargs):
41 import urllib
43 import urllib
42 from rhodecode.apps._base import ADMIN_PREFIX
44 from rhodecode.apps._base import ADMIN_PREFIX
43
45
44 base_url = {
46 base_url = {
45 'login': ADMIN_PREFIX + '/login',
47 'login': ADMIN_PREFIX + '/login',
46 'logout': ADMIN_PREFIX + '/logout',
48 'logout': ADMIN_PREFIX + '/logout',
47 'register': ADMIN_PREFIX + '/register',
49 'register': ADMIN_PREFIX + '/register',
48 'reset_password':
50 'reset_password':
49 ADMIN_PREFIX + '/password_reset',
51 ADMIN_PREFIX + '/password_reset',
50 'reset_password_confirmation':
52 'reset_password_confirmation':
51 ADMIN_PREFIX + '/password_reset_confirmation',
53 ADMIN_PREFIX + '/password_reset_confirmation',
52
54
53 'admin_permissions_application':
55 'admin_permissions_application':
54 ADMIN_PREFIX + '/permissions/application',
56 ADMIN_PREFIX + '/permissions/application',
55 'admin_permissions_application_update':
57 'admin_permissions_application_update':
56 ADMIN_PREFIX + '/permissions/application/update',
58 ADMIN_PREFIX + '/permissions/application/update',
57
59
58 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}'
60 'repo_commit_raw': '/{repo_name}/raw-changeset/{commit_id}'
59
61
60 }[name].format(**kwargs)
62 }[name].format(**kwargs)
61
63
62 if params:
64 if params:
63 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
65 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
64 return base_url
66 return base_url
65
67
66
68
67 @pytest.mark.usefixtures('app')
69 @pytest.mark.usefixtures('app')
68 class TestLoginController(object):
70 class TestLoginController(object):
69 destroy_users = set()
71 destroy_users = set()
70
72
71 @classmethod
73 @classmethod
72 def teardown_class(cls):
74 def teardown_class(cls):
73 fixture.destroy_users(cls.destroy_users)
75 fixture.destroy_users(cls.destroy_users)
74
76
75 def teardown_method(self, method):
77 def teardown_method(self, method):
76 for n in Notification.query().all():
78 for n in Notification.query().all():
77 Session().delete(n)
79 Session().delete(n)
78
80
79 Session().commit()
81 Session().commit()
80 assert Notification.query().all() == []
82 assert Notification.query().all() == []
81
83
82 def test_index(self):
84 def test_index(self):
83 response = self.app.get(route_path('login'))
85 response = self.app.get(route_path('login'))
84 assert response.status == '200 OK'
86 assert response.status == '200 OK'
85 # Test response...
87 # Test response...
86
88
87 def test_login_admin_ok(self):
89 def test_login_admin_ok(self):
88 response = self.app.post(route_path('login'),
90 response = self.app.post(route_path('login'),
89 {'username': 'test_admin',
91 {'username': 'test_admin',
90 'password': 'test12'})
92 'password': 'test12'})
91 assert response.status == '302 Found'
93 assert response.status == '302 Found'
92 session = response.get_session_from_response()
94 session = response.get_session_from_response()
93 username = session['rhodecode_user'].get('username')
95 username = session['rhodecode_user'].get('username')
94 assert username == 'test_admin'
96 assert username == 'test_admin'
95 response = response.follow()
97 response = response.follow()
96 response.mustcontain('/%s' % HG_REPO)
98 response.mustcontain('/%s' % HG_REPO)
97
99
98 def test_login_regular_ok(self):
100 def test_login_regular_ok(self):
99 response = self.app.post(route_path('login'),
101 response = self.app.post(route_path('login'),
100 {'username': 'test_regular',
102 {'username': 'test_regular',
101 'password': 'test12'})
103 'password': 'test12'})
102
104
103 assert response.status == '302 Found'
105 assert response.status == '302 Found'
104 session = response.get_session_from_response()
106 session = response.get_session_from_response()
105 username = session['rhodecode_user'].get('username')
107 username = session['rhodecode_user'].get('username')
106 assert username == 'test_regular'
108 assert username == 'test_regular'
107 response = response.follow()
109 response = response.follow()
108 response.mustcontain('/%s' % HG_REPO)
110 response.mustcontain('/%s' % HG_REPO)
109
111
110 def test_login_ok_came_from(self):
112 def test_login_ok_came_from(self):
111 test_came_from = '/_admin/users?branch=stable'
113 test_came_from = '/_admin/users?branch=stable'
112 _url = '{}?came_from={}'.format(route_path('login'), test_came_from)
114 _url = '{}?came_from={}'.format(route_path('login'), test_came_from)
113 response = self.app.post(
115 response = self.app.post(
114 _url, {'username': 'test_admin', 'password': 'test12'})
116 _url, {'username': 'test_admin', 'password': 'test12'})
115 assert response.status == '302 Found'
117 assert response.status == '302 Found'
116 assert 'branch=stable' in response.location
118 assert 'branch=stable' in response.location
117 response = response.follow()
119 response = response.follow()
118
120
119 assert response.status == '200 OK'
121 assert response.status == '200 OK'
120 response.mustcontain('Users administration')
122 response.mustcontain('Users administration')
121
123
122 def test_redirect_to_login_with_get_args(self):
124 def test_redirect_to_login_with_get_args(self):
123 with fixture.anon_access(False):
125 with fixture.anon_access(False):
124 kwargs = {'branch': 'stable'}
126 kwargs = {'branch': 'stable'}
125 response = self.app.get(
127 response = self.app.get(
126 h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs))
128 h.route_path('repo_summary', repo_name=HG_REPO, _query=kwargs))
127 assert response.status == '302 Found'
129 assert response.status == '302 Found'
128
130
129 response_query = urlparse.parse_qsl(response.location)
131 response_query = urlparse.parse_qsl(response.location)
130 assert 'branch=stable' in response_query[0][1]
132 assert 'branch=stable' in response_query[0][1]
131
133
132 def test_login_form_with_get_args(self):
134 def test_login_form_with_get_args(self):
133 _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login'))
135 _url = '{}?came_from=/_admin/users,branch=stable'.format(route_path('login'))
134 response = self.app.get(_url)
136 response = self.app.get(_url)
135 assert 'branch%3Dstable' in response.form.action
137 assert 'branch%3Dstable' in response.form.action
136
138
137 @pytest.mark.parametrize("url_came_from", [
139 @pytest.mark.parametrize("url_came_from", [
138 'data:text/html,<script>window.alert("xss")</script>',
140 'data:text/html,<script>window.alert("xss")</script>',
139 'mailto:test@rhodecode.org',
141 'mailto:test@rhodecode.org',
140 'file:///etc/passwd',
142 'file:///etc/passwd',
141 'ftp://some.ftp.server',
143 'ftp://some.ftp.server',
142 'http://other.domain',
144 'http://other.domain',
143 '/\r\nX-Forwarded-Host: http://example.org',
145 '/\r\nX-Forwarded-Host: http://example.org',
144 ], ids=no_newline_id_generator)
146 ], ids=no_newline_id_generator)
145 def test_login_bad_came_froms(self, url_came_from):
147 def test_login_bad_came_froms(self, url_came_from):
146 _url = '{}?came_from={}'.format(route_path('login'), url_came_from)
148 _url = '{}?came_from={}'.format(route_path('login'), url_came_from)
147 response = self.app.post(
149 response = self.app.post(
148 _url,
150 _url,
149 {'username': 'test_admin', 'password': 'test12'})
151 {'username': 'test_admin', 'password': 'test12'})
150 assert response.status == '302 Found'
152 assert response.status == '302 Found'
151 response = response.follow()
153 response = response.follow()
152 assert response.status == '200 OK'
154 assert response.status == '200 OK'
153 assert response.request.path == '/'
155 assert response.request.path == '/'
154
156
155 def test_login_short_password(self):
157 def test_login_short_password(self):
156 response = self.app.post(route_path('login'),
158 response = self.app.post(route_path('login'),
157 {'username': 'test_admin',
159 {'username': 'test_admin',
158 'password': 'as'})
160 'password': 'as'})
159 assert response.status == '200 OK'
161 assert response.status == '200 OK'
160
162
161 response.mustcontain('Enter 3 characters or more')
163 response.mustcontain('Enter 3 characters or more')
162
164
163 def test_login_wrong_non_ascii_password(self, user_regular):
165 def test_login_wrong_non_ascii_password(self, user_regular):
164 response = self.app.post(
166 response = self.app.post(
165 route_path('login'),
167 route_path('login'),
166 {'username': user_regular.username,
168 {'username': user_regular.username,
167 'password': u'invalid-non-asci\xe4'.encode('utf8')})
169 'password': u'invalid-non-asci\xe4'.encode('utf8')})
168
170
169 response.mustcontain('invalid user name')
171 response.mustcontain('invalid user name')
170 response.mustcontain('invalid password')
172 response.mustcontain('invalid password')
171
173
172 def test_login_with_non_ascii_password(self, user_util):
174 def test_login_with_non_ascii_password(self, user_util):
173 password = u'valid-non-ascii\xe4'
175 password = u'valid-non-ascii\xe4'
174 user = user_util.create_user(password=password)
176 user = user_util.create_user(password=password)
175 response = self.app.post(
177 response = self.app.post(
176 route_path('login'),
178 route_path('login'),
177 {'username': user.username,
179 {'username': user.username,
178 'password': password.encode('utf-8')})
180 'password': password.encode('utf-8')})
179 assert response.status_code == 302
181 assert response.status_code == 302
180
182
181 def test_login_wrong_username_password(self):
183 def test_login_wrong_username_password(self):
182 response = self.app.post(route_path('login'),
184 response = self.app.post(route_path('login'),
183 {'username': 'error',
185 {'username': 'error',
184 'password': 'test12'})
186 'password': 'test12'})
185
187
186 response.mustcontain('invalid user name')
188 response.mustcontain('invalid user name')
187 response.mustcontain('invalid password')
189 response.mustcontain('invalid password')
188
190
189 def test_login_admin_ok_password_migration(self, real_crypto_backend):
191 def test_login_admin_ok_password_migration(self, real_crypto_backend):
190 from rhodecode.lib import auth
192 from rhodecode.lib import auth
191
193
192 # create new user, with sha256 password
194 # create new user, with sha256 password
193 temp_user = 'test_admin_sha256'
195 temp_user = 'test_admin_sha256'
194 user = fixture.create_user(temp_user)
196 user = fixture.create_user(temp_user)
195 user.password = auth._RhodeCodeCryptoSha256().hash_create(
197 user.password = auth._RhodeCodeCryptoSha256().hash_create(
196 b'test123')
198 b'test123')
197 Session().add(user)
199 Session().add(user)
198 Session().commit()
200 Session().commit()
199 self.destroy_users.add(temp_user)
201 self.destroy_users.add(temp_user)
200 response = self.app.post(route_path('login'),
202 response = self.app.post(route_path('login'),
201 {'username': temp_user,
203 {'username': temp_user,
202 'password': 'test123'})
204 'password': 'test123'})
203
205
204 assert response.status == '302 Found'
206 assert response.status == '302 Found'
205 session = response.get_session_from_response()
207 session = response.get_session_from_response()
206 username = session['rhodecode_user'].get('username')
208 username = session['rhodecode_user'].get('username')
207 assert username == temp_user
209 assert username == temp_user
208 response = response.follow()
210 response = response.follow()
209 response.mustcontain('/%s' % HG_REPO)
211 response.mustcontain('/%s' % HG_REPO)
210
212
211 # new password should be bcrypted, after log-in and transfer
213 # new password should be bcrypted, after log-in and transfer
212 user = User.get_by_username(temp_user)
214 user = User.get_by_username(temp_user)
213 assert user.password.startswith('$')
215 assert user.password.startswith('$')
214
216
215 # REGISTRATIONS
217 # REGISTRATIONS
216 def test_register(self):
218 def test_register(self):
217 response = self.app.get(route_path('register'))
219 response = self.app.get(route_path('register'))
218 response.mustcontain('Create an Account')
220 response.mustcontain('Create an Account')
219
221
220 def test_register_err_same_username(self):
222 def test_register_err_same_username(self):
221 uname = 'test_admin'
223 uname = 'test_admin'
222 response = self.app.post(
224 response = self.app.post(
223 route_path('register'),
225 route_path('register'),
224 {
226 {
225 'username': uname,
227 'username': uname,
226 'password': 'test12',
228 'password': 'test12',
227 'password_confirmation': 'test12',
229 'password_confirmation': 'test12',
228 'email': 'goodmail@domain.com',
230 'email': 'goodmail@domain.com',
229 'firstname': 'test',
231 'firstname': 'test',
230 'lastname': 'test'
232 'lastname': 'test'
231 }
233 }
232 )
234 )
233
235
234 assertr = response.assert_response()
236 assertr = response.assert_response()
235 msg = validators.ValidUsername()._messages['username_exists']
237 msg = validators.ValidUsername()._messages['username_exists']
236 msg = msg % {'username': uname}
238 msg = msg % {'username': uname}
237 assertr.element_contains('#username+.error-message', msg)
239 assertr.element_contains('#username+.error-message', msg)
238
240
239 def test_register_err_same_email(self):
241 def test_register_err_same_email(self):
240 response = self.app.post(
242 response = self.app.post(
241 route_path('register'),
243 route_path('register'),
242 {
244 {
243 'username': 'test_admin_0',
245 'username': 'test_admin_0',
244 'password': 'test12',
246 'password': 'test12',
245 'password_confirmation': 'test12',
247 'password_confirmation': 'test12',
246 'email': 'test_admin@mail.com',
248 'email': 'test_admin@mail.com',
247 'firstname': 'test',
249 'firstname': 'test',
248 'lastname': 'test'
250 'lastname': 'test'
249 }
251 }
250 )
252 )
251
253
252 assertr = response.assert_response()
254 assertr = response.assert_response()
253 msg = validators.UniqSystemEmail()()._messages['email_taken']
255 msg = validators.UniqSystemEmail()()._messages['email_taken']
254 assertr.element_contains('#email+.error-message', msg)
256 assertr.element_contains('#email+.error-message', msg)
255
257
256 def test_register_err_same_email_case_sensitive(self):
258 def test_register_err_same_email_case_sensitive(self):
257 response = self.app.post(
259 response = self.app.post(
258 route_path('register'),
260 route_path('register'),
259 {
261 {
260 'username': 'test_admin_1',
262 'username': 'test_admin_1',
261 'password': 'test12',
263 'password': 'test12',
262 'password_confirmation': 'test12',
264 'password_confirmation': 'test12',
263 'email': 'TesT_Admin@mail.COM',
265 'email': 'TesT_Admin@mail.COM',
264 'firstname': 'test',
266 'firstname': 'test',
265 'lastname': 'test'
267 'lastname': 'test'
266 }
268 }
267 )
269 )
268 assertr = response.assert_response()
270 assertr = response.assert_response()
269 msg = validators.UniqSystemEmail()()._messages['email_taken']
271 msg = validators.UniqSystemEmail()()._messages['email_taken']
270 assertr.element_contains('#email+.error-message', msg)
272 assertr.element_contains('#email+.error-message', msg)
271
273
272 def test_register_err_wrong_data(self):
274 def test_register_err_wrong_data(self):
273 response = self.app.post(
275 response = self.app.post(
274 route_path('register'),
276 route_path('register'),
275 {
277 {
276 'username': 'xs',
278 'username': 'xs',
277 'password': 'test',
279 'password': 'test',
278 'password_confirmation': 'test',
280 'password_confirmation': 'test',
279 'email': 'goodmailm',
281 'email': 'goodmailm',
280 'firstname': 'test',
282 'firstname': 'test',
281 'lastname': 'test'
283 'lastname': 'test'
282 }
284 }
283 )
285 )
284 assert response.status == '200 OK'
286 assert response.status == '200 OK'
285 response.mustcontain('An email address must contain a single @')
287 response.mustcontain('An email address must contain a single @')
286 response.mustcontain('Enter a value 6 characters long or more')
288 response.mustcontain('Enter a value 6 characters long or more')
287
289
288 def test_register_err_username(self):
290 def test_register_err_username(self):
289 response = self.app.post(
291 response = self.app.post(
290 route_path('register'),
292 route_path('register'),
291 {
293 {
292 'username': 'error user',
294 'username': 'error user',
293 'password': 'test12',
295 'password': 'test12',
294 'password_confirmation': 'test12',
296 'password_confirmation': 'test12',
295 'email': 'goodmailm',
297 'email': 'goodmailm',
296 'firstname': 'test',
298 'firstname': 'test',
297 'lastname': 'test'
299 'lastname': 'test'
298 }
300 }
299 )
301 )
300
302
301 response.mustcontain('An email address must contain a single @')
303 response.mustcontain('An email address must contain a single @')
302 response.mustcontain(
304 response.mustcontain(
303 'Username may only contain '
305 'Username may only contain '
304 'alphanumeric characters underscores, '
306 'alphanumeric characters underscores, '
305 'periods or dashes and must begin with '
307 'periods or dashes and must begin with '
306 'alphanumeric character')
308 'alphanumeric character')
307
309
308 def test_register_err_case_sensitive(self):
310 def test_register_err_case_sensitive(self):
309 usr = 'Test_Admin'
311 usr = 'Test_Admin'
310 response = self.app.post(
312 response = self.app.post(
311 route_path('register'),
313 route_path('register'),
312 {
314 {
313 'username': usr,
315 'username': usr,
314 'password': 'test12',
316 'password': 'test12',
315 'password_confirmation': 'test12',
317 'password_confirmation': 'test12',
316 'email': 'goodmailm',
318 'email': 'goodmailm',
317 'firstname': 'test',
319 'firstname': 'test',
318 'lastname': 'test'
320 'lastname': 'test'
319 }
321 }
320 )
322 )
321
323
322 assertr = response.assert_response()
324 assertr = response.assert_response()
323 msg = validators.ValidUsername()._messages['username_exists']
325 msg = validators.ValidUsername()._messages['username_exists']
324 msg = msg % {'username': usr}
326 msg = msg % {'username': usr}
325 assertr.element_contains('#username+.error-message', msg)
327 assertr.element_contains('#username+.error-message', msg)
326
328
327 def test_register_special_chars(self):
329 def test_register_special_chars(self):
328 response = self.app.post(
330 response = self.app.post(
329 route_path('register'),
331 route_path('register'),
330 {
332 {
331 'username': 'xxxaxn',
333 'username': 'xxxaxn',
332 'password': 'ąćźżąśśśś',
334 'password': 'ąćźżąśśśś',
333 'password_confirmation': 'ąćźżąśśśś',
335 'password_confirmation': 'ąćźżąśśśś',
334 'email': 'goodmailm@test.plx',
336 'email': 'goodmailm@test.plx',
335 'firstname': 'test',
337 'firstname': 'test',
336 'lastname': 'test'
338 'lastname': 'test'
337 }
339 }
338 )
340 )
339
341
340 msg = validators.ValidPassword()._messages['invalid_password']
342 msg = validators.ValidPassword()._messages['invalid_password']
341 response.mustcontain(msg)
343 response.mustcontain(msg)
342
344
343 def test_register_password_mismatch(self):
345 def test_register_password_mismatch(self):
344 response = self.app.post(
346 response = self.app.post(
345 route_path('register'),
347 route_path('register'),
346 {
348 {
347 'username': 'xs',
349 'username': 'xs',
348 'password': '123qwe',
350 'password': '123qwe',
349 'password_confirmation': 'qwe123',
351 'password_confirmation': 'qwe123',
350 'email': 'goodmailm@test.plxa',
352 'email': 'goodmailm@test.plxa',
351 'firstname': 'test',
353 'firstname': 'test',
352 'lastname': 'test'
354 'lastname': 'test'
353 }
355 }
354 )
356 )
355 msg = validators.ValidPasswordsMatch()._messages['password_mismatch']
357 msg = validators.ValidPasswordsMatch()._messages['password_mismatch']
356 response.mustcontain(msg)
358 response.mustcontain(msg)
357
359
358 def test_register_ok(self):
360 def test_register_ok(self):
359 username = 'test_regular4'
361 username = 'test_regular4'
360 password = 'qweqwe'
362 password = 'qweqwe'
361 email = 'marcin@test.com'
363 email = 'marcin@test.com'
362 name = 'testname'
364 name = 'testname'
363 lastname = 'testlastname'
365 lastname = 'testlastname'
364
366
365 response = self.app.post(
367 response = self.app.post(
366 route_path('register'),
368 route_path('register'),
367 {
369 {
368 'username': username,
370 'username': username,
369 'password': password,
371 'password': password,
370 'password_confirmation': password,
372 'password_confirmation': password,
371 'email': email,
373 'email': email,
372 'firstname': name,
374 'firstname': name,
373 'lastname': lastname,
375 'lastname': lastname,
374 'admin': True
376 'admin': True
375 }
377 }
376 ) # This should be overriden
378 ) # This should be overriden
377 assert response.status == '302 Found'
379 assert response.status == '302 Found'
378 assert_session_flash(
380 assert_session_flash(
379 response, 'You have successfully registered with RhodeCode')
381 response, 'You have successfully registered with RhodeCode')
380
382
381 ret = Session().query(User).filter(
383 ret = Session().query(User).filter(
382 User.username == 'test_regular4').one()
384 User.username == 'test_regular4').one()
383 assert ret.username == username
385 assert ret.username == username
384 assert check_password(password, ret.password)
386 assert check_password(password, ret.password)
385 assert ret.email == email
387 assert ret.email == email
386 assert ret.name == name
388 assert ret.name == name
387 assert ret.lastname == lastname
389 assert ret.lastname == lastname
388 assert ret.auth_tokens is not None
390 assert ret.auth_tokens is not None
389 assert not ret.admin
391 assert not ret.admin
390
392
391 def test_forgot_password_wrong_mail(self):
393 def test_forgot_password_wrong_mail(self):
392 bad_email = 'marcin@wrongmail.org'
394 bad_email = 'marcin@wrongmail.org'
393 response = self.app.post(
395 response = self.app.post(
394 route_path('reset_password'), {'email': bad_email, }
396 route_path('reset_password'), {'email': bad_email, }
395 )
397 )
396 assert_session_flash(response,
398 assert_session_flash(response,
397 'If such email exists, a password reset link was sent to it.')
399 'If such email exists, a password reset link was sent to it.')
398
400
399 def test_forgot_password(self, user_util):
401 def test_forgot_password(self, user_util):
400 response = self.app.get(route_path('reset_password'))
402 response = self.app.get(route_path('reset_password'))
401 assert response.status == '200 OK'
403 assert response.status == '200 OK'
402
404
403 user = user_util.create_user()
405 user = user_util.create_user()
404 user_id = user.user_id
406 user_id = user.user_id
405 email = user.email
407 email = user.email
406
408
407 response = self.app.post(route_path('reset_password'), {'email': email, })
409 response = self.app.post(route_path('reset_password'), {'email': email, })
408
410
409 assert_session_flash(response,
411 assert_session_flash(response,
410 'If such email exists, a password reset link was sent to it.')
412 'If such email exists, a password reset link was sent to it.')
411
413
412 # BAD KEY
414 # BAD KEY
413 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey')
415 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), 'badkey')
414 response = self.app.get(confirm_url)
416 response = self.app.get(confirm_url)
415 assert response.status == '302 Found'
417 assert response.status == '302 Found'
416 assert response.location.endswith(route_path('reset_password'))
418 assert response.location.endswith(route_path('reset_password'))
417 assert_session_flash(response, 'Given reset token is invalid')
419 assert_session_flash(response, 'Given reset token is invalid')
418
420
419 response.follow() # cleanup flash
421 response.follow() # cleanup flash
420
422
421 # GOOD KEY
423 # GOOD KEY
422 key = UserApiKeys.query()\
424 key = UserApiKeys.query()\
423 .filter(UserApiKeys.user_id == user_id)\
425 .filter(UserApiKeys.user_id == user_id)\
424 .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\
426 .filter(UserApiKeys.role == UserApiKeys.ROLE_PASSWORD_RESET)\
425 .first()
427 .first()
426
428
427 assert key
429 assert key
428
430
429 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key)
431 confirm_url = '{}?key={}'.format(route_path('reset_password_confirmation'), key.api_key)
430 response = self.app.get(confirm_url)
432 response = self.app.get(confirm_url)
431 assert response.status == '302 Found'
433 assert response.status == '302 Found'
432 assert response.location.endswith(route_path('login'))
434 assert response.location.endswith(route_path('login'))
433
435
434 assert_session_flash(
436 assert_session_flash(
435 response,
437 response,
436 'Your password reset was successful, '
438 'Your password reset was successful, '
437 'a new password has been sent to your email')
439 'a new password has been sent to your email')
438
440
439 response.follow()
441 response.follow()
440
442
441 def _get_api_whitelist(self, values=None):
443 def _get_api_whitelist(self, values=None):
442 config = {'api_access_controllers_whitelist': values or []}
444 config = {'api_access_controllers_whitelist': values or []}
443 return config
445 return config
444
446
445 @pytest.mark.parametrize("test_name, auth_token", [
447 @pytest.mark.parametrize("test_name, auth_token", [
446 ('none', None),
448 ('none', None),
447 ('empty_string', ''),
449 ('empty_string', ''),
448 ('fake_number', '123456'),
450 ('fake_number', '123456'),
449 ('proper_auth_token', None)
451 ('proper_auth_token', None)
450 ])
452 ])
451 def test_access_not_whitelisted_page_via_auth_token(
453 def test_access_not_whitelisted_page_via_auth_token(
452 self, test_name, auth_token, user_admin):
454 self, test_name, auth_token, user_admin):
453
455
454 whitelist = self._get_api_whitelist([])
456 whitelist = self._get_api_whitelist([])
455 with mock.patch.dict('rhodecode.CONFIG', whitelist):
457 with mock.patch.dict('rhodecode.CONFIG', whitelist):
456 assert [] == whitelist['api_access_controllers_whitelist']
458 assert [] == whitelist['api_access_controllers_whitelist']
457 if test_name == 'proper_auth_token':
459 if test_name == 'proper_auth_token':
458 # use builtin if api_key is None
460 # use builtin if api_key is None
459 auth_token = user_admin.api_key
461 auth_token = user_admin.api_key
460
462
461 with fixture.anon_access(False):
463 with fixture.anon_access(False):
462 self.app.get(
464 self.app.get(
463 route_path('repo_commit_raw',
465 route_path('repo_commit_raw',
464 repo_name=HG_REPO, commit_id='tip',
466 repo_name=HG_REPO, commit_id='tip',
465 params=dict(api_key=auth_token)),
467 params=dict(api_key=auth_token)),
466 status=302)
468 status=302)
467
469
468 @pytest.mark.parametrize("test_name, auth_token, code", [
470 @pytest.mark.parametrize("test_name, auth_token, code", [
469 ('none', None, 302),
471 ('none', None, 302),
470 ('empty_string', '', 302),
472 ('empty_string', '', 302),
471 ('fake_number', '123456', 302),
473 ('fake_number', '123456', 302),
472 ('proper_auth_token', None, 200)
474 ('proper_auth_token', None, 200)
473 ])
475 ])
474 def test_access_whitelisted_page_via_auth_token(
476 def test_access_whitelisted_page_via_auth_token(
475 self, test_name, auth_token, code, user_admin):
477 self, test_name, auth_token, code, user_admin):
476
478
477 whitelist_entry = ['ChangesetController:changeset_raw']
479 whitelist = self._get_api_whitelist(whitelist_view)
478 whitelist = self._get_api_whitelist(whitelist_entry)
479
480
480 with mock.patch.dict('rhodecode.CONFIG', whitelist):
481 with mock.patch.dict('rhodecode.CONFIG', whitelist):
481 assert whitelist_entry == whitelist['api_access_controllers_whitelist']
482 assert whitelist_view == whitelist['api_access_controllers_whitelist']
482
483
483 if test_name == 'proper_auth_token':
484 if test_name == 'proper_auth_token':
484 auth_token = user_admin.api_key
485 auth_token = user_admin.api_key
485 assert auth_token
486 assert auth_token
486
487
487 with fixture.anon_access(False):
488 with fixture.anon_access(False):
488 self.app.get(
489 self.app.get(
489 route_path('repo_commit_raw',
490 route_path('repo_commit_raw',
490 repo_name=HG_REPO, commit_id='tip',
491 repo_name=HG_REPO, commit_id='tip',
491 params=dict(api_key=auth_token)),
492 params=dict(api_key=auth_token)),
492 status=code)
493 status=code)
493
494
494 def test_access_page_via_extra_auth_token(self):
495 def test_access_page_via_extra_auth_token(self):
495 whitelist = self._get_api_whitelist(
496 whitelist = self._get_api_whitelist(whitelist_view)
496 ['ChangesetController:changeset_raw'])
497 with mock.patch.dict('rhodecode.CONFIG', whitelist):
497 with mock.patch.dict('rhodecode.CONFIG', whitelist):
498 assert ['ChangesetController:changeset_raw'] == \
498 assert whitelist_view == \
499 whitelist['api_access_controllers_whitelist']
499 whitelist['api_access_controllers_whitelist']
500
500
501 new_auth_token = AuthTokenModel().create(
501 new_auth_token = AuthTokenModel().create(
502 TEST_USER_ADMIN_LOGIN, 'test')
502 TEST_USER_ADMIN_LOGIN, 'test')
503 Session().commit()
503 Session().commit()
504 with fixture.anon_access(False):
504 with fixture.anon_access(False):
505 self.app.get(
505 self.app.get(
506 route_path('repo_commit_raw',
506 route_path('repo_commit_raw',
507 repo_name=HG_REPO, commit_id='tip',
507 repo_name=HG_REPO, commit_id='tip',
508 params=dict(api_key=new_auth_token.api_key)),
508 params=dict(api_key=new_auth_token.api_key)),
509 status=200)
509 status=200)
510
510
511 def test_access_page_via_expired_auth_token(self):
511 def test_access_page_via_expired_auth_token(self):
512 whitelist = self._get_api_whitelist(
512 whitelist = self._get_api_whitelist(whitelist_view)
513 ['ChangesetController:changeset_raw'])
514 with mock.patch.dict('rhodecode.CONFIG', whitelist):
513 with mock.patch.dict('rhodecode.CONFIG', whitelist):
515 assert ['ChangesetController:changeset_raw'] == \
514 assert whitelist_view == \
516 whitelist['api_access_controllers_whitelist']
515 whitelist['api_access_controllers_whitelist']
517
516
518 new_auth_token = AuthTokenModel().create(
517 new_auth_token = AuthTokenModel().create(
519 TEST_USER_ADMIN_LOGIN, 'test')
518 TEST_USER_ADMIN_LOGIN, 'test')
520 Session().commit()
519 Session().commit()
521 # patch the api key and make it expired
520 # patch the api key and make it expired
522 new_auth_token.expires = 0
521 new_auth_token.expires = 0
523 Session().add(new_auth_token)
522 Session().add(new_auth_token)
524 Session().commit()
523 Session().commit()
525 with fixture.anon_access(False):
524 with fixture.anon_access(False):
526 self.app.get(
525 self.app.get(
527 route_path('repo_commit_raw',
526 route_path('repo_commit_raw',
528 repo_name=HG_REPO, commit_id='tip',
527 repo_name=HG_REPO, commit_id='tip',
529 params=dict(api_key=new_auth_token.api_key)),
528 params=dict(api_key=new_auth_token.api_key)),
530 status=302)
529 status=302)
@@ -1,277 +1,304 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 from rhodecode.apps._base import add_route_with_slash
20 from rhodecode.apps._base import add_route_with_slash
21
21
22
22
23 def includeme(config):
23 def includeme(config):
24
24
25 # Summary
25 # Summary
26 # NOTE(marcink): one additional route is defined in very bottom, catch
26 # NOTE(marcink): one additional route is defined in very bottom, catch
27 # all pattern
27 # all pattern
28 config.add_route(
28 config.add_route(
29 name='repo_summary_explicit',
29 name='repo_summary_explicit',
30 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
30 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
31 config.add_route(
31 config.add_route(
32 name='repo_summary_commits',
32 name='repo_summary_commits',
33 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
33 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
34
34
35 # repo commits
35 # repo commits
36
36 config.add_route(
37 config.add_route(
37 name='repo_commit',
38 name='repo_commit',
38 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
39 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
39
40
41 config.add_route(
42 name='repo_commit_children',
43 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
44
45 config.add_route(
46 name='repo_commit_parents',
47 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
48
49 # still working url for backward compat.
50 config.add_route(
51 name='repo_commit_raw_deprecated',
52 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
53
54 config.add_route(
55 name='repo_commit_raw',
56 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
57
58 config.add_route(
59 name='repo_commit_patch',
60 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
61
62 config.add_route(
63 name='repo_commit_download',
64 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
65
66 config.add_route(
67 name='repo_commit_data',
68 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
69
70 config.add_route(
71 name='repo_commit_comment_create',
72 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
73
74 config.add_route(
75 name='repo_commit_comment_preview',
76 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
77
78 config.add_route(
79 name='repo_commit_comment_delete',
80 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
81
40 # repo files
82 # repo files
41 config.add_route(
83 config.add_route(
42 name='repo_archivefile',
84 name='repo_archivefile',
43 pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True)
85 pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True)
44
86
45 config.add_route(
87 config.add_route(
46 name='repo_files_diff',
88 name='repo_files_diff',
47 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
89 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
48 config.add_route( # legacy route to make old links work
90 config.add_route( # legacy route to make old links work
49 name='repo_files_diff_2way_redirect',
91 name='repo_files_diff_2way_redirect',
50 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
92 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
51
93
52 config.add_route(
94 config.add_route(
53 name='repo_files',
95 name='repo_files',
54 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
96 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
55 config.add_route(
97 config.add_route(
56 name='repo_files:default_path',
98 name='repo_files:default_path',
57 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
99 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
58 config.add_route(
100 config.add_route(
59 name='repo_files:default_commit',
101 name='repo_files:default_commit',
60 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
102 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
61
103
62 config.add_route(
104 config.add_route(
63 name='repo_files:rendered',
105 name='repo_files:rendered',
64 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
106 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
65
107
66 config.add_route(
108 config.add_route(
67 name='repo_files:annotated',
109 name='repo_files:annotated',
68 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
110 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
69 config.add_route(
111 config.add_route(
70 name='repo_files:annotated_previous',
112 name='repo_files:annotated_previous',
71 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
113 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
72
114
73 config.add_route(
115 config.add_route(
74 name='repo_nodetree_full',
116 name='repo_nodetree_full',
75 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
117 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
76 config.add_route(
118 config.add_route(
77 name='repo_nodetree_full:default_path',
119 name='repo_nodetree_full:default_path',
78 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
120 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
79
121
80 config.add_route(
122 config.add_route(
81 name='repo_files_nodelist',
123 name='repo_files_nodelist',
82 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
124 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
83
125
84 config.add_route(
126 config.add_route(
85 name='repo_file_raw',
127 name='repo_file_raw',
86 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
128 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
87
129
88 config.add_route(
130 config.add_route(
89 name='repo_file_download',
131 name='repo_file_download',
90 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
132 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
91 config.add_route( # backward compat to keep old links working
133 config.add_route( # backward compat to keep old links working
92 name='repo_file_download:legacy',
134 name='repo_file_download:legacy',
93 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
135 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
94 repo_route=True)
136 repo_route=True)
95
137
96 config.add_route(
138 config.add_route(
97 name='repo_file_history',
139 name='repo_file_history',
98 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
140 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
99
141
100 config.add_route(
142 config.add_route(
101 name='repo_file_authors',
143 name='repo_file_authors',
102 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
144 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
103
145
104 config.add_route(
146 config.add_route(
105 name='repo_files_remove_file',
147 name='repo_files_remove_file',
106 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
148 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
107 repo_route=True)
149 repo_route=True)
108 config.add_route(
150 config.add_route(
109 name='repo_files_delete_file',
151 name='repo_files_delete_file',
110 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
152 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
111 repo_route=True)
153 repo_route=True)
112 config.add_route(
154 config.add_route(
113 name='repo_files_edit_file',
155 name='repo_files_edit_file',
114 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
156 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
115 repo_route=True)
157 repo_route=True)
116 config.add_route(
158 config.add_route(
117 name='repo_files_update_file',
159 name='repo_files_update_file',
118 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
160 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
119 repo_route=True)
161 repo_route=True)
120 config.add_route(
162 config.add_route(
121 name='repo_files_add_file',
163 name='repo_files_add_file',
122 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
164 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
123 repo_route=True)
165 repo_route=True)
124 config.add_route(
166 config.add_route(
125 name='repo_files_create_file',
167 name='repo_files_create_file',
126 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
168 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
127 repo_route=True)
169 repo_route=True)
128
170
129 # refs data
171 # refs data
130 config.add_route(
172 config.add_route(
131 name='repo_refs_data',
173 name='repo_refs_data',
132 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
174 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
133
175
134 config.add_route(
176 config.add_route(
135 name='repo_refs_changelog_data',
177 name='repo_refs_changelog_data',
136 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
178 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
137
179
138 config.add_route(
180 config.add_route(
139 name='repo_stats',
181 name='repo_stats',
140 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
182 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
141
183
142 # Changelog
184 # Changelog
143 config.add_route(
185 config.add_route(
144 name='repo_changelog',
186 name='repo_changelog',
145 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
187 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
146 config.add_route(
188 config.add_route(
147 name='repo_changelog_file',
189 name='repo_changelog_file',
148 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
190 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
149 config.add_route(
191 config.add_route(
150 name='repo_changelog_elements',
192 name='repo_changelog_elements',
151 pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True)
193 pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True)
152
194
153 # Tags
195 # Tags
154 config.add_route(
196 config.add_route(
155 name='tags_home',
197 name='tags_home',
156 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
198 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
157
199
158 # Branches
200 # Branches
159 config.add_route(
201 config.add_route(
160 name='branches_home',
202 name='branches_home',
161 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
203 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
162
204
163 config.add_route(
205 config.add_route(
164 name='bookmarks_home',
206 name='bookmarks_home',
165 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
207 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
166
208
167 # Pull Requests
209 # Pull Requests
168 config.add_route(
210 config.add_route(
169 name='pullrequest_show',
211 name='pullrequest_show',
170 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id}',
212 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id}',
171 repo_route=True)
213 repo_route=True)
172
214
173 config.add_route(
215 config.add_route(
174 name='pullrequest_show_all',
216 name='pullrequest_show_all',
175 pattern='/{repo_name:.*?[^/]}/pull-request',
217 pattern='/{repo_name:.*?[^/]}/pull-request',
176 repo_route=True, repo_accepted_types=['hg', 'git'])
218 repo_route=True, repo_accepted_types=['hg', 'git'])
177
219
178 config.add_route(
220 config.add_route(
179 name='pullrequest_show_all_data',
221 name='pullrequest_show_all_data',
180 pattern='/{repo_name:.*?[^/]}/pull-request-data',
222 pattern='/{repo_name:.*?[^/]}/pull-request-data',
181 repo_route=True, repo_accepted_types=['hg', 'git'])
223 repo_route=True, repo_accepted_types=['hg', 'git'])
182
224
183 # commits aka changesets
184 # TODO(dan): handle default landing revision ?
185 config.add_route(
186 name='changeset_home',
187 pattern='/{repo_name:.*?[^/]}/changeset/{revision}',
188 repo_route=True)
189 config.add_route(
190 name='changeset_children',
191 pattern='/{repo_name:.*?[^/]}/changeset_children/{revision}',
192 repo_route=True)
193 config.add_route(
194 name='changeset_parents',
195 pattern='/{repo_name:.*?[^/]}/changeset_parents/{revision}',
196 repo_route=True)
197
198 # Settings
225 # Settings
199 config.add_route(
226 config.add_route(
200 name='edit_repo',
227 name='edit_repo',
201 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
228 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
202
229
203 # Settings advanced
230 # Settings advanced
204 config.add_route(
231 config.add_route(
205 name='edit_repo_advanced',
232 name='edit_repo_advanced',
206 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
233 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
207 config.add_route(
234 config.add_route(
208 name='edit_repo_advanced_delete',
235 name='edit_repo_advanced_delete',
209 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
236 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
210 config.add_route(
237 config.add_route(
211 name='edit_repo_advanced_locking',
238 name='edit_repo_advanced_locking',
212 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
239 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
213 config.add_route(
240 config.add_route(
214 name='edit_repo_advanced_journal',
241 name='edit_repo_advanced_journal',
215 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
242 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
216 config.add_route(
243 config.add_route(
217 name='edit_repo_advanced_fork',
244 name='edit_repo_advanced_fork',
218 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
245 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
219
246
220 # Caches
247 # Caches
221 config.add_route(
248 config.add_route(
222 name='edit_repo_caches',
249 name='edit_repo_caches',
223 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
250 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
224
251
225 # Permissions
252 # Permissions
226 config.add_route(
253 config.add_route(
227 name='edit_repo_perms',
254 name='edit_repo_perms',
228 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
255 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
229
256
230 # Repo Review Rules
257 # Repo Review Rules
231 config.add_route(
258 config.add_route(
232 name='repo_reviewers',
259 name='repo_reviewers',
233 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
260 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
234
261
235 config.add_route(
262 config.add_route(
236 name='repo_default_reviewers_data',
263 name='repo_default_reviewers_data',
237 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
264 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
238
265
239 # Maintenance
266 # Maintenance
240 config.add_route(
267 config.add_route(
241 name='repo_maintenance',
268 name='repo_maintenance',
242 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
269 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
243
270
244 config.add_route(
271 config.add_route(
245 name='repo_maintenance_execute',
272 name='repo_maintenance_execute',
246 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
273 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
247
274
248 # Strip
275 # Strip
249 config.add_route(
276 config.add_route(
250 name='strip',
277 name='strip',
251 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
278 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
252
279
253 config.add_route(
280 config.add_route(
254 name='strip_check',
281 name='strip_check',
255 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
282 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
256
283
257 config.add_route(
284 config.add_route(
258 name='strip_execute',
285 name='strip_execute',
259 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
286 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
260
287
261 # ATOM/RSS Feed
288 # ATOM/RSS Feed
262 config.add_route(
289 config.add_route(
263 name='rss_feed_home',
290 name='rss_feed_home',
264 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
291 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
265
292
266 config.add_route(
293 config.add_route(
267 name='atom_feed_home',
294 name='atom_feed_home',
268 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
295 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
269
296
270 # NOTE(marcink): needs to be at the end for catch-all
297 # NOTE(marcink): needs to be at the end for catch-all
271 add_route_with_slash(
298 add_route_with_slash(
272 config,
299 config,
273 name='repo_summary',
300 name='repo_summary',
274 pattern='/{repo_name:.*?[^/]}', repo_route=True)
301 pattern='/{repo_name:.*?[^/]}', repo_route=True)
275
302
276 # Scan module for configuration decorators.
303 # Scan module for configuration decorators.
277 config.scan()
304 config.scan()
@@ -1,288 +1,313 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from pylons.i18n import ungettext
22 import pytest
21 import pytest
23
22
24 from rhodecode.tests import *
23 from rhodecode.tests import TestController
24
25 from rhodecode.model.db import (
25 from rhodecode.model.db import (
26 ChangesetComment, Notification, UserNotification)
26 ChangesetComment, Notification, UserNotification)
27 from rhodecode.model.meta import Session
27 from rhodecode.model.meta import Session
28 from rhodecode.lib import helpers as h
28 from rhodecode.lib import helpers as h
29
29
30
30
31 def route_path(name, params=None, **kwargs):
32 import urllib
33
34 base_url = {
35 'repo_commit': '/{repo_name}/changeset/{commit_id}',
36 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create',
37 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview',
38 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete',
39 }[name].format(**kwargs)
40
41 if params:
42 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
43 return base_url
44
45
31 @pytest.mark.backends("git", "hg", "svn")
46 @pytest.mark.backends("git", "hg", "svn")
32 class TestCommitCommentsController(TestController):
47 class TestRepoCommitCommentsView(TestController):
33
48
34 @pytest.fixture(autouse=True)
49 @pytest.fixture(autouse=True)
35 def prepare(self, request, pylonsapp):
50 def prepare(self, request, pylonsapp):
36 for x in ChangesetComment.query().all():
51 for x in ChangesetComment.query().all():
37 Session().delete(x)
52 Session().delete(x)
38 Session().commit()
53 Session().commit()
39
54
40 for x in Notification.query().all():
55 for x in Notification.query().all():
41 Session().delete(x)
56 Session().delete(x)
42 Session().commit()
57 Session().commit()
43
58
44 request.addfinalizer(self.cleanup)
59 request.addfinalizer(self.cleanup)
45
60
46 def cleanup(self):
61 def cleanup(self):
47 for x in ChangesetComment.query().all():
62 for x in ChangesetComment.query().all():
48 Session().delete(x)
63 Session().delete(x)
49 Session().commit()
64 Session().commit()
50
65
51 for x in Notification.query().all():
66 for x in Notification.query().all():
52 Session().delete(x)
67 Session().delete(x)
53 Session().commit()
68 Session().commit()
54
69
55 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
70 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
56 def test_create(self, comment_type, backend):
71 def test_create(self, comment_type, backend):
57 self.log_user()
72 self.log_user()
58 commit = backend.repo.get_commit('300')
73 commit = backend.repo.get_commit('300')
59 commit_id = commit.raw_id
74 commit_id = commit.raw_id
60 text = u'CommentOnCommit'
75 text = u'CommentOnCommit'
61
76
62 params = {'text': text, 'csrf_token': self.csrf_token,
77 params = {'text': text, 'csrf_token': self.csrf_token,
63 'comment_type': comment_type}
78 'comment_type': comment_type}
64 self.app.post(
79 self.app.post(
65 url(controller='changeset', action='comment',
80 route_path('repo_commit_comment_create',
66 repo_name=backend.repo_name, revision=commit_id), params=params)
81 repo_name=backend.repo_name, commit_id=commit_id),
82 params=params)
67
83
68 response = self.app.get(
84 response = self.app.get(
69 url(controller='changeset', action='index',
85 route_path('repo_commit',
70 repo_name=backend.repo_name, revision=commit_id))
86 repo_name=backend.repo_name, commit_id=commit_id))
71
87
72 # test DB
88 # test DB
73 assert ChangesetComment.query().count() == 1
89 assert ChangesetComment.query().count() == 1
74 assert_comment_links(response, ChangesetComment.query().count(), 0)
90 assert_comment_links(response, ChangesetComment.query().count(), 0)
75
91
76 assert Notification.query().count() == 1
92 assert Notification.query().count() == 1
77 assert ChangesetComment.query().count() == 1
93 assert ChangesetComment.query().count() == 1
78
94
79 notification = Notification.query().all()[0]
95 notification = Notification.query().all()[0]
80
96
81 comment_id = ChangesetComment.query().first().comment_id
97 comment_id = ChangesetComment.query().first().comment_id
82 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
98 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
83
99
84 sbj = 'left {0} on commit `{1}` in the {2} repository'.format(
100 sbj = 'left {0} on commit `{1}` in the {2} repository'.format(
85 comment_type, h.show_id(commit), backend.repo_name)
101 comment_type, h.show_id(commit), backend.repo_name)
86 assert sbj in notification.subject
102 assert sbj in notification.subject
87
103
88 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
104 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
89 backend.repo_name, commit_id, comment_id))
105 backend.repo_name, commit_id, comment_id))
90 assert lnk in notification.body
106 assert lnk in notification.body
91
107
92 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
108 @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES)
93 def test_create_inline(self, comment_type, backend):
109 def test_create_inline(self, comment_type, backend):
94 self.log_user()
110 self.log_user()
95 commit = backend.repo.get_commit('300')
111 commit = backend.repo.get_commit('300')
96 commit_id = commit.raw_id
112 commit_id = commit.raw_id
97 text = u'CommentOnCommit'
113 text = u'CommentOnCommit'
98 f_path = 'vcs/web/simplevcs/views/repository.py'
114 f_path = 'vcs/web/simplevcs/views/repository.py'
99 line = 'n1'
115 line = 'n1'
100
116
101 params = {'text': text, 'f_path': f_path, 'line': line,
117 params = {'text': text, 'f_path': f_path, 'line': line,
102 'comment_type': comment_type,
118 'comment_type': comment_type,
103 'csrf_token': self.csrf_token}
119 'csrf_token': self.csrf_token}
104
120
105 self.app.post(
121 self.app.post(
106 url(controller='changeset', action='comment',
122 route_path('repo_commit_comment_create',
107 repo_name=backend.repo_name, revision=commit_id), params=params)
123 repo_name=backend.repo_name, commit_id=commit_id),
124 params=params)
108
125
109 response = self.app.get(
126 response = self.app.get(
110 url(controller='changeset', action='index',
127 route_path('repo_commit',
111 repo_name=backend.repo_name, revision=commit_id))
128 repo_name=backend.repo_name, commit_id=commit_id))
112
129
113 # test DB
130 # test DB
114 assert ChangesetComment.query().count() == 1
131 assert ChangesetComment.query().count() == 1
115 assert_comment_links(response, 0, ChangesetComment.query().count())
132 assert_comment_links(response, 0, ChangesetComment.query().count())
116
133
117 if backend.alias == 'svn':
134 if backend.alias == 'svn':
118 response.mustcontain(
135 response.mustcontain(
119 '''data-f-path="vcs/commands/summary.py" '''
136 '''data-f-path="vcs/commands/summary.py" '''
120 '''id="a_c--ad05457a43f8"'''
137 '''id="a_c--ad05457a43f8"'''
121 )
138 )
122 else:
139 else:
123 response.mustcontain(
140 response.mustcontain(
124 '''data-f-path="vcs/backends/hg.py" '''
141 '''data-f-path="vcs/backends/hg.py" '''
125 '''id="a_c--9c390eb52cd6"'''
142 '''id="a_c--9c390eb52cd6"'''
126 )
143 )
127
144
128 assert Notification.query().count() == 1
145 assert Notification.query().count() == 1
129 assert ChangesetComment.query().count() == 1
146 assert ChangesetComment.query().count() == 1
130
147
131 notification = Notification.query().all()[0]
148 notification = Notification.query().all()[0]
132 comment = ChangesetComment.query().first()
149 comment = ChangesetComment.query().first()
133 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
150 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
134
151
135 assert comment.revision == commit_id
152 assert comment.revision == commit_id
136 sbj = 'left {comment_type} on commit `{commit}` ' \
153 sbj = 'left {comment_type} on commit `{commit}` ' \
137 '(file: `{f_path}`) in the {repo} repository'.format(
154 '(file: `{f_path}`) in the {repo} repository'.format(
138 commit=h.show_id(commit),
155 commit=h.show_id(commit),
139 f_path=f_path, line=line, repo=backend.repo_name,
156 f_path=f_path, line=line, repo=backend.repo_name,
140 comment_type=comment_type)
157 comment_type=comment_type)
141 assert sbj in notification.subject
158 assert sbj in notification.subject
142
159
143 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
160 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
144 backend.repo_name, commit_id, comment.comment_id))
161 backend.repo_name, commit_id, comment.comment_id))
145 assert lnk in notification.body
162 assert lnk in notification.body
146 assert 'on line n1' in notification.body
163 assert 'on line n1' in notification.body
147
164
148 def test_create_with_mention(self, backend):
165 def test_create_with_mention(self, backend):
149 self.log_user()
166 self.log_user()
150
167
151 commit_id = backend.repo.get_commit('300').raw_id
168 commit_id = backend.repo.get_commit('300').raw_id
152 text = u'@test_regular check CommentOnCommit'
169 text = u'@test_regular check CommentOnCommit'
153
170
154 params = {'text': text, 'csrf_token': self.csrf_token}
171 params = {'text': text, 'csrf_token': self.csrf_token}
155 self.app.post(
172 self.app.post(
156 url(controller='changeset', action='comment',
173 route_path('repo_commit_comment_create',
157 repo_name=backend.repo_name, revision=commit_id), params=params)
174 repo_name=backend.repo_name, commit_id=commit_id),
175 params=params)
158
176
159 response = self.app.get(
177 response = self.app.get(
160 url(controller='changeset', action='index',
178 route_path('repo_commit',
161 repo_name=backend.repo_name, revision=commit_id))
179 repo_name=backend.repo_name, commit_id=commit_id))
162 # test DB
180 # test DB
163 assert ChangesetComment.query().count() == 1
181 assert ChangesetComment.query().count() == 1
164 assert_comment_links(response, ChangesetComment.query().count(), 0)
182 assert_comment_links(response, ChangesetComment.query().count(), 0)
165
183
166 notification = Notification.query().one()
184 notification = Notification.query().one()
167
185
168 assert len(notification.recipients) == 2
186 assert len(notification.recipients) == 2
169 users = [x.username for x in notification.recipients]
187 users = [x.username for x in notification.recipients]
170
188
171 # test_regular gets notification by @mention
189 # test_regular gets notification by @mention
172 assert sorted(users) == [u'test_admin', u'test_regular']
190 assert sorted(users) == [u'test_admin', u'test_regular']
173
191
174 def test_create_with_status_change(self, backend):
192 def test_create_with_status_change(self, backend):
175 self.log_user()
193 self.log_user()
176 commit = backend.repo.get_commit('300')
194 commit = backend.repo.get_commit('300')
177 commit_id = commit.raw_id
195 commit_id = commit.raw_id
178 text = u'CommentOnCommit'
196 text = u'CommentOnCommit'
179 f_path = 'vcs/web/simplevcs/views/repository.py'
197 f_path = 'vcs/web/simplevcs/views/repository.py'
180 line = 'n1'
198 line = 'n1'
181
199
182 params = {'text': text, 'changeset_status': 'approved',
200 params = {'text': text, 'changeset_status': 'approved',
183 'csrf_token': self.csrf_token}
201 'csrf_token': self.csrf_token}
184
202
185 self.app.post(
203 self.app.post(
186 url(controller='changeset', action='comment',
204 route_path(
187 repo_name=backend.repo_name, revision=commit_id), params=params)
205 'repo_commit_comment_create',
206 repo_name=backend.repo_name, commit_id=commit_id),
207 params=params)
188
208
189 response = self.app.get(
209 response = self.app.get(
190 url(controller='changeset', action='index',
210 route_path('repo_commit',
191 repo_name=backend.repo_name, revision=commit_id))
211 repo_name=backend.repo_name, commit_id=commit_id))
192
212
193 # test DB
213 # test DB
194 assert ChangesetComment.query().count() == 1
214 assert ChangesetComment.query().count() == 1
195 assert_comment_links(response, ChangesetComment.query().count(), 0)
215 assert_comment_links(response, ChangesetComment.query().count(), 0)
196
216
197 assert Notification.query().count() == 1
217 assert Notification.query().count() == 1
198 assert ChangesetComment.query().count() == 1
218 assert ChangesetComment.query().count() == 1
199
219
200 notification = Notification.query().all()[0]
220 notification = Notification.query().all()[0]
201
221
202 comment_id = ChangesetComment.query().first().comment_id
222 comment_id = ChangesetComment.query().first().comment_id
203 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
223 assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT
204
224
205 sbj = 'left note on commit `{0}` (status: Approved) ' \
225 sbj = 'left note on commit `{0}` (status: Approved) ' \
206 'in the {1} repository'.format(
226 'in the {1} repository'.format(
207 h.show_id(commit), backend.repo_name)
227 h.show_id(commit), backend.repo_name)
208 assert sbj in notification.subject
228 assert sbj in notification.subject
209
229
210 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
230 lnk = (u'/{0}/changeset/{1}#comment-{2}'.format(
211 backend.repo_name, commit_id, comment_id))
231 backend.repo_name, commit_id, comment_id))
212 assert lnk in notification.body
232 assert lnk in notification.body
213
233
214 def test_delete(self, backend):
234 def test_delete(self, backend):
215 self.log_user()
235 self.log_user()
216 commit_id = backend.repo.get_commit('300').raw_id
236 commit_id = backend.repo.get_commit('300').raw_id
217 text = u'CommentOnCommit'
237 text = u'CommentOnCommit'
218
238
219 params = {'text': text, 'csrf_token': self.csrf_token}
239 params = {'text': text, 'csrf_token': self.csrf_token}
220 self.app.post(
240 self.app.post(
221 url(
241 route_path(
222 controller='changeset', action='comment',
242 'repo_commit_comment_create',
223 repo_name=backend.repo_name, revision=commit_id),
243 repo_name=backend.repo_name, commit_id=commit_id),
224 params=params)
244 params=params)
225
245
226 comments = ChangesetComment.query().all()
246 comments = ChangesetComment.query().all()
227 assert len(comments) == 1
247 assert len(comments) == 1
228 comment_id = comments[0].comment_id
248 comment_id = comments[0].comment_id
229
249
230 self.app.post(
250 self.app.post(
231 url(controller='changeset', action='delete_comment',
251 route_path('repo_commit_comment_delete',
232 repo_name=backend.repo_name, comment_id=comment_id),
252 repo_name=backend.repo_name,
233 params={'_method': 'delete', 'csrf_token': self.csrf_token})
253 commit_id=commit_id,
254 comment_id=comment_id),
255 params={'csrf_token': self.csrf_token})
234
256
235 comments = ChangesetComment.query().all()
257 comments = ChangesetComment.query().all()
236 assert len(comments) == 0
258 assert len(comments) == 0
237
259
238 response = self.app.get(
260 response = self.app.get(
239 url(controller='changeset', action='index',
261 route_path('repo_commit',
240 repo_name=backend.repo_name, revision=commit_id))
262 repo_name=backend.repo_name, commit_id=commit_id))
241 assert_comment_links(response, 0, 0)
263 assert_comment_links(response, 0, 0)
242
264
243 @pytest.mark.parametrize('renderer, input, output', [
265 @pytest.mark.parametrize('renderer, input, output', [
244 ('rst', 'plain text', '<p>plain text</p>'),
266 ('rst', 'plain text', '<p>plain text</p>'),
245 ('rst', 'header\n======', '<h1 class="title">header</h1>'),
267 ('rst', 'header\n======', '<h1 class="title">header</h1>'),
246 ('rst', '*italics*', '<em>italics</em>'),
268 ('rst', '*italics*', '<em>italics</em>'),
247 ('rst', '**bold**', '<strong>bold</strong>'),
269 ('rst', '**bold**', '<strong>bold</strong>'),
248 ('markdown', 'plain text', '<p>plain text</p>'),
270 ('markdown', 'plain text', '<p>plain text</p>'),
249 ('markdown', '# header', '<h1>header</h1>'),
271 ('markdown', '# header', '<h1>header</h1>'),
250 ('markdown', '*italics*', '<em>italics</em>'),
272 ('markdown', '*italics*', '<em>italics</em>'),
251 ('markdown', '**bold**', '<strong>bold</strong>'),
273 ('markdown', '**bold**', '<strong>bold</strong>'),
252 ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain',
274 ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain',
253 'md-header', 'md-italics', 'md-bold', ])
275 'md-header', 'md-italics', 'md-bold', ])
254 def test_preview(self, renderer, input, output, backend):
276 def test_preview(self, renderer, input, output, backend, xhr_header):
255 self.log_user()
277 self.log_user()
256 params = {
278 params = {
257 'renderer': renderer,
279 'renderer': renderer,
258 'text': input,
280 'text': input,
259 'csrf_token': self.csrf_token
281 'csrf_token': self.csrf_token
260 }
282 }
261 environ = {
283 commit_id = '0' * 16 # fake this for tests
262 'HTTP_X_PARTIAL_XHR': 'true'
263 }
264 response = self.app.post(
284 response = self.app.post(
265 url(controller='changeset',
285 route_path('repo_commit_comment_preview',
266 action='preview_comment',
286 repo_name=backend.repo_name, commit_id=commit_id,),
267 repo_name=backend.repo_name),
268 params=params,
287 params=params,
269 extra_environ=environ)
288 extra_environ=xhr_header)
270
289
271 response.mustcontain(output)
290 response.mustcontain(output)
272
291
273
292
274 def assert_comment_links(response, comments, inline_comments):
293 def assert_comment_links(response, comments, inline_comments):
275 comments_text = ungettext("%d Commit comment",
294 if comments == 1:
276 "%d Commit comments", comments) % comments
295 comments_text = "%d Commit comment" % comments
296 else:
297 comments_text = "%d Commit comments" % comments
298
299 if inline_comments == 1:
300 inline_comments_text = "%d Inline Comment" % inline_comments
301 else:
302 inline_comments_text = "%d Inline Comments" % inline_comments
303
277 if comments:
304 if comments:
278 response.mustcontain('<a href="#comments">%s</a>,' % comments_text)
305 response.mustcontain('<a href="#comments">%s</a>,' % comments_text)
279 else:
306 else:
280 response.mustcontain(comments_text)
307 response.mustcontain(comments_text)
281
308
282 inline_comments_text = ungettext("%d Inline Comment", "%d Inline Comments",
283 inline_comments) % inline_comments
284 if inline_comments:
309 if inline_comments:
285 response.mustcontain(
310 response.mustcontain(
286 'id="inline-comments-counter">%s</' % inline_comments_text)
311 'id="inline-comments-counter">%s</' % inline_comments_text)
287 else:
312 else:
288 response.mustcontain(inline_comments_text)
313 response.mustcontain(inline_comments_text)
@@ -1,301 +1,318 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.helpers import _shorten_commit_id
23 from rhodecode.lib.helpers import _shorten_commit_id
24 from rhodecode.tests import url
24
25
26 def route_path(name, params=None, **kwargs):
27 import urllib
28
29 base_url = {
30 'repo_commit': '/{repo_name}/changeset/{commit_id}',
31 'repo_commit_children': '/{repo_name}/changeset_children/{commit_id}',
32 'repo_commit_parents': '/{repo_name}/changeset_parents/{commit_id}',
33 'repo_commit_raw': '/{repo_name}/changeset-diff/{commit_id}',
34 'repo_commit_patch': '/{repo_name}/changeset-patch/{commit_id}',
35 'repo_commit_download': '/{repo_name}/changeset-download/{commit_id}',
36 'repo_commit_data': '/{repo_name}/changeset-data/{commit_id}',
37 'repo_compare': '/{repo_name}/compare/{source_ref_type}@{source_ref}...{target_ref_type}@{target_ref}',
38 }[name].format(**kwargs)
39
40 if params:
41 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
42 return base_url
25
43
26
44
27 @pytest.mark.usefixtures("app")
45 @pytest.mark.usefixtures("app")
28 class TestChangesetController(object):
46 class TestRepoCommitView(object):
29
47
30 def test_index(self, backend):
48 def test_show_commit(self, backend):
31 commit_id = self.commit_id[backend.alias]
49 commit_id = self.commit_id[backend.alias]
32 response = self.app.get(url(
50 response = self.app.get(route_path(
33 controller='changeset', action='index',
51 'repo_commit', repo_name=backend.repo_name, commit_id=commit_id))
34 repo_name=backend.repo_name, revision=commit_id))
35 response.mustcontain('Added a symlink')
52 response.mustcontain('Added a symlink')
36 response.mustcontain(commit_id)
53 response.mustcontain(commit_id)
37 response.mustcontain('No newline at end of file')
54 response.mustcontain('No newline at end of file')
38
55
39 def test_index_raw(self, backend):
56 def test_show_raw(self, backend):
40 commit_id = self.commit_id[backend.alias]
57 commit_id = self.commit_id[backend.alias]
41 response = self.app.get(url(
58 response = self.app.get(route_path(
42 controller='changeset', action='changeset_raw',
59 'repo_commit_raw',
43 repo_name=backend.repo_name, revision=commit_id))
60 repo_name=backend.repo_name, commit_id=commit_id))
44 assert response.body == self.diffs[backend.alias]
61 assert response.body == self.diffs[backend.alias]
45
62
46 def test_index_raw_patch(self, backend):
63 def test_show_raw_patch(self, backend):
47 response = self.app.get(url(
64 response = self.app.get(route_path(
48 controller='changeset', action='changeset_patch',
65 'repo_commit_patch', repo_name=backend.repo_name,
49 repo_name=backend.repo_name,
66 commit_id=self.commit_id[backend.alias]))
50 revision=self.commit_id[backend.alias]))
51 assert response.body == self.patches[backend.alias]
67 assert response.body == self.patches[backend.alias]
52
68
53 def test_index_changeset_download(self, backend):
69 def test_commit_download(self, backend):
54 response = self.app.get(url(
70 response = self.app.get(route_path(
55 controller='changeset', action='changeset_download',
71 'repo_commit_download',
56 repo_name=backend.repo_name,
72 repo_name=backend.repo_name,
57 revision=self.commit_id[backend.alias]))
73 commit_id=self.commit_id[backend.alias]))
58 assert response.body == self.diffs[backend.alias]
74 assert response.body == self.diffs[backend.alias]
59
75
60 def test_single_commit_page_different_ops(self, backend):
76 def test_single_commit_page_different_ops(self, backend):
61 commit_id = {
77 commit_id = {
62 'hg': '603d6c72c46d953420c89d36372f08d9f305f5dd',
78 'hg': '603d6c72c46d953420c89d36372f08d9f305f5dd',
63 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986',
79 'git': '03fa803d7e9fb14daa9a3089e0d1494eda75d986',
64 'svn': '337',
80 'svn': '337',
65 }
81 }
66 commit_id = commit_id[backend.alias]
82 commit_id = commit_id[backend.alias]
67 response = self.app.get(url(
83 response = self.app.get(route_path(
68 controller='changeset', action='index',
84 'repo_commit',
69 repo_name=backend.repo_name, revision=commit_id))
85 repo_name=backend.repo_name, commit_id=commit_id))
70
86
71 response.mustcontain(_shorten_commit_id(commit_id))
87 response.mustcontain(_shorten_commit_id(commit_id))
72 response.mustcontain('21 files changed: 943 inserted, 288 deleted')
88 response.mustcontain('21 files changed: 943 inserted, 288 deleted')
73
89
74 # files op files
90 # files op files
75 response.mustcontain('File no longer present at commit: %s' %
91 response.mustcontain('File no longer present at commit: %s' %
76 _shorten_commit_id(commit_id))
92 _shorten_commit_id(commit_id))
77
93
78 # svn uses a different filename
94 # svn uses a different filename
79 if backend.alias == 'svn':
95 if backend.alias == 'svn':
80 response.mustcontain('new file 10644')
96 response.mustcontain('new file 10644')
81 else:
97 else:
82 response.mustcontain('new file 100644')
98 response.mustcontain('new file 100644')
83 response.mustcontain('Changed theme to ADC theme') # commit msg
99 response.mustcontain('Changed theme to ADC theme') # commit msg
84
100
85 self._check_new_diff_menus(response, right_menu=True)
101 self._check_new_diff_menus(response, right_menu=True)
86
102
87 def test_commit_range_page_different_ops(self, backend):
103 def test_commit_range_page_different_ops(self, backend):
88 commit_id_range = {
104 commit_id_range = {
89 'hg': (
105 'hg': (
90 '25d7e49c18b159446cadfa506a5cf8ad1cb04067',
106 '25d7e49c18b159446cadfa506a5cf8ad1cb04067',
91 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
107 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
92 'git': (
108 'git': (
93 '6fc9270775aaf5544c1deb014f4ddd60c952fcbb',
109 '6fc9270775aaf5544c1deb014f4ddd60c952fcbb',
94 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
110 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
95 'svn': (
111 'svn': (
96 '335',
112 '335',
97 '337'),
113 '337'),
98 }
114 }
99 commit_ids = commit_id_range[backend.alias]
115 commit_ids = commit_id_range[backend.alias]
100 commit_id = '%s...%s' % (commit_ids[0], commit_ids[1])
116 commit_id = '%s...%s' % (commit_ids[0], commit_ids[1])
101 response = self.app.get(url(
117 response = self.app.get(route_path(
102 controller='changeset', action='index',
118 'repo_commit',
103 repo_name=backend.repo_name, revision=commit_id))
119 repo_name=backend.repo_name, commit_id=commit_id))
104
120
105 response.mustcontain(_shorten_commit_id(commit_ids[0]))
121 response.mustcontain(_shorten_commit_id(commit_ids[0]))
106 response.mustcontain(_shorten_commit_id(commit_ids[1]))
122 response.mustcontain(_shorten_commit_id(commit_ids[1]))
107
123
108 # svn is special
124 # svn is special
109 if backend.alias == 'svn':
125 if backend.alias == 'svn':
110 response.mustcontain('new file 10644')
126 response.mustcontain('new file 10644')
111 response.mustcontain('1 file changed: 5 inserted, 1 deleted')
127 response.mustcontain('1 file changed: 5 inserted, 1 deleted')
112 response.mustcontain('12 files changed: 236 inserted, 22 deleted')
128 response.mustcontain('12 files changed: 236 inserted, 22 deleted')
113 response.mustcontain('21 files changed: 943 inserted, 288 deleted')
129 response.mustcontain('21 files changed: 943 inserted, 288 deleted')
114 else:
130 else:
115 response.mustcontain('new file 100644')
131 response.mustcontain('new file 100644')
116 response.mustcontain('12 files changed: 222 inserted, 20 deleted')
132 response.mustcontain('12 files changed: 222 inserted, 20 deleted')
117 response.mustcontain('21 files changed: 943 inserted, 288 deleted')
133 response.mustcontain('21 files changed: 943 inserted, 288 deleted')
118
134
119 # files op files
135 # files op files
120 response.mustcontain('File no longer present at commit: %s' %
136 response.mustcontain('File no longer present at commit: %s' %
121 _shorten_commit_id(commit_ids[1]))
137 _shorten_commit_id(commit_ids[1]))
122 response.mustcontain('Added docstrings to vcs.cli') # commit msg
138 response.mustcontain('Added docstrings to vcs.cli') # commit msg
123 response.mustcontain('Changed theme to ADC theme') # commit msg
139 response.mustcontain('Changed theme to ADC theme') # commit msg
124
140
125 self._check_new_diff_menus(response)
141 self._check_new_diff_menus(response)
126
142
127 def test_combined_compare_commit_page_different_ops(self, backend):
143 def test_combined_compare_commit_page_different_ops(self, backend):
128 commit_id_range = {
144 commit_id_range = {
129 'hg': (
145 'hg': (
130 '4fdd71e9427417b2e904e0464c634fdee85ec5a7',
146 '4fdd71e9427417b2e904e0464c634fdee85ec5a7',
131 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
147 '603d6c72c46d953420c89d36372f08d9f305f5dd'),
132 'git': (
148 'git': (
133 'f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13',
149 'f5fbf9cfd5f1f1be146f6d3b38bcd791a7480c13',
134 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
150 '03fa803d7e9fb14daa9a3089e0d1494eda75d986'),
135 'svn': (
151 'svn': (
136 '335',
152 '335',
137 '337'),
153 '337'),
138 }
154 }
139 commit_ids = commit_id_range[backend.alias]
155 commit_ids = commit_id_range[backend.alias]
140 response = self.app.get(url(
156 response = self.app.get(route_path(
141 controller='compare', action='compare',
157 'repo_compare',
142 repo_name=backend.repo_name,
158 repo_name=backend.repo_name,
143 source_ref_type='rev', source_ref=commit_ids[0],
159 source_ref_type='rev', source_ref=commit_ids[0],
144 target_ref_type='rev', target_ref=commit_ids[1], ))
160 target_ref_type='rev', target_ref=commit_ids[1], ))
145
161
146 response.mustcontain(_shorten_commit_id(commit_ids[0]))
162 response.mustcontain(_shorten_commit_id(commit_ids[0]))
147 response.mustcontain(_shorten_commit_id(commit_ids[1]))
163 response.mustcontain(_shorten_commit_id(commit_ids[1]))
148
164
149 # files op files
165 # files op files
150 response.mustcontain('File no longer present at commit: %s' %
166 response.mustcontain('File no longer present at commit: %s' %
151 _shorten_commit_id(commit_ids[1]))
167 _shorten_commit_id(commit_ids[1]))
152
168
153 # svn is special
169 # svn is special
154 if backend.alias == 'svn':
170 if backend.alias == 'svn':
155 response.mustcontain('new file 10644')
171 response.mustcontain('new file 10644')
156 response.mustcontain('32 files changed: 1179 inserted, 310 deleted')
172 response.mustcontain('32 files changed: 1179 inserted, 310 deleted')
157 else:
173 else:
158 response.mustcontain('new file 100644')
174 response.mustcontain('new file 100644')
159 response.mustcontain('32 files changed: 1165 inserted, 308 deleted')
175 response.mustcontain('32 files changed: 1165 inserted, 308 deleted')
160
176
161 response.mustcontain('Added docstrings to vcs.cli') # commit msg
177 response.mustcontain('Added docstrings to vcs.cli') # commit msg
162 response.mustcontain('Changed theme to ADC theme') # commit msg
178 response.mustcontain('Changed theme to ADC theme') # commit msg
163
179
164 self._check_new_diff_menus(response)
180 self._check_new_diff_menus(response)
165
181
166 def test_changeset_range(self, backend):
182 def test_changeset_range(self, backend):
167 self._check_changeset_range(
183 self._check_changeset_range(
168 backend, self.commit_id_range, self.commit_id_range_result)
184 backend, self.commit_id_range, self.commit_id_range_result)
169
185
170 def test_changeset_range_with_initial_commit(self, backend):
186 def test_changeset_range_with_initial_commit(self, backend):
171 commit_id_range = {
187 commit_id_range = {
172 'hg': (
188 'hg': (
173 'b986218ba1c9b0d6a259fac9b050b1724ed8e545'
189 'b986218ba1c9b0d6a259fac9b050b1724ed8e545'
174 '...6cba7170863a2411822803fa77a0a264f1310b35'),
190 '...6cba7170863a2411822803fa77a0a264f1310b35'),
175 'git': (
191 'git': (
176 'c1214f7e79e02fc37156ff215cd71275450cffc3'
192 'c1214f7e79e02fc37156ff215cd71275450cffc3'
177 '...fa6600f6848800641328adbf7811fd2372c02ab2'),
193 '...fa6600f6848800641328adbf7811fd2372c02ab2'),
178 'svn': '1...3',
194 'svn': '1...3',
179 }
195 }
180 commit_id_range_result = {
196 commit_id_range_result = {
181 'hg': ['b986218ba1c9', '3d8f361e72ab', '6cba7170863a'],
197 'hg': ['b986218ba1c9', '3d8f361e72ab', '6cba7170863a'],
182 'git': ['c1214f7e79e0', '38b5fe81f109', 'fa6600f68488'],
198 'git': ['c1214f7e79e0', '38b5fe81f109', 'fa6600f68488'],
183 'svn': ['1', '2', '3'],
199 'svn': ['1', '2', '3'],
184 }
200 }
185 self._check_changeset_range(
201 self._check_changeset_range(
186 backend, commit_id_range, commit_id_range_result)
202 backend, commit_id_range, commit_id_range_result)
187
203
188 def _check_changeset_range(
204 def _check_changeset_range(
189 self, backend, commit_id_ranges, commit_id_range_result):
205 self, backend, commit_id_ranges, commit_id_range_result):
190 response = self.app.get(
206 response = self.app.get(
191 url(controller='changeset', action='index',
207 route_path('repo_commit',
192 repo_name=backend.repo_name,
208 repo_name=backend.repo_name,
193 revision=commit_id_ranges[backend.alias]))
209 commit_id=commit_id_ranges[backend.alias]))
210
194 expected_result = commit_id_range_result[backend.alias]
211 expected_result = commit_id_range_result[backend.alias]
195 response.mustcontain('{} commits'.format(len(expected_result)))
212 response.mustcontain('{} commits'.format(len(expected_result)))
196 for commit_id in expected_result:
213 for commit_id in expected_result:
197 response.mustcontain(commit_id)
214 response.mustcontain(commit_id)
198
215
199 commit_id = {
216 commit_id = {
200 'hg': '2062ec7beeeaf9f44a1c25c41479565040b930b2',
217 'hg': '2062ec7beeeaf9f44a1c25c41479565040b930b2',
201 'svn': '393',
218 'svn': '393',
202 'git': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
219 'git': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
203 }
220 }
204
221
205 commit_id_range = {
222 commit_id_range = {
206 'hg': (
223 'hg': (
207 'a53d9201d4bc278910d416d94941b7ea007ecd52'
224 'a53d9201d4bc278910d416d94941b7ea007ecd52'
208 '...2062ec7beeeaf9f44a1c25c41479565040b930b2'),
225 '...2062ec7beeeaf9f44a1c25c41479565040b930b2'),
209 'git': (
226 'git': (
210 '7ab37bc680b4aa72c34d07b230c866c28e9fc204'
227 '7ab37bc680b4aa72c34d07b230c866c28e9fc204'
211 '...fd627b9e0dd80b47be81af07c4a98518244ed2f7'),
228 '...fd627b9e0dd80b47be81af07c4a98518244ed2f7'),
212 'svn': '391...393',
229 'svn': '391...393',
213 }
230 }
214
231
215 commit_id_range_result = {
232 commit_id_range_result = {
216 'hg': ['a53d9201d4bc', '96507bd11ecc', '2062ec7beeea'],
233 'hg': ['a53d9201d4bc', '96507bd11ecc', '2062ec7beeea'],
217 'git': ['7ab37bc680b4', '5f2c6ee19592', 'fd627b9e0dd8'],
234 'git': ['7ab37bc680b4', '5f2c6ee19592', 'fd627b9e0dd8'],
218 'svn': ['391', '392', '393'],
235 'svn': ['391', '392', '393'],
219 }
236 }
220
237
221 diffs = {
238 diffs = {
222 'hg': r"""diff --git a/README b/README
239 'hg': r"""diff --git a/README b/README
223 new file mode 120000
240 new file mode 120000
224 --- /dev/null
241 --- /dev/null
225 +++ b/README
242 +++ b/README
226 @@ -0,0 +1,1 @@
243 @@ -0,0 +1,1 @@
227 +README.rst
244 +README.rst
228 \ No newline at end of file
245 \ No newline at end of file
229 """,
246 """,
230 'git': r"""diff --git a/README b/README
247 'git': r"""diff --git a/README b/README
231 new file mode 120000
248 new file mode 120000
232 index 0000000000000000000000000000000000000000..92cacd285355271487b7e379dba6ca60f9a554a4
249 index 0000000000000000000000000000000000000000..92cacd285355271487b7e379dba6ca60f9a554a4
233 --- /dev/null
250 --- /dev/null
234 +++ b/README
251 +++ b/README
235 @@ -0,0 +1 @@
252 @@ -0,0 +1 @@
236 +README.rst
253 +README.rst
237 \ No newline at end of file
254 \ No newline at end of file
238 """,
255 """,
239 'svn': """Index: README
256 'svn': """Index: README
240 ===================================================================
257 ===================================================================
241 diff --git a/README b/README
258 diff --git a/README b/README
242 new file mode 10644
259 new file mode 10644
243 --- /dev/null\t(revision 0)
260 --- /dev/null\t(revision 0)
244 +++ b/README\t(revision 393)
261 +++ b/README\t(revision 393)
245 @@ -0,0 +1 @@
262 @@ -0,0 +1 @@
246 +link README.rst
263 +link README.rst
247 \\ No newline at end of file
264 \\ No newline at end of file
248 """,
265 """,
249 }
266 }
250
267
251 patches = {
268 patches = {
252 'hg': r"""# HG changeset patch
269 'hg': r"""# HG changeset patch
253 # User Marcin Kuzminski <marcin@python-works.com>
270 # User Marcin Kuzminski <marcin@python-works.com>
254 # Date 2014-01-07 12:21:40
271 # Date 2014-01-07 12:21:40
255 # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2
272 # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2
256 # Parent 96507bd11ecc815ebc6270fdf6db110928c09c1e
273 # Parent 96507bd11ecc815ebc6270fdf6db110928c09c1e
257
274
258 Added a symlink
275 Added a symlink
259
276
260 """ + diffs['hg'],
277 """ + diffs['hg'],
261 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20
278 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20
262 From: Marcin Kuzminski <marcin@python-works.com>
279 From: Marcin Kuzminski <marcin@python-works.com>
263 Date: 2014-01-07 12:22:20
280 Date: 2014-01-07 12:22:20
264 Subject: [PATCH] Added a symlink
281 Subject: [PATCH] Added a symlink
265
282
266 ---
283 ---
267
284
268 """ + diffs['git'],
285 """ + diffs['git'],
269 'svn': r"""# SVN changeset patch
286 'svn': r"""# SVN changeset patch
270 # User marcin
287 # User marcin
271 # Date 2014-09-02 12:25:22.071142
288 # Date 2014-09-02 12:25:22.071142
272 # Revision 393
289 # Revision 393
273
290
274 Added a symlink
291 Added a symlink
275
292
276 """ + diffs['svn'],
293 """ + diffs['svn'],
277 }
294 }
278
295
279 def _check_diff_menus(self, response, right_menu=False,):
296 def _check_diff_menus(self, response, right_menu=False,):
280 # diff menus
297 # diff menus
281 for elem in ['Show File', 'Unified Diff', 'Side-by-side Diff',
298 for elem in ['Show File', 'Unified Diff', 'Side-by-side Diff',
282 'Raw Diff', 'Download Diff']:
299 'Raw Diff', 'Download Diff']:
283 response.mustcontain(elem)
300 response.mustcontain(elem)
284
301
285 # right pane diff menus
302 # right pane diff menus
286 if right_menu:
303 if right_menu:
287 for elem in ['Ignore whitespace', 'Increase context',
304 for elem in ['Ignore whitespace', 'Increase context',
288 'Hide comments']:
305 'Hide comments']:
289 response.mustcontain(elem)
306 response.mustcontain(elem)
290
307
291 def _check_new_diff_menus(self, response, right_menu=False,):
308 def _check_new_diff_menus(self, response, right_menu=False,):
292 # diff menus
309 # diff menus
293 for elem in ['Show file before', 'Show file after',
310 for elem in ['Show file before', 'Show file after',
294 'Raw diff', 'Download diff']:
311 'Raw diff', 'Download diff']:
295 response.mustcontain(elem)
312 response.mustcontain(elem)
296
313
297 # right pane diff menus
314 # right pane diff menus
298 if right_menu:
315 if right_menu:
299 for elem in ['Ignore whitespace', 'Increase context',
316 for elem in ['Ignore whitespace', 'Increase context',
300 'Hide comments']:
317 'Hide comments']:
301 response.mustcontain(elem)
318 response.mustcontain(elem)
@@ -1,203 +1,203 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2017-2017 RhodeCode GmbH
3 # Copyright (C) 2017-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytz
21 import pytz
22 import logging
22 import logging
23
23
24 from beaker.cache import cache_region
24 from beaker.cache import cache_region
25 from pyramid.view import view_config
25 from pyramid.view import view_config
26 from pyramid.response import Response
26 from pyramid.response import Response
27 from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed
27 from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed
28
28
29 from rhodecode.apps._base import RepoAppView
29 from rhodecode.apps._base import RepoAppView
30 from rhodecode.lib import audit_logger
30 from rhodecode.lib import audit_logger
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
32 from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator,
32 from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator,
33 NotAnonymous, CSRFRequired)
33 NotAnonymous, CSRFRequired)
34 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
34 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
35 from rhodecode.lib.ext_json import json
35 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.utils2 import str2bool, safe_int
36 from rhodecode.lib.utils2 import str2bool, safe_int
37 from rhodecode.model.db import UserApiKeys, CacheKey
37 from rhodecode.model.db import UserApiKeys, CacheKey
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 class RepoFeedView(RepoAppView):
42 class RepoFeedView(RepoAppView):
43 def load_default_context(self):
43 def load_default_context(self):
44 c = self._get_local_tmpl_context()
44 c = self._get_local_tmpl_context()
45
45
46 # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead
46 # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead
47 c.repo_info = self.db_repo
47 c.repo_info = self.db_repo
48
48
49 self._register_global_c(c)
49 self._register_global_c(c)
50 self._load_defaults()
50 self._load_defaults()
51 return c
51 return c
52
52
53 def _get_config(self):
53 def _get_config(self):
54 import rhodecode
54 import rhodecode
55 config = rhodecode.CONFIG
55 config = rhodecode.CONFIG
56
56
57 return {
57 return {
58 'language': 'en-us',
58 'language': 'en-us',
59 'feed_ttl': '5', # TTL of feed,
59 'feed_ttl': '5', # TTL of feed,
60 'feed_include_diff':
60 'feed_include_diff':
61 str2bool(config.get('rss_include_diff', False)),
61 str2bool(config.get('rss_include_diff', False)),
62 'feed_items_per_page':
62 'feed_items_per_page':
63 safe_int(config.get('rss_items_per_page', 20)),
63 safe_int(config.get('rss_items_per_page', 20)),
64 'feed_diff_limit':
64 'feed_diff_limit':
65 # we need to protect from parsing huge diffs here other way
65 # we need to protect from parsing huge diffs here other way
66 # we can kill the server
66 # we can kill the server
67 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
67 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
68 }
68 }
69
69
70 def _load_defaults(self):
70 def _load_defaults(self):
71 _ = self.request.translate
71 _ = self.request.translate
72 config = self._get_config()
72 config = self._get_config()
73 # common values for feeds
73 # common values for feeds
74 self.description = _('Changes on %s repository')
74 self.description = _('Changes on %s repository')
75 self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s')
75 self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s')
76 self.language = config["language"]
76 self.language = config["language"]
77 self.ttl = config["feed_ttl"]
77 self.ttl = config["feed_ttl"]
78 self.feed_include_diff = config['feed_include_diff']
78 self.feed_include_diff = config['feed_include_diff']
79 self.feed_diff_limit = config['feed_diff_limit']
79 self.feed_diff_limit = config['feed_diff_limit']
80 self.feed_items_per_page = config['feed_items_per_page']
80 self.feed_items_per_page = config['feed_items_per_page']
81
81
82 def _changes(self, commit):
82 def _changes(self, commit):
83 diff_processor = DiffProcessor(
83 diff_processor = DiffProcessor(
84 commit.diff(), diff_limit=self.feed_diff_limit)
84 commit.diff(), diff_limit=self.feed_diff_limit)
85 _parsed = diff_processor.prepare(inline_diff=False)
85 _parsed = diff_processor.prepare(inline_diff=False)
86 limited_diff = isinstance(_parsed, LimitedDiffContainer)
86 limited_diff = isinstance(_parsed, LimitedDiffContainer)
87
87
88 return _parsed, limited_diff
88 return _parsed, limited_diff
89
89
90 def _get_title(self, commit):
90 def _get_title(self, commit):
91 return h.shorter(commit.message, 160)
91 return h.shorter(commit.message, 160)
92
92
93 def _get_description(self, commit):
93 def _get_description(self, commit):
94 _renderer = self.request.get_partial_renderer(
94 _renderer = self.request.get_partial_renderer(
95 'feed/atom_feed_entry.mako')
95 'feed/atom_feed_entry.mako')
96 parsed_diff, limited_diff = self._changes(commit)
96 parsed_diff, limited_diff = self._changes(commit)
97 return _renderer(
97 return _renderer(
98 'body',
98 'body',
99 commit=commit,
99 commit=commit,
100 parsed_diff=parsed_diff,
100 parsed_diff=parsed_diff,
101 limited_diff=limited_diff,
101 limited_diff=limited_diff,
102 feed_include_diff=self.feed_include_diff,
102 feed_include_diff=self.feed_include_diff,
103 )
103 )
104
104
105 def _set_timezone(self, date, tzinfo=pytz.utc):
105 def _set_timezone(self, date, tzinfo=pytz.utc):
106 if not getattr(date, "tzinfo", None):
106 if not getattr(date, "tzinfo", None):
107 date.replace(tzinfo=tzinfo)
107 date.replace(tzinfo=tzinfo)
108 return date
108 return date
109
109
110 def _get_commits(self):
110 def _get_commits(self):
111 return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:])
111 return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:])
112
112
113 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
113 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
114 @HasRepoPermissionAnyDecorator(
114 @HasRepoPermissionAnyDecorator(
115 'repository.read', 'repository.write', 'repository.admin')
115 'repository.read', 'repository.write', 'repository.admin')
116 @view_config(
116 @view_config(
117 route_name='atom_feed_home', request_method='GET',
117 route_name='atom_feed_home', request_method='GET',
118 renderer=None)
118 renderer=None)
119 def atom(self):
119 def atom(self):
120 """
120 """
121 Produce an atom-1.0 feed via feedgenerator module
121 Produce an atom-1.0 feed via feedgenerator module
122 """
122 """
123 self.load_default_context()
123 self.load_default_context()
124
124
125 @cache_region('long_term')
125 @cache_region('long_term')
126 def _generate_feed(cache_key):
126 def _generate_feed(cache_key):
127 feed = Atom1Feed(
127 feed = Atom1Feed(
128 title=self.title % self.db_repo_name,
128 title=self.title % self.db_repo_name,
129 link=h.route_url('repo_summary', repo_name=self.db_repo_name),
129 link=h.route_url('repo_summary', repo_name=self.db_repo_name),
130 description=self.description % self.db_repo_name,
130 description=self.description % self.db_repo_name,
131 language=self.language,
131 language=self.language,
132 ttl=self.ttl
132 ttl=self.ttl
133 )
133 )
134
134
135 for commit in reversed(self._get_commits()):
135 for commit in reversed(self._get_commits()):
136 date = self._set_timezone(commit.date)
136 date = self._set_timezone(commit.date)
137 feed.add_item(
137 feed.add_item(
138 title=self._get_title(commit),
138 title=self._get_title(commit),
139 author_name=commit.author,
139 author_name=commit.author,
140 description=self._get_description(commit),
140 description=self._get_description(commit),
141 link=h.route_url(
141 link=h.route_url(
142 'changeset_home', repo_name=self.db_repo_name,
142 'repo_commit', repo_name=self.db_repo_name,
143 revision=commit.raw_id),
143 commit_id=commit.raw_id),
144 pubdate=date,)
144 pubdate=date,)
145
145
146 return feed.mime_type, feed.writeString('utf-8')
146 return feed.mime_type, feed.writeString('utf-8')
147
147
148 invalidator_context = CacheKey.repo_context_cache(
148 invalidator_context = CacheKey.repo_context_cache(
149 _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_ATOM)
149 _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_ATOM)
150
150
151 with invalidator_context as context:
151 with invalidator_context as context:
152 context.invalidate()
152 context.invalidate()
153 mime_type, feed = context.compute()
153 mime_type, feed = context.compute()
154
154
155 response = Response(feed)
155 response = Response(feed)
156 response.content_type = mime_type
156 response.content_type = mime_type
157 return response
157 return response
158
158
159 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
159 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
160 @HasRepoPermissionAnyDecorator(
160 @HasRepoPermissionAnyDecorator(
161 'repository.read', 'repository.write', 'repository.admin')
161 'repository.read', 'repository.write', 'repository.admin')
162 @view_config(
162 @view_config(
163 route_name='rss_feed_home', request_method='GET',
163 route_name='rss_feed_home', request_method='GET',
164 renderer=None)
164 renderer=None)
165 def rss(self):
165 def rss(self):
166 """
166 """
167 Produce an rss2 feed via feedgenerator module
167 Produce an rss2 feed via feedgenerator module
168 """
168 """
169 self.load_default_context()
169 self.load_default_context()
170
170
171 @cache_region('long_term')
171 @cache_region('long_term')
172 def _generate_feed(cache_key):
172 def _generate_feed(cache_key):
173 feed = Rss201rev2Feed(
173 feed = Rss201rev2Feed(
174 title=self.title % self.db_repo_name,
174 title=self.title % self.db_repo_name,
175 link=h.route_url('repo_summary', repo_name=self.db_repo_name),
175 link=h.route_url('repo_summary', repo_name=self.db_repo_name),
176 description=self.description % self.db_repo_name,
176 description=self.description % self.db_repo_name,
177 language=self.language,
177 language=self.language,
178 ttl=self.ttl
178 ttl=self.ttl
179 )
179 )
180
180
181 for commit in reversed(self._get_commits()):
181 for commit in reversed(self._get_commits()):
182 date = self._set_timezone(commit.date)
182 date = self._set_timezone(commit.date)
183 feed.add_item(
183 feed.add_item(
184 title=self._get_title(commit),
184 title=self._get_title(commit),
185 author_name=commit.author,
185 author_name=commit.author,
186 description=self._get_description(commit),
186 description=self._get_description(commit),
187 link=h.route_url(
187 link=h.route_url(
188 'changeset_home', repo_name=self.db_repo_name,
188 'repo_commit', repo_name=self.db_repo_name,
189 revision=commit.raw_id),
189 commit_id=commit.raw_id),
190 pubdate=date,)
190 pubdate=date,)
191
191
192 return feed.mime_type, feed.writeString('utf-8')
192 return feed.mime_type, feed.writeString('utf-8')
193
193
194 invalidator_context = CacheKey.repo_context_cache(
194 invalidator_context = CacheKey.repo_context_cache(
195 _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_RSS)
195 _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_RSS)
196
196
197 with invalidator_context as context:
197 with invalidator_context as context:
198 context.invalidate()
198 context.invalidate()
199 mime_type, feed = context.compute()
199 mime_type, feed = context.compute()
200
200
201 response = Response(feed)
201 response = Response(feed)
202 response.content_type = mime_type
202 response.content_type = mime_type
203 return response
203 return response
@@ -1,1278 +1,1278 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2017 RhodeCode GmbH
3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import itertools
21 import itertools
22 import logging
22 import logging
23 import os
23 import os
24 import shutil
24 import shutil
25 import tempfile
25 import tempfile
26 import collections
26 import collections
27
27
28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
28 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31 from pyramid.response import Response
31 from pyramid.response import Response
32
32
33 from rhodecode.apps._base import RepoAppView
33 from rhodecode.apps._base import RepoAppView
34
34
35 from rhodecode.controllers.utils import parse_path_ref
35 from rhodecode.controllers.utils import parse_path_ref
36 from rhodecode.lib import diffs, helpers as h, caches
36 from rhodecode.lib import diffs, helpers as h, caches
37 from rhodecode.lib import audit_logger
37 from rhodecode.lib import audit_logger
38 from rhodecode.lib.exceptions import NonRelativePathError
38 from rhodecode.lib.exceptions import NonRelativePathError
39 from rhodecode.lib.codeblocks import (
39 from rhodecode.lib.codeblocks import (
40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
40 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
41 from rhodecode.lib.utils2 import (
41 from rhodecode.lib.utils2 import (
42 convert_line_endings, detect_mode, safe_str, str2bool)
42 convert_line_endings, detect_mode, safe_str, str2bool)
43 from rhodecode.lib.auth import (
43 from rhodecode.lib.auth import (
44 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
44 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
45 from rhodecode.lib.vcs import path as vcspath
45 from rhodecode.lib.vcs import path as vcspath
46 from rhodecode.lib.vcs.backends.base import EmptyCommit
46 from rhodecode.lib.vcs.backends.base import EmptyCommit
47 from rhodecode.lib.vcs.conf import settings
47 from rhodecode.lib.vcs.conf import settings
48 from rhodecode.lib.vcs.nodes import FileNode
48 from rhodecode.lib.vcs.nodes import FileNode
49 from rhodecode.lib.vcs.exceptions import (
49 from rhodecode.lib.vcs.exceptions import (
50 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
50 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
51 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
51 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
52 NodeDoesNotExistError, CommitError, NodeError)
52 NodeDoesNotExistError, CommitError, NodeError)
53
53
54 from rhodecode.model.scm import ScmModel
54 from rhodecode.model.scm import ScmModel
55 from rhodecode.model.db import Repository
55 from rhodecode.model.db import Repository
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class RepoFilesView(RepoAppView):
60 class RepoFilesView(RepoAppView):
61
61
62 @staticmethod
62 @staticmethod
63 def adjust_file_path_for_svn(f_path, repo):
63 def adjust_file_path_for_svn(f_path, repo):
64 """
64 """
65 Computes the relative path of `f_path`.
65 Computes the relative path of `f_path`.
66
66
67 This is mainly based on prefix matching of the recognized tags and
67 This is mainly based on prefix matching of the recognized tags and
68 branches in the underlying repository.
68 branches in the underlying repository.
69 """
69 """
70 tags_and_branches = itertools.chain(
70 tags_and_branches = itertools.chain(
71 repo.branches.iterkeys(),
71 repo.branches.iterkeys(),
72 repo.tags.iterkeys())
72 repo.tags.iterkeys())
73 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
73 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
74
74
75 for name in tags_and_branches:
75 for name in tags_and_branches:
76 if f_path.startswith('{}/'.format(name)):
76 if f_path.startswith('{}/'.format(name)):
77 f_path = vcspath.relpath(f_path, name)
77 f_path = vcspath.relpath(f_path, name)
78 break
78 break
79 return f_path
79 return f_path
80
80
81 def load_default_context(self):
81 def load_default_context(self):
82 c = self._get_local_tmpl_context(include_app_defaults=True)
82 c = self._get_local_tmpl_context(include_app_defaults=True)
83
83
84 # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead
84 # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead
85 c.repo_info = self.db_repo
85 c.repo_info = self.db_repo
86 c.rhodecode_repo = self.rhodecode_vcs_repo
86 c.rhodecode_repo = self.rhodecode_vcs_repo
87
87
88 self._register_global_c(c)
88 self._register_global_c(c)
89 return c
89 return c
90
90
91 def _ensure_not_locked(self):
91 def _ensure_not_locked(self):
92 _ = self.request.translate
92 _ = self.request.translate
93
93
94 repo = self.db_repo
94 repo = self.db_repo
95 if repo.enable_locking and repo.locked[0]:
95 if repo.enable_locking and repo.locked[0]:
96 h.flash(_('This repository has been locked by %s on %s')
96 h.flash(_('This repository has been locked by %s on %s')
97 % (h.person_by_id(repo.locked[0]),
97 % (h.person_by_id(repo.locked[0]),
98 h.format_date(h.time_to_datetime(repo.locked[1]))),
98 h.format_date(h.time_to_datetime(repo.locked[1]))),
99 'warning')
99 'warning')
100 files_url = h.route_path(
100 files_url = h.route_path(
101 'repo_files:default_path',
101 'repo_files:default_path',
102 repo_name=self.db_repo_name, commit_id='tip')
102 repo_name=self.db_repo_name, commit_id='tip')
103 raise HTTPFound(files_url)
103 raise HTTPFound(files_url)
104
104
105 def _get_commit_and_path(self):
105 def _get_commit_and_path(self):
106 default_commit_id = self.db_repo.landing_rev[1]
106 default_commit_id = self.db_repo.landing_rev[1]
107 default_f_path = '/'
107 default_f_path = '/'
108
108
109 commit_id = self.request.matchdict.get(
109 commit_id = self.request.matchdict.get(
110 'commit_id', default_commit_id)
110 'commit_id', default_commit_id)
111 f_path = self._get_f_path(self.request.matchdict, default_f_path)
111 f_path = self._get_f_path(self.request.matchdict, default_f_path)
112 return commit_id, f_path
112 return commit_id, f_path
113
113
114 def _get_default_encoding(self, c):
114 def _get_default_encoding(self, c):
115 enc_list = getattr(c, 'default_encodings', [])
115 enc_list = getattr(c, 'default_encodings', [])
116 return enc_list[0] if enc_list else 'UTF-8'
116 return enc_list[0] if enc_list else 'UTF-8'
117
117
118 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
118 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
119 """
119 """
120 This is a safe way to get commit. If an error occurs it redirects to
120 This is a safe way to get commit. If an error occurs it redirects to
121 tip with proper message
121 tip with proper message
122
122
123 :param commit_id: id of commit to fetch
123 :param commit_id: id of commit to fetch
124 :param redirect_after: toggle redirection
124 :param redirect_after: toggle redirection
125 """
125 """
126 _ = self.request.translate
126 _ = self.request.translate
127
127
128 try:
128 try:
129 return self.rhodecode_vcs_repo.get_commit(commit_id)
129 return self.rhodecode_vcs_repo.get_commit(commit_id)
130 except EmptyRepositoryError:
130 except EmptyRepositoryError:
131 if not redirect_after:
131 if not redirect_after:
132 return None
132 return None
133
133
134 _url = h.route_path(
134 _url = h.route_path(
135 'repo_files_add_file',
135 'repo_files_add_file',
136 repo_name=self.db_repo_name, commit_id=0, f_path='',
136 repo_name=self.db_repo_name, commit_id=0, f_path='',
137 _anchor='edit')
137 _anchor='edit')
138
138
139 if h.HasRepoPermissionAny(
139 if h.HasRepoPermissionAny(
140 'repository.write', 'repository.admin')(self.db_repo_name):
140 'repository.write', 'repository.admin')(self.db_repo_name):
141 add_new = h.link_to(
141 add_new = h.link_to(
142 _('Click here to add a new file.'), _url, class_="alert-link")
142 _('Click here to add a new file.'), _url, class_="alert-link")
143 else:
143 else:
144 add_new = ""
144 add_new = ""
145
145
146 h.flash(h.literal(
146 h.flash(h.literal(
147 _('There are no files yet. %s') % add_new), category='warning')
147 _('There are no files yet. %s') % add_new), category='warning')
148 raise HTTPFound(
148 raise HTTPFound(
149 h.route_path('repo_summary', repo_name=self.db_repo_name))
149 h.route_path('repo_summary', repo_name=self.db_repo_name))
150
150
151 except (CommitDoesNotExistError, LookupError):
151 except (CommitDoesNotExistError, LookupError):
152 msg = _('No such commit exists for this repository')
152 msg = _('No such commit exists for this repository')
153 h.flash(msg, category='error')
153 h.flash(msg, category='error')
154 raise HTTPNotFound()
154 raise HTTPNotFound()
155 except RepositoryError as e:
155 except RepositoryError as e:
156 h.flash(safe_str(h.escape(e)), category='error')
156 h.flash(safe_str(h.escape(e)), category='error')
157 raise HTTPNotFound()
157 raise HTTPNotFound()
158
158
159 def _get_filenode_or_redirect(self, commit_obj, path):
159 def _get_filenode_or_redirect(self, commit_obj, path):
160 """
160 """
161 Returns file_node, if error occurs or given path is directory,
161 Returns file_node, if error occurs or given path is directory,
162 it'll redirect to top level path
162 it'll redirect to top level path
163 """
163 """
164 _ = self.request.translate
164 _ = self.request.translate
165
165
166 try:
166 try:
167 file_node = commit_obj.get_node(path)
167 file_node = commit_obj.get_node(path)
168 if file_node.is_dir():
168 if file_node.is_dir():
169 raise RepositoryError('The given path is a directory')
169 raise RepositoryError('The given path is a directory')
170 except CommitDoesNotExistError:
170 except CommitDoesNotExistError:
171 log.exception('No such commit exists for this repository')
171 log.exception('No such commit exists for this repository')
172 h.flash(_('No such commit exists for this repository'), category='error')
172 h.flash(_('No such commit exists for this repository'), category='error')
173 raise HTTPNotFound()
173 raise HTTPNotFound()
174 except RepositoryError as e:
174 except RepositoryError as e:
175 log.warning('Repository error while fetching '
175 log.warning('Repository error while fetching '
176 'filenode `%s`. Err:%s', path, e)
176 'filenode `%s`. Err:%s', path, e)
177 h.flash(safe_str(h.escape(e)), category='error')
177 h.flash(safe_str(h.escape(e)), category='error')
178 raise HTTPNotFound()
178 raise HTTPNotFound()
179
179
180 return file_node
180 return file_node
181
181
182 def _is_valid_head(self, commit_id, repo):
182 def _is_valid_head(self, commit_id, repo):
183 # check if commit is a branch identifier- basically we cannot
183 # check if commit is a branch identifier- basically we cannot
184 # create multiple heads via file editing
184 # create multiple heads via file editing
185 valid_heads = repo.branches.keys() + repo.branches.values()
185 valid_heads = repo.branches.keys() + repo.branches.values()
186
186
187 if h.is_svn(repo) and not repo.is_empty():
187 if h.is_svn(repo) and not repo.is_empty():
188 # Note: Subversion only has one head, we add it here in case there
188 # Note: Subversion only has one head, we add it here in case there
189 # is no branch matched.
189 # is no branch matched.
190 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
190 valid_heads.append(repo.get_commit(commit_idx=-1).raw_id)
191
191
192 # check if commit is a branch name or branch hash
192 # check if commit is a branch name or branch hash
193 return commit_id in valid_heads
193 return commit_id in valid_heads
194
194
195 def _get_tree_cache_manager(self, namespace_type):
195 def _get_tree_cache_manager(self, namespace_type):
196 _namespace = caches.get_repo_namespace_key(
196 _namespace = caches.get_repo_namespace_key(
197 namespace_type, self.db_repo_name)
197 namespace_type, self.db_repo_name)
198 return caches.get_cache_manager('repo_cache_long', _namespace)
198 return caches.get_cache_manager('repo_cache_long', _namespace)
199
199
200 def _get_tree_at_commit(
200 def _get_tree_at_commit(
201 self, c, commit_id, f_path, full_load=False, force=False):
201 self, c, commit_id, f_path, full_load=False, force=False):
202 def _cached_tree():
202 def _cached_tree():
203 log.debug('Generating cached file tree for %s, %s, %s',
203 log.debug('Generating cached file tree for %s, %s, %s',
204 self.db_repo_name, commit_id, f_path)
204 self.db_repo_name, commit_id, f_path)
205
205
206 c.full_load = full_load
206 c.full_load = full_load
207 return render(
207 return render(
208 'rhodecode:templates/files/files_browser_tree.mako',
208 'rhodecode:templates/files/files_browser_tree.mako',
209 self._get_template_context(c), self.request)
209 self._get_template_context(c), self.request)
210
210
211 cache_manager = self._get_tree_cache_manager(caches.FILE_TREE)
211 cache_manager = self._get_tree_cache_manager(caches.FILE_TREE)
212
212
213 cache_key = caches.compute_key_from_params(
213 cache_key = caches.compute_key_from_params(
214 self.db_repo_name, commit_id, f_path)
214 self.db_repo_name, commit_id, f_path)
215
215
216 if force:
216 if force:
217 # we want to force recompute of caches
217 # we want to force recompute of caches
218 cache_manager.remove_value(cache_key)
218 cache_manager.remove_value(cache_key)
219
219
220 return cache_manager.get(cache_key, createfunc=_cached_tree)
220 return cache_manager.get(cache_key, createfunc=_cached_tree)
221
221
222 def _get_archive_spec(self, fname):
222 def _get_archive_spec(self, fname):
223 log.debug('Detecting archive spec for: `%s`', fname)
223 log.debug('Detecting archive spec for: `%s`', fname)
224
224
225 fileformat = None
225 fileformat = None
226 ext = None
226 ext = None
227 content_type = None
227 content_type = None
228 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
228 for a_type, ext_data in settings.ARCHIVE_SPECS.items():
229 content_type, extension = ext_data
229 content_type, extension = ext_data
230
230
231 if fname.endswith(extension):
231 if fname.endswith(extension):
232 fileformat = a_type
232 fileformat = a_type
233 log.debug('archive is of type: %s', fileformat)
233 log.debug('archive is of type: %s', fileformat)
234 ext = extension
234 ext = extension
235 break
235 break
236
236
237 if not fileformat:
237 if not fileformat:
238 raise ValueError()
238 raise ValueError()
239
239
240 # left over part of whole fname is the commit
240 # left over part of whole fname is the commit
241 commit_id = fname[:-len(ext)]
241 commit_id = fname[:-len(ext)]
242
242
243 return commit_id, ext, fileformat, content_type
243 return commit_id, ext, fileformat, content_type
244
244
245 @LoginRequired()
245 @LoginRequired()
246 @HasRepoPermissionAnyDecorator(
246 @HasRepoPermissionAnyDecorator(
247 'repository.read', 'repository.write', 'repository.admin')
247 'repository.read', 'repository.write', 'repository.admin')
248 @view_config(
248 @view_config(
249 route_name='repo_archivefile', request_method='GET',
249 route_name='repo_archivefile', request_method='GET',
250 renderer=None)
250 renderer=None)
251 def repo_archivefile(self):
251 def repo_archivefile(self):
252 # archive cache config
252 # archive cache config
253 from rhodecode import CONFIG
253 from rhodecode import CONFIG
254 _ = self.request.translate
254 _ = self.request.translate
255 self.load_default_context()
255 self.load_default_context()
256
256
257 fname = self.request.matchdict['fname']
257 fname = self.request.matchdict['fname']
258 subrepos = self.request.GET.get('subrepos') == 'true'
258 subrepos = self.request.GET.get('subrepos') == 'true'
259
259
260 if not self.db_repo.enable_downloads:
260 if not self.db_repo.enable_downloads:
261 return Response(_('Downloads disabled'))
261 return Response(_('Downloads disabled'))
262
262
263 try:
263 try:
264 commit_id, ext, fileformat, content_type = \
264 commit_id, ext, fileformat, content_type = \
265 self._get_archive_spec(fname)
265 self._get_archive_spec(fname)
266 except ValueError:
266 except ValueError:
267 return Response(_('Unknown archive type for: `{}`').format(fname))
267 return Response(_('Unknown archive type for: `{}`').format(fname))
268
268
269 try:
269 try:
270 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
270 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
271 except CommitDoesNotExistError:
271 except CommitDoesNotExistError:
272 return Response(_('Unknown commit_id %s') % commit_id)
272 return Response(_('Unknown commit_id %s') % commit_id)
273 except EmptyRepositoryError:
273 except EmptyRepositoryError:
274 return Response(_('Empty repository'))
274 return Response(_('Empty repository'))
275
275
276 archive_name = '%s-%s%s%s' % (
276 archive_name = '%s-%s%s%s' % (
277 safe_str(self.db_repo_name.replace('/', '_')),
277 safe_str(self.db_repo_name.replace('/', '_')),
278 '-sub' if subrepos else '',
278 '-sub' if subrepos else '',
279 safe_str(commit.short_id), ext)
279 safe_str(commit.short_id), ext)
280
280
281 use_cached_archive = False
281 use_cached_archive = False
282 archive_cache_enabled = CONFIG.get(
282 archive_cache_enabled = CONFIG.get(
283 'archive_cache_dir') and not self.request.GET.get('no_cache')
283 'archive_cache_dir') and not self.request.GET.get('no_cache')
284
284
285 if archive_cache_enabled:
285 if archive_cache_enabled:
286 # check if we it's ok to write
286 # check if we it's ok to write
287 if not os.path.isdir(CONFIG['archive_cache_dir']):
287 if not os.path.isdir(CONFIG['archive_cache_dir']):
288 os.makedirs(CONFIG['archive_cache_dir'])
288 os.makedirs(CONFIG['archive_cache_dir'])
289 cached_archive_path = os.path.join(
289 cached_archive_path = os.path.join(
290 CONFIG['archive_cache_dir'], archive_name)
290 CONFIG['archive_cache_dir'], archive_name)
291 if os.path.isfile(cached_archive_path):
291 if os.path.isfile(cached_archive_path):
292 log.debug('Found cached archive in %s', cached_archive_path)
292 log.debug('Found cached archive in %s', cached_archive_path)
293 fd, archive = None, cached_archive_path
293 fd, archive = None, cached_archive_path
294 use_cached_archive = True
294 use_cached_archive = True
295 else:
295 else:
296 log.debug('Archive %s is not yet cached', archive_name)
296 log.debug('Archive %s is not yet cached', archive_name)
297
297
298 if not use_cached_archive:
298 if not use_cached_archive:
299 # generate new archive
299 # generate new archive
300 fd, archive = tempfile.mkstemp()
300 fd, archive = tempfile.mkstemp()
301 log.debug('Creating new temp archive in %s', archive)
301 log.debug('Creating new temp archive in %s', archive)
302 try:
302 try:
303 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
303 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos)
304 except ImproperArchiveTypeError:
304 except ImproperArchiveTypeError:
305 return _('Unknown archive type')
305 return _('Unknown archive type')
306 if archive_cache_enabled:
306 if archive_cache_enabled:
307 # if we generated the archive and we have cache enabled
307 # if we generated the archive and we have cache enabled
308 # let's use this for future
308 # let's use this for future
309 log.debug('Storing new archive in %s', cached_archive_path)
309 log.debug('Storing new archive in %s', cached_archive_path)
310 shutil.move(archive, cached_archive_path)
310 shutil.move(archive, cached_archive_path)
311 archive = cached_archive_path
311 archive = cached_archive_path
312
312
313 # store download action
313 # store download action
314 audit_logger.store_web(
314 audit_logger.store_web(
315 'repo.archive.download', action_data={
315 'repo.archive.download', action_data={
316 'user_agent': self.request.user_agent,
316 'user_agent': self.request.user_agent,
317 'archive_name': archive_name,
317 'archive_name': archive_name,
318 'archive_spec': fname,
318 'archive_spec': fname,
319 'archive_cached': use_cached_archive},
319 'archive_cached': use_cached_archive},
320 user=self._rhodecode_user,
320 user=self._rhodecode_user,
321 repo=self.db_repo,
321 repo=self.db_repo,
322 commit=True
322 commit=True
323 )
323 )
324
324
325 def get_chunked_archive(archive):
325 def get_chunked_archive(archive):
326 with open(archive, 'rb') as stream:
326 with open(archive, 'rb') as stream:
327 while True:
327 while True:
328 data = stream.read(16 * 1024)
328 data = stream.read(16 * 1024)
329 if not data:
329 if not data:
330 if fd: # fd means we used temporary file
330 if fd: # fd means we used temporary file
331 os.close(fd)
331 os.close(fd)
332 if not archive_cache_enabled:
332 if not archive_cache_enabled:
333 log.debug('Destroying temp archive %s', archive)
333 log.debug('Destroying temp archive %s', archive)
334 os.remove(archive)
334 os.remove(archive)
335 break
335 break
336 yield data
336 yield data
337
337
338 response = Response(app_iter=get_chunked_archive(archive))
338 response = Response(app_iter=get_chunked_archive(archive))
339 response.content_disposition = str(
339 response.content_disposition = str(
340 'attachment; filename=%s' % archive_name)
340 'attachment; filename=%s' % archive_name)
341 response.content_type = str(content_type)
341 response.content_type = str(content_type)
342
342
343 return response
343 return response
344
344
345 def _get_file_node(self, commit_id, f_path):
345 def _get_file_node(self, commit_id, f_path):
346 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
346 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
347 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
347 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
348 try:
348 try:
349 node = commit.get_node(f_path)
349 node = commit.get_node(f_path)
350 if node.is_dir():
350 if node.is_dir():
351 raise NodeError('%s path is a %s not a file'
351 raise NodeError('%s path is a %s not a file'
352 % (node, type(node)))
352 % (node, type(node)))
353 except NodeDoesNotExistError:
353 except NodeDoesNotExistError:
354 commit = EmptyCommit(
354 commit = EmptyCommit(
355 commit_id=commit_id,
355 commit_id=commit_id,
356 idx=commit.idx,
356 idx=commit.idx,
357 repo=commit.repository,
357 repo=commit.repository,
358 alias=commit.repository.alias,
358 alias=commit.repository.alias,
359 message=commit.message,
359 message=commit.message,
360 author=commit.author,
360 author=commit.author,
361 date=commit.date)
361 date=commit.date)
362 node = FileNode(f_path, '', commit=commit)
362 node = FileNode(f_path, '', commit=commit)
363 else:
363 else:
364 commit = EmptyCommit(
364 commit = EmptyCommit(
365 repo=self.rhodecode_vcs_repo,
365 repo=self.rhodecode_vcs_repo,
366 alias=self.rhodecode_vcs_repo.alias)
366 alias=self.rhodecode_vcs_repo.alias)
367 node = FileNode(f_path, '', commit=commit)
367 node = FileNode(f_path, '', commit=commit)
368 return node
368 return node
369
369
370 @LoginRequired()
370 @LoginRequired()
371 @HasRepoPermissionAnyDecorator(
371 @HasRepoPermissionAnyDecorator(
372 'repository.read', 'repository.write', 'repository.admin')
372 'repository.read', 'repository.write', 'repository.admin')
373 @view_config(
373 @view_config(
374 route_name='repo_files_diff', request_method='GET',
374 route_name='repo_files_diff', request_method='GET',
375 renderer=None)
375 renderer=None)
376 def repo_files_diff(self):
376 def repo_files_diff(self):
377 c = self.load_default_context()
377 c = self.load_default_context()
378 f_path = self._get_f_path(self.request.matchdict)
378 f_path = self._get_f_path(self.request.matchdict)
379 diff1 = self.request.GET.get('diff1', '')
379 diff1 = self.request.GET.get('diff1', '')
380 diff2 = self.request.GET.get('diff2', '')
380 diff2 = self.request.GET.get('diff2', '')
381
381
382 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
382 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
383
383
384 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
384 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
385 line_context = self.request.GET.get('context', 3)
385 line_context = self.request.GET.get('context', 3)
386
386
387 if not any((diff1, diff2)):
387 if not any((diff1, diff2)):
388 h.flash(
388 h.flash(
389 'Need query parameter "diff1" or "diff2" to generate a diff.',
389 'Need query parameter "diff1" or "diff2" to generate a diff.',
390 category='error')
390 category='error')
391 raise HTTPBadRequest()
391 raise HTTPBadRequest()
392
392
393 c.action = self.request.GET.get('diff')
393 c.action = self.request.GET.get('diff')
394 if c.action not in ['download', 'raw']:
394 if c.action not in ['download', 'raw']:
395 compare_url = h.url(
395 compare_url = h.url(
396 'compare_url', repo_name=self.db_repo_name,
396 'compare_url', repo_name=self.db_repo_name,
397 source_ref_type='rev',
397 source_ref_type='rev',
398 source_ref=diff1,
398 source_ref=diff1,
399 target_repo=self.db_repo_name,
399 target_repo=self.db_repo_name,
400 target_ref_type='rev',
400 target_ref_type='rev',
401 target_ref=diff2,
401 target_ref=diff2,
402 f_path=f_path)
402 f_path=f_path)
403 # redirect to new view if we render diff
403 # redirect to new view if we render diff
404 raise HTTPFound(compare_url)
404 raise HTTPFound(compare_url)
405
405
406 try:
406 try:
407 node1 = self._get_file_node(diff1, path1)
407 node1 = self._get_file_node(diff1, path1)
408 node2 = self._get_file_node(diff2, f_path)
408 node2 = self._get_file_node(diff2, f_path)
409 except (RepositoryError, NodeError):
409 except (RepositoryError, NodeError):
410 log.exception("Exception while trying to get node from repository")
410 log.exception("Exception while trying to get node from repository")
411 raise HTTPFound(
411 raise HTTPFound(
412 h.route_path('repo_files', repo_name=self.db_repo_name,
412 h.route_path('repo_files', repo_name=self.db_repo_name,
413 commit_id='tip', f_path=f_path))
413 commit_id='tip', f_path=f_path))
414
414
415 if all(isinstance(node.commit, EmptyCommit)
415 if all(isinstance(node.commit, EmptyCommit)
416 for node in (node1, node2)):
416 for node in (node1, node2)):
417 raise HTTPNotFound()
417 raise HTTPNotFound()
418
418
419 c.commit_1 = node1.commit
419 c.commit_1 = node1.commit
420 c.commit_2 = node2.commit
420 c.commit_2 = node2.commit
421
421
422 if c.action == 'download':
422 if c.action == 'download':
423 _diff = diffs.get_gitdiff(node1, node2,
423 _diff = diffs.get_gitdiff(node1, node2,
424 ignore_whitespace=ignore_whitespace,
424 ignore_whitespace=ignore_whitespace,
425 context=line_context)
425 context=line_context)
426 diff = diffs.DiffProcessor(_diff, format='gitdiff')
426 diff = diffs.DiffProcessor(_diff, format='gitdiff')
427
427
428 response = Response(diff.as_raw())
428 response = Response(diff.as_raw())
429 response.content_type = 'text/plain'
429 response.content_type = 'text/plain'
430 response.content_disposition = (
430 response.content_disposition = (
431 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
431 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
432 )
432 )
433 charset = self._get_default_encoding(c)
433 charset = self._get_default_encoding(c)
434 if charset:
434 if charset:
435 response.charset = charset
435 response.charset = charset
436 return response
436 return response
437
437
438 elif c.action == 'raw':
438 elif c.action == 'raw':
439 _diff = diffs.get_gitdiff(node1, node2,
439 _diff = diffs.get_gitdiff(node1, node2,
440 ignore_whitespace=ignore_whitespace,
440 ignore_whitespace=ignore_whitespace,
441 context=line_context)
441 context=line_context)
442 diff = diffs.DiffProcessor(_diff, format='gitdiff')
442 diff = diffs.DiffProcessor(_diff, format='gitdiff')
443
443
444 response = Response(diff.as_raw())
444 response = Response(diff.as_raw())
445 response.content_type = 'text/plain'
445 response.content_type = 'text/plain'
446 charset = self._get_default_encoding(c)
446 charset = self._get_default_encoding(c)
447 if charset:
447 if charset:
448 response.charset = charset
448 response.charset = charset
449 return response
449 return response
450
450
451 # in case we ever end up here
451 # in case we ever end up here
452 raise HTTPNotFound()
452 raise HTTPNotFound()
453
453
454 @LoginRequired()
454 @LoginRequired()
455 @HasRepoPermissionAnyDecorator(
455 @HasRepoPermissionAnyDecorator(
456 'repository.read', 'repository.write', 'repository.admin')
456 'repository.read', 'repository.write', 'repository.admin')
457 @view_config(
457 @view_config(
458 route_name='repo_files_diff_2way_redirect', request_method='GET',
458 route_name='repo_files_diff_2way_redirect', request_method='GET',
459 renderer=None)
459 renderer=None)
460 def repo_files_diff_2way_redirect(self):
460 def repo_files_diff_2way_redirect(self):
461 """
461 """
462 Kept only to make OLD links work
462 Kept only to make OLD links work
463 """
463 """
464 f_path = self._get_f_path(self.request.matchdict)
464 f_path = self._get_f_path(self.request.matchdict)
465 diff1 = self.request.GET.get('diff1', '')
465 diff1 = self.request.GET.get('diff1', '')
466 diff2 = self.request.GET.get('diff2', '')
466 diff2 = self.request.GET.get('diff2', '')
467
467
468 if not any((diff1, diff2)):
468 if not any((diff1, diff2)):
469 h.flash(
469 h.flash(
470 'Need query parameter "diff1" or "diff2" to generate a diff.',
470 'Need query parameter "diff1" or "diff2" to generate a diff.',
471 category='error')
471 category='error')
472 raise HTTPBadRequest()
472 raise HTTPBadRequest()
473
473
474 compare_url = h.url(
474 compare_url = h.url(
475 'compare_url', repo_name=self.db_repo_name,
475 'compare_url', repo_name=self.db_repo_name,
476 source_ref_type='rev',
476 source_ref_type='rev',
477 source_ref=diff1,
477 source_ref=diff1,
478 target_repo=self.db_repo_name,
478 target_repo=self.db_repo_name,
479 target_ref_type='rev',
479 target_ref_type='rev',
480 target_ref=diff2,
480 target_ref=diff2,
481 f_path=f_path,
481 f_path=f_path,
482 diffmode='sideside')
482 diffmode='sideside')
483 raise HTTPFound(compare_url)
483 raise HTTPFound(compare_url)
484
484
485 @LoginRequired()
485 @LoginRequired()
486 @HasRepoPermissionAnyDecorator(
486 @HasRepoPermissionAnyDecorator(
487 'repository.read', 'repository.write', 'repository.admin')
487 'repository.read', 'repository.write', 'repository.admin')
488 @view_config(
488 @view_config(
489 route_name='repo_files', request_method='GET',
489 route_name='repo_files', request_method='GET',
490 renderer=None)
490 renderer=None)
491 @view_config(
491 @view_config(
492 route_name='repo_files:default_path', request_method='GET',
492 route_name='repo_files:default_path', request_method='GET',
493 renderer=None)
493 renderer=None)
494 @view_config(
494 @view_config(
495 route_name='repo_files:default_commit', request_method='GET',
495 route_name='repo_files:default_commit', request_method='GET',
496 renderer=None)
496 renderer=None)
497 @view_config(
497 @view_config(
498 route_name='repo_files:rendered', request_method='GET',
498 route_name='repo_files:rendered', request_method='GET',
499 renderer=None)
499 renderer=None)
500 @view_config(
500 @view_config(
501 route_name='repo_files:annotated', request_method='GET',
501 route_name='repo_files:annotated', request_method='GET',
502 renderer=None)
502 renderer=None)
503 def repo_files(self):
503 def repo_files(self):
504 c = self.load_default_context()
504 c = self.load_default_context()
505
505
506 view_name = getattr(self.request.matched_route, 'name', None)
506 view_name = getattr(self.request.matched_route, 'name', None)
507
507
508 c.annotate = view_name == 'repo_files:annotated'
508 c.annotate = view_name == 'repo_files:annotated'
509 # default is false, but .rst/.md files later are auto rendered, we can
509 # default is false, but .rst/.md files later are auto rendered, we can
510 # overwrite auto rendering by setting this GET flag
510 # overwrite auto rendering by setting this GET flag
511 c.renderer = view_name == 'repo_files:rendered' or \
511 c.renderer = view_name == 'repo_files:rendered' or \
512 not self.request.GET.get('no-render', False)
512 not self.request.GET.get('no-render', False)
513
513
514 # redirect to given commit_id from form if given
514 # redirect to given commit_id from form if given
515 get_commit_id = self.request.GET.get('at_rev', None)
515 get_commit_id = self.request.GET.get('at_rev', None)
516 if get_commit_id:
516 if get_commit_id:
517 self._get_commit_or_redirect(get_commit_id)
517 self._get_commit_or_redirect(get_commit_id)
518
518
519 commit_id, f_path = self._get_commit_and_path()
519 commit_id, f_path = self._get_commit_and_path()
520 c.commit = self._get_commit_or_redirect(commit_id)
520 c.commit = self._get_commit_or_redirect(commit_id)
521 c.branch = self.request.GET.get('branch', None)
521 c.branch = self.request.GET.get('branch', None)
522 c.f_path = f_path
522 c.f_path = f_path
523
523
524 # prev link
524 # prev link
525 try:
525 try:
526 prev_commit = c.commit.prev(c.branch)
526 prev_commit = c.commit.prev(c.branch)
527 c.prev_commit = prev_commit
527 c.prev_commit = prev_commit
528 c.url_prev = h.route_path(
528 c.url_prev = h.route_path(
529 'repo_files', repo_name=self.db_repo_name,
529 'repo_files', repo_name=self.db_repo_name,
530 commit_id=prev_commit.raw_id, f_path=f_path)
530 commit_id=prev_commit.raw_id, f_path=f_path)
531 if c.branch:
531 if c.branch:
532 c.url_prev += '?branch=%s' % c.branch
532 c.url_prev += '?branch=%s' % c.branch
533 except (CommitDoesNotExistError, VCSError):
533 except (CommitDoesNotExistError, VCSError):
534 c.url_prev = '#'
534 c.url_prev = '#'
535 c.prev_commit = EmptyCommit()
535 c.prev_commit = EmptyCommit()
536
536
537 # next link
537 # next link
538 try:
538 try:
539 next_commit = c.commit.next(c.branch)
539 next_commit = c.commit.next(c.branch)
540 c.next_commit = next_commit
540 c.next_commit = next_commit
541 c.url_next = h.route_path(
541 c.url_next = h.route_path(
542 'repo_files', repo_name=self.db_repo_name,
542 'repo_files', repo_name=self.db_repo_name,
543 commit_id=next_commit.raw_id, f_path=f_path)
543 commit_id=next_commit.raw_id, f_path=f_path)
544 if c.branch:
544 if c.branch:
545 c.url_next += '?branch=%s' % c.branch
545 c.url_next += '?branch=%s' % c.branch
546 except (CommitDoesNotExistError, VCSError):
546 except (CommitDoesNotExistError, VCSError):
547 c.url_next = '#'
547 c.url_next = '#'
548 c.next_commit = EmptyCommit()
548 c.next_commit = EmptyCommit()
549
549
550 # files or dirs
550 # files or dirs
551 try:
551 try:
552 c.file = c.commit.get_node(f_path)
552 c.file = c.commit.get_node(f_path)
553 c.file_author = True
553 c.file_author = True
554 c.file_tree = ''
554 c.file_tree = ''
555
555
556 # load file content
556 # load file content
557 if c.file.is_file():
557 if c.file.is_file():
558 c.lf_node = c.file.get_largefile_node()
558 c.lf_node = c.file.get_largefile_node()
559
559
560 c.file_source_page = 'true'
560 c.file_source_page = 'true'
561 c.file_last_commit = c.file.last_commit
561 c.file_last_commit = c.file.last_commit
562 if c.file.size < c.visual.cut_off_limit_diff:
562 if c.file.size < c.visual.cut_off_limit_diff:
563 if c.annotate: # annotation has precedence over renderer
563 if c.annotate: # annotation has precedence over renderer
564 c.annotated_lines = filenode_as_annotated_lines_tokens(
564 c.annotated_lines = filenode_as_annotated_lines_tokens(
565 c.file
565 c.file
566 )
566 )
567 else:
567 else:
568 c.renderer = (
568 c.renderer = (
569 c.renderer and h.renderer_from_filename(c.file.path)
569 c.renderer and h.renderer_from_filename(c.file.path)
570 )
570 )
571 if not c.renderer:
571 if not c.renderer:
572 c.lines = filenode_as_lines_tokens(c.file)
572 c.lines = filenode_as_lines_tokens(c.file)
573
573
574 c.on_branch_head = self._is_valid_head(
574 c.on_branch_head = self._is_valid_head(
575 commit_id, self.rhodecode_vcs_repo)
575 commit_id, self.rhodecode_vcs_repo)
576
576
577 branch = c.commit.branch if (
577 branch = c.commit.branch if (
578 c.commit.branch and '/' not in c.commit.branch) else None
578 c.commit.branch and '/' not in c.commit.branch) else None
579 c.branch_or_raw_id = branch or c.commit.raw_id
579 c.branch_or_raw_id = branch or c.commit.raw_id
580 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
580 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
581
581
582 author = c.file_last_commit.author
582 author = c.file_last_commit.author
583 c.authors = [[
583 c.authors = [[
584 h.email(author),
584 h.email(author),
585 h.person(author, 'username_or_name_or_email'),
585 h.person(author, 'username_or_name_or_email'),
586 1
586 1
587 ]]
587 ]]
588
588
589 else: # load tree content at path
589 else: # load tree content at path
590 c.file_source_page = 'false'
590 c.file_source_page = 'false'
591 c.authors = []
591 c.authors = []
592 # this loads a simple tree without metadata to speed things up
592 # this loads a simple tree without metadata to speed things up
593 # later via ajax we call repo_nodetree_full and fetch whole
593 # later via ajax we call repo_nodetree_full and fetch whole
594 c.file_tree = self._get_tree_at_commit(
594 c.file_tree = self._get_tree_at_commit(
595 c, c.commit.raw_id, f_path)
595 c, c.commit.raw_id, f_path)
596
596
597 except RepositoryError as e:
597 except RepositoryError as e:
598 h.flash(safe_str(h.escape(e)), category='error')
598 h.flash(safe_str(h.escape(e)), category='error')
599 raise HTTPNotFound()
599 raise HTTPNotFound()
600
600
601 if self.request.environ.get('HTTP_X_PJAX'):
601 if self.request.environ.get('HTTP_X_PJAX'):
602 html = render('rhodecode:templates/files/files_pjax.mako',
602 html = render('rhodecode:templates/files/files_pjax.mako',
603 self._get_template_context(c), self.request)
603 self._get_template_context(c), self.request)
604 else:
604 else:
605 html = render('rhodecode:templates/files/files.mako',
605 html = render('rhodecode:templates/files/files.mako',
606 self._get_template_context(c), self.request)
606 self._get_template_context(c), self.request)
607 return Response(html)
607 return Response(html)
608
608
609 @HasRepoPermissionAnyDecorator(
609 @HasRepoPermissionAnyDecorator(
610 'repository.read', 'repository.write', 'repository.admin')
610 'repository.read', 'repository.write', 'repository.admin')
611 @view_config(
611 @view_config(
612 route_name='repo_files:annotated_previous', request_method='GET',
612 route_name='repo_files:annotated_previous', request_method='GET',
613 renderer=None)
613 renderer=None)
614 def repo_files_annotated_previous(self):
614 def repo_files_annotated_previous(self):
615 self.load_default_context()
615 self.load_default_context()
616
616
617 commit_id, f_path = self._get_commit_and_path()
617 commit_id, f_path = self._get_commit_and_path()
618 commit = self._get_commit_or_redirect(commit_id)
618 commit = self._get_commit_or_redirect(commit_id)
619 prev_commit_id = commit.raw_id
619 prev_commit_id = commit.raw_id
620 line_anchor = self.request.GET.get('line_anchor')
620 line_anchor = self.request.GET.get('line_anchor')
621 is_file = False
621 is_file = False
622 try:
622 try:
623 _file = commit.get_node(f_path)
623 _file = commit.get_node(f_path)
624 is_file = _file.is_file()
624 is_file = _file.is_file()
625 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
625 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
626 pass
626 pass
627
627
628 if is_file:
628 if is_file:
629 history = commit.get_file_history(f_path)
629 history = commit.get_file_history(f_path)
630 prev_commit_id = history[1].raw_id \
630 prev_commit_id = history[1].raw_id \
631 if len(history) > 1 else prev_commit_id
631 if len(history) > 1 else prev_commit_id
632 prev_url = h.route_path(
632 prev_url = h.route_path(
633 'repo_files:annotated', repo_name=self.db_repo_name,
633 'repo_files:annotated', repo_name=self.db_repo_name,
634 commit_id=prev_commit_id, f_path=f_path,
634 commit_id=prev_commit_id, f_path=f_path,
635 _anchor='L{}'.format(line_anchor))
635 _anchor='L{}'.format(line_anchor))
636
636
637 raise HTTPFound(prev_url)
637 raise HTTPFound(prev_url)
638
638
639 @LoginRequired()
639 @LoginRequired()
640 @HasRepoPermissionAnyDecorator(
640 @HasRepoPermissionAnyDecorator(
641 'repository.read', 'repository.write', 'repository.admin')
641 'repository.read', 'repository.write', 'repository.admin')
642 @view_config(
642 @view_config(
643 route_name='repo_nodetree_full', request_method='GET',
643 route_name='repo_nodetree_full', request_method='GET',
644 renderer=None, xhr=True)
644 renderer=None, xhr=True)
645 @view_config(
645 @view_config(
646 route_name='repo_nodetree_full:default_path', request_method='GET',
646 route_name='repo_nodetree_full:default_path', request_method='GET',
647 renderer=None, xhr=True)
647 renderer=None, xhr=True)
648 def repo_nodetree_full(self):
648 def repo_nodetree_full(self):
649 """
649 """
650 Returns rendered html of file tree that contains commit date,
650 Returns rendered html of file tree that contains commit date,
651 author, commit_id for the specified combination of
651 author, commit_id for the specified combination of
652 repo, commit_id and file path
652 repo, commit_id and file path
653 """
653 """
654 c = self.load_default_context()
654 c = self.load_default_context()
655
655
656 commit_id, f_path = self._get_commit_and_path()
656 commit_id, f_path = self._get_commit_and_path()
657 commit = self._get_commit_or_redirect(commit_id)
657 commit = self._get_commit_or_redirect(commit_id)
658 try:
658 try:
659 dir_node = commit.get_node(f_path)
659 dir_node = commit.get_node(f_path)
660 except RepositoryError as e:
660 except RepositoryError as e:
661 return Response('error: {}'.format(safe_str(e)))
661 return Response('error: {}'.format(safe_str(e)))
662
662
663 if dir_node.is_file():
663 if dir_node.is_file():
664 return Response('')
664 return Response('')
665
665
666 c.file = dir_node
666 c.file = dir_node
667 c.commit = commit
667 c.commit = commit
668
668
669 # using force=True here, make a little trick. We flush the cache and
669 # using force=True here, make a little trick. We flush the cache and
670 # compute it using the same key as without previous full_load, so now
670 # compute it using the same key as without previous full_load, so now
671 # the fully loaded tree is now returned instead of partial,
671 # the fully loaded tree is now returned instead of partial,
672 # and we store this in caches
672 # and we store this in caches
673 html = self._get_tree_at_commit(
673 html = self._get_tree_at_commit(
674 c, commit.raw_id, dir_node.path, full_load=True, force=True)
674 c, commit.raw_id, dir_node.path, full_load=True, force=True)
675
675
676 return Response(html)
676 return Response(html)
677
677
678 def _get_attachement_disposition(self, f_path):
678 def _get_attachement_disposition(self, f_path):
679 return 'attachment; filename=%s' % \
679 return 'attachment; filename=%s' % \
680 safe_str(f_path.split(Repository.NAME_SEP)[-1])
680 safe_str(f_path.split(Repository.NAME_SEP)[-1])
681
681
682 @LoginRequired()
682 @LoginRequired()
683 @HasRepoPermissionAnyDecorator(
683 @HasRepoPermissionAnyDecorator(
684 'repository.read', 'repository.write', 'repository.admin')
684 'repository.read', 'repository.write', 'repository.admin')
685 @view_config(
685 @view_config(
686 route_name='repo_file_raw', request_method='GET',
686 route_name='repo_file_raw', request_method='GET',
687 renderer=None)
687 renderer=None)
688 def repo_file_raw(self):
688 def repo_file_raw(self):
689 """
689 """
690 Action for show as raw, some mimetypes are "rendered",
690 Action for show as raw, some mimetypes are "rendered",
691 those include images, icons.
691 those include images, icons.
692 """
692 """
693 c = self.load_default_context()
693 c = self.load_default_context()
694
694
695 commit_id, f_path = self._get_commit_and_path()
695 commit_id, f_path = self._get_commit_and_path()
696 commit = self._get_commit_or_redirect(commit_id)
696 commit = self._get_commit_or_redirect(commit_id)
697 file_node = self._get_filenode_or_redirect(commit, f_path)
697 file_node = self._get_filenode_or_redirect(commit, f_path)
698
698
699 raw_mimetype_mapping = {
699 raw_mimetype_mapping = {
700 # map original mimetype to a mimetype used for "show as raw"
700 # map original mimetype to a mimetype used for "show as raw"
701 # you can also provide a content-disposition to override the
701 # you can also provide a content-disposition to override the
702 # default "attachment" disposition.
702 # default "attachment" disposition.
703 # orig_type: (new_type, new_dispo)
703 # orig_type: (new_type, new_dispo)
704
704
705 # show images inline:
705 # show images inline:
706 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
706 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
707 # for example render an SVG with javascript inside or even render
707 # for example render an SVG with javascript inside or even render
708 # HTML.
708 # HTML.
709 'image/x-icon': ('image/x-icon', 'inline'),
709 'image/x-icon': ('image/x-icon', 'inline'),
710 'image/png': ('image/png', 'inline'),
710 'image/png': ('image/png', 'inline'),
711 'image/gif': ('image/gif', 'inline'),
711 'image/gif': ('image/gif', 'inline'),
712 'image/jpeg': ('image/jpeg', 'inline'),
712 'image/jpeg': ('image/jpeg', 'inline'),
713 'application/pdf': ('application/pdf', 'inline'),
713 'application/pdf': ('application/pdf', 'inline'),
714 }
714 }
715
715
716 mimetype = file_node.mimetype
716 mimetype = file_node.mimetype
717 try:
717 try:
718 mimetype, disposition = raw_mimetype_mapping[mimetype]
718 mimetype, disposition = raw_mimetype_mapping[mimetype]
719 except KeyError:
719 except KeyError:
720 # we don't know anything special about this, handle it safely
720 # we don't know anything special about this, handle it safely
721 if file_node.is_binary:
721 if file_node.is_binary:
722 # do same as download raw for binary files
722 # do same as download raw for binary files
723 mimetype, disposition = 'application/octet-stream', 'attachment'
723 mimetype, disposition = 'application/octet-stream', 'attachment'
724 else:
724 else:
725 # do not just use the original mimetype, but force text/plain,
725 # do not just use the original mimetype, but force text/plain,
726 # otherwise it would serve text/html and that might be unsafe.
726 # otherwise it would serve text/html and that might be unsafe.
727 # Note: underlying vcs library fakes text/plain mimetype if the
727 # Note: underlying vcs library fakes text/plain mimetype if the
728 # mimetype can not be determined and it thinks it is not
728 # mimetype can not be determined and it thinks it is not
729 # binary.This might lead to erroneous text display in some
729 # binary.This might lead to erroneous text display in some
730 # cases, but helps in other cases, like with text files
730 # cases, but helps in other cases, like with text files
731 # without extension.
731 # without extension.
732 mimetype, disposition = 'text/plain', 'inline'
732 mimetype, disposition = 'text/plain', 'inline'
733
733
734 if disposition == 'attachment':
734 if disposition == 'attachment':
735 disposition = self._get_attachement_disposition(f_path)
735 disposition = self._get_attachement_disposition(f_path)
736
736
737 def stream_node():
737 def stream_node():
738 yield file_node.raw_bytes
738 yield file_node.raw_bytes
739
739
740 response = Response(app_iter=stream_node())
740 response = Response(app_iter=stream_node())
741 response.content_disposition = disposition
741 response.content_disposition = disposition
742 response.content_type = mimetype
742 response.content_type = mimetype
743
743
744 charset = self._get_default_encoding(c)
744 charset = self._get_default_encoding(c)
745 if charset:
745 if charset:
746 response.charset = charset
746 response.charset = charset
747
747
748 return response
748 return response
749
749
750 @LoginRequired()
750 @LoginRequired()
751 @HasRepoPermissionAnyDecorator(
751 @HasRepoPermissionAnyDecorator(
752 'repository.read', 'repository.write', 'repository.admin')
752 'repository.read', 'repository.write', 'repository.admin')
753 @view_config(
753 @view_config(
754 route_name='repo_file_download', request_method='GET',
754 route_name='repo_file_download', request_method='GET',
755 renderer=None)
755 renderer=None)
756 @view_config(
756 @view_config(
757 route_name='repo_file_download:legacy', request_method='GET',
757 route_name='repo_file_download:legacy', request_method='GET',
758 renderer=None)
758 renderer=None)
759 def repo_file_download(self):
759 def repo_file_download(self):
760 c = self.load_default_context()
760 c = self.load_default_context()
761
761
762 commit_id, f_path = self._get_commit_and_path()
762 commit_id, f_path = self._get_commit_and_path()
763 commit = self._get_commit_or_redirect(commit_id)
763 commit = self._get_commit_or_redirect(commit_id)
764 file_node = self._get_filenode_or_redirect(commit, f_path)
764 file_node = self._get_filenode_or_redirect(commit, f_path)
765
765
766 if self.request.GET.get('lf'):
766 if self.request.GET.get('lf'):
767 # only if lf get flag is passed, we download this file
767 # only if lf get flag is passed, we download this file
768 # as LFS/Largefile
768 # as LFS/Largefile
769 lf_node = file_node.get_largefile_node()
769 lf_node = file_node.get_largefile_node()
770 if lf_node:
770 if lf_node:
771 # overwrite our pointer with the REAL large-file
771 # overwrite our pointer with the REAL large-file
772 file_node = lf_node
772 file_node = lf_node
773
773
774 disposition = self._get_attachement_disposition(f_path)
774 disposition = self._get_attachement_disposition(f_path)
775
775
776 def stream_node():
776 def stream_node():
777 yield file_node.raw_bytes
777 yield file_node.raw_bytes
778
778
779 response = Response(app_iter=stream_node())
779 response = Response(app_iter=stream_node())
780 response.content_disposition = disposition
780 response.content_disposition = disposition
781 response.content_type = file_node.mimetype
781 response.content_type = file_node.mimetype
782
782
783 charset = self._get_default_encoding(c)
783 charset = self._get_default_encoding(c)
784 if charset:
784 if charset:
785 response.charset = charset
785 response.charset = charset
786
786
787 return response
787 return response
788
788
789 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
789 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
790 def _cached_nodes():
790 def _cached_nodes():
791 log.debug('Generating cached nodelist for %s, %s, %s',
791 log.debug('Generating cached nodelist for %s, %s, %s',
792 repo_name, commit_id, f_path)
792 repo_name, commit_id, f_path)
793 _d, _f = ScmModel().get_nodes(
793 _d, _f = ScmModel().get_nodes(
794 repo_name, commit_id, f_path, flat=False)
794 repo_name, commit_id, f_path, flat=False)
795 return _d + _f
795 return _d + _f
796
796
797 cache_manager = self._get_tree_cache_manager(caches.FILE_SEARCH_TREE_META)
797 cache_manager = self._get_tree_cache_manager(caches.FILE_SEARCH_TREE_META)
798
798
799 cache_key = caches.compute_key_from_params(
799 cache_key = caches.compute_key_from_params(
800 repo_name, commit_id, f_path)
800 repo_name, commit_id, f_path)
801 return cache_manager.get(cache_key, createfunc=_cached_nodes)
801 return cache_manager.get(cache_key, createfunc=_cached_nodes)
802
802
803 @LoginRequired()
803 @LoginRequired()
804 @HasRepoPermissionAnyDecorator(
804 @HasRepoPermissionAnyDecorator(
805 'repository.read', 'repository.write', 'repository.admin')
805 'repository.read', 'repository.write', 'repository.admin')
806 @view_config(
806 @view_config(
807 route_name='repo_files_nodelist', request_method='GET',
807 route_name='repo_files_nodelist', request_method='GET',
808 renderer='json_ext', xhr=True)
808 renderer='json_ext', xhr=True)
809 def repo_nodelist(self):
809 def repo_nodelist(self):
810 self.load_default_context()
810 self.load_default_context()
811
811
812 commit_id, f_path = self._get_commit_and_path()
812 commit_id, f_path = self._get_commit_and_path()
813 commit = self._get_commit_or_redirect(commit_id)
813 commit = self._get_commit_or_redirect(commit_id)
814
814
815 metadata = self._get_nodelist_at_commit(
815 metadata = self._get_nodelist_at_commit(
816 self.db_repo_name, commit.raw_id, f_path)
816 self.db_repo_name, commit.raw_id, f_path)
817 return {'nodes': metadata}
817 return {'nodes': metadata}
818
818
819 def _create_references(
819 def _create_references(
820 self, branches_or_tags, symbolic_reference, f_path):
820 self, branches_or_tags, symbolic_reference, f_path):
821 items = []
821 items = []
822 for name, commit_id in branches_or_tags.items():
822 for name, commit_id in branches_or_tags.items():
823 sym_ref = symbolic_reference(commit_id, name, f_path)
823 sym_ref = symbolic_reference(commit_id, name, f_path)
824 items.append((sym_ref, name))
824 items.append((sym_ref, name))
825 return items
825 return items
826
826
827 def _symbolic_reference(self, commit_id, name, f_path):
827 def _symbolic_reference(self, commit_id, name, f_path):
828 return commit_id
828 return commit_id
829
829
830 def _symbolic_reference_svn(self, commit_id, name, f_path):
830 def _symbolic_reference_svn(self, commit_id, name, f_path):
831 new_f_path = vcspath.join(name, f_path)
831 new_f_path = vcspath.join(name, f_path)
832 return u'%s@%s' % (new_f_path, commit_id)
832 return u'%s@%s' % (new_f_path, commit_id)
833
833
834 def _get_node_history(self, commit_obj, f_path, commits=None):
834 def _get_node_history(self, commit_obj, f_path, commits=None):
835 """
835 """
836 get commit history for given node
836 get commit history for given node
837
837
838 :param commit_obj: commit to calculate history
838 :param commit_obj: commit to calculate history
839 :param f_path: path for node to calculate history for
839 :param f_path: path for node to calculate history for
840 :param commits: if passed don't calculate history and take
840 :param commits: if passed don't calculate history and take
841 commits defined in this list
841 commits defined in this list
842 """
842 """
843 _ = self.request.translate
843 _ = self.request.translate
844
844
845 # calculate history based on tip
845 # calculate history based on tip
846 tip = self.rhodecode_vcs_repo.get_commit()
846 tip = self.rhodecode_vcs_repo.get_commit()
847 if commits is None:
847 if commits is None:
848 pre_load = ["author", "branch"]
848 pre_load = ["author", "branch"]
849 try:
849 try:
850 commits = tip.get_file_history(f_path, pre_load=pre_load)
850 commits = tip.get_file_history(f_path, pre_load=pre_load)
851 except (NodeDoesNotExistError, CommitError):
851 except (NodeDoesNotExistError, CommitError):
852 # this node is not present at tip!
852 # this node is not present at tip!
853 commits = commit_obj.get_file_history(f_path, pre_load=pre_load)
853 commits = commit_obj.get_file_history(f_path, pre_load=pre_load)
854
854
855 history = []
855 history = []
856 commits_group = ([], _("Changesets"))
856 commits_group = ([], _("Changesets"))
857 for commit in commits:
857 for commit in commits:
858 branch = ' (%s)' % commit.branch if commit.branch else ''
858 branch = ' (%s)' % commit.branch if commit.branch else ''
859 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
859 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
860 commits_group[0].append((commit.raw_id, n_desc,))
860 commits_group[0].append((commit.raw_id, n_desc,))
861 history.append(commits_group)
861 history.append(commits_group)
862
862
863 symbolic_reference = self._symbolic_reference
863 symbolic_reference = self._symbolic_reference
864
864
865 if self.rhodecode_vcs_repo.alias == 'svn':
865 if self.rhodecode_vcs_repo.alias == 'svn':
866 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
866 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
867 f_path, self.rhodecode_vcs_repo)
867 f_path, self.rhodecode_vcs_repo)
868 if adjusted_f_path != f_path:
868 if adjusted_f_path != f_path:
869 log.debug(
869 log.debug(
870 'Recognized svn tag or branch in file "%s", using svn '
870 'Recognized svn tag or branch in file "%s", using svn '
871 'specific symbolic references', f_path)
871 'specific symbolic references', f_path)
872 f_path = adjusted_f_path
872 f_path = adjusted_f_path
873 symbolic_reference = self._symbolic_reference_svn
873 symbolic_reference = self._symbolic_reference_svn
874
874
875 branches = self._create_references(
875 branches = self._create_references(
876 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path)
876 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path)
877 branches_group = (branches, _("Branches"))
877 branches_group = (branches, _("Branches"))
878
878
879 tags = self._create_references(
879 tags = self._create_references(
880 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path)
880 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path)
881 tags_group = (tags, _("Tags"))
881 tags_group = (tags, _("Tags"))
882
882
883 history.append(branches_group)
883 history.append(branches_group)
884 history.append(tags_group)
884 history.append(tags_group)
885
885
886 return history, commits
886 return history, commits
887
887
888 @LoginRequired()
888 @LoginRequired()
889 @HasRepoPermissionAnyDecorator(
889 @HasRepoPermissionAnyDecorator(
890 'repository.read', 'repository.write', 'repository.admin')
890 'repository.read', 'repository.write', 'repository.admin')
891 @view_config(
891 @view_config(
892 route_name='repo_file_history', request_method='GET',
892 route_name='repo_file_history', request_method='GET',
893 renderer='json_ext')
893 renderer='json_ext')
894 def repo_file_history(self):
894 def repo_file_history(self):
895 self.load_default_context()
895 self.load_default_context()
896
896
897 commit_id, f_path = self._get_commit_and_path()
897 commit_id, f_path = self._get_commit_and_path()
898 commit = self._get_commit_or_redirect(commit_id)
898 commit = self._get_commit_or_redirect(commit_id)
899 file_node = self._get_filenode_or_redirect(commit, f_path)
899 file_node = self._get_filenode_or_redirect(commit, f_path)
900
900
901 if file_node.is_file():
901 if file_node.is_file():
902 file_history, _hist = self._get_node_history(commit, f_path)
902 file_history, _hist = self._get_node_history(commit, f_path)
903
903
904 res = []
904 res = []
905 for obj in file_history:
905 for obj in file_history:
906 res.append({
906 res.append({
907 'text': obj[1],
907 'text': obj[1],
908 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
908 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
909 })
909 })
910
910
911 data = {
911 data = {
912 'more': False,
912 'more': False,
913 'results': res
913 'results': res
914 }
914 }
915 return data
915 return data
916
916
917 log.warning('Cannot fetch history for directory')
917 log.warning('Cannot fetch history for directory')
918 raise HTTPBadRequest()
918 raise HTTPBadRequest()
919
919
920 @LoginRequired()
920 @LoginRequired()
921 @HasRepoPermissionAnyDecorator(
921 @HasRepoPermissionAnyDecorator(
922 'repository.read', 'repository.write', 'repository.admin')
922 'repository.read', 'repository.write', 'repository.admin')
923 @view_config(
923 @view_config(
924 route_name='repo_file_authors', request_method='GET',
924 route_name='repo_file_authors', request_method='GET',
925 renderer='rhodecode:templates/files/file_authors_box.mako')
925 renderer='rhodecode:templates/files/file_authors_box.mako')
926 def repo_file_authors(self):
926 def repo_file_authors(self):
927 c = self.load_default_context()
927 c = self.load_default_context()
928
928
929 commit_id, f_path = self._get_commit_and_path()
929 commit_id, f_path = self._get_commit_and_path()
930 commit = self._get_commit_or_redirect(commit_id)
930 commit = self._get_commit_or_redirect(commit_id)
931 file_node = self._get_filenode_or_redirect(commit, f_path)
931 file_node = self._get_filenode_or_redirect(commit, f_path)
932
932
933 if not file_node.is_file():
933 if not file_node.is_file():
934 raise HTTPBadRequest()
934 raise HTTPBadRequest()
935
935
936 c.file_last_commit = file_node.last_commit
936 c.file_last_commit = file_node.last_commit
937 if self.request.GET.get('annotate') == '1':
937 if self.request.GET.get('annotate') == '1':
938 # use _hist from annotation if annotation mode is on
938 # use _hist from annotation if annotation mode is on
939 commit_ids = set(x[1] for x in file_node.annotate)
939 commit_ids = set(x[1] for x in file_node.annotate)
940 _hist = (
940 _hist = (
941 self.rhodecode_vcs_repo.get_commit(commit_id)
941 self.rhodecode_vcs_repo.get_commit(commit_id)
942 for commit_id in commit_ids)
942 for commit_id in commit_ids)
943 else:
943 else:
944 _f_history, _hist = self._get_node_history(commit, f_path)
944 _f_history, _hist = self._get_node_history(commit, f_path)
945 c.file_author = False
945 c.file_author = False
946
946
947 unique = collections.OrderedDict()
947 unique = collections.OrderedDict()
948 for commit in _hist:
948 for commit in _hist:
949 author = commit.author
949 author = commit.author
950 if author not in unique:
950 if author not in unique:
951 unique[commit.author] = [
951 unique[commit.author] = [
952 h.email(author),
952 h.email(author),
953 h.person(author, 'username_or_name_or_email'),
953 h.person(author, 'username_or_name_or_email'),
954 1 # counter
954 1 # counter
955 ]
955 ]
956
956
957 else:
957 else:
958 # increase counter
958 # increase counter
959 unique[commit.author][2] += 1
959 unique[commit.author][2] += 1
960
960
961 c.authors = [val for val in unique.values()]
961 c.authors = [val for val in unique.values()]
962
962
963 return self._get_template_context(c)
963 return self._get_template_context(c)
964
964
965 @LoginRequired()
965 @LoginRequired()
966 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
966 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
967 @view_config(
967 @view_config(
968 route_name='repo_files_remove_file', request_method='GET',
968 route_name='repo_files_remove_file', request_method='GET',
969 renderer='rhodecode:templates/files/files_delete.mako')
969 renderer='rhodecode:templates/files/files_delete.mako')
970 def repo_files_remove_file(self):
970 def repo_files_remove_file(self):
971 _ = self.request.translate
971 _ = self.request.translate
972 c = self.load_default_context()
972 c = self.load_default_context()
973 commit_id, f_path = self._get_commit_and_path()
973 commit_id, f_path = self._get_commit_and_path()
974
974
975 self._ensure_not_locked()
975 self._ensure_not_locked()
976
976
977 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
977 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
978 h.flash(_('You can only delete files with commit '
978 h.flash(_('You can only delete files with commit '
979 'being a valid branch '), category='warning')
979 'being a valid branch '), category='warning')
980 raise HTTPFound(
980 raise HTTPFound(
981 h.route_path('repo_files',
981 h.route_path('repo_files',
982 repo_name=self.db_repo_name, commit_id='tip',
982 repo_name=self.db_repo_name, commit_id='tip',
983 f_path=f_path))
983 f_path=f_path))
984
984
985 c.commit = self._get_commit_or_redirect(commit_id)
985 c.commit = self._get_commit_or_redirect(commit_id)
986 c.file = self._get_filenode_or_redirect(c.commit, f_path)
986 c.file = self._get_filenode_or_redirect(c.commit, f_path)
987
987
988 c.default_message = _(
988 c.default_message = _(
989 'Deleted file {} via RhodeCode Enterprise').format(f_path)
989 'Deleted file {} via RhodeCode Enterprise').format(f_path)
990 c.f_path = f_path
990 c.f_path = f_path
991
991
992 return self._get_template_context(c)
992 return self._get_template_context(c)
993
993
994 @LoginRequired()
994 @LoginRequired()
995 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
995 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
996 @CSRFRequired()
996 @CSRFRequired()
997 @view_config(
997 @view_config(
998 route_name='repo_files_delete_file', request_method='POST',
998 route_name='repo_files_delete_file', request_method='POST',
999 renderer=None)
999 renderer=None)
1000 def repo_files_delete_file(self):
1000 def repo_files_delete_file(self):
1001 _ = self.request.translate
1001 _ = self.request.translate
1002
1002
1003 c = self.load_default_context()
1003 c = self.load_default_context()
1004 commit_id, f_path = self._get_commit_and_path()
1004 commit_id, f_path = self._get_commit_and_path()
1005
1005
1006 self._ensure_not_locked()
1006 self._ensure_not_locked()
1007
1007
1008 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1008 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1009 h.flash(_('You can only delete files with commit '
1009 h.flash(_('You can only delete files with commit '
1010 'being a valid branch '), category='warning')
1010 'being a valid branch '), category='warning')
1011 raise HTTPFound(
1011 raise HTTPFound(
1012 h.route_path('repo_files',
1012 h.route_path('repo_files',
1013 repo_name=self.db_repo_name, commit_id='tip',
1013 repo_name=self.db_repo_name, commit_id='tip',
1014 f_path=f_path))
1014 f_path=f_path))
1015
1015
1016 c.commit = self._get_commit_or_redirect(commit_id)
1016 c.commit = self._get_commit_or_redirect(commit_id)
1017 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1017 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1018
1018
1019 c.default_message = _(
1019 c.default_message = _(
1020 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1020 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1021 c.f_path = f_path
1021 c.f_path = f_path
1022 node_path = f_path
1022 node_path = f_path
1023 author = self._rhodecode_db_user.full_contact
1023 author = self._rhodecode_db_user.full_contact
1024 message = self.request.POST.get('message') or c.default_message
1024 message = self.request.POST.get('message') or c.default_message
1025 try:
1025 try:
1026 nodes = {
1026 nodes = {
1027 node_path: {
1027 node_path: {
1028 'content': ''
1028 'content': ''
1029 }
1029 }
1030 }
1030 }
1031 ScmModel().delete_nodes(
1031 ScmModel().delete_nodes(
1032 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1032 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1033 message=message,
1033 message=message,
1034 nodes=nodes,
1034 nodes=nodes,
1035 parent_commit=c.commit,
1035 parent_commit=c.commit,
1036 author=author,
1036 author=author,
1037 )
1037 )
1038
1038
1039 h.flash(
1039 h.flash(
1040 _('Successfully deleted file `{}`').format(
1040 _('Successfully deleted file `{}`').format(
1041 h.escape(f_path)), category='success')
1041 h.escape(f_path)), category='success')
1042 except Exception:
1042 except Exception:
1043 log.exception('Error during commit operation')
1043 log.exception('Error during commit operation')
1044 h.flash(_('Error occurred during commit'), category='error')
1044 h.flash(_('Error occurred during commit'), category='error')
1045 raise HTTPFound(
1045 raise HTTPFound(
1046 h.route_path('changeset_home', repo_name=self.db_repo_name,
1046 h.route_path('repo_commit', repo_name=self.db_repo_name,
1047 revision='tip'))
1047 commit_id='tip'))
1048
1048
1049 @LoginRequired()
1049 @LoginRequired()
1050 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1050 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1051 @view_config(
1051 @view_config(
1052 route_name='repo_files_edit_file', request_method='GET',
1052 route_name='repo_files_edit_file', request_method='GET',
1053 renderer='rhodecode:templates/files/files_edit.mako')
1053 renderer='rhodecode:templates/files/files_edit.mako')
1054 def repo_files_edit_file(self):
1054 def repo_files_edit_file(self):
1055 _ = self.request.translate
1055 _ = self.request.translate
1056 c = self.load_default_context()
1056 c = self.load_default_context()
1057 commit_id, f_path = self._get_commit_and_path()
1057 commit_id, f_path = self._get_commit_and_path()
1058
1058
1059 self._ensure_not_locked()
1059 self._ensure_not_locked()
1060
1060
1061 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1061 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1062 h.flash(_('You can only edit files with commit '
1062 h.flash(_('You can only edit files with commit '
1063 'being a valid branch '), category='warning')
1063 'being a valid branch '), category='warning')
1064 raise HTTPFound(
1064 raise HTTPFound(
1065 h.route_path('repo_files',
1065 h.route_path('repo_files',
1066 repo_name=self.db_repo_name, commit_id='tip',
1066 repo_name=self.db_repo_name, commit_id='tip',
1067 f_path=f_path))
1067 f_path=f_path))
1068
1068
1069 c.commit = self._get_commit_or_redirect(commit_id)
1069 c.commit = self._get_commit_or_redirect(commit_id)
1070 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1070 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1071
1071
1072 if c.file.is_binary:
1072 if c.file.is_binary:
1073 files_url = h.route_path(
1073 files_url = h.route_path(
1074 'repo_files',
1074 'repo_files',
1075 repo_name=self.db_repo_name,
1075 repo_name=self.db_repo_name,
1076 commit_id=c.commit.raw_id, f_path=f_path)
1076 commit_id=c.commit.raw_id, f_path=f_path)
1077 raise HTTPFound(files_url)
1077 raise HTTPFound(files_url)
1078
1078
1079 c.default_message = _(
1079 c.default_message = _(
1080 'Edited file {} via RhodeCode Enterprise').format(f_path)
1080 'Edited file {} via RhodeCode Enterprise').format(f_path)
1081 c.f_path = f_path
1081 c.f_path = f_path
1082
1082
1083 return self._get_template_context(c)
1083 return self._get_template_context(c)
1084
1084
1085 @LoginRequired()
1085 @LoginRequired()
1086 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1086 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1087 @CSRFRequired()
1087 @CSRFRequired()
1088 @view_config(
1088 @view_config(
1089 route_name='repo_files_update_file', request_method='POST',
1089 route_name='repo_files_update_file', request_method='POST',
1090 renderer=None)
1090 renderer=None)
1091 def repo_files_update_file(self):
1091 def repo_files_update_file(self):
1092 _ = self.request.translate
1092 _ = self.request.translate
1093 c = self.load_default_context()
1093 c = self.load_default_context()
1094 commit_id, f_path = self._get_commit_and_path()
1094 commit_id, f_path = self._get_commit_and_path()
1095
1095
1096 self._ensure_not_locked()
1096 self._ensure_not_locked()
1097
1097
1098 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1098 if not self._is_valid_head(commit_id, self.rhodecode_vcs_repo):
1099 h.flash(_('You can only edit files with commit '
1099 h.flash(_('You can only edit files with commit '
1100 'being a valid branch '), category='warning')
1100 'being a valid branch '), category='warning')
1101 raise HTTPFound(
1101 raise HTTPFound(
1102 h.route_path('repo_files',
1102 h.route_path('repo_files',
1103 repo_name=self.db_repo_name, commit_id='tip',
1103 repo_name=self.db_repo_name, commit_id='tip',
1104 f_path=f_path))
1104 f_path=f_path))
1105
1105
1106 c.commit = self._get_commit_or_redirect(commit_id)
1106 c.commit = self._get_commit_or_redirect(commit_id)
1107 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1107 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1108
1108
1109 if c.file.is_binary:
1109 if c.file.is_binary:
1110 raise HTTPFound(
1110 raise HTTPFound(
1111 h.route_path('repo_files',
1111 h.route_path('repo_files',
1112 repo_name=self.db_repo_name,
1112 repo_name=self.db_repo_name,
1113 commit_id=c.commit.raw_id,
1113 commit_id=c.commit.raw_id,
1114 f_path=f_path))
1114 f_path=f_path))
1115
1115
1116 c.default_message = _(
1116 c.default_message = _(
1117 'Edited file {} via RhodeCode Enterprise').format(f_path)
1117 'Edited file {} via RhodeCode Enterprise').format(f_path)
1118 c.f_path = f_path
1118 c.f_path = f_path
1119 old_content = c.file.content
1119 old_content = c.file.content
1120 sl = old_content.splitlines(1)
1120 sl = old_content.splitlines(1)
1121 first_line = sl[0] if sl else ''
1121 first_line = sl[0] if sl else ''
1122
1122
1123 r_post = self.request.POST
1123 r_post = self.request.POST
1124 # modes: 0 - Unix, 1 - Mac, 2 - DOS
1124 # modes: 0 - Unix, 1 - Mac, 2 - DOS
1125 mode = detect_mode(first_line, 0)
1125 mode = detect_mode(first_line, 0)
1126 content = convert_line_endings(r_post.get('content', ''), mode)
1126 content = convert_line_endings(r_post.get('content', ''), mode)
1127
1127
1128 message = r_post.get('message') or c.default_message
1128 message = r_post.get('message') or c.default_message
1129 org_f_path = c.file.unicode_path
1129 org_f_path = c.file.unicode_path
1130 filename = r_post['filename']
1130 filename = r_post['filename']
1131 org_filename = c.file.name
1131 org_filename = c.file.name
1132
1132
1133 if content == old_content and filename == org_filename:
1133 if content == old_content and filename == org_filename:
1134 h.flash(_('No changes'), category='warning')
1134 h.flash(_('No changes'), category='warning')
1135 raise HTTPFound(
1135 raise HTTPFound(
1136 h.route_path('changeset_home', repo_name=self.db_repo_name,
1136 h.route_path('repo_commit', repo_name=self.db_repo_name,
1137 revision='tip'))
1137 commit_id='tip'))
1138 try:
1138 try:
1139 mapping = {
1139 mapping = {
1140 org_f_path: {
1140 org_f_path: {
1141 'org_filename': org_f_path,
1141 'org_filename': org_f_path,
1142 'filename': os.path.join(c.file.dir_path, filename),
1142 'filename': os.path.join(c.file.dir_path, filename),
1143 'content': content,
1143 'content': content,
1144 'lexer': '',
1144 'lexer': '',
1145 'op': 'mod',
1145 'op': 'mod',
1146 }
1146 }
1147 }
1147 }
1148
1148
1149 ScmModel().update_nodes(
1149 ScmModel().update_nodes(
1150 user=self._rhodecode_db_user.user_id,
1150 user=self._rhodecode_db_user.user_id,
1151 repo=self.db_repo,
1151 repo=self.db_repo,
1152 message=message,
1152 message=message,
1153 nodes=mapping,
1153 nodes=mapping,
1154 parent_commit=c.commit,
1154 parent_commit=c.commit,
1155 )
1155 )
1156
1156
1157 h.flash(
1157 h.flash(
1158 _('Successfully committed changes to file `{}`').format(
1158 _('Successfully committed changes to file `{}`').format(
1159 h.escape(f_path)), category='success')
1159 h.escape(f_path)), category='success')
1160 except Exception:
1160 except Exception:
1161 log.exception('Error occurred during commit')
1161 log.exception('Error occurred during commit')
1162 h.flash(_('Error occurred during commit'), category='error')
1162 h.flash(_('Error occurred during commit'), category='error')
1163 raise HTTPFound(
1163 raise HTTPFound(
1164 h.route_path('changeset_home', repo_name=self.db_repo_name,
1164 h.route_path('repo_commit', repo_name=self.db_repo_name,
1165 revision='tip'))
1165 commit_id='tip'))
1166
1166
1167 @LoginRequired()
1167 @LoginRequired()
1168 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1168 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1169 @view_config(
1169 @view_config(
1170 route_name='repo_files_add_file', request_method='GET',
1170 route_name='repo_files_add_file', request_method='GET',
1171 renderer='rhodecode:templates/files/files_add.mako')
1171 renderer='rhodecode:templates/files/files_add.mako')
1172 def repo_files_add_file(self):
1172 def repo_files_add_file(self):
1173 _ = self.request.translate
1173 _ = self.request.translate
1174 c = self.load_default_context()
1174 c = self.load_default_context()
1175 commit_id, f_path = self._get_commit_and_path()
1175 commit_id, f_path = self._get_commit_and_path()
1176
1176
1177 self._ensure_not_locked()
1177 self._ensure_not_locked()
1178
1178
1179 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1179 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1180 if c.commit is None:
1180 if c.commit is None:
1181 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1181 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1182 c.default_message = (_('Added file via RhodeCode Enterprise'))
1182 c.default_message = (_('Added file via RhodeCode Enterprise'))
1183 c.f_path = f_path
1183 c.f_path = f_path
1184
1184
1185 return self._get_template_context(c)
1185 return self._get_template_context(c)
1186
1186
1187 @LoginRequired()
1187 @LoginRequired()
1188 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1188 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1189 @CSRFRequired()
1189 @CSRFRequired()
1190 @view_config(
1190 @view_config(
1191 route_name='repo_files_create_file', request_method='POST',
1191 route_name='repo_files_create_file', request_method='POST',
1192 renderer=None)
1192 renderer=None)
1193 def repo_files_create_file(self):
1193 def repo_files_create_file(self):
1194 _ = self.request.translate
1194 _ = self.request.translate
1195 c = self.load_default_context()
1195 c = self.load_default_context()
1196 commit_id, f_path = self._get_commit_and_path()
1196 commit_id, f_path = self._get_commit_and_path()
1197
1197
1198 self._ensure_not_locked()
1198 self._ensure_not_locked()
1199
1199
1200 r_post = self.request.POST
1200 r_post = self.request.POST
1201
1201
1202 c.commit = self._get_commit_or_redirect(
1202 c.commit = self._get_commit_or_redirect(
1203 commit_id, redirect_after=False)
1203 commit_id, redirect_after=False)
1204 if c.commit is None:
1204 if c.commit is None:
1205 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1205 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1206 c.default_message = (_('Added file via RhodeCode Enterprise'))
1206 c.default_message = (_('Added file via RhodeCode Enterprise'))
1207 c.f_path = f_path
1207 c.f_path = f_path
1208 unix_mode = 0
1208 unix_mode = 0
1209 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1209 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1210
1210
1211 message = r_post.get('message') or c.default_message
1211 message = r_post.get('message') or c.default_message
1212 filename = r_post.get('filename')
1212 filename = r_post.get('filename')
1213 location = r_post.get('location', '') # dir location
1213 location = r_post.get('location', '') # dir location
1214 file_obj = r_post.get('upload_file', None)
1214 file_obj = r_post.get('upload_file', None)
1215
1215
1216 if file_obj is not None and hasattr(file_obj, 'filename'):
1216 if file_obj is not None and hasattr(file_obj, 'filename'):
1217 filename = r_post.get('filename_upload')
1217 filename = r_post.get('filename_upload')
1218 content = file_obj.file
1218 content = file_obj.file
1219
1219
1220 if hasattr(content, 'file'):
1220 if hasattr(content, 'file'):
1221 # non posix systems store real file under file attr
1221 # non posix systems store real file under file attr
1222 content = content.file
1222 content = content.file
1223
1223
1224 default_redirect_url = h.route_path(
1224 default_redirect_url = h.route_path(
1225 'changeset_home', repo_name=self.db_repo_name, revision='tip')
1225 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1226
1226
1227 # If there's no commit, redirect to repo summary
1227 # If there's no commit, redirect to repo summary
1228 if type(c.commit) is EmptyCommit:
1228 if type(c.commit) is EmptyCommit:
1229 redirect_url = h.route_path(
1229 redirect_url = h.route_path(
1230 'repo_summary', repo_name=self.db_repo_name)
1230 'repo_summary', repo_name=self.db_repo_name)
1231 else:
1231 else:
1232 redirect_url = default_redirect_url
1232 redirect_url = default_redirect_url
1233
1233
1234 if not filename:
1234 if not filename:
1235 h.flash(_('No filename'), category='warning')
1235 h.flash(_('No filename'), category='warning')
1236 raise HTTPFound(redirect_url)
1236 raise HTTPFound(redirect_url)
1237
1237
1238 # extract the location from filename,
1238 # extract the location from filename,
1239 # allows using foo/bar.txt syntax to create subdirectories
1239 # allows using foo/bar.txt syntax to create subdirectories
1240 subdir_loc = filename.rsplit('/', 1)
1240 subdir_loc = filename.rsplit('/', 1)
1241 if len(subdir_loc) == 2:
1241 if len(subdir_loc) == 2:
1242 location = os.path.join(location, subdir_loc[0])
1242 location = os.path.join(location, subdir_loc[0])
1243
1243
1244 # strip all crap out of file, just leave the basename
1244 # strip all crap out of file, just leave the basename
1245 filename = os.path.basename(filename)
1245 filename = os.path.basename(filename)
1246 node_path = os.path.join(location, filename)
1246 node_path = os.path.join(location, filename)
1247 author = self._rhodecode_db_user.full_contact
1247 author = self._rhodecode_db_user.full_contact
1248
1248
1249 try:
1249 try:
1250 nodes = {
1250 nodes = {
1251 node_path: {
1251 node_path: {
1252 'content': content
1252 'content': content
1253 }
1253 }
1254 }
1254 }
1255 ScmModel().create_nodes(
1255 ScmModel().create_nodes(
1256 user=self._rhodecode_db_user.user_id,
1256 user=self._rhodecode_db_user.user_id,
1257 repo=self.db_repo,
1257 repo=self.db_repo,
1258 message=message,
1258 message=message,
1259 nodes=nodes,
1259 nodes=nodes,
1260 parent_commit=c.commit,
1260 parent_commit=c.commit,
1261 author=author,
1261 author=author,
1262 )
1262 )
1263
1263
1264 h.flash(
1264 h.flash(
1265 _('Successfully committed new file `{}`').format(
1265 _('Successfully committed new file `{}`').format(
1266 h.escape(node_path)), category='success')
1266 h.escape(node_path)), category='success')
1267 except NonRelativePathError:
1267 except NonRelativePathError:
1268 h.flash(_(
1268 h.flash(_(
1269 'The location specified must be a relative path and must not '
1269 'The location specified must be a relative path and must not '
1270 'contain .. in the path'), category='warning')
1270 'contain .. in the path'), category='warning')
1271 raise HTTPFound(default_redirect_url)
1271 raise HTTPFound(default_redirect_url)
1272 except (NodeError, NodeAlreadyExistsError) as e:
1272 except (NodeError, NodeAlreadyExistsError) as e:
1273 h.flash(_(h.escape(e)), category='error')
1273 h.flash(_(h.escape(e)), category='error')
1274 except Exception:
1274 except Exception:
1275 log.exception('Error occurred during commit')
1275 log.exception('Error occurred during commit')
1276 h.flash(_('Error occurred during commit'), category='error')
1276 h.flash(_('Error occurred during commit'), category='error')
1277
1277
1278 raise HTTPFound(default_redirect_url)
1278 raise HTTPFound(default_redirect_url)
@@ -1,660 +1,599 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Routes configuration
22 Routes configuration
23
23
24 The more specific and detailed routes should be defined first so they
24 The more specific and detailed routes should be defined first so they
25 may take precedent over the more generic routes. For more information
25 may take precedent over the more generic routes. For more information
26 refer to the routes manual at http://routes.groovie.org/docs/
26 refer to the routes manual at http://routes.groovie.org/docs/
27
27
28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
29 and _route_name variable which uses some of stored naming here to do redirects.
29 and _route_name variable which uses some of stored naming here to do redirects.
30 """
30 """
31 import os
31 import os
32 import re
32 import re
33 from routes import Mapper
33 from routes import Mapper
34
34
35 # prefix for non repository related links needs to be prefixed with `/`
35 # prefix for non repository related links needs to be prefixed with `/`
36 ADMIN_PREFIX = '/_admin'
36 ADMIN_PREFIX = '/_admin'
37 STATIC_FILE_PREFIX = '/_static'
37 STATIC_FILE_PREFIX = '/_static'
38
38
39 # Default requirements for URL parts
39 # Default requirements for URL parts
40 URL_NAME_REQUIREMENTS = {
40 URL_NAME_REQUIREMENTS = {
41 # group name can have a slash in them, but they must not end with a slash
41 # group name can have a slash in them, but they must not end with a slash
42 'group_name': r'.*?[^/]',
42 'group_name': r'.*?[^/]',
43 'repo_group_name': r'.*?[^/]',
43 'repo_group_name': r'.*?[^/]',
44 # repo names can have a slash in them, but they must not end with a slash
44 # repo names can have a slash in them, but they must not end with a slash
45 'repo_name': r'.*?[^/]',
45 'repo_name': r'.*?[^/]',
46 # file path eats up everything at the end
46 # file path eats up everything at the end
47 'f_path': r'.*',
47 'f_path': r'.*',
48 # reference types
48 # reference types
49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
49 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
50 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
51 }
51 }
52
52
53
53
54 class JSRoutesMapper(Mapper):
54 class JSRoutesMapper(Mapper):
55 """
55 """
56 Wrapper for routes.Mapper to make pyroutes compatible url definitions
56 Wrapper for routes.Mapper to make pyroutes compatible url definitions
57 """
57 """
58 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
58 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
59 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
59 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
60 def __init__(self, *args, **kw):
60 def __init__(self, *args, **kw):
61 super(JSRoutesMapper, self).__init__(*args, **kw)
61 super(JSRoutesMapper, self).__init__(*args, **kw)
62 self._jsroutes = []
62 self._jsroutes = []
63
63
64 def connect(self, *args, **kw):
64 def connect(self, *args, **kw):
65 """
65 """
66 Wrapper for connect to take an extra argument jsroute=True
66 Wrapper for connect to take an extra argument jsroute=True
67
67
68 :param jsroute: boolean, if True will add the route to the pyroutes list
68 :param jsroute: boolean, if True will add the route to the pyroutes list
69 """
69 """
70 if kw.pop('jsroute', False):
70 if kw.pop('jsroute', False):
71 if not self._named_route_regex.match(args[0]):
71 if not self._named_route_regex.match(args[0]):
72 raise Exception('only named routes can be added to pyroutes')
72 raise Exception('only named routes can be added to pyroutes')
73 self._jsroutes.append(args[0])
73 self._jsroutes.append(args[0])
74
74
75 super(JSRoutesMapper, self).connect(*args, **kw)
75 super(JSRoutesMapper, self).connect(*args, **kw)
76
76
77 def _extract_route_information(self, route):
77 def _extract_route_information(self, route):
78 """
78 """
79 Convert a route into tuple(name, path, args), eg:
79 Convert a route into tuple(name, path, args), eg:
80 ('show_user', '/profile/%(username)s', ['username'])
80 ('show_user', '/profile/%(username)s', ['username'])
81 """
81 """
82 routepath = route.routepath
82 routepath = route.routepath
83 def replace(matchobj):
83 def replace(matchobj):
84 if matchobj.group(1):
84 if matchobj.group(1):
85 return "%%(%s)s" % matchobj.group(1).split(':')[0]
85 return "%%(%s)s" % matchobj.group(1).split(':')[0]
86 else:
86 else:
87 return "%%(%s)s" % matchobj.group(2)
87 return "%%(%s)s" % matchobj.group(2)
88
88
89 routepath = self._argument_prog.sub(replace, routepath)
89 routepath = self._argument_prog.sub(replace, routepath)
90 return (
90 return (
91 route.name,
91 route.name,
92 routepath,
92 routepath,
93 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
93 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
94 for arg in self._argument_prog.findall(route.routepath)]
94 for arg in self._argument_prog.findall(route.routepath)]
95 )
95 )
96
96
97 def jsroutes(self):
97 def jsroutes(self):
98 """
98 """
99 Return a list of pyroutes.js compatible routes
99 Return a list of pyroutes.js compatible routes
100 """
100 """
101 for route_name in self._jsroutes:
101 for route_name in self._jsroutes:
102 yield self._extract_route_information(self._routenames[route_name])
102 yield self._extract_route_information(self._routenames[route_name])
103
103
104
104
105 def make_map(config):
105 def make_map(config):
106 """Create, configure and return the routes Mapper"""
106 """Create, configure and return the routes Mapper"""
107 rmap = JSRoutesMapper(
107 rmap = JSRoutesMapper(
108 directory=config['pylons.paths']['controllers'],
108 directory=config['pylons.paths']['controllers'],
109 always_scan=config['debug'])
109 always_scan=config['debug'])
110 rmap.minimization = False
110 rmap.minimization = False
111 rmap.explicit = False
111 rmap.explicit = False
112
112
113 from rhodecode.lib.utils2 import str2bool
113 from rhodecode.lib.utils2 import str2bool
114 from rhodecode.model import repo, repo_group
114 from rhodecode.model import repo, repo_group
115
115
116 def check_repo(environ, match_dict):
116 def check_repo(environ, match_dict):
117 """
117 """
118 check for valid repository for proper 404 handling
118 check for valid repository for proper 404 handling
119
119
120 :param environ:
120 :param environ:
121 :param match_dict:
121 :param match_dict:
122 """
122 """
123 repo_name = match_dict.get('repo_name')
123 repo_name = match_dict.get('repo_name')
124
124
125 if match_dict.get('f_path'):
125 if match_dict.get('f_path'):
126 # fix for multiple initial slashes that causes errors
126 # fix for multiple initial slashes that causes errors
127 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
127 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
128 repo_model = repo.RepoModel()
128 repo_model = repo.RepoModel()
129 by_name_match = repo_model.get_by_repo_name(repo_name)
129 by_name_match = repo_model.get_by_repo_name(repo_name)
130 # if we match quickly from database, short circuit the operation,
130 # if we match quickly from database, short circuit the operation,
131 # and validate repo based on the type.
131 # and validate repo based on the type.
132 if by_name_match:
132 if by_name_match:
133 return True
133 return True
134
134
135 by_id_match = repo_model.get_repo_by_id(repo_name)
135 by_id_match = repo_model.get_repo_by_id(repo_name)
136 if by_id_match:
136 if by_id_match:
137 repo_name = by_id_match.repo_name
137 repo_name = by_id_match.repo_name
138 match_dict['repo_name'] = repo_name
138 match_dict['repo_name'] = repo_name
139 return True
139 return True
140
140
141 return False
141 return False
142
142
143 def check_group(environ, match_dict):
143 def check_group(environ, match_dict):
144 """
144 """
145 check for valid repository group path for proper 404 handling
145 check for valid repository group path for proper 404 handling
146
146
147 :param environ:
147 :param environ:
148 :param match_dict:
148 :param match_dict:
149 """
149 """
150 repo_group_name = match_dict.get('group_name')
150 repo_group_name = match_dict.get('group_name')
151 repo_group_model = repo_group.RepoGroupModel()
151 repo_group_model = repo_group.RepoGroupModel()
152 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
152 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
153 if by_name_match:
153 if by_name_match:
154 return True
154 return True
155
155
156 return False
156 return False
157
157
158 def check_user_group(environ, match_dict):
158 def check_user_group(environ, match_dict):
159 """
159 """
160 check for valid user group for proper 404 handling
160 check for valid user group for proper 404 handling
161
161
162 :param environ:
162 :param environ:
163 :param match_dict:
163 :param match_dict:
164 """
164 """
165 return True
165 return True
166
166
167 def check_int(environ, match_dict):
167 def check_int(environ, match_dict):
168 return match_dict.get('id').isdigit()
168 return match_dict.get('id').isdigit()
169
169
170
170
171 #==========================================================================
171 #==========================================================================
172 # CUSTOM ROUTES HERE
172 # CUSTOM ROUTES HERE
173 #==========================================================================
173 #==========================================================================
174
174
175 # ping and pylons error test
175 # ping and pylons error test
176 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
176 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
177 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
177 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
178
178
179 # ADMIN REPOSITORY ROUTES
179 # ADMIN REPOSITORY ROUTES
180 with rmap.submapper(path_prefix=ADMIN_PREFIX,
180 with rmap.submapper(path_prefix=ADMIN_PREFIX,
181 controller='admin/repos') as m:
181 controller='admin/repos') as m:
182 m.connect('repos', '/repos',
182 m.connect('repos', '/repos',
183 action='create', conditions={'method': ['POST']})
183 action='create', conditions={'method': ['POST']})
184 m.connect('repos', '/repos',
184 m.connect('repos', '/repos',
185 action='index', conditions={'method': ['GET']})
185 action='index', conditions={'method': ['GET']})
186 m.connect('new_repo', '/create_repository', jsroute=True,
186 m.connect('new_repo', '/create_repository', jsroute=True,
187 action='create_repository', conditions={'method': ['GET']})
187 action='create_repository', conditions={'method': ['GET']})
188 m.connect('delete_repo', '/repos/{repo_name}',
188 m.connect('delete_repo', '/repos/{repo_name}',
189 action='delete', conditions={'method': ['DELETE']},
189 action='delete', conditions={'method': ['DELETE']},
190 requirements=URL_NAME_REQUIREMENTS)
190 requirements=URL_NAME_REQUIREMENTS)
191 m.connect('repo', '/repos/{repo_name}',
191 m.connect('repo', '/repos/{repo_name}',
192 action='show', conditions={'method': ['GET'],
192 action='show', conditions={'method': ['GET'],
193 'function': check_repo},
193 'function': check_repo},
194 requirements=URL_NAME_REQUIREMENTS)
194 requirements=URL_NAME_REQUIREMENTS)
195
195
196 # ADMIN REPOSITORY GROUPS ROUTES
196 # ADMIN REPOSITORY GROUPS ROUTES
197 with rmap.submapper(path_prefix=ADMIN_PREFIX,
197 with rmap.submapper(path_prefix=ADMIN_PREFIX,
198 controller='admin/repo_groups') as m:
198 controller='admin/repo_groups') as m:
199 m.connect('repo_groups', '/repo_groups',
199 m.connect('repo_groups', '/repo_groups',
200 action='create', conditions={'method': ['POST']})
200 action='create', conditions={'method': ['POST']})
201 m.connect('repo_groups', '/repo_groups',
201 m.connect('repo_groups', '/repo_groups',
202 action='index', conditions={'method': ['GET']})
202 action='index', conditions={'method': ['GET']})
203 m.connect('new_repo_group', '/repo_groups/new',
203 m.connect('new_repo_group', '/repo_groups/new',
204 action='new', conditions={'method': ['GET']})
204 action='new', conditions={'method': ['GET']})
205 m.connect('update_repo_group', '/repo_groups/{group_name}',
205 m.connect('update_repo_group', '/repo_groups/{group_name}',
206 action='update', conditions={'method': ['PUT'],
206 action='update', conditions={'method': ['PUT'],
207 'function': check_group},
207 'function': check_group},
208 requirements=URL_NAME_REQUIREMENTS)
208 requirements=URL_NAME_REQUIREMENTS)
209
209
210 # EXTRAS REPO GROUP ROUTES
210 # EXTRAS REPO GROUP ROUTES
211 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
211 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
212 action='edit',
212 action='edit',
213 conditions={'method': ['GET'], 'function': check_group},
213 conditions={'method': ['GET'], 'function': check_group},
214 requirements=URL_NAME_REQUIREMENTS)
214 requirements=URL_NAME_REQUIREMENTS)
215 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
215 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
216 action='edit',
216 action='edit',
217 conditions={'method': ['PUT'], 'function': check_group},
217 conditions={'method': ['PUT'], 'function': check_group},
218 requirements=URL_NAME_REQUIREMENTS)
218 requirements=URL_NAME_REQUIREMENTS)
219
219
220 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
220 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
221 action='edit_repo_group_advanced',
221 action='edit_repo_group_advanced',
222 conditions={'method': ['GET'], 'function': check_group},
222 conditions={'method': ['GET'], 'function': check_group},
223 requirements=URL_NAME_REQUIREMENTS)
223 requirements=URL_NAME_REQUIREMENTS)
224 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
224 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
225 action='edit_repo_group_advanced',
225 action='edit_repo_group_advanced',
226 conditions={'method': ['PUT'], 'function': check_group},
226 conditions={'method': ['PUT'], 'function': check_group},
227 requirements=URL_NAME_REQUIREMENTS)
227 requirements=URL_NAME_REQUIREMENTS)
228
228
229 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
229 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
230 action='edit_repo_group_perms',
230 action='edit_repo_group_perms',
231 conditions={'method': ['GET'], 'function': check_group},
231 conditions={'method': ['GET'], 'function': check_group},
232 requirements=URL_NAME_REQUIREMENTS)
232 requirements=URL_NAME_REQUIREMENTS)
233 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
233 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
234 action='update_perms',
234 action='update_perms',
235 conditions={'method': ['PUT'], 'function': check_group},
235 conditions={'method': ['PUT'], 'function': check_group},
236 requirements=URL_NAME_REQUIREMENTS)
236 requirements=URL_NAME_REQUIREMENTS)
237
237
238 m.connect('delete_repo_group', '/repo_groups/{group_name}',
238 m.connect('delete_repo_group', '/repo_groups/{group_name}',
239 action='delete', conditions={'method': ['DELETE'],
239 action='delete', conditions={'method': ['DELETE'],
240 'function': check_group},
240 'function': check_group},
241 requirements=URL_NAME_REQUIREMENTS)
241 requirements=URL_NAME_REQUIREMENTS)
242
242
243 # ADMIN USER ROUTES
243 # ADMIN USER ROUTES
244 with rmap.submapper(path_prefix=ADMIN_PREFIX,
244 with rmap.submapper(path_prefix=ADMIN_PREFIX,
245 controller='admin/users') as m:
245 controller='admin/users') as m:
246 m.connect('users', '/users',
246 m.connect('users', '/users',
247 action='create', conditions={'method': ['POST']})
247 action='create', conditions={'method': ['POST']})
248 m.connect('new_user', '/users/new',
248 m.connect('new_user', '/users/new',
249 action='new', conditions={'method': ['GET']})
249 action='new', conditions={'method': ['GET']})
250 m.connect('update_user', '/users/{user_id}',
250 m.connect('update_user', '/users/{user_id}',
251 action='update', conditions={'method': ['PUT']})
251 action='update', conditions={'method': ['PUT']})
252 m.connect('delete_user', '/users/{user_id}',
252 m.connect('delete_user', '/users/{user_id}',
253 action='delete', conditions={'method': ['DELETE']})
253 action='delete', conditions={'method': ['DELETE']})
254 m.connect('edit_user', '/users/{user_id}/edit',
254 m.connect('edit_user', '/users/{user_id}/edit',
255 action='edit', conditions={'method': ['GET']}, jsroute=True)
255 action='edit', conditions={'method': ['GET']}, jsroute=True)
256 m.connect('user', '/users/{user_id}',
256 m.connect('user', '/users/{user_id}',
257 action='show', conditions={'method': ['GET']})
257 action='show', conditions={'method': ['GET']})
258 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
258 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
259 action='reset_password', conditions={'method': ['POST']})
259 action='reset_password', conditions={'method': ['POST']})
260 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
260 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
261 action='create_personal_repo_group', conditions={'method': ['POST']})
261 action='create_personal_repo_group', conditions={'method': ['POST']})
262
262
263 # EXTRAS USER ROUTES
263 # EXTRAS USER ROUTES
264 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
264 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
265 action='edit_advanced', conditions={'method': ['GET']})
265 action='edit_advanced', conditions={'method': ['GET']})
266 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
266 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
267 action='update_advanced', conditions={'method': ['PUT']})
267 action='update_advanced', conditions={'method': ['PUT']})
268
268
269 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
269 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
270 action='edit_global_perms', conditions={'method': ['GET']})
270 action='edit_global_perms', conditions={'method': ['GET']})
271 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
271 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
272 action='update_global_perms', conditions={'method': ['PUT']})
272 action='update_global_perms', conditions={'method': ['PUT']})
273
273
274 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
274 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
275 action='edit_perms_summary', conditions={'method': ['GET']})
275 action='edit_perms_summary', conditions={'method': ['GET']})
276
276
277 # ADMIN USER GROUPS REST ROUTES
277 # ADMIN USER GROUPS REST ROUTES
278 with rmap.submapper(path_prefix=ADMIN_PREFIX,
278 with rmap.submapper(path_prefix=ADMIN_PREFIX,
279 controller='admin/user_groups') as m:
279 controller='admin/user_groups') as m:
280 m.connect('users_groups', '/user_groups',
280 m.connect('users_groups', '/user_groups',
281 action='create', conditions={'method': ['POST']})
281 action='create', conditions={'method': ['POST']})
282 m.connect('users_groups', '/user_groups',
282 m.connect('users_groups', '/user_groups',
283 action='index', conditions={'method': ['GET']})
283 action='index', conditions={'method': ['GET']})
284 m.connect('new_users_group', '/user_groups/new',
284 m.connect('new_users_group', '/user_groups/new',
285 action='new', conditions={'method': ['GET']})
285 action='new', conditions={'method': ['GET']})
286 m.connect('update_users_group', '/user_groups/{user_group_id}',
286 m.connect('update_users_group', '/user_groups/{user_group_id}',
287 action='update', conditions={'method': ['PUT']})
287 action='update', conditions={'method': ['PUT']})
288 m.connect('delete_users_group', '/user_groups/{user_group_id}',
288 m.connect('delete_users_group', '/user_groups/{user_group_id}',
289 action='delete', conditions={'method': ['DELETE']})
289 action='delete', conditions={'method': ['DELETE']})
290 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
290 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
291 action='edit', conditions={'method': ['GET']},
291 action='edit', conditions={'method': ['GET']},
292 function=check_user_group)
292 function=check_user_group)
293
293
294 # EXTRAS USER GROUP ROUTES
294 # EXTRAS USER GROUP ROUTES
295 m.connect('edit_user_group_global_perms',
295 m.connect('edit_user_group_global_perms',
296 '/user_groups/{user_group_id}/edit/global_permissions',
296 '/user_groups/{user_group_id}/edit/global_permissions',
297 action='edit_global_perms', conditions={'method': ['GET']})
297 action='edit_global_perms', conditions={'method': ['GET']})
298 m.connect('edit_user_group_global_perms',
298 m.connect('edit_user_group_global_perms',
299 '/user_groups/{user_group_id}/edit/global_permissions',
299 '/user_groups/{user_group_id}/edit/global_permissions',
300 action='update_global_perms', conditions={'method': ['PUT']})
300 action='update_global_perms', conditions={'method': ['PUT']})
301 m.connect('edit_user_group_perms_summary',
301 m.connect('edit_user_group_perms_summary',
302 '/user_groups/{user_group_id}/edit/permissions_summary',
302 '/user_groups/{user_group_id}/edit/permissions_summary',
303 action='edit_perms_summary', conditions={'method': ['GET']})
303 action='edit_perms_summary', conditions={'method': ['GET']})
304
304
305 m.connect('edit_user_group_perms',
305 m.connect('edit_user_group_perms',
306 '/user_groups/{user_group_id}/edit/permissions',
306 '/user_groups/{user_group_id}/edit/permissions',
307 action='edit_perms', conditions={'method': ['GET']})
307 action='edit_perms', conditions={'method': ['GET']})
308 m.connect('edit_user_group_perms',
308 m.connect('edit_user_group_perms',
309 '/user_groups/{user_group_id}/edit/permissions',
309 '/user_groups/{user_group_id}/edit/permissions',
310 action='update_perms', conditions={'method': ['PUT']})
310 action='update_perms', conditions={'method': ['PUT']})
311
311
312 m.connect('edit_user_group_advanced',
312 m.connect('edit_user_group_advanced',
313 '/user_groups/{user_group_id}/edit/advanced',
313 '/user_groups/{user_group_id}/edit/advanced',
314 action='edit_advanced', conditions={'method': ['GET']})
314 action='edit_advanced', conditions={'method': ['GET']})
315
315
316 m.connect('edit_user_group_advanced_sync',
316 m.connect('edit_user_group_advanced_sync',
317 '/user_groups/{user_group_id}/edit/advanced/sync',
317 '/user_groups/{user_group_id}/edit/advanced/sync',
318 action='edit_advanced_set_synchronization', conditions={'method': ['POST']})
318 action='edit_advanced_set_synchronization', conditions={'method': ['POST']})
319
319
320 m.connect('edit_user_group_members',
320 m.connect('edit_user_group_members',
321 '/user_groups/{user_group_id}/edit/members', jsroute=True,
321 '/user_groups/{user_group_id}/edit/members', jsroute=True,
322 action='user_group_members', conditions={'method': ['GET']})
322 action='user_group_members', conditions={'method': ['GET']})
323
323
324 # ADMIN DEFAULTS REST ROUTES
324 # ADMIN DEFAULTS REST ROUTES
325 with rmap.submapper(path_prefix=ADMIN_PREFIX,
325 with rmap.submapper(path_prefix=ADMIN_PREFIX,
326 controller='admin/defaults') as m:
326 controller='admin/defaults') as m:
327 m.connect('admin_defaults_repositories', '/defaults/repositories',
327 m.connect('admin_defaults_repositories', '/defaults/repositories',
328 action='update_repository_defaults', conditions={'method': ['POST']})
328 action='update_repository_defaults', conditions={'method': ['POST']})
329 m.connect('admin_defaults_repositories', '/defaults/repositories',
329 m.connect('admin_defaults_repositories', '/defaults/repositories',
330 action='index', conditions={'method': ['GET']})
330 action='index', conditions={'method': ['GET']})
331
331
332 # ADMIN SETTINGS ROUTES
332 # ADMIN SETTINGS ROUTES
333 with rmap.submapper(path_prefix=ADMIN_PREFIX,
333 with rmap.submapper(path_prefix=ADMIN_PREFIX,
334 controller='admin/settings') as m:
334 controller='admin/settings') as m:
335
335
336 # default
336 # default
337 m.connect('admin_settings', '/settings',
337 m.connect('admin_settings', '/settings',
338 action='settings_global_update',
338 action='settings_global_update',
339 conditions={'method': ['POST']})
339 conditions={'method': ['POST']})
340 m.connect('admin_settings', '/settings',
340 m.connect('admin_settings', '/settings',
341 action='settings_global', conditions={'method': ['GET']})
341 action='settings_global', conditions={'method': ['GET']})
342
342
343 m.connect('admin_settings_vcs', '/settings/vcs',
343 m.connect('admin_settings_vcs', '/settings/vcs',
344 action='settings_vcs_update',
344 action='settings_vcs_update',
345 conditions={'method': ['POST']})
345 conditions={'method': ['POST']})
346 m.connect('admin_settings_vcs', '/settings/vcs',
346 m.connect('admin_settings_vcs', '/settings/vcs',
347 action='settings_vcs',
347 action='settings_vcs',
348 conditions={'method': ['GET']})
348 conditions={'method': ['GET']})
349 m.connect('admin_settings_vcs', '/settings/vcs',
349 m.connect('admin_settings_vcs', '/settings/vcs',
350 action='delete_svn_pattern',
350 action='delete_svn_pattern',
351 conditions={'method': ['DELETE']})
351 conditions={'method': ['DELETE']})
352
352
353 m.connect('admin_settings_mapping', '/settings/mapping',
353 m.connect('admin_settings_mapping', '/settings/mapping',
354 action='settings_mapping_update',
354 action='settings_mapping_update',
355 conditions={'method': ['POST']})
355 conditions={'method': ['POST']})
356 m.connect('admin_settings_mapping', '/settings/mapping',
356 m.connect('admin_settings_mapping', '/settings/mapping',
357 action='settings_mapping', conditions={'method': ['GET']})
357 action='settings_mapping', conditions={'method': ['GET']})
358
358
359 m.connect('admin_settings_global', '/settings/global',
359 m.connect('admin_settings_global', '/settings/global',
360 action='settings_global_update',
360 action='settings_global_update',
361 conditions={'method': ['POST']})
361 conditions={'method': ['POST']})
362 m.connect('admin_settings_global', '/settings/global',
362 m.connect('admin_settings_global', '/settings/global',
363 action='settings_global', conditions={'method': ['GET']})
363 action='settings_global', conditions={'method': ['GET']})
364
364
365 m.connect('admin_settings_visual', '/settings/visual',
365 m.connect('admin_settings_visual', '/settings/visual',
366 action='settings_visual_update',
366 action='settings_visual_update',
367 conditions={'method': ['POST']})
367 conditions={'method': ['POST']})
368 m.connect('admin_settings_visual', '/settings/visual',
368 m.connect('admin_settings_visual', '/settings/visual',
369 action='settings_visual', conditions={'method': ['GET']})
369 action='settings_visual', conditions={'method': ['GET']})
370
370
371 m.connect('admin_settings_issuetracker',
371 m.connect('admin_settings_issuetracker',
372 '/settings/issue-tracker', action='settings_issuetracker',
372 '/settings/issue-tracker', action='settings_issuetracker',
373 conditions={'method': ['GET']})
373 conditions={'method': ['GET']})
374 m.connect('admin_settings_issuetracker_save',
374 m.connect('admin_settings_issuetracker_save',
375 '/settings/issue-tracker/save',
375 '/settings/issue-tracker/save',
376 action='settings_issuetracker_save',
376 action='settings_issuetracker_save',
377 conditions={'method': ['POST']})
377 conditions={'method': ['POST']})
378 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
378 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
379 action='settings_issuetracker_test',
379 action='settings_issuetracker_test',
380 conditions={'method': ['POST']})
380 conditions={'method': ['POST']})
381 m.connect('admin_issuetracker_delete',
381 m.connect('admin_issuetracker_delete',
382 '/settings/issue-tracker/delete',
382 '/settings/issue-tracker/delete',
383 action='settings_issuetracker_delete',
383 action='settings_issuetracker_delete',
384 conditions={'method': ['DELETE']})
384 conditions={'method': ['DELETE']})
385
385
386 m.connect('admin_settings_email', '/settings/email',
386 m.connect('admin_settings_email', '/settings/email',
387 action='settings_email_update',
387 action='settings_email_update',
388 conditions={'method': ['POST']})
388 conditions={'method': ['POST']})
389 m.connect('admin_settings_email', '/settings/email',
389 m.connect('admin_settings_email', '/settings/email',
390 action='settings_email', conditions={'method': ['GET']})
390 action='settings_email', conditions={'method': ['GET']})
391
391
392 m.connect('admin_settings_hooks', '/settings/hooks',
392 m.connect('admin_settings_hooks', '/settings/hooks',
393 action='settings_hooks_update',
393 action='settings_hooks_update',
394 conditions={'method': ['POST', 'DELETE']})
394 conditions={'method': ['POST', 'DELETE']})
395 m.connect('admin_settings_hooks', '/settings/hooks',
395 m.connect('admin_settings_hooks', '/settings/hooks',
396 action='settings_hooks', conditions={'method': ['GET']})
396 action='settings_hooks', conditions={'method': ['GET']})
397
397
398 m.connect('admin_settings_search', '/settings/search',
398 m.connect('admin_settings_search', '/settings/search',
399 action='settings_search', conditions={'method': ['GET']})
399 action='settings_search', conditions={'method': ['GET']})
400
400
401 m.connect('admin_settings_supervisor', '/settings/supervisor',
401 m.connect('admin_settings_supervisor', '/settings/supervisor',
402 action='settings_supervisor', conditions={'method': ['GET']})
402 action='settings_supervisor', conditions={'method': ['GET']})
403 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
403 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
404 action='settings_supervisor_log', conditions={'method': ['GET']})
404 action='settings_supervisor_log', conditions={'method': ['GET']})
405
405
406 m.connect('admin_settings_labs', '/settings/labs',
406 m.connect('admin_settings_labs', '/settings/labs',
407 action='settings_labs_update',
407 action='settings_labs_update',
408 conditions={'method': ['POST']})
408 conditions={'method': ['POST']})
409 m.connect('admin_settings_labs', '/settings/labs',
409 m.connect('admin_settings_labs', '/settings/labs',
410 action='settings_labs', conditions={'method': ['GET']})
410 action='settings_labs', conditions={'method': ['GET']})
411
411
412 # ADMIN MY ACCOUNT
412 # ADMIN MY ACCOUNT
413 with rmap.submapper(path_prefix=ADMIN_PREFIX,
413 with rmap.submapper(path_prefix=ADMIN_PREFIX,
414 controller='admin/my_account') as m:
414 controller='admin/my_account') as m:
415
415
416 # NOTE(marcink): this needs to be kept for password force flag to be
416 # NOTE(marcink): this needs to be kept for password force flag to be
417 # handled in pylons controllers, remove after full migration to pyramid
417 # handled in pylons controllers, remove after full migration to pyramid
418 m.connect('my_account_password', '/my_account/password',
418 m.connect('my_account_password', '/my_account/password',
419 action='my_account_password', conditions={'method': ['GET']})
419 action='my_account_password', conditions={'method': ['GET']})
420
420
421 #==========================================================================
421 #==========================================================================
422 # REPOSITORY ROUTES
422 # REPOSITORY ROUTES
423 #==========================================================================
423 #==========================================================================
424
424
425 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
425 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
426 controller='admin/repos', action='repo_creating',
426 controller='admin/repos', action='repo_creating',
427 requirements=URL_NAME_REQUIREMENTS)
427 requirements=URL_NAME_REQUIREMENTS)
428 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
428 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
429 controller='admin/repos', action='repo_check',
429 controller='admin/repos', action='repo_check',
430 requirements=URL_NAME_REQUIREMENTS)
430 requirements=URL_NAME_REQUIREMENTS)
431
431
432 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
433 controller='changeset', revision='tip',
434 conditions={'function': check_repo},
435 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
436 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
437 controller='changeset', revision='tip', action='changeset_children',
438 conditions={'function': check_repo},
439 requirements=URL_NAME_REQUIREMENTS)
440 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
441 controller='changeset', revision='tip', action='changeset_parents',
442 conditions={'function': check_repo},
443 requirements=URL_NAME_REQUIREMENTS)
444
445 # repo edit options
432 # repo edit options
446 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
433 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
447 controller='admin/repos', action='edit_fields',
434 controller='admin/repos', action='edit_fields',
448 conditions={'method': ['GET'], 'function': check_repo},
435 conditions={'method': ['GET'], 'function': check_repo},
449 requirements=URL_NAME_REQUIREMENTS)
436 requirements=URL_NAME_REQUIREMENTS)
450 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
437 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
451 controller='admin/repos', action='create_repo_field',
438 controller='admin/repos', action='create_repo_field',
452 conditions={'method': ['PUT'], 'function': check_repo},
439 conditions={'method': ['PUT'], 'function': check_repo},
453 requirements=URL_NAME_REQUIREMENTS)
440 requirements=URL_NAME_REQUIREMENTS)
454 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
441 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
455 controller='admin/repos', action='delete_repo_field',
442 controller='admin/repos', action='delete_repo_field',
456 conditions={'method': ['DELETE'], 'function': check_repo},
443 conditions={'method': ['DELETE'], 'function': check_repo},
457 requirements=URL_NAME_REQUIREMENTS)
444 requirements=URL_NAME_REQUIREMENTS)
458
445
459 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
446 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
460 controller='admin/repos', action='toggle_locking',
447 controller='admin/repos', action='toggle_locking',
461 conditions={'method': ['GET'], 'function': check_repo},
448 conditions={'method': ['GET'], 'function': check_repo},
462 requirements=URL_NAME_REQUIREMENTS)
449 requirements=URL_NAME_REQUIREMENTS)
463
450
464 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
451 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
465 controller='admin/repos', action='edit_remote_form',
452 controller='admin/repos', action='edit_remote_form',
466 conditions={'method': ['GET'], 'function': check_repo},
453 conditions={'method': ['GET'], 'function': check_repo},
467 requirements=URL_NAME_REQUIREMENTS)
454 requirements=URL_NAME_REQUIREMENTS)
468 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
455 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
469 controller='admin/repos', action='edit_remote',
456 controller='admin/repos', action='edit_remote',
470 conditions={'method': ['PUT'], 'function': check_repo},
457 conditions={'method': ['PUT'], 'function': check_repo},
471 requirements=URL_NAME_REQUIREMENTS)
458 requirements=URL_NAME_REQUIREMENTS)
472
459
473 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
460 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
474 controller='admin/repos', action='edit_statistics_form',
461 controller='admin/repos', action='edit_statistics_form',
475 conditions={'method': ['GET'], 'function': check_repo},
462 conditions={'method': ['GET'], 'function': check_repo},
476 requirements=URL_NAME_REQUIREMENTS)
463 requirements=URL_NAME_REQUIREMENTS)
477 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
464 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
478 controller='admin/repos', action='edit_statistics',
465 controller='admin/repos', action='edit_statistics',
479 conditions={'method': ['PUT'], 'function': check_repo},
466 conditions={'method': ['PUT'], 'function': check_repo},
480 requirements=URL_NAME_REQUIREMENTS)
467 requirements=URL_NAME_REQUIREMENTS)
481 rmap.connect('repo_settings_issuetracker',
468 rmap.connect('repo_settings_issuetracker',
482 '/{repo_name}/settings/issue-tracker',
469 '/{repo_name}/settings/issue-tracker',
483 controller='admin/repos', action='repo_issuetracker',
470 controller='admin/repos', action='repo_issuetracker',
484 conditions={'method': ['GET'], 'function': check_repo},
471 conditions={'method': ['GET'], 'function': check_repo},
485 requirements=URL_NAME_REQUIREMENTS)
472 requirements=URL_NAME_REQUIREMENTS)
486 rmap.connect('repo_issuetracker_test',
473 rmap.connect('repo_issuetracker_test',
487 '/{repo_name}/settings/issue-tracker/test',
474 '/{repo_name}/settings/issue-tracker/test',
488 controller='admin/repos', action='repo_issuetracker_test',
475 controller='admin/repos', action='repo_issuetracker_test',
489 conditions={'method': ['POST'], 'function': check_repo},
476 conditions={'method': ['POST'], 'function': check_repo},
490 requirements=URL_NAME_REQUIREMENTS)
477 requirements=URL_NAME_REQUIREMENTS)
491 rmap.connect('repo_issuetracker_delete',
478 rmap.connect('repo_issuetracker_delete',
492 '/{repo_name}/settings/issue-tracker/delete',
479 '/{repo_name}/settings/issue-tracker/delete',
493 controller='admin/repos', action='repo_issuetracker_delete',
480 controller='admin/repos', action='repo_issuetracker_delete',
494 conditions={'method': ['DELETE'], 'function': check_repo},
481 conditions={'method': ['DELETE'], 'function': check_repo},
495 requirements=URL_NAME_REQUIREMENTS)
482 requirements=URL_NAME_REQUIREMENTS)
496 rmap.connect('repo_issuetracker_save',
483 rmap.connect('repo_issuetracker_save',
497 '/{repo_name}/settings/issue-tracker/save',
484 '/{repo_name}/settings/issue-tracker/save',
498 controller='admin/repos', action='repo_issuetracker_save',
485 controller='admin/repos', action='repo_issuetracker_save',
499 conditions={'method': ['POST'], 'function': check_repo},
486 conditions={'method': ['POST'], 'function': check_repo},
500 requirements=URL_NAME_REQUIREMENTS)
487 requirements=URL_NAME_REQUIREMENTS)
501 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
488 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
502 controller='admin/repos', action='repo_settings_vcs_update',
489 controller='admin/repos', action='repo_settings_vcs_update',
503 conditions={'method': ['POST'], 'function': check_repo},
490 conditions={'method': ['POST'], 'function': check_repo},
504 requirements=URL_NAME_REQUIREMENTS)
491 requirements=URL_NAME_REQUIREMENTS)
505 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
492 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
506 controller='admin/repos', action='repo_settings_vcs',
493 controller='admin/repos', action='repo_settings_vcs',
507 conditions={'method': ['GET'], 'function': check_repo},
494 conditions={'method': ['GET'], 'function': check_repo},
508 requirements=URL_NAME_REQUIREMENTS)
495 requirements=URL_NAME_REQUIREMENTS)
509 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
496 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
510 controller='admin/repos', action='repo_delete_svn_pattern',
497 controller='admin/repos', action='repo_delete_svn_pattern',
511 conditions={'method': ['DELETE'], 'function': check_repo},
498 conditions={'method': ['DELETE'], 'function': check_repo},
512 requirements=URL_NAME_REQUIREMENTS)
499 requirements=URL_NAME_REQUIREMENTS)
513 rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest',
500 rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest',
514 controller='admin/repos', action='repo_settings_pullrequest',
501 controller='admin/repos', action='repo_settings_pullrequest',
515 conditions={'method': ['GET', 'POST'], 'function': check_repo},
502 conditions={'method': ['GET', 'POST'], 'function': check_repo},
516 requirements=URL_NAME_REQUIREMENTS)
503 requirements=URL_NAME_REQUIREMENTS)
517
504
518 # still working url for backward compat.
519 rmap.connect('raw_changeset_home_depraced',
520 '/{repo_name}/raw-changeset/{revision}',
521 controller='changeset', action='changeset_raw',
522 revision='tip', conditions={'function': check_repo},
523 requirements=URL_NAME_REQUIREMENTS)
524
525 # new URLs
526 rmap.connect('changeset_raw_home',
527 '/{repo_name}/changeset-diff/{revision}',
528 controller='changeset', action='changeset_raw',
529 revision='tip', conditions={'function': check_repo},
530 requirements=URL_NAME_REQUIREMENTS)
531
532 rmap.connect('changeset_patch_home',
533 '/{repo_name}/changeset-patch/{revision}',
534 controller='changeset', action='changeset_patch',
535 revision='tip', conditions={'function': check_repo},
536 requirements=URL_NAME_REQUIREMENTS)
537
538 rmap.connect('changeset_download_home',
539 '/{repo_name}/changeset-download/{revision}',
540 controller='changeset', action='changeset_download',
541 revision='tip', conditions={'function': check_repo},
542 requirements=URL_NAME_REQUIREMENTS)
543
544 rmap.connect('changeset_comment',
545 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
546 controller='changeset', revision='tip', action='comment',
547 conditions={'function': check_repo},
548 requirements=URL_NAME_REQUIREMENTS)
549
550 rmap.connect('changeset_comment_preview',
551 '/{repo_name}/changeset/comment/preview', jsroute=True,
552 controller='changeset', action='preview_comment',
553 conditions={'function': check_repo, 'method': ['POST']},
554 requirements=URL_NAME_REQUIREMENTS)
555
556 rmap.connect('changeset_comment_delete',
557 '/{repo_name}/changeset/comment/{comment_id}/delete',
558 controller='changeset', action='delete_comment',
559 conditions={'function': check_repo, 'method': ['DELETE']},
560 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
561
562 rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}',
563 controller='changeset', action='changeset_info',
564 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
565
566 rmap.connect('compare_home',
505 rmap.connect('compare_home',
567 '/{repo_name}/compare',
506 '/{repo_name}/compare',
568 controller='compare', action='index',
507 controller='compare', action='index',
569 conditions={'function': check_repo},
508 conditions={'function': check_repo},
570 requirements=URL_NAME_REQUIREMENTS)
509 requirements=URL_NAME_REQUIREMENTS)
571
510
572 rmap.connect('compare_url',
511 rmap.connect('compare_url',
573 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
512 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
574 controller='compare', action='compare',
513 controller='compare', action='compare',
575 conditions={'function': check_repo},
514 conditions={'function': check_repo},
576 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
515 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
577
516
578 rmap.connect('pullrequest_home',
517 rmap.connect('pullrequest_home',
579 '/{repo_name}/pull-request/new', controller='pullrequests',
518 '/{repo_name}/pull-request/new', controller='pullrequests',
580 action='index', conditions={'function': check_repo,
519 action='index', conditions={'function': check_repo,
581 'method': ['GET']},
520 'method': ['GET']},
582 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
521 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
583
522
584 rmap.connect('pullrequest',
523 rmap.connect('pullrequest',
585 '/{repo_name}/pull-request/new', controller='pullrequests',
524 '/{repo_name}/pull-request/new', controller='pullrequests',
586 action='create', conditions={'function': check_repo,
525 action='create', conditions={'function': check_repo,
587 'method': ['POST']},
526 'method': ['POST']},
588 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
527 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
589
528
590 rmap.connect('pullrequest_repo_refs',
529 rmap.connect('pullrequest_repo_refs',
591 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
530 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
592 controller='pullrequests',
531 controller='pullrequests',
593 action='get_repo_refs',
532 action='get_repo_refs',
594 conditions={'function': check_repo, 'method': ['GET']},
533 conditions={'function': check_repo, 'method': ['GET']},
595 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
534 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
596
535
597 rmap.connect('pullrequest_repo_destinations',
536 rmap.connect('pullrequest_repo_destinations',
598 '/{repo_name}/pull-request/repo-destinations',
537 '/{repo_name}/pull-request/repo-destinations',
599 controller='pullrequests',
538 controller='pullrequests',
600 action='get_repo_destinations',
539 action='get_repo_destinations',
601 conditions={'function': check_repo, 'method': ['GET']},
540 conditions={'function': check_repo, 'method': ['GET']},
602 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
541 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
603
542
604 rmap.connect('pullrequest_show',
543 rmap.connect('pullrequest_show',
605 '/{repo_name}/pull-request/{pull_request_id}',
544 '/{repo_name}/pull-request/{pull_request_id}',
606 controller='pullrequests',
545 controller='pullrequests',
607 action='show', conditions={'function': check_repo,
546 action='show', conditions={'function': check_repo,
608 'method': ['GET']},
547 'method': ['GET']},
609 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
548 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
610
549
611 rmap.connect('pullrequest_update',
550 rmap.connect('pullrequest_update',
612 '/{repo_name}/pull-request/{pull_request_id}',
551 '/{repo_name}/pull-request/{pull_request_id}',
613 controller='pullrequests',
552 controller='pullrequests',
614 action='update', conditions={'function': check_repo,
553 action='update', conditions={'function': check_repo,
615 'method': ['PUT']},
554 'method': ['PUT']},
616 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
555 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
617
556
618 rmap.connect('pullrequest_merge',
557 rmap.connect('pullrequest_merge',
619 '/{repo_name}/pull-request/{pull_request_id}',
558 '/{repo_name}/pull-request/{pull_request_id}',
620 controller='pullrequests',
559 controller='pullrequests',
621 action='merge', conditions={'function': check_repo,
560 action='merge', conditions={'function': check_repo,
622 'method': ['POST']},
561 'method': ['POST']},
623 requirements=URL_NAME_REQUIREMENTS)
562 requirements=URL_NAME_REQUIREMENTS)
624
563
625 rmap.connect('pullrequest_delete',
564 rmap.connect('pullrequest_delete',
626 '/{repo_name}/pull-request/{pull_request_id}',
565 '/{repo_name}/pull-request/{pull_request_id}',
627 controller='pullrequests',
566 controller='pullrequests',
628 action='delete', conditions={'function': check_repo,
567 action='delete', conditions={'function': check_repo,
629 'method': ['DELETE']},
568 'method': ['DELETE']},
630 requirements=URL_NAME_REQUIREMENTS)
569 requirements=URL_NAME_REQUIREMENTS)
631
570
632 rmap.connect('pullrequest_comment',
571 rmap.connect('pullrequest_comment',
633 '/{repo_name}/pull-request-comment/{pull_request_id}',
572 '/{repo_name}/pull-request-comment/{pull_request_id}',
634 controller='pullrequests',
573 controller='pullrequests',
635 action='comment', conditions={'function': check_repo,
574 action='comment', conditions={'function': check_repo,
636 'method': ['POST']},
575 'method': ['POST']},
637 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
576 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
638
577
639 rmap.connect('pullrequest_comment_delete',
578 rmap.connect('pullrequest_comment_delete',
640 '/{repo_name}/pull-request-comment/{comment_id}/delete',
579 '/{repo_name}/pull-request-comment/{comment_id}/delete',
641 controller='pullrequests', action='delete_comment',
580 controller='pullrequests', action='delete_comment',
642 conditions={'function': check_repo, 'method': ['DELETE']},
581 conditions={'function': check_repo, 'method': ['DELETE']},
643 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
582 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
644
583
645 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
584 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
646 controller='forks', action='fork_create',
585 controller='forks', action='fork_create',
647 conditions={'function': check_repo, 'method': ['POST']},
586 conditions={'function': check_repo, 'method': ['POST']},
648 requirements=URL_NAME_REQUIREMENTS)
587 requirements=URL_NAME_REQUIREMENTS)
649
588
650 rmap.connect('repo_fork_home', '/{repo_name}/fork',
589 rmap.connect('repo_fork_home', '/{repo_name}/fork',
651 controller='forks', action='fork',
590 controller='forks', action='fork',
652 conditions={'function': check_repo},
591 conditions={'function': check_repo},
653 requirements=URL_NAME_REQUIREMENTS)
592 requirements=URL_NAME_REQUIREMENTS)
654
593
655 rmap.connect('repo_forks_home', '/{repo_name}/forks',
594 rmap.connect('repo_forks_home', '/{repo_name}/forks',
656 controller='forks', action='forks',
595 controller='forks', action='forks',
657 conditions={'function': check_repo},
596 conditions={'function': check_repo},
658 requirements=URL_NAME_REQUIREMENTS)
597 requirements=URL_NAME_REQUIREMENTS)
659
598
660 return rmap
599 return rmap
@@ -1,349 +1,351 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 from pylons import url
24 from pylons.i18n.translation import _
23 from pylons.i18n.translation import _
25 from webhelpers.html.builder import literal
24 from webhelpers.html.builder import literal
26 from webhelpers.html.tags import link_to
25 from webhelpers.html.tags import link_to
27
26
28 from rhodecode.lib.utils2 import AttributeDict
27 from rhodecode.lib.utils2 import AttributeDict
29 from rhodecode.lib.vcs.backends.base import BaseCommit
28 from rhodecode.lib.vcs.backends.base import BaseCommit
30 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
29 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
31
30
32
31
33 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
34
33
35
34
36 def action_parser(user_log, feed=False, parse_cs=False):
35 def action_parser(user_log, feed=False, parse_cs=False):
37 """
36 """
38 This helper will action_map the specified string action into translated
37 This helper will action_map the specified string action into translated
39 fancy names with icons and links
38 fancy names with icons and links
40
39
41 :param user_log: user log instance
40 :param user_log: user log instance
42 :param feed: use output for feeds (no html and fancy icons)
41 :param feed: use output for feeds (no html and fancy icons)
43 :param parse_cs: parse Changesets into VCS instances
42 :param parse_cs: parse Changesets into VCS instances
44 """
43 """
45 if user_log.version == 'v2':
44 if user_log.version == 'v2':
46 ap = AuditLogParser(user_log)
45 ap = AuditLogParser(user_log)
47 return ap.callbacks()
46 return ap.callbacks()
48 else:
47 else:
49 # old style
48 # old style
50 ap = ActionParser(user_log, feed=False, parse_commits=False)
49 ap = ActionParser(user_log, feed=False, parse_commits=False)
51 return ap.callbacks()
50 return ap.callbacks()
52
51
53
52
54 class ActionParser(object):
53 class ActionParser(object):
55
54
56 commits_limit = 3 # display this amount always
55 commits_limit = 3 # display this amount always
57 commits_top_limit = 50 # show up to this amount of commits hidden
56 commits_top_limit = 50 # show up to this amount of commits hidden
58
57
59 def __init__(self, user_log, feed=False, parse_commits=False):
58 def __init__(self, user_log, feed=False, parse_commits=False):
60 self.user_log = user_log
59 self.user_log = user_log
61 self.feed = feed
60 self.feed = feed
62 self.parse_commits = parse_commits
61 self.parse_commits = parse_commits
63
62
64 self.action = user_log.action
63 self.action = user_log.action
65 self.action_params = ' '
64 self.action_params = ' '
66 x = self.action.split(':', 1)
65 x = self.action.split(':', 1)
67 if len(x) > 1:
66 if len(x) > 1:
68 self.action, self.action_params = x
67 self.action, self.action_params = x
69
68
70 def callbacks(self):
69 def callbacks(self):
71 action_str = self.action_map.get(self.action, self.action)
70 action_str = self.action_map.get(self.action, self.action)
72 if self.feed:
71 if self.feed:
73 action = action_str[0].replace('[', '').replace(']', '')
72 action = action_str[0].replace('[', '').replace(']', '')
74 else:
73 else:
75 action = action_str[0]\
74 action = action_str[0]\
76 .replace('[', '<span class="journal_highlight">')\
75 .replace('[', '<span class="journal_highlight">')\
77 .replace(']', '</span>')
76 .replace(']', '</span>')
78
77
79 action_params_func = _no_params_func
78 action_params_func = _no_params_func
80 if callable(action_str[1]):
79 if callable(action_str[1]):
81 action_params_func = action_str[1]
80 action_params_func = action_str[1]
82
81
83 # returned callbacks we need to call to get
82 # returned callbacks we need to call to get
84 return [
83 return [
85 lambda: literal(action), action_params_func,
84 lambda: literal(action), action_params_func,
86 self.action_parser_icon]
85 self.action_parser_icon]
87
86
88 @property
87 @property
89 def action_map(self):
88 def action_map(self):
90
89
91 # action : translated str, callback(extractor), icon
90 # action : translated str, callback(extractor), icon
92 action_map = {
91 action_map = {
93 'user_deleted_repo': (
92 'user_deleted_repo': (
94 _('[deleted] repository'),
93 _('[deleted] repository'),
95 None, 'icon-trash'),
94 None, 'icon-trash'),
96 'user_created_repo': (
95 'user_created_repo': (
97 _('[created] repository'),
96 _('[created] repository'),
98 None, 'icon-plus icon-plus-colored'),
97 None, 'icon-plus icon-plus-colored'),
99 'user_created_fork': (
98 'user_created_fork': (
100 _('[created] repository as fork'),
99 _('[created] repository as fork'),
101 None, 'icon-code-fork'),
100 None, 'icon-code-fork'),
102 'user_forked_repo': (
101 'user_forked_repo': (
103 _('[forked] repository'),
102 _('[forked] repository'),
104 self.get_fork_name, 'icon-code-fork'),
103 self.get_fork_name, 'icon-code-fork'),
105 'user_updated_repo': (
104 'user_updated_repo': (
106 _('[updated] repository'),
105 _('[updated] repository'),
107 None, 'icon-pencil icon-pencil-colored'),
106 None, 'icon-pencil icon-pencil-colored'),
108 'user_downloaded_archive': (
107 'user_downloaded_archive': (
109 _('[downloaded] archive from repository'),
108 _('[downloaded] archive from repository'),
110 self.get_archive_name, 'icon-download-alt'),
109 self.get_archive_name, 'icon-download-alt'),
111 'admin_deleted_repo': (
110 'admin_deleted_repo': (
112 _('[delete] repository'),
111 _('[delete] repository'),
113 None, 'icon-trash'),
112 None, 'icon-trash'),
114 'admin_created_repo': (
113 'admin_created_repo': (
115 _('[created] repository'),
114 _('[created] repository'),
116 None, 'icon-plus icon-plus-colored'),
115 None, 'icon-plus icon-plus-colored'),
117 'admin_forked_repo': (
116 'admin_forked_repo': (
118 _('[forked] repository'),
117 _('[forked] repository'),
119 None, 'icon-code-fork icon-fork-colored'),
118 None, 'icon-code-fork icon-fork-colored'),
120 'admin_updated_repo': (
119 'admin_updated_repo': (
121 _('[updated] repository'),
120 _('[updated] repository'),
122 None, 'icon-pencil icon-pencil-colored'),
121 None, 'icon-pencil icon-pencil-colored'),
123 'admin_created_user': (
122 'admin_created_user': (
124 _('[created] user'),
123 _('[created] user'),
125 self.get_user_name, 'icon-user icon-user-colored'),
124 self.get_user_name, 'icon-user icon-user-colored'),
126 'admin_updated_user': (
125 'admin_updated_user': (
127 _('[updated] user'),
126 _('[updated] user'),
128 self.get_user_name, 'icon-user icon-user-colored'),
127 self.get_user_name, 'icon-user icon-user-colored'),
129 'admin_created_users_group': (
128 'admin_created_users_group': (
130 _('[created] user group'),
129 _('[created] user group'),
131 self.get_users_group, 'icon-pencil icon-pencil-colored'),
130 self.get_users_group, 'icon-pencil icon-pencil-colored'),
132 'admin_updated_users_group': (
131 'admin_updated_users_group': (
133 _('[updated] user group'),
132 _('[updated] user group'),
134 self.get_users_group, 'icon-pencil icon-pencil-colored'),
133 self.get_users_group, 'icon-pencil icon-pencil-colored'),
135 'user_commented_revision': (
134 'user_commented_revision': (
136 _('[commented] on commit in repository'),
135 _('[commented] on commit in repository'),
137 self.get_cs_links, 'icon-comment icon-comment-colored'),
136 self.get_cs_links, 'icon-comment icon-comment-colored'),
138 'user_commented_pull_request': (
137 'user_commented_pull_request': (
139 _('[commented] on pull request for'),
138 _('[commented] on pull request for'),
140 self.get_pull_request, 'icon-comment icon-comment-colored'),
139 self.get_pull_request, 'icon-comment icon-comment-colored'),
141 'user_closed_pull_request': (
140 'user_closed_pull_request': (
142 _('[closed] pull request for'),
141 _('[closed] pull request for'),
143 self.get_pull_request, 'icon-check'),
142 self.get_pull_request, 'icon-check'),
144 'user_merged_pull_request': (
143 'user_merged_pull_request': (
145 _('[merged] pull request for'),
144 _('[merged] pull request for'),
146 self.get_pull_request, 'icon-check'),
145 self.get_pull_request, 'icon-check'),
147 'push': (
146 'push': (
148 _('[pushed] into'),
147 _('[pushed] into'),
149 self.get_cs_links, 'icon-arrow-up'),
148 self.get_cs_links, 'icon-arrow-up'),
150 'push_local': (
149 'push_local': (
151 _('[committed via RhodeCode] into repository'),
150 _('[committed via RhodeCode] into repository'),
152 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
151 self.get_cs_links, 'icon-pencil icon-pencil-colored'),
153 'push_remote': (
152 'push_remote': (
154 _('[pulled from remote] into repository'),
153 _('[pulled from remote] into repository'),
155 self.get_cs_links, 'icon-arrow-up'),
154 self.get_cs_links, 'icon-arrow-up'),
156 'pull': (
155 'pull': (
157 _('[pulled] from'),
156 _('[pulled] from'),
158 None, 'icon-arrow-down'),
157 None, 'icon-arrow-down'),
159 'started_following_repo': (
158 'started_following_repo': (
160 _('[started following] repository'),
159 _('[started following] repository'),
161 None, 'icon-heart icon-heart-colored'),
160 None, 'icon-heart icon-heart-colored'),
162 'stopped_following_repo': (
161 'stopped_following_repo': (
163 _('[stopped following] repository'),
162 _('[stopped following] repository'),
164 None, 'icon-heart-empty icon-heart-colored'),
163 None, 'icon-heart-empty icon-heart-colored'),
165 }
164 }
166 return action_map
165 return action_map
167
166
168 def get_fork_name(self):
167 def get_fork_name(self):
169 from rhodecode.lib import helpers as h
168 from rhodecode.lib import helpers as h
170 repo_name = self.action_params
169 repo_name = self.action_params
171 _url = h.route_path('repo_summary', repo_name=repo_name)
170 _url = h.route_path('repo_summary', repo_name=repo_name)
172 return _('fork name %s') % link_to(self.action_params, _url)
171 return _('fork name %s') % link_to(self.action_params, _url)
173
172
174 def get_user_name(self):
173 def get_user_name(self):
175 user_name = self.action_params
174 user_name = self.action_params
176 return user_name
175 return user_name
177
176
178 def get_users_group(self):
177 def get_users_group(self):
179 group_name = self.action_params
178 group_name = self.action_params
180 return group_name
179 return group_name
181
180
182 def get_pull_request(self):
181 def get_pull_request(self):
183 from rhodecode.lib import helpers as h
182 from rhodecode.lib import helpers as h
184 pull_request_id = self.action_params
183 pull_request_id = self.action_params
185 if self.is_deleted():
184 if self.is_deleted():
186 repo_name = self.user_log.repository_name
185 repo_name = self.user_log.repository_name
187 else:
186 else:
188 repo_name = self.user_log.repository.repo_name
187 repo_name = self.user_log.repository.repo_name
189 return link_to(
188 return link_to(
190 _('Pull request #%s') % pull_request_id,
189 _('Pull request #%s') % pull_request_id,
191 h.route_path('pullrequest_show', repo_name=repo_name,
190 h.route_path('pullrequest_show', repo_name=repo_name,
192 pull_request_id=pull_request_id))
191 pull_request_id=pull_request_id))
193
192
194 def get_archive_name(self):
193 def get_archive_name(self):
195 archive_name = self.action_params
194 archive_name = self.action_params
196 return archive_name
195 return archive_name
197
196
198 def action_parser_icon(self):
197 def action_parser_icon(self):
199 tmpl = """<i class="%s" alt="%s"></i>"""
198 tmpl = """<i class="%s" alt="%s"></i>"""
200 ico = self.action_map.get(self.action, ['', '', ''])[2]
199 ico = self.action_map.get(self.action, ['', '', ''])[2]
201 return literal(tmpl % (ico, self.action))
200 return literal(tmpl % (ico, self.action))
202
201
203 def get_cs_links(self):
202 def get_cs_links(self):
203 from rhodecode.lib import helpers as h
204 if self.is_deleted():
204 if self.is_deleted():
205 return self.action_params
205 return self.action_params
206
206
207 repo_name = self.user_log.repository.repo_name
207 repo_name = self.user_log.repository.repo_name
208 commit_ids = self.action_params.split(',')
208 commit_ids = self.action_params.split(',')
209 commits = self.get_commits(commit_ids)
209 commits = self.get_commits(commit_ids)
210
210
211 link_generator = (
211 link_generator = (
212 self.lnk(commit, repo_name)
212 self.lnk(commit, repo_name)
213 for commit in commits[:self.commits_limit])
213 for commit in commits[:self.commits_limit])
214 commit_links = [" " + ', '.join(link_generator)]
214 commit_links = [" " + ', '.join(link_generator)]
215 _op1, _name1 = _get_op(commit_ids[0])
215 _op1, _name1 = _get_op(commit_ids[0])
216 _op2, _name2 = _get_op(commit_ids[-1])
216 _op2, _name2 = _get_op(commit_ids[-1])
217
217
218 commit_id_range = '%s...%s' % (_name1, _name2)
218 commit_id_range = '%s...%s' % (_name1, _name2)
219
219
220 compare_view = (
220 compare_view = (
221 ' <div class="compare_view tooltip" title="%s">'
221 ' <div class="compare_view tooltip" title="%s">'
222 '<a href="%s">%s</a> </div>' % (
222 '<a href="%s">%s</a> </div>' % (
223 _('Show all combined commits %s->%s') % (
223 _('Show all combined commits %s->%s') % (
224 commit_ids[0][:12], commit_ids[-1][:12]
224 commit_ids[0][:12], commit_ids[-1][:12]
225 ),
225 ),
226 url('changeset_home', repo_name=repo_name,
226 h.route_path(
227 revision=commit_id_range), _('compare view')
227 'repo_commit', repo_name=repo_name,
228 commit_id=commit_id_range), _('compare view')
228 )
229 )
229 )
230 )
230
231
231 if len(commit_ids) > self.commits_limit:
232 if len(commit_ids) > self.commits_limit:
232 more_count = len(commit_ids) - self.commits_limit
233 more_count = len(commit_ids) - self.commits_limit
233 commit_links.append(
234 commit_links.append(
234 _(' and %(num)s more commits') % {'num': more_count}
235 _(' and %(num)s more commits') % {'num': more_count}
235 )
236 )
236
237
237 if len(commits) > 1:
238 if len(commits) > 1:
238 commit_links.append(compare_view)
239 commit_links.append(compare_view)
239 return ''.join(commit_links)
240 return ''.join(commit_links)
240
241
241 def get_commits(self, commit_ids):
242 def get_commits(self, commit_ids):
242 commits = []
243 commits = []
243 if not filter(lambda v: v != '', commit_ids):
244 if not filter(lambda v: v != '', commit_ids):
244 return commits
245 return commits
245
246
246 repo = None
247 repo = None
247 if self.parse_commits:
248 if self.parse_commits:
248 repo = self.user_log.repository.scm_instance()
249 repo = self.user_log.repository.scm_instance()
249
250
250 for commit_id in commit_ids[:self.commits_top_limit]:
251 for commit_id in commit_ids[:self.commits_top_limit]:
251 _op, _name = _get_op(commit_id)
252 _op, _name = _get_op(commit_id)
252
253
253 # we want parsed commits, or new log store format is bad
254 # we want parsed commits, or new log store format is bad
254 if self.parse_commits:
255 if self.parse_commits:
255 try:
256 try:
256 commit = repo.get_commit(commit_id=commit_id)
257 commit = repo.get_commit(commit_id=commit_id)
257 commits.append(commit)
258 commits.append(commit)
258 except CommitDoesNotExistError:
259 except CommitDoesNotExistError:
259 log.error(
260 log.error(
260 'cannot find commit id %s in this repository',
261 'cannot find commit id %s in this repository',
261 commit_id)
262 commit_id)
262 commits.append(commit_id)
263 commits.append(commit_id)
263 continue
264 continue
264 else:
265 else:
265 fake_commit = AttributeDict({
266 fake_commit = AttributeDict({
266 'short_id': commit_id[:12],
267 'short_id': commit_id[:12],
267 'raw_id': commit_id,
268 'raw_id': commit_id,
268 'message': '',
269 'message': '',
269 'op': _op,
270 'op': _op,
270 'ref_name': _name
271 'ref_name': _name
271 })
272 })
272 commits.append(fake_commit)
273 commits.append(fake_commit)
273
274
274 return commits
275 return commits
275
276
276 def lnk(self, commit_or_id, repo_name):
277 def lnk(self, commit_or_id, repo_name):
277 from rhodecode.lib.helpers import tooltip
278 from rhodecode.lib.helpers import tooltip
279 from rhodecode.lib import helpers as h
278
280
279 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
281 if isinstance(commit_or_id, (BaseCommit, AttributeDict)):
280 lazy_cs = True
282 lazy_cs = True
281 if (getattr(commit_or_id, 'op', None) and
283 if (getattr(commit_or_id, 'op', None) and
282 getattr(commit_or_id, 'ref_name', None)):
284 getattr(commit_or_id, 'ref_name', None)):
283 lazy_cs = False
285 lazy_cs = False
284 lbl = '?'
286 lbl = '?'
285 if commit_or_id.op == 'delete_branch':
287 if commit_or_id.op == 'delete_branch':
286 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
288 lbl = '%s' % _('Deleted branch: %s') % commit_or_id.ref_name
287 title = ''
289 title = ''
288 elif commit_or_id.op == 'tag':
290 elif commit_or_id.op == 'tag':
289 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
291 lbl = '%s' % _('Created tag: %s') % commit_or_id.ref_name
290 title = ''
292 title = ''
291 _url = '#'
293 _url = '#'
292
294
293 else:
295 else:
294 lbl = '%s' % (commit_or_id.short_id[:8])
296 lbl = '%s' % (commit_or_id.short_id[:8])
295 _url = url('changeset_home', repo_name=repo_name,
297 _url = h.route_path('repo_commit', repo_name=repo_name,
296 revision=commit_or_id.raw_id)
298 commit_id=commit_or_id.raw_id)
297 title = tooltip(commit_or_id.message)
299 title = tooltip(commit_or_id.message)
298 else:
300 else:
299 # commit cannot be found/striped/removed etc.
301 # commit cannot be found/striped/removed etc.
300 lbl = ('%s' % commit_or_id)[:12]
302 lbl = ('%s' % commit_or_id)[:12]
301 _url = '#'
303 _url = '#'
302 title = _('Commit not found')
304 title = _('Commit not found')
303 if self.parse_commits:
305 if self.parse_commits:
304 return link_to(lbl, _url, title=title, class_='tooltip')
306 return link_to(lbl, _url, title=title, class_='tooltip')
305 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
307 return link_to(lbl, _url, raw_id=commit_or_id.raw_id, repo_name=repo_name,
306 class_='lazy-cs' if lazy_cs else '')
308 class_='lazy-cs' if lazy_cs else '')
307
309
308 def is_deleted(self):
310 def is_deleted(self):
309 return self.user_log.repository is None
311 return self.user_log.repository is None
310
312
311
313
312 class AuditLogParser(object):
314 class AuditLogParser(object):
313 def __init__(self, audit_log_entry):
315 def __init__(self, audit_log_entry):
314 self.audit_log_entry = audit_log_entry
316 self.audit_log_entry = audit_log_entry
315
317
316 def get_icon(self, action):
318 def get_icon(self, action):
317 return 'icon-rhodecode'
319 return 'icon-rhodecode'
318
320
319 def callbacks(self):
321 def callbacks(self):
320 action_str = self.audit_log_entry.action
322 action_str = self.audit_log_entry.action
321
323
322 def callback():
324 def callback():
323 # returned callbacks we need to call to get
325 # returned callbacks we need to call to get
324 action = action_str \
326 action = action_str \
325 .replace('[', '<span class="journal_highlight">')\
327 .replace('[', '<span class="journal_highlight">')\
326 .replace(']', '</span>')
328 .replace(']', '</span>')
327 return literal(action)
329 return literal(action)
328
330
329 def icon():
331 def icon():
330 tmpl = """<i class="%s" alt="%s"></i>"""
332 tmpl = """<i class="%s" alt="%s"></i>"""
331 ico = self.get_icon(action_str)
333 ico = self.get_icon(action_str)
332 return literal(tmpl % (ico, action_str))
334 return literal(tmpl % (ico, action_str))
333
335
334 action_params_func = _no_params_func
336 action_params_func = _no_params_func
335
337
336 return [
338 return [
337 callback, action_params_func, icon]
339 callback, action_params_func, icon]
338
340
339
341
340 def _no_params_func():
342 def _no_params_func():
341 return ""
343 return ""
342
344
343
345
344 def _get_op(commit_id):
346 def _get_op(commit_id):
345 _op = None
347 _op = None
346 _name = commit_id
348 _name = commit_id
347 if len(commit_id.split('=>')) == 2:
349 if len(commit_id.split('=>')) == 2:
348 _op, _name = commit_id.split('=>')
350 _op, _name = commit_id.split('=>')
349 return _op, _name
351 return _op, _name
@@ -1,2027 +1,2027 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 authentication and permission libraries
22 authentication and permission libraries
23 """
23 """
24
24
25 import os
25 import os
26 import inspect
26 import inspect
27 import collections
27 import collections
28 import fnmatch
28 import fnmatch
29 import hashlib
29 import hashlib
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import random
32 import random
33 import traceback
33 import traceback
34 from functools import wraps
34 from functools import wraps
35
35
36 import ipaddress
36 import ipaddress
37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
38 from pylons.i18n.translation import _
38 from pylons.i18n.translation import _
39 # NOTE(marcink): this has to be removed only after pyramid migration,
39 # NOTE(marcink): this has to be removed only after pyramid migration,
40 # replace with _ = request.translate
40 # replace with _ = request.translate
41 from sqlalchemy.orm.exc import ObjectDeletedError
41 from sqlalchemy.orm.exc import ObjectDeletedError
42 from sqlalchemy.orm import joinedload
42 from sqlalchemy.orm import joinedload
43 from zope.cachedescriptors.property import Lazy as LazyProperty
43 from zope.cachedescriptors.property import Lazy as LazyProperty
44
44
45 import rhodecode
45 import rhodecode
46 from rhodecode.model import meta
46 from rhodecode.model import meta
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.user import UserModel
48 from rhodecode.model.user import UserModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
51 UserIpMap, UserApiKeys, RepoGroup)
51 UserIpMap, UserApiKeys, RepoGroup)
52 from rhodecode.lib import caches
52 from rhodecode.lib import caches
53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
54 from rhodecode.lib.utils import (
54 from rhodecode.lib.utils import (
55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
56 from rhodecode.lib.caching_query import FromCache
56 from rhodecode.lib.caching_query import FromCache
57
57
58
58
59 if rhodecode.is_unix:
59 if rhodecode.is_unix:
60 import bcrypt
60 import bcrypt
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64 csrf_token_key = "csrf_token"
64 csrf_token_key = "csrf_token"
65
65
66
66
67 class PasswordGenerator(object):
67 class PasswordGenerator(object):
68 """
68 """
69 This is a simple class for generating password from different sets of
69 This is a simple class for generating password from different sets of
70 characters
70 characters
71 usage::
71 usage::
72
72
73 passwd_gen = PasswordGenerator()
73 passwd_gen = PasswordGenerator()
74 #print 8-letter password containing only big and small letters
74 #print 8-letter password containing only big and small letters
75 of alphabet
75 of alphabet
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 """
77 """
78 ALPHABETS_NUM = r'''1234567890'''
78 ALPHABETS_NUM = r'''1234567890'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88
88
89 def __init__(self, passwd=''):
89 def __init__(self, passwd=''):
90 self.passwd = passwd
90 self.passwd = passwd
91
91
92 def gen_password(self, length, type_=None):
92 def gen_password(self, length, type_=None):
93 if type_ is None:
93 if type_ is None:
94 type_ = self.ALPHABETS_FULL
94 type_ = self.ALPHABETS_FULL
95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
96 return self.passwd
96 return self.passwd
97
97
98
98
99 class _RhodeCodeCryptoBase(object):
99 class _RhodeCodeCryptoBase(object):
100 ENC_PREF = None
100 ENC_PREF = None
101
101
102 def hash_create(self, str_):
102 def hash_create(self, str_):
103 """
103 """
104 hash the string using
104 hash the string using
105
105
106 :param str_: password to hash
106 :param str_: password to hash
107 """
107 """
108 raise NotImplementedError
108 raise NotImplementedError
109
109
110 def hash_check_with_upgrade(self, password, hashed):
110 def hash_check_with_upgrade(self, password, hashed):
111 """
111 """
112 Returns tuple in which first element is boolean that states that
112 Returns tuple in which first element is boolean that states that
113 given password matches it's hashed version, and the second is new hash
113 given password matches it's hashed version, and the second is new hash
114 of the password, in case this password should be migrated to new
114 of the password, in case this password should be migrated to new
115 cipher.
115 cipher.
116 """
116 """
117 checked_hash = self.hash_check(password, hashed)
117 checked_hash = self.hash_check(password, hashed)
118 return checked_hash, None
118 return checked_hash, None
119
119
120 def hash_check(self, password, hashed):
120 def hash_check(self, password, hashed):
121 """
121 """
122 Checks matching password with it's hashed value.
122 Checks matching password with it's hashed value.
123
123
124 :param password: password
124 :param password: password
125 :param hashed: password in hashed form
125 :param hashed: password in hashed form
126 """
126 """
127 raise NotImplementedError
127 raise NotImplementedError
128
128
129 def _assert_bytes(self, value):
129 def _assert_bytes(self, value):
130 """
130 """
131 Passing in an `unicode` object can lead to hard to detect issues
131 Passing in an `unicode` object can lead to hard to detect issues
132 if passwords contain non-ascii characters. Doing a type check
132 if passwords contain non-ascii characters. Doing a type check
133 during runtime, so that such mistakes are detected early on.
133 during runtime, so that such mistakes are detected early on.
134 """
134 """
135 if not isinstance(value, str):
135 if not isinstance(value, str):
136 raise TypeError(
136 raise TypeError(
137 "Bytestring required as input, got %r." % (value, ))
137 "Bytestring required as input, got %r." % (value, ))
138
138
139
139
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 ENC_PREF = ('$2a$10', '$2b$10')
141 ENC_PREF = ('$2a$10', '$2b$10')
142
142
143 def hash_create(self, str_):
143 def hash_create(self, str_):
144 self._assert_bytes(str_)
144 self._assert_bytes(str_)
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146
146
147 def hash_check_with_upgrade(self, password, hashed):
147 def hash_check_with_upgrade(self, password, hashed):
148 """
148 """
149 Returns tuple in which first element is boolean that states that
149 Returns tuple in which first element is boolean that states that
150 given password matches it's hashed version, and the second is new hash
150 given password matches it's hashed version, and the second is new hash
151 of the password, in case this password should be migrated to new
151 of the password, in case this password should be migrated to new
152 cipher.
152 cipher.
153
153
154 This implements special upgrade logic which works like that:
154 This implements special upgrade logic which works like that:
155 - check if the given password == bcrypted hash, if yes then we
155 - check if the given password == bcrypted hash, if yes then we
156 properly used password and it was already in bcrypt. Proceed
156 properly used password and it was already in bcrypt. Proceed
157 without any changes
157 without any changes
158 - if bcrypt hash check is not working try with sha256. If hash compare
158 - if bcrypt hash check is not working try with sha256. If hash compare
159 is ok, it means we using correct but old hashed password. indicate
159 is ok, it means we using correct but old hashed password. indicate
160 hash change and proceed
160 hash change and proceed
161 """
161 """
162
162
163 new_hash = None
163 new_hash = None
164
164
165 # regular pw check
165 # regular pw check
166 password_match_bcrypt = self.hash_check(password, hashed)
166 password_match_bcrypt = self.hash_check(password, hashed)
167
167
168 # now we want to know if the password was maybe from sha256
168 # now we want to know if the password was maybe from sha256
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 if not password_match_bcrypt:
170 if not password_match_bcrypt:
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 new_hash = self.hash_create(password) # make new bcrypt hash
172 new_hash = self.hash_create(password) # make new bcrypt hash
173 password_match_bcrypt = True
173 password_match_bcrypt = True
174
174
175 return password_match_bcrypt, new_hash
175 return password_match_bcrypt, new_hash
176
176
177 def hash_check(self, password, hashed):
177 def hash_check(self, password, hashed):
178 """
178 """
179 Checks matching password with it's hashed value.
179 Checks matching password with it's hashed value.
180
180
181 :param password: password
181 :param password: password
182 :param hashed: password in hashed form
182 :param hashed: password in hashed form
183 """
183 """
184 self._assert_bytes(password)
184 self._assert_bytes(password)
185 try:
185 try:
186 return bcrypt.hashpw(password, hashed) == hashed
186 return bcrypt.hashpw(password, hashed) == hashed
187 except ValueError as e:
187 except ValueError as e:
188 # we're having a invalid salt here probably, we should not crash
188 # we're having a invalid salt here probably, we should not crash
189 # just return with False as it would be a wrong password.
189 # just return with False as it would be a wrong password.
190 log.debug('Failed to check password hash using bcrypt %s',
190 log.debug('Failed to check password hash using bcrypt %s',
191 safe_str(e))
191 safe_str(e))
192
192
193 return False
193 return False
194
194
195
195
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 ENC_PREF = '_'
197 ENC_PREF = '_'
198
198
199 def hash_create(self, str_):
199 def hash_create(self, str_):
200 self._assert_bytes(str_)
200 self._assert_bytes(str_)
201 return hashlib.sha256(str_).hexdigest()
201 return hashlib.sha256(str_).hexdigest()
202
202
203 def hash_check(self, password, hashed):
203 def hash_check(self, password, hashed):
204 """
204 """
205 Checks matching password with it's hashed value.
205 Checks matching password with it's hashed value.
206
206
207 :param password: password
207 :param password: password
208 :param hashed: password in hashed form
208 :param hashed: password in hashed form
209 """
209 """
210 self._assert_bytes(password)
210 self._assert_bytes(password)
211 return hashlib.sha256(password).hexdigest() == hashed
211 return hashlib.sha256(password).hexdigest() == hashed
212
212
213
213
214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
215 ENC_PREF = '_'
215 ENC_PREF = '_'
216
216
217 def hash_create(self, str_):
217 def hash_create(self, str_):
218 self._assert_bytes(str_)
218 self._assert_bytes(str_)
219 return hashlib.md5(str_).hexdigest()
219 return hashlib.md5(str_).hexdigest()
220
220
221 def hash_check(self, password, hashed):
221 def hash_check(self, password, hashed):
222 """
222 """
223 Checks matching password with it's hashed value.
223 Checks matching password with it's hashed value.
224
224
225 :param password: password
225 :param password: password
226 :param hashed: password in hashed form
226 :param hashed: password in hashed form
227 """
227 """
228 self._assert_bytes(password)
228 self._assert_bytes(password)
229 return hashlib.md5(password).hexdigest() == hashed
229 return hashlib.md5(password).hexdigest() == hashed
230
230
231
231
232 def crypto_backend():
232 def crypto_backend():
233 """
233 """
234 Return the matching crypto backend.
234 Return the matching crypto backend.
235
235
236 Selection is based on if we run tests or not, we pick md5 backend to run
236 Selection is based on if we run tests or not, we pick md5 backend to run
237 tests faster since BCRYPT is expensive to calculate
237 tests faster since BCRYPT is expensive to calculate
238 """
238 """
239 if rhodecode.is_test:
239 if rhodecode.is_test:
240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
241 else:
241 else:
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243
243
244 return RhodeCodeCrypto
244 return RhodeCodeCrypto
245
245
246
246
247 def get_crypt_password(password):
247 def get_crypt_password(password):
248 """
248 """
249 Create the hash of `password` with the active crypto backend.
249 Create the hash of `password` with the active crypto backend.
250
250
251 :param password: The cleartext password.
251 :param password: The cleartext password.
252 :type password: unicode
252 :type password: unicode
253 """
253 """
254 password = safe_str(password)
254 password = safe_str(password)
255 return crypto_backend().hash_create(password)
255 return crypto_backend().hash_create(password)
256
256
257
257
258 def check_password(password, hashed):
258 def check_password(password, hashed):
259 """
259 """
260 Check if the value in `password` matches the hash in `hashed`.
260 Check if the value in `password` matches the hash in `hashed`.
261
261
262 :param password: The cleartext password.
262 :param password: The cleartext password.
263 :type password: unicode
263 :type password: unicode
264
264
265 :param hashed: The expected hashed version of the password.
265 :param hashed: The expected hashed version of the password.
266 :type hashed: The hash has to be passed in in text representation.
266 :type hashed: The hash has to be passed in in text representation.
267 """
267 """
268 password = safe_str(password)
268 password = safe_str(password)
269 return crypto_backend().hash_check(password, hashed)
269 return crypto_backend().hash_check(password, hashed)
270
270
271
271
272 def generate_auth_token(data, salt=None):
272 def generate_auth_token(data, salt=None):
273 """
273 """
274 Generates API KEY from given string
274 Generates API KEY from given string
275 """
275 """
276
276
277 if salt is None:
277 if salt is None:
278 salt = os.urandom(16)
278 salt = os.urandom(16)
279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
280
280
281
281
282 class CookieStoreWrapper(object):
282 class CookieStoreWrapper(object):
283
283
284 def __init__(self, cookie_store):
284 def __init__(self, cookie_store):
285 self.cookie_store = cookie_store
285 self.cookie_store = cookie_store
286
286
287 def __repr__(self):
287 def __repr__(self):
288 return 'CookieStore<%s>' % (self.cookie_store)
288 return 'CookieStore<%s>' % (self.cookie_store)
289
289
290 def get(self, key, other=None):
290 def get(self, key, other=None):
291 if isinstance(self.cookie_store, dict):
291 if isinstance(self.cookie_store, dict):
292 return self.cookie_store.get(key, other)
292 return self.cookie_store.get(key, other)
293 elif isinstance(self.cookie_store, AuthUser):
293 elif isinstance(self.cookie_store, AuthUser):
294 return self.cookie_store.__dict__.get(key, other)
294 return self.cookie_store.__dict__.get(key, other)
295
295
296
296
297 def _cached_perms_data(user_id, scope, user_is_admin,
297 def _cached_perms_data(user_id, scope, user_is_admin,
298 user_inherit_default_permissions, explicit, algo):
298 user_inherit_default_permissions, explicit, algo):
299
299
300 permissions = PermissionCalculator(
300 permissions = PermissionCalculator(
301 user_id, scope, user_is_admin, user_inherit_default_permissions,
301 user_id, scope, user_is_admin, user_inherit_default_permissions,
302 explicit, algo)
302 explicit, algo)
303 return permissions.calculate()
303 return permissions.calculate()
304
304
305
305
306 class PermOrigin(object):
306 class PermOrigin(object):
307 ADMIN = 'superadmin'
307 ADMIN = 'superadmin'
308
308
309 REPO_USER = 'user:%s'
309 REPO_USER = 'user:%s'
310 REPO_USERGROUP = 'usergroup:%s'
310 REPO_USERGROUP = 'usergroup:%s'
311 REPO_OWNER = 'repo.owner'
311 REPO_OWNER = 'repo.owner'
312 REPO_DEFAULT = 'repo.default'
312 REPO_DEFAULT = 'repo.default'
313 REPO_PRIVATE = 'repo.private'
313 REPO_PRIVATE = 'repo.private'
314
314
315 REPOGROUP_USER = 'user:%s'
315 REPOGROUP_USER = 'user:%s'
316 REPOGROUP_USERGROUP = 'usergroup:%s'
316 REPOGROUP_USERGROUP = 'usergroup:%s'
317 REPOGROUP_OWNER = 'group.owner'
317 REPOGROUP_OWNER = 'group.owner'
318 REPOGROUP_DEFAULT = 'group.default'
318 REPOGROUP_DEFAULT = 'group.default'
319
319
320 USERGROUP_USER = 'user:%s'
320 USERGROUP_USER = 'user:%s'
321 USERGROUP_USERGROUP = 'usergroup:%s'
321 USERGROUP_USERGROUP = 'usergroup:%s'
322 USERGROUP_OWNER = 'usergroup.owner'
322 USERGROUP_OWNER = 'usergroup.owner'
323 USERGROUP_DEFAULT = 'usergroup.default'
323 USERGROUP_DEFAULT = 'usergroup.default'
324
324
325
325
326 class PermOriginDict(dict):
326 class PermOriginDict(dict):
327 """
327 """
328 A special dict used for tracking permissions along with their origins.
328 A special dict used for tracking permissions along with their origins.
329
329
330 `__setitem__` has been overridden to expect a tuple(perm, origin)
330 `__setitem__` has been overridden to expect a tuple(perm, origin)
331 `__getitem__` will return only the perm
331 `__getitem__` will return only the perm
332 `.perm_origin_stack` will return the stack of (perm, origin) set per key
332 `.perm_origin_stack` will return the stack of (perm, origin) set per key
333
333
334 >>> perms = PermOriginDict()
334 >>> perms = PermOriginDict()
335 >>> perms['resource'] = 'read', 'default'
335 >>> perms['resource'] = 'read', 'default'
336 >>> perms['resource']
336 >>> perms['resource']
337 'read'
337 'read'
338 >>> perms['resource'] = 'write', 'admin'
338 >>> perms['resource'] = 'write', 'admin'
339 >>> perms['resource']
339 >>> perms['resource']
340 'write'
340 'write'
341 >>> perms.perm_origin_stack
341 >>> perms.perm_origin_stack
342 {'resource': [('read', 'default'), ('write', 'admin')]}
342 {'resource': [('read', 'default'), ('write', 'admin')]}
343 """
343 """
344
344
345 def __init__(self, *args, **kw):
345 def __init__(self, *args, **kw):
346 dict.__init__(self, *args, **kw)
346 dict.__init__(self, *args, **kw)
347 self.perm_origin_stack = {}
347 self.perm_origin_stack = {}
348
348
349 def __setitem__(self, key, (perm, origin)):
349 def __setitem__(self, key, (perm, origin)):
350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
351 dict.__setitem__(self, key, perm)
351 dict.__setitem__(self, key, perm)
352
352
353
353
354 class PermissionCalculator(object):
354 class PermissionCalculator(object):
355
355
356 def __init__(
356 def __init__(
357 self, user_id, scope, user_is_admin,
357 self, user_id, scope, user_is_admin,
358 user_inherit_default_permissions, explicit, algo):
358 user_inherit_default_permissions, explicit, algo):
359 self.user_id = user_id
359 self.user_id = user_id
360 self.user_is_admin = user_is_admin
360 self.user_is_admin = user_is_admin
361 self.inherit_default_permissions = user_inherit_default_permissions
361 self.inherit_default_permissions = user_inherit_default_permissions
362 self.explicit = explicit
362 self.explicit = explicit
363 self.algo = algo
363 self.algo = algo
364
364
365 scope = scope or {}
365 scope = scope or {}
366 self.scope_repo_id = scope.get('repo_id')
366 self.scope_repo_id = scope.get('repo_id')
367 self.scope_repo_group_id = scope.get('repo_group_id')
367 self.scope_repo_group_id = scope.get('repo_group_id')
368 self.scope_user_group_id = scope.get('user_group_id')
368 self.scope_user_group_id = scope.get('user_group_id')
369
369
370 self.default_user_id = User.get_default_user(cache=True).user_id
370 self.default_user_id = User.get_default_user(cache=True).user_id
371
371
372 self.permissions_repositories = PermOriginDict()
372 self.permissions_repositories = PermOriginDict()
373 self.permissions_repository_groups = PermOriginDict()
373 self.permissions_repository_groups = PermOriginDict()
374 self.permissions_user_groups = PermOriginDict()
374 self.permissions_user_groups = PermOriginDict()
375 self.permissions_global = set()
375 self.permissions_global = set()
376
376
377 self.default_repo_perms = Permission.get_default_repo_perms(
377 self.default_repo_perms = Permission.get_default_repo_perms(
378 self.default_user_id, self.scope_repo_id)
378 self.default_user_id, self.scope_repo_id)
379 self.default_repo_groups_perms = Permission.get_default_group_perms(
379 self.default_repo_groups_perms = Permission.get_default_group_perms(
380 self.default_user_id, self.scope_repo_group_id)
380 self.default_user_id, self.scope_repo_group_id)
381 self.default_user_group_perms = \
381 self.default_user_group_perms = \
382 Permission.get_default_user_group_perms(
382 Permission.get_default_user_group_perms(
383 self.default_user_id, self.scope_user_group_id)
383 self.default_user_id, self.scope_user_group_id)
384
384
385 def calculate(self):
385 def calculate(self):
386 if self.user_is_admin:
386 if self.user_is_admin:
387 return self._admin_permissions()
387 return self._admin_permissions()
388
388
389 self._calculate_global_default_permissions()
389 self._calculate_global_default_permissions()
390 self._calculate_global_permissions()
390 self._calculate_global_permissions()
391 self._calculate_default_permissions()
391 self._calculate_default_permissions()
392 self._calculate_repository_permissions()
392 self._calculate_repository_permissions()
393 self._calculate_repository_group_permissions()
393 self._calculate_repository_group_permissions()
394 self._calculate_user_group_permissions()
394 self._calculate_user_group_permissions()
395 return self._permission_structure()
395 return self._permission_structure()
396
396
397 def _admin_permissions(self):
397 def _admin_permissions(self):
398 """
398 """
399 admin user have all default rights for repositories
399 admin user have all default rights for repositories
400 and groups set to admin
400 and groups set to admin
401 """
401 """
402 self.permissions_global.add('hg.admin')
402 self.permissions_global.add('hg.admin')
403 self.permissions_global.add('hg.create.write_on_repogroup.true')
403 self.permissions_global.add('hg.create.write_on_repogroup.true')
404
404
405 # repositories
405 # repositories
406 for perm in self.default_repo_perms:
406 for perm in self.default_repo_perms:
407 r_k = perm.UserRepoToPerm.repository.repo_name
407 r_k = perm.UserRepoToPerm.repository.repo_name
408 p = 'repository.admin'
408 p = 'repository.admin'
409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
410
410
411 # repository groups
411 # repository groups
412 for perm in self.default_repo_groups_perms:
412 for perm in self.default_repo_groups_perms:
413 rg_k = perm.UserRepoGroupToPerm.group.group_name
413 rg_k = perm.UserRepoGroupToPerm.group.group_name
414 p = 'group.admin'
414 p = 'group.admin'
415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
416
416
417 # user groups
417 # user groups
418 for perm in self.default_user_group_perms:
418 for perm in self.default_user_group_perms:
419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
420 p = 'usergroup.admin'
420 p = 'usergroup.admin'
421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
422
422
423 return self._permission_structure()
423 return self._permission_structure()
424
424
425 def _calculate_global_default_permissions(self):
425 def _calculate_global_default_permissions(self):
426 """
426 """
427 global permissions taken from the default user
427 global permissions taken from the default user
428 """
428 """
429 default_global_perms = UserToPerm.query()\
429 default_global_perms = UserToPerm.query()\
430 .filter(UserToPerm.user_id == self.default_user_id)\
430 .filter(UserToPerm.user_id == self.default_user_id)\
431 .options(joinedload(UserToPerm.permission))
431 .options(joinedload(UserToPerm.permission))
432
432
433 for perm in default_global_perms:
433 for perm in default_global_perms:
434 self.permissions_global.add(perm.permission.permission_name)
434 self.permissions_global.add(perm.permission.permission_name)
435
435
436 def _calculate_global_permissions(self):
436 def _calculate_global_permissions(self):
437 """
437 """
438 Set global system permissions with user permissions or permissions
438 Set global system permissions with user permissions or permissions
439 taken from the user groups of the current user.
439 taken from the user groups of the current user.
440
440
441 The permissions include repo creating, repo group creating, forking
441 The permissions include repo creating, repo group creating, forking
442 etc.
442 etc.
443 """
443 """
444
444
445 # now we read the defined permissions and overwrite what we have set
445 # now we read the defined permissions and overwrite what we have set
446 # before those can be configured from groups or users explicitly.
446 # before those can be configured from groups or users explicitly.
447
447
448 # TODO: johbo: This seems to be out of sync, find out the reason
448 # TODO: johbo: This seems to be out of sync, find out the reason
449 # for the comment below and update it.
449 # for the comment below and update it.
450
450
451 # In case we want to extend this list we should be always in sync with
451 # In case we want to extend this list we should be always in sync with
452 # User.DEFAULT_USER_PERMISSIONS definitions
452 # User.DEFAULT_USER_PERMISSIONS definitions
453 _configurable = frozenset([
453 _configurable = frozenset([
454 'hg.fork.none', 'hg.fork.repository',
454 'hg.fork.none', 'hg.fork.repository',
455 'hg.create.none', 'hg.create.repository',
455 'hg.create.none', 'hg.create.repository',
456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
458 'hg.create.write_on_repogroup.false',
458 'hg.create.write_on_repogroup.false',
459 'hg.create.write_on_repogroup.true',
459 'hg.create.write_on_repogroup.true',
460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
461 ])
461 ])
462
462
463 # USER GROUPS comes first user group global permissions
463 # USER GROUPS comes first user group global permissions
464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
465 .options(joinedload(UserGroupToPerm.permission))\
465 .options(joinedload(UserGroupToPerm.permission))\
466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
467 UserGroupMember.users_group_id))\
467 UserGroupMember.users_group_id))\
468 .filter(UserGroupMember.user_id == self.user_id)\
468 .filter(UserGroupMember.user_id == self.user_id)\
469 .order_by(UserGroupToPerm.users_group_id)\
469 .order_by(UserGroupToPerm.users_group_id)\
470 .all()
470 .all()
471
471
472 # need to group here by groups since user can be in more than
472 # need to group here by groups since user can be in more than
473 # one group, so we get all groups
473 # one group, so we get all groups
474 _explicit_grouped_perms = [
474 _explicit_grouped_perms = [
475 [x, list(y)] for x, y in
475 [x, list(y)] for x, y in
476 itertools.groupby(user_perms_from_users_groups,
476 itertools.groupby(user_perms_from_users_groups,
477 lambda _x: _x.users_group)]
477 lambda _x: _x.users_group)]
478
478
479 for gr, perms in _explicit_grouped_perms:
479 for gr, perms in _explicit_grouped_perms:
480 # since user can be in multiple groups iterate over them and
480 # since user can be in multiple groups iterate over them and
481 # select the lowest permissions first (more explicit)
481 # select the lowest permissions first (more explicit)
482 # TODO: marcink: do this^^
482 # TODO: marcink: do this^^
483
483
484 # group doesn't inherit default permissions so we actually set them
484 # group doesn't inherit default permissions so we actually set them
485 if not gr.inherit_default_permissions:
485 if not gr.inherit_default_permissions:
486 # NEED TO IGNORE all previously set configurable permissions
486 # NEED TO IGNORE all previously set configurable permissions
487 # and replace them with explicitly set from this user
487 # and replace them with explicitly set from this user
488 # group permissions
488 # group permissions
489 self.permissions_global = self.permissions_global.difference(
489 self.permissions_global = self.permissions_global.difference(
490 _configurable)
490 _configurable)
491 for perm in perms:
491 for perm in perms:
492 self.permissions_global.add(perm.permission.permission_name)
492 self.permissions_global.add(perm.permission.permission_name)
493
493
494 # user explicit global permissions
494 # user explicit global permissions
495 user_perms = Session().query(UserToPerm)\
495 user_perms = Session().query(UserToPerm)\
496 .options(joinedload(UserToPerm.permission))\
496 .options(joinedload(UserToPerm.permission))\
497 .filter(UserToPerm.user_id == self.user_id).all()
497 .filter(UserToPerm.user_id == self.user_id).all()
498
498
499 if not self.inherit_default_permissions:
499 if not self.inherit_default_permissions:
500 # NEED TO IGNORE all configurable permissions and
500 # NEED TO IGNORE all configurable permissions and
501 # replace them with explicitly set from this user permissions
501 # replace them with explicitly set from this user permissions
502 self.permissions_global = self.permissions_global.difference(
502 self.permissions_global = self.permissions_global.difference(
503 _configurable)
503 _configurable)
504 for perm in user_perms:
504 for perm in user_perms:
505 self.permissions_global.add(perm.permission.permission_name)
505 self.permissions_global.add(perm.permission.permission_name)
506
506
507 def _calculate_default_permissions(self):
507 def _calculate_default_permissions(self):
508 """
508 """
509 Set default user permissions for repositories, repository groups
509 Set default user permissions for repositories, repository groups
510 taken from the default user.
510 taken from the default user.
511
511
512 Calculate inheritance of object permissions based on what we have now
512 Calculate inheritance of object permissions based on what we have now
513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
514 explicitly set. Inherit is the opposite of .false being there.
514 explicitly set. Inherit is the opposite of .false being there.
515
515
516 .. note::
516 .. note::
517
517
518 the syntax is little bit odd but what we need to check here is
518 the syntax is little bit odd but what we need to check here is
519 the opposite of .false permission being in the list so even for
519 the opposite of .false permission being in the list so even for
520 inconsistent state when both .true/.false is there
520 inconsistent state when both .true/.false is there
521 .false is more important
521 .false is more important
522
522
523 """
523 """
524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
525 in self.permissions_global)
525 in self.permissions_global)
526
526
527 # defaults for repositories, taken from `default` user permissions
527 # defaults for repositories, taken from `default` user permissions
528 # on given repo
528 # on given repo
529 for perm in self.default_repo_perms:
529 for perm in self.default_repo_perms:
530 r_k = perm.UserRepoToPerm.repository.repo_name
530 r_k = perm.UserRepoToPerm.repository.repo_name
531 o = PermOrigin.REPO_DEFAULT
531 o = PermOrigin.REPO_DEFAULT
532 if perm.Repository.private and not (
532 if perm.Repository.private and not (
533 perm.Repository.user_id == self.user_id):
533 perm.Repository.user_id == self.user_id):
534 # disable defaults for private repos,
534 # disable defaults for private repos,
535 p = 'repository.none'
535 p = 'repository.none'
536 o = PermOrigin.REPO_PRIVATE
536 o = PermOrigin.REPO_PRIVATE
537 elif perm.Repository.user_id == self.user_id:
537 elif perm.Repository.user_id == self.user_id:
538 # set admin if owner
538 # set admin if owner
539 p = 'repository.admin'
539 p = 'repository.admin'
540 o = PermOrigin.REPO_OWNER
540 o = PermOrigin.REPO_OWNER
541 else:
541 else:
542 p = perm.Permission.permission_name
542 p = perm.Permission.permission_name
543 # if we decide this user isn't inheriting permissions from
543 # if we decide this user isn't inheriting permissions from
544 # default user we set him to .none so only explicit
544 # default user we set him to .none so only explicit
545 # permissions work
545 # permissions work
546 if not user_inherit_object_permissions:
546 if not user_inherit_object_permissions:
547 p = 'repository.none'
547 p = 'repository.none'
548 self.permissions_repositories[r_k] = p, o
548 self.permissions_repositories[r_k] = p, o
549
549
550 # defaults for repository groups taken from `default` user permission
550 # defaults for repository groups taken from `default` user permission
551 # on given group
551 # on given group
552 for perm in self.default_repo_groups_perms:
552 for perm in self.default_repo_groups_perms:
553 rg_k = perm.UserRepoGroupToPerm.group.group_name
553 rg_k = perm.UserRepoGroupToPerm.group.group_name
554 o = PermOrigin.REPOGROUP_DEFAULT
554 o = PermOrigin.REPOGROUP_DEFAULT
555 if perm.RepoGroup.user_id == self.user_id:
555 if perm.RepoGroup.user_id == self.user_id:
556 # set admin if owner
556 # set admin if owner
557 p = 'group.admin'
557 p = 'group.admin'
558 o = PermOrigin.REPOGROUP_OWNER
558 o = PermOrigin.REPOGROUP_OWNER
559 else:
559 else:
560 p = perm.Permission.permission_name
560 p = perm.Permission.permission_name
561
561
562 # if we decide this user isn't inheriting permissions from default
562 # if we decide this user isn't inheriting permissions from default
563 # user we set him to .none so only explicit permissions work
563 # user we set him to .none so only explicit permissions work
564 if not user_inherit_object_permissions:
564 if not user_inherit_object_permissions:
565 p = 'group.none'
565 p = 'group.none'
566 self.permissions_repository_groups[rg_k] = p, o
566 self.permissions_repository_groups[rg_k] = p, o
567
567
568 # defaults for user groups taken from `default` user permission
568 # defaults for user groups taken from `default` user permission
569 # on given user group
569 # on given user group
570 for perm in self.default_user_group_perms:
570 for perm in self.default_user_group_perms:
571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
572 o = PermOrigin.USERGROUP_DEFAULT
572 o = PermOrigin.USERGROUP_DEFAULT
573 if perm.UserGroup.user_id == self.user_id:
573 if perm.UserGroup.user_id == self.user_id:
574 # set admin if owner
574 # set admin if owner
575 p = 'usergroup.admin'
575 p = 'usergroup.admin'
576 o = PermOrigin.USERGROUP_OWNER
576 o = PermOrigin.USERGROUP_OWNER
577 else:
577 else:
578 p = perm.Permission.permission_name
578 p = perm.Permission.permission_name
579
579
580 # if we decide this user isn't inheriting permissions from default
580 # if we decide this user isn't inheriting permissions from default
581 # user we set him to .none so only explicit permissions work
581 # user we set him to .none so only explicit permissions work
582 if not user_inherit_object_permissions:
582 if not user_inherit_object_permissions:
583 p = 'usergroup.none'
583 p = 'usergroup.none'
584 self.permissions_user_groups[u_k] = p, o
584 self.permissions_user_groups[u_k] = p, o
585
585
586 def _calculate_repository_permissions(self):
586 def _calculate_repository_permissions(self):
587 """
587 """
588 Repository permissions for the current user.
588 Repository permissions for the current user.
589
589
590 Check if the user is part of user groups for this repository and
590 Check if the user is part of user groups for this repository and
591 fill in the permission from it. `_choose_permission` decides of which
591 fill in the permission from it. `_choose_permission` decides of which
592 permission should be selected based on selected method.
592 permission should be selected based on selected method.
593 """
593 """
594
594
595 # user group for repositories permissions
595 # user group for repositories permissions
596 user_repo_perms_from_user_group = Permission\
596 user_repo_perms_from_user_group = Permission\
597 .get_default_repo_perms_from_user_group(
597 .get_default_repo_perms_from_user_group(
598 self.user_id, self.scope_repo_id)
598 self.user_id, self.scope_repo_id)
599
599
600 multiple_counter = collections.defaultdict(int)
600 multiple_counter = collections.defaultdict(int)
601 for perm in user_repo_perms_from_user_group:
601 for perm in user_repo_perms_from_user_group:
602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
604 multiple_counter[r_k] += 1
604 multiple_counter[r_k] += 1
605 p = perm.Permission.permission_name
605 p = perm.Permission.permission_name
606 o = PermOrigin.REPO_USERGROUP % ug_k
606 o = PermOrigin.REPO_USERGROUP % ug_k
607
607
608 if perm.Repository.user_id == self.user_id:
608 if perm.Repository.user_id == self.user_id:
609 # set admin if owner
609 # set admin if owner
610 p = 'repository.admin'
610 p = 'repository.admin'
611 o = PermOrigin.REPO_OWNER
611 o = PermOrigin.REPO_OWNER
612 else:
612 else:
613 if multiple_counter[r_k] > 1:
613 if multiple_counter[r_k] > 1:
614 cur_perm = self.permissions_repositories[r_k]
614 cur_perm = self.permissions_repositories[r_k]
615 p = self._choose_permission(p, cur_perm)
615 p = self._choose_permission(p, cur_perm)
616 self.permissions_repositories[r_k] = p, o
616 self.permissions_repositories[r_k] = p, o
617
617
618 # user explicit permissions for repositories, overrides any specified
618 # user explicit permissions for repositories, overrides any specified
619 # by the group permission
619 # by the group permission
620 user_repo_perms = Permission.get_default_repo_perms(
620 user_repo_perms = Permission.get_default_repo_perms(
621 self.user_id, self.scope_repo_id)
621 self.user_id, self.scope_repo_id)
622 for perm in user_repo_perms:
622 for perm in user_repo_perms:
623 r_k = perm.UserRepoToPerm.repository.repo_name
623 r_k = perm.UserRepoToPerm.repository.repo_name
624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
625 # set admin if owner
625 # set admin if owner
626 if perm.Repository.user_id == self.user_id:
626 if perm.Repository.user_id == self.user_id:
627 p = 'repository.admin'
627 p = 'repository.admin'
628 o = PermOrigin.REPO_OWNER
628 o = PermOrigin.REPO_OWNER
629 else:
629 else:
630 p = perm.Permission.permission_name
630 p = perm.Permission.permission_name
631 if not self.explicit:
631 if not self.explicit:
632 cur_perm = self.permissions_repositories.get(
632 cur_perm = self.permissions_repositories.get(
633 r_k, 'repository.none')
633 r_k, 'repository.none')
634 p = self._choose_permission(p, cur_perm)
634 p = self._choose_permission(p, cur_perm)
635 self.permissions_repositories[r_k] = p, o
635 self.permissions_repositories[r_k] = p, o
636
636
637 def _calculate_repository_group_permissions(self):
637 def _calculate_repository_group_permissions(self):
638 """
638 """
639 Repository group permissions for the current user.
639 Repository group permissions for the current user.
640
640
641 Check if the user is part of user groups for repository groups and
641 Check if the user is part of user groups for repository groups and
642 fill in the permissions from it. `_choose_permmission` decides of which
642 fill in the permissions from it. `_choose_permmission` decides of which
643 permission should be selected based on selected method.
643 permission should be selected based on selected method.
644 """
644 """
645 # user group for repo groups permissions
645 # user group for repo groups permissions
646 user_repo_group_perms_from_user_group = Permission\
646 user_repo_group_perms_from_user_group = Permission\
647 .get_default_group_perms_from_user_group(
647 .get_default_group_perms_from_user_group(
648 self.user_id, self.scope_repo_group_id)
648 self.user_id, self.scope_repo_group_id)
649
649
650 multiple_counter = collections.defaultdict(int)
650 multiple_counter = collections.defaultdict(int)
651 for perm in user_repo_group_perms_from_user_group:
651 for perm in user_repo_group_perms_from_user_group:
652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
655 multiple_counter[g_k] += 1
655 multiple_counter[g_k] += 1
656 p = perm.Permission.permission_name
656 p = perm.Permission.permission_name
657 if perm.RepoGroup.user_id == self.user_id:
657 if perm.RepoGroup.user_id == self.user_id:
658 # set admin if owner, even for member of other user group
658 # set admin if owner, even for member of other user group
659 p = 'group.admin'
659 p = 'group.admin'
660 o = PermOrigin.REPOGROUP_OWNER
660 o = PermOrigin.REPOGROUP_OWNER
661 else:
661 else:
662 if multiple_counter[g_k] > 1:
662 if multiple_counter[g_k] > 1:
663 cur_perm = self.permissions_repository_groups[g_k]
663 cur_perm = self.permissions_repository_groups[g_k]
664 p = self._choose_permission(p, cur_perm)
664 p = self._choose_permission(p, cur_perm)
665 self.permissions_repository_groups[g_k] = p, o
665 self.permissions_repository_groups[g_k] = p, o
666
666
667 # user explicit permissions for repository groups
667 # user explicit permissions for repository groups
668 user_repo_groups_perms = Permission.get_default_group_perms(
668 user_repo_groups_perms = Permission.get_default_group_perms(
669 self.user_id, self.scope_repo_group_id)
669 self.user_id, self.scope_repo_group_id)
670 for perm in user_repo_groups_perms:
670 for perm in user_repo_groups_perms:
671 rg_k = perm.UserRepoGroupToPerm.group.group_name
671 rg_k = perm.UserRepoGroupToPerm.group.group_name
672 u_k = perm.UserRepoGroupToPerm.user.username
672 u_k = perm.UserRepoGroupToPerm.user.username
673 o = PermOrigin.REPOGROUP_USER % u_k
673 o = PermOrigin.REPOGROUP_USER % u_k
674
674
675 if perm.RepoGroup.user_id == self.user_id:
675 if perm.RepoGroup.user_id == self.user_id:
676 # set admin if owner
676 # set admin if owner
677 p = 'group.admin'
677 p = 'group.admin'
678 o = PermOrigin.REPOGROUP_OWNER
678 o = PermOrigin.REPOGROUP_OWNER
679 else:
679 else:
680 p = perm.Permission.permission_name
680 p = perm.Permission.permission_name
681 if not self.explicit:
681 if not self.explicit:
682 cur_perm = self.permissions_repository_groups.get(
682 cur_perm = self.permissions_repository_groups.get(
683 rg_k, 'group.none')
683 rg_k, 'group.none')
684 p = self._choose_permission(p, cur_perm)
684 p = self._choose_permission(p, cur_perm)
685 self.permissions_repository_groups[rg_k] = p, o
685 self.permissions_repository_groups[rg_k] = p, o
686
686
687 def _calculate_user_group_permissions(self):
687 def _calculate_user_group_permissions(self):
688 """
688 """
689 User group permissions for the current user.
689 User group permissions for the current user.
690 """
690 """
691 # user group for user group permissions
691 # user group for user group permissions
692 user_group_from_user_group = Permission\
692 user_group_from_user_group = Permission\
693 .get_default_user_group_perms_from_user_group(
693 .get_default_user_group_perms_from_user_group(
694 self.user_id, self.scope_user_group_id)
694 self.user_id, self.scope_user_group_id)
695
695
696 multiple_counter = collections.defaultdict(int)
696 multiple_counter = collections.defaultdict(int)
697 for perm in user_group_from_user_group:
697 for perm in user_group_from_user_group:
698 g_k = perm.UserGroupUserGroupToPerm\
698 g_k = perm.UserGroupUserGroupToPerm\
699 .target_user_group.users_group_name
699 .target_user_group.users_group_name
700 u_k = perm.UserGroupUserGroupToPerm\
700 u_k = perm.UserGroupUserGroupToPerm\
701 .user_group.users_group_name
701 .user_group.users_group_name
702 o = PermOrigin.USERGROUP_USERGROUP % u_k
702 o = PermOrigin.USERGROUP_USERGROUP % u_k
703 multiple_counter[g_k] += 1
703 multiple_counter[g_k] += 1
704 p = perm.Permission.permission_name
704 p = perm.Permission.permission_name
705
705
706 if perm.UserGroup.user_id == self.user_id:
706 if perm.UserGroup.user_id == self.user_id:
707 # set admin if owner, even for member of other user group
707 # set admin if owner, even for member of other user group
708 p = 'usergroup.admin'
708 p = 'usergroup.admin'
709 o = PermOrigin.USERGROUP_OWNER
709 o = PermOrigin.USERGROUP_OWNER
710 else:
710 else:
711 if multiple_counter[g_k] > 1:
711 if multiple_counter[g_k] > 1:
712 cur_perm = self.permissions_user_groups[g_k]
712 cur_perm = self.permissions_user_groups[g_k]
713 p = self._choose_permission(p, cur_perm)
713 p = self._choose_permission(p, cur_perm)
714 self.permissions_user_groups[g_k] = p, o
714 self.permissions_user_groups[g_k] = p, o
715
715
716 # user explicit permission for user groups
716 # user explicit permission for user groups
717 user_user_groups_perms = Permission.get_default_user_group_perms(
717 user_user_groups_perms = Permission.get_default_user_group_perms(
718 self.user_id, self.scope_user_group_id)
718 self.user_id, self.scope_user_group_id)
719 for perm in user_user_groups_perms:
719 for perm in user_user_groups_perms:
720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
721 u_k = perm.UserUserGroupToPerm.user.username
721 u_k = perm.UserUserGroupToPerm.user.username
722 o = PermOrigin.USERGROUP_USER % u_k
722 o = PermOrigin.USERGROUP_USER % u_k
723
723
724 if perm.UserGroup.user_id == self.user_id:
724 if perm.UserGroup.user_id == self.user_id:
725 # set admin if owner
725 # set admin if owner
726 p = 'usergroup.admin'
726 p = 'usergroup.admin'
727 o = PermOrigin.USERGROUP_OWNER
727 o = PermOrigin.USERGROUP_OWNER
728 else:
728 else:
729 p = perm.Permission.permission_name
729 p = perm.Permission.permission_name
730 if not self.explicit:
730 if not self.explicit:
731 cur_perm = self.permissions_user_groups.get(
731 cur_perm = self.permissions_user_groups.get(
732 ug_k, 'usergroup.none')
732 ug_k, 'usergroup.none')
733 p = self._choose_permission(p, cur_perm)
733 p = self._choose_permission(p, cur_perm)
734 self.permissions_user_groups[ug_k] = p, o
734 self.permissions_user_groups[ug_k] = p, o
735
735
736 def _choose_permission(self, new_perm, cur_perm):
736 def _choose_permission(self, new_perm, cur_perm):
737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
739 if self.algo == 'higherwin':
739 if self.algo == 'higherwin':
740 if new_perm_val > cur_perm_val:
740 if new_perm_val > cur_perm_val:
741 return new_perm
741 return new_perm
742 return cur_perm
742 return cur_perm
743 elif self.algo == 'lowerwin':
743 elif self.algo == 'lowerwin':
744 if new_perm_val < cur_perm_val:
744 if new_perm_val < cur_perm_val:
745 return new_perm
745 return new_perm
746 return cur_perm
746 return cur_perm
747
747
748 def _permission_structure(self):
748 def _permission_structure(self):
749 return {
749 return {
750 'global': self.permissions_global,
750 'global': self.permissions_global,
751 'repositories': self.permissions_repositories,
751 'repositories': self.permissions_repositories,
752 'repositories_groups': self.permissions_repository_groups,
752 'repositories_groups': self.permissions_repository_groups,
753 'user_groups': self.permissions_user_groups,
753 'user_groups': self.permissions_user_groups,
754 }
754 }
755
755
756
756
757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
757 def allowed_auth_token_access(view_name, whitelist=None, auth_token=None):
758 """
758 """
759 Check if given controller_name is in whitelist of auth token access
759 Check if given controller_name is in whitelist of auth token access
760 """
760 """
761 if not whitelist:
761 if not whitelist:
762 from rhodecode import CONFIG
762 from rhodecode import CONFIG
763 whitelist = aslist(
763 whitelist = aslist(
764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
765 log.debug(
765 log.debug(
766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
767
767
768 auth_token_access_valid = False
768 auth_token_access_valid = False
769 for entry in whitelist:
769 for entry in whitelist:
770 if fnmatch.fnmatch(controller_name, entry):
770 if fnmatch.fnmatch(view_name, entry):
771 auth_token_access_valid = True
771 auth_token_access_valid = True
772 break
772 break
773
773
774 if auth_token_access_valid:
774 if auth_token_access_valid:
775 log.debug('controller:%s matches entry in whitelist'
775 log.debug('view: `%s` matches entry in whitelist: %s'
776 % (controller_name,))
776 % (view_name, whitelist))
777 else:
777 else:
778 msg = ('controller: %s does *NOT* match any entry in whitelist'
778 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
779 % (controller_name,))
779 % (view_name, whitelist))
780 if auth_token:
780 if auth_token:
781 # if we use auth token key and don't have access it's a warning
781 # if we use auth token key and don't have access it's a warning
782 log.warning(msg)
782 log.warning(msg)
783 else:
783 else:
784 log.debug(msg)
784 log.debug(msg)
785
785
786 return auth_token_access_valid
786 return auth_token_access_valid
787
787
788
788
789 class AuthUser(object):
789 class AuthUser(object):
790 """
790 """
791 A simple object that handles all attributes of user in RhodeCode
791 A simple object that handles all attributes of user in RhodeCode
792
792
793 It does lookup based on API key,given user, or user present in session
793 It does lookup based on API key,given user, or user present in session
794 Then it fills all required information for such user. It also checks if
794 Then it fills all required information for such user. It also checks if
795 anonymous access is enabled and if so, it returns default user as logged in
795 anonymous access is enabled and if so, it returns default user as logged in
796 """
796 """
797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
798
798
799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
800
800
801 self.user_id = user_id
801 self.user_id = user_id
802 self._api_key = api_key
802 self._api_key = api_key
803
803
804 self.api_key = None
804 self.api_key = None
805 self.feed_token = ''
805 self.feed_token = ''
806 self.username = username
806 self.username = username
807 self.ip_addr = ip_addr
807 self.ip_addr = ip_addr
808 self.name = ''
808 self.name = ''
809 self.lastname = ''
809 self.lastname = ''
810 self.first_name = ''
810 self.first_name = ''
811 self.last_name = ''
811 self.last_name = ''
812 self.email = ''
812 self.email = ''
813 self.is_authenticated = False
813 self.is_authenticated = False
814 self.admin = False
814 self.admin = False
815 self.inherit_default_permissions = False
815 self.inherit_default_permissions = False
816 self.password = ''
816 self.password = ''
817
817
818 self.anonymous_user = None # propagated on propagate_data
818 self.anonymous_user = None # propagated on propagate_data
819 self.propagate_data()
819 self.propagate_data()
820 self._instance = None
820 self._instance = None
821 self._permissions_scoped_cache = {} # used to bind scoped calculation
821 self._permissions_scoped_cache = {} # used to bind scoped calculation
822
822
823 @LazyProperty
823 @LazyProperty
824 def permissions(self):
824 def permissions(self):
825 return self.get_perms(user=self, cache=False)
825 return self.get_perms(user=self, cache=False)
826
826
827 def permissions_with_scope(self, scope):
827 def permissions_with_scope(self, scope):
828 """
828 """
829 Call the get_perms function with scoped data. The scope in that function
829 Call the get_perms function with scoped data. The scope in that function
830 narrows the SQL calls to the given ID of objects resulting in fetching
830 narrows the SQL calls to the given ID of objects resulting in fetching
831 Just particular permission we want to obtain. If scope is an empty dict
831 Just particular permission we want to obtain. If scope is an empty dict
832 then it basically narrows the scope to GLOBAL permissions only.
832 then it basically narrows the scope to GLOBAL permissions only.
833
833
834 :param scope: dict
834 :param scope: dict
835 """
835 """
836 if 'repo_name' in scope:
836 if 'repo_name' in scope:
837 obj = Repository.get_by_repo_name(scope['repo_name'])
837 obj = Repository.get_by_repo_name(scope['repo_name'])
838 if obj:
838 if obj:
839 scope['repo_id'] = obj.repo_id
839 scope['repo_id'] = obj.repo_id
840 _scope = {
840 _scope = {
841 'repo_id': -1,
841 'repo_id': -1,
842 'user_group_id': -1,
842 'user_group_id': -1,
843 'repo_group_id': -1,
843 'repo_group_id': -1,
844 }
844 }
845 _scope.update(scope)
845 _scope.update(scope)
846 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
846 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
847 _scope.items())))
847 _scope.items())))
848 if cache_key not in self._permissions_scoped_cache:
848 if cache_key not in self._permissions_scoped_cache:
849 # store in cache to mimic how the @LazyProperty works,
849 # store in cache to mimic how the @LazyProperty works,
850 # the difference here is that we use the unique key calculated
850 # the difference here is that we use the unique key calculated
851 # from params and values
851 # from params and values
852 res = self.get_perms(user=self, cache=False, scope=_scope)
852 res = self.get_perms(user=self, cache=False, scope=_scope)
853 self._permissions_scoped_cache[cache_key] = res
853 self._permissions_scoped_cache[cache_key] = res
854 return self._permissions_scoped_cache[cache_key]
854 return self._permissions_scoped_cache[cache_key]
855
855
856 def get_instance(self):
856 def get_instance(self):
857 return User.get(self.user_id)
857 return User.get(self.user_id)
858
858
859 def update_lastactivity(self):
859 def update_lastactivity(self):
860 if self.user_id:
860 if self.user_id:
861 User.get(self.user_id).update_lastactivity()
861 User.get(self.user_id).update_lastactivity()
862
862
863 def propagate_data(self):
863 def propagate_data(self):
864 """
864 """
865 Fills in user data and propagates values to this instance. Maps fetched
865 Fills in user data and propagates values to this instance. Maps fetched
866 user attributes to this class instance attributes
866 user attributes to this class instance attributes
867 """
867 """
868 log.debug('starting data propagation for new potential AuthUser')
868 log.debug('starting data propagation for new potential AuthUser')
869 user_model = UserModel()
869 user_model = UserModel()
870 anon_user = self.anonymous_user = User.get_default_user(cache=True)
870 anon_user = self.anonymous_user = User.get_default_user(cache=True)
871 is_user_loaded = False
871 is_user_loaded = False
872
872
873 # lookup by userid
873 # lookup by userid
874 if self.user_id is not None and self.user_id != anon_user.user_id:
874 if self.user_id is not None and self.user_id != anon_user.user_id:
875 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
875 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
876 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
876 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
877
877
878 # try go get user by api key
878 # try go get user by api key
879 elif self._api_key and self._api_key != anon_user.api_key:
879 elif self._api_key and self._api_key != anon_user.api_key:
880 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
880 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
881 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
881 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
882
882
883 # lookup by username
883 # lookup by username
884 elif self.username:
884 elif self.username:
885 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
885 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
886 is_user_loaded = user_model.fill_data(self, username=self.username)
886 is_user_loaded = user_model.fill_data(self, username=self.username)
887 else:
887 else:
888 log.debug('No data in %s that could been used to log in' % self)
888 log.debug('No data in %s that could been used to log in' % self)
889
889
890 if not is_user_loaded:
890 if not is_user_loaded:
891 log.debug('Failed to load user. Fallback to default user')
891 log.debug('Failed to load user. Fallback to default user')
892 # if we cannot authenticate user try anonymous
892 # if we cannot authenticate user try anonymous
893 if anon_user.active:
893 if anon_user.active:
894 user_model.fill_data(self, user_id=anon_user.user_id)
894 user_model.fill_data(self, user_id=anon_user.user_id)
895 # then we set this user is logged in
895 # then we set this user is logged in
896 self.is_authenticated = True
896 self.is_authenticated = True
897 else:
897 else:
898 # in case of disabled anonymous user we reset some of the
898 # in case of disabled anonymous user we reset some of the
899 # parameters so such user is "corrupted", skipping the fill_data
899 # parameters so such user is "corrupted", skipping the fill_data
900 for attr in ['user_id', 'username', 'admin', 'active']:
900 for attr in ['user_id', 'username', 'admin', 'active']:
901 setattr(self, attr, None)
901 setattr(self, attr, None)
902 self.is_authenticated = False
902 self.is_authenticated = False
903
903
904 if not self.username:
904 if not self.username:
905 self.username = 'None'
905 self.username = 'None'
906
906
907 log.debug('Auth User is now %s' % self)
907 log.debug('Auth User is now %s' % self)
908
908
909 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
909 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
910 cache=False):
910 cache=False):
911 """
911 """
912 Fills user permission attribute with permissions taken from database
912 Fills user permission attribute with permissions taken from database
913 works for permissions given for repositories, and for permissions that
913 works for permissions given for repositories, and for permissions that
914 are granted to groups
914 are granted to groups
915
915
916 :param user: instance of User object from database
916 :param user: instance of User object from database
917 :param explicit: In case there are permissions both for user and a group
917 :param explicit: In case there are permissions both for user and a group
918 that user is part of, explicit flag will defiine if user will
918 that user is part of, explicit flag will defiine if user will
919 explicitly override permissions from group, if it's False it will
919 explicitly override permissions from group, if it's False it will
920 make decision based on the algo
920 make decision based on the algo
921 :param algo: algorithm to decide what permission should be choose if
921 :param algo: algorithm to decide what permission should be choose if
922 it's multiple defined, eg user in two different groups. It also
922 it's multiple defined, eg user in two different groups. It also
923 decides if explicit flag is turned off how to specify the permission
923 decides if explicit flag is turned off how to specify the permission
924 for case when user is in a group + have defined separate permission
924 for case when user is in a group + have defined separate permission
925 """
925 """
926 user_id = user.user_id
926 user_id = user.user_id
927 user_is_admin = user.is_admin
927 user_is_admin = user.is_admin
928
928
929 # inheritance of global permissions like create repo/fork repo etc
929 # inheritance of global permissions like create repo/fork repo etc
930 user_inherit_default_permissions = user.inherit_default_permissions
930 user_inherit_default_permissions = user.inherit_default_permissions
931
931
932 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
932 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
933 compute = caches.conditional_cache(
933 compute = caches.conditional_cache(
934 'short_term', 'cache_desc',
934 'short_term', 'cache_desc',
935 condition=cache, func=_cached_perms_data)
935 condition=cache, func=_cached_perms_data)
936 result = compute(user_id, scope, user_is_admin,
936 result = compute(user_id, scope, user_is_admin,
937 user_inherit_default_permissions, explicit, algo)
937 user_inherit_default_permissions, explicit, algo)
938
938
939 result_repr = []
939 result_repr = []
940 for k in result:
940 for k in result:
941 result_repr.append((k, len(result[k])))
941 result_repr.append((k, len(result[k])))
942
942
943 log.debug('PERMISSION tree computed %s' % (result_repr,))
943 log.debug('PERMISSION tree computed %s' % (result_repr,))
944 return result
944 return result
945
945
946 @property
946 @property
947 def is_default(self):
947 def is_default(self):
948 return self.username == User.DEFAULT_USER
948 return self.username == User.DEFAULT_USER
949
949
950 @property
950 @property
951 def is_admin(self):
951 def is_admin(self):
952 return self.admin
952 return self.admin
953
953
954 @property
954 @property
955 def is_user_object(self):
955 def is_user_object(self):
956 return self.user_id is not None
956 return self.user_id is not None
957
957
958 @property
958 @property
959 def repositories_admin(self):
959 def repositories_admin(self):
960 """
960 """
961 Returns list of repositories you're an admin of
961 Returns list of repositories you're an admin of
962 """
962 """
963 return [
963 return [
964 x[0] for x in self.permissions['repositories'].iteritems()
964 x[0] for x in self.permissions['repositories'].iteritems()
965 if x[1] == 'repository.admin']
965 if x[1] == 'repository.admin']
966
966
967 @property
967 @property
968 def repository_groups_admin(self):
968 def repository_groups_admin(self):
969 """
969 """
970 Returns list of repository groups you're an admin of
970 Returns list of repository groups you're an admin of
971 """
971 """
972 return [
972 return [
973 x[0] for x in self.permissions['repositories_groups'].iteritems()
973 x[0] for x in self.permissions['repositories_groups'].iteritems()
974 if x[1] == 'group.admin']
974 if x[1] == 'group.admin']
975
975
976 @property
976 @property
977 def user_groups_admin(self):
977 def user_groups_admin(self):
978 """
978 """
979 Returns list of user groups you're an admin of
979 Returns list of user groups you're an admin of
980 """
980 """
981 return [
981 return [
982 x[0] for x in self.permissions['user_groups'].iteritems()
982 x[0] for x in self.permissions['user_groups'].iteritems()
983 if x[1] == 'usergroup.admin']
983 if x[1] == 'usergroup.admin']
984
984
985 @property
985 @property
986 def ip_allowed(self):
986 def ip_allowed(self):
987 """
987 """
988 Checks if ip_addr used in constructor is allowed from defined list of
988 Checks if ip_addr used in constructor is allowed from defined list of
989 allowed ip_addresses for user
989 allowed ip_addresses for user
990
990
991 :returns: boolean, True if ip is in allowed ip range
991 :returns: boolean, True if ip is in allowed ip range
992 """
992 """
993 # check IP
993 # check IP
994 inherit = self.inherit_default_permissions
994 inherit = self.inherit_default_permissions
995 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
995 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
996 inherit_from_default=inherit)
996 inherit_from_default=inherit)
997 @property
997 @property
998 def personal_repo_group(self):
998 def personal_repo_group(self):
999 return RepoGroup.get_user_personal_repo_group(self.user_id)
999 return RepoGroup.get_user_personal_repo_group(self.user_id)
1000
1000
1001 @classmethod
1001 @classmethod
1002 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1002 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1003 allowed_ips = AuthUser.get_allowed_ips(
1003 allowed_ips = AuthUser.get_allowed_ips(
1004 user_id, cache=True, inherit_from_default=inherit_from_default)
1004 user_id, cache=True, inherit_from_default=inherit_from_default)
1005 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1005 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1006 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1006 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1007 return True
1007 return True
1008 else:
1008 else:
1009 log.info('Access for IP:%s forbidden, '
1009 log.info('Access for IP:%s forbidden, '
1010 'not in %s' % (ip_addr, allowed_ips))
1010 'not in %s' % (ip_addr, allowed_ips))
1011 return False
1011 return False
1012
1012
1013 def __repr__(self):
1013 def __repr__(self):
1014 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1014 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1015 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1015 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1016
1016
1017 def set_authenticated(self, authenticated=True):
1017 def set_authenticated(self, authenticated=True):
1018 if self.user_id != self.anonymous_user.user_id:
1018 if self.user_id != self.anonymous_user.user_id:
1019 self.is_authenticated = authenticated
1019 self.is_authenticated = authenticated
1020
1020
1021 def get_cookie_store(self):
1021 def get_cookie_store(self):
1022 return {
1022 return {
1023 'username': self.username,
1023 'username': self.username,
1024 'password': md5(self.password),
1024 'password': md5(self.password),
1025 'user_id': self.user_id,
1025 'user_id': self.user_id,
1026 'is_authenticated': self.is_authenticated
1026 'is_authenticated': self.is_authenticated
1027 }
1027 }
1028
1028
1029 @classmethod
1029 @classmethod
1030 def from_cookie_store(cls, cookie_store):
1030 def from_cookie_store(cls, cookie_store):
1031 """
1031 """
1032 Creates AuthUser from a cookie store
1032 Creates AuthUser from a cookie store
1033
1033
1034 :param cls:
1034 :param cls:
1035 :param cookie_store:
1035 :param cookie_store:
1036 """
1036 """
1037 user_id = cookie_store.get('user_id')
1037 user_id = cookie_store.get('user_id')
1038 username = cookie_store.get('username')
1038 username = cookie_store.get('username')
1039 api_key = cookie_store.get('api_key')
1039 api_key = cookie_store.get('api_key')
1040 return AuthUser(user_id, api_key, username)
1040 return AuthUser(user_id, api_key, username)
1041
1041
1042 @classmethod
1042 @classmethod
1043 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1043 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1044 _set = set()
1044 _set = set()
1045
1045
1046 if inherit_from_default:
1046 if inherit_from_default:
1047 default_ips = UserIpMap.query().filter(
1047 default_ips = UserIpMap.query().filter(
1048 UserIpMap.user == User.get_default_user(cache=True))
1048 UserIpMap.user == User.get_default_user(cache=True))
1049 if cache:
1049 if cache:
1050 default_ips = default_ips.options(
1050 default_ips = default_ips.options(
1051 FromCache("sql_cache_short", "get_user_ips_default"))
1051 FromCache("sql_cache_short", "get_user_ips_default"))
1052
1052
1053 # populate from default user
1053 # populate from default user
1054 for ip in default_ips:
1054 for ip in default_ips:
1055 try:
1055 try:
1056 _set.add(ip.ip_addr)
1056 _set.add(ip.ip_addr)
1057 except ObjectDeletedError:
1057 except ObjectDeletedError:
1058 # since we use heavy caching sometimes it happens that
1058 # since we use heavy caching sometimes it happens that
1059 # we get deleted objects here, we just skip them
1059 # we get deleted objects here, we just skip them
1060 pass
1060 pass
1061
1061
1062 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1062 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1063 if cache:
1063 if cache:
1064 user_ips = user_ips.options(
1064 user_ips = user_ips.options(
1065 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1065 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1066
1066
1067 for ip in user_ips:
1067 for ip in user_ips:
1068 try:
1068 try:
1069 _set.add(ip.ip_addr)
1069 _set.add(ip.ip_addr)
1070 except ObjectDeletedError:
1070 except ObjectDeletedError:
1071 # since we use heavy caching sometimes it happens that we get
1071 # since we use heavy caching sometimes it happens that we get
1072 # deleted objects here, we just skip them
1072 # deleted objects here, we just skip them
1073 pass
1073 pass
1074 return _set or set(['0.0.0.0/0', '::/0'])
1074 return _set or set(['0.0.0.0/0', '::/0'])
1075
1075
1076
1076
1077 def set_available_permissions(config):
1077 def set_available_permissions(config):
1078 """
1078 """
1079 This function will propagate pylons globals with all available defined
1079 This function will propagate pylons globals with all available defined
1080 permission given in db. We don't want to check each time from db for new
1080 permission given in db. We don't want to check each time from db for new
1081 permissions since adding a new permission also requires application restart
1081 permissions since adding a new permission also requires application restart
1082 ie. to decorate new views with the newly created permission
1082 ie. to decorate new views with the newly created permission
1083
1083
1084 :param config: current pylons config instance
1084 :param config: current pylons config instance
1085
1085
1086 """
1086 """
1087 log.info('getting information about all available permissions')
1087 log.info('getting information about all available permissions')
1088 try:
1088 try:
1089 sa = meta.Session
1089 sa = meta.Session
1090 all_perms = sa.query(Permission).all()
1090 all_perms = sa.query(Permission).all()
1091 config['available_permissions'] = [x.permission_name for x in all_perms]
1091 config['available_permissions'] = [x.permission_name for x in all_perms]
1092 except Exception:
1092 except Exception:
1093 log.error(traceback.format_exc())
1093 log.error(traceback.format_exc())
1094 finally:
1094 finally:
1095 meta.Session.remove()
1095 meta.Session.remove()
1096
1096
1097
1097
1098 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1098 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1099 """
1099 """
1100 Return the current authentication token, creating one if one doesn't
1100 Return the current authentication token, creating one if one doesn't
1101 already exist and the save_if_missing flag is present.
1101 already exist and the save_if_missing flag is present.
1102
1102
1103 :param session: pass in the pylons session, else we use the global ones
1103 :param session: pass in the pylons session, else we use the global ones
1104 :param force_new: force to re-generate the token and store it in session
1104 :param force_new: force to re-generate the token and store it in session
1105 :param save_if_missing: save the newly generated token if it's missing in
1105 :param save_if_missing: save the newly generated token if it's missing in
1106 session
1106 session
1107 """
1107 """
1108 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1108 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1109 # from pyramid.csrf import get_csrf_token
1109 # from pyramid.csrf import get_csrf_token
1110
1110
1111 if not session:
1111 if not session:
1112 from pylons import session
1112 from pylons import session
1113
1113
1114 if (csrf_token_key not in session and save_if_missing) or force_new:
1114 if (csrf_token_key not in session and save_if_missing) or force_new:
1115 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1115 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1116 session[csrf_token_key] = token
1116 session[csrf_token_key] = token
1117 if hasattr(session, 'save'):
1117 if hasattr(session, 'save'):
1118 session.save()
1118 session.save()
1119 return session.get(csrf_token_key)
1119 return session.get(csrf_token_key)
1120
1120
1121
1121
1122 def get_request(perm_class):
1122 def get_request(perm_class):
1123 from pyramid.threadlocal import get_current_request
1123 from pyramid.threadlocal import get_current_request
1124 pyramid_request = get_current_request()
1124 pyramid_request = get_current_request()
1125 if not pyramid_request:
1125 if not pyramid_request:
1126 # return global request of pylons in case pyramid isn't available
1126 # return global request of pylons in case pyramid isn't available
1127 # NOTE(marcink): this should be removed after migration to pyramid
1127 # NOTE(marcink): this should be removed after migration to pyramid
1128 from pylons import request
1128 from pylons import request
1129 return request
1129 return request
1130 return pyramid_request
1130 return pyramid_request
1131
1131
1132
1132
1133 # CHECK DECORATORS
1133 # CHECK DECORATORS
1134 class CSRFRequired(object):
1134 class CSRFRequired(object):
1135 """
1135 """
1136 Decorator for authenticating a form
1136 Decorator for authenticating a form
1137
1137
1138 This decorator uses an authorization token stored in the client's
1138 This decorator uses an authorization token stored in the client's
1139 session for prevention of certain Cross-site request forgery (CSRF)
1139 session for prevention of certain Cross-site request forgery (CSRF)
1140 attacks (See
1140 attacks (See
1141 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1141 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1142 information).
1142 information).
1143
1143
1144 For use with the ``webhelpers.secure_form`` helper functions.
1144 For use with the ``webhelpers.secure_form`` helper functions.
1145
1145
1146 """
1146 """
1147 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1147 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1148 except_methods=None):
1148 except_methods=None):
1149 self.token = token
1149 self.token = token
1150 self.header = header
1150 self.header = header
1151 self.except_methods = except_methods or []
1151 self.except_methods = except_methods or []
1152
1152
1153 def __call__(self, func):
1153 def __call__(self, func):
1154 return get_cython_compat_decorator(self.__wrapper, func)
1154 return get_cython_compat_decorator(self.__wrapper, func)
1155
1155
1156 def _get_csrf(self, _request):
1156 def _get_csrf(self, _request):
1157 return _request.POST.get(self.token, _request.headers.get(self.header))
1157 return _request.POST.get(self.token, _request.headers.get(self.header))
1158
1158
1159 def check_csrf(self, _request, cur_token):
1159 def check_csrf(self, _request, cur_token):
1160 supplied_token = self._get_csrf(_request)
1160 supplied_token = self._get_csrf(_request)
1161 return supplied_token and supplied_token == cur_token
1161 return supplied_token and supplied_token == cur_token
1162
1162
1163 def _get_request(self):
1163 def _get_request(self):
1164 return get_request(self)
1164 return get_request(self)
1165
1165
1166 def __wrapper(self, func, *fargs, **fkwargs):
1166 def __wrapper(self, func, *fargs, **fkwargs):
1167 request = self._get_request()
1167 request = self._get_request()
1168
1168
1169 if request.method in self.except_methods:
1169 if request.method in self.except_methods:
1170 return func(*fargs, **fkwargs)
1170 return func(*fargs, **fkwargs)
1171
1171
1172 cur_token = get_csrf_token(save_if_missing=False)
1172 cur_token = get_csrf_token(save_if_missing=False)
1173 if self.check_csrf(request, cur_token):
1173 if self.check_csrf(request, cur_token):
1174 if request.POST.get(self.token):
1174 if request.POST.get(self.token):
1175 del request.POST[self.token]
1175 del request.POST[self.token]
1176 return func(*fargs, **fkwargs)
1176 return func(*fargs, **fkwargs)
1177 else:
1177 else:
1178 reason = 'token-missing'
1178 reason = 'token-missing'
1179 supplied_token = self._get_csrf(request)
1179 supplied_token = self._get_csrf(request)
1180 if supplied_token and cur_token != supplied_token:
1180 if supplied_token and cur_token != supplied_token:
1181 reason = 'token-mismatch [%s:%s]' % (
1181 reason = 'token-mismatch [%s:%s]' % (
1182 cur_token or ''[:6], supplied_token or ''[:6])
1182 cur_token or ''[:6], supplied_token or ''[:6])
1183
1183
1184 csrf_message = \
1184 csrf_message = \
1185 ("Cross-site request forgery detected, request denied. See "
1185 ("Cross-site request forgery detected, request denied. See "
1186 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1186 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1187 "more information.")
1187 "more information.")
1188 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1188 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1189 'REMOTE_ADDR:%s, HEADERS:%s' % (
1189 'REMOTE_ADDR:%s, HEADERS:%s' % (
1190 request, reason, request.remote_addr, request.headers))
1190 request, reason, request.remote_addr, request.headers))
1191
1191
1192 raise HTTPForbidden(explanation=csrf_message)
1192 raise HTTPForbidden(explanation=csrf_message)
1193
1193
1194
1194
1195 class LoginRequired(object):
1195 class LoginRequired(object):
1196 """
1196 """
1197 Must be logged in to execute this function else
1197 Must be logged in to execute this function else
1198 redirect to login page
1198 redirect to login page
1199
1199
1200 :param api_access: if enabled this checks only for valid auth token
1200 :param api_access: if enabled this checks only for valid auth token
1201 and grants access based on valid token
1201 and grants access based on valid token
1202 """
1202 """
1203 def __init__(self, auth_token_access=None):
1203 def __init__(self, auth_token_access=None):
1204 self.auth_token_access = auth_token_access
1204 self.auth_token_access = auth_token_access
1205
1205
1206 def __call__(self, func):
1206 def __call__(self, func):
1207 return get_cython_compat_decorator(self.__wrapper, func)
1207 return get_cython_compat_decorator(self.__wrapper, func)
1208
1208
1209 def _get_request(self):
1209 def _get_request(self):
1210 return get_request(self)
1210 return get_request(self)
1211
1211
1212 def __wrapper(self, func, *fargs, **fkwargs):
1212 def __wrapper(self, func, *fargs, **fkwargs):
1213 from rhodecode.lib import helpers as h
1213 from rhodecode.lib import helpers as h
1214 cls = fargs[0]
1214 cls = fargs[0]
1215 user = cls._rhodecode_user
1215 user = cls._rhodecode_user
1216 request = self._get_request()
1216 request = self._get_request()
1217
1217
1218 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1218 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1219 log.debug('Starting login restriction checks for user: %s' % (user,))
1219 log.debug('Starting login restriction checks for user: %s' % (user,))
1220 # check if our IP is allowed
1220 # check if our IP is allowed
1221 ip_access_valid = True
1221 ip_access_valid = True
1222 if not user.ip_allowed:
1222 if not user.ip_allowed:
1223 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1223 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1224 category='warning')
1224 category='warning')
1225 ip_access_valid = False
1225 ip_access_valid = False
1226
1226
1227 # check if we used an APIKEY and it's a valid one
1227 # check if we used an APIKEY and it's a valid one
1228 # defined white-list of controllers which API access will be enabled
1228 # defined white-list of controllers which API access will be enabled
1229 _auth_token = request.GET.get(
1229 _auth_token = request.GET.get(
1230 'auth_token', '') or request.GET.get('api_key', '')
1230 'auth_token', '') or request.GET.get('api_key', '')
1231 auth_token_access_valid = allowed_auth_token_access(
1231 auth_token_access_valid = allowed_auth_token_access(
1232 loc, auth_token=_auth_token)
1232 loc, auth_token=_auth_token)
1233
1233
1234 # explicit controller is enabled or API is in our whitelist
1234 # explicit controller is enabled or API is in our whitelist
1235 if self.auth_token_access or auth_token_access_valid:
1235 if self.auth_token_access or auth_token_access_valid:
1236 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1236 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1237 db_user = user.get_instance()
1237 db_user = user.get_instance()
1238
1238
1239 if db_user:
1239 if db_user:
1240 if self.auth_token_access:
1240 if self.auth_token_access:
1241 roles = self.auth_token_access
1241 roles = self.auth_token_access
1242 else:
1242 else:
1243 roles = [UserApiKeys.ROLE_HTTP]
1243 roles = [UserApiKeys.ROLE_HTTP]
1244 token_match = db_user.authenticate_by_token(
1244 token_match = db_user.authenticate_by_token(
1245 _auth_token, roles=roles)
1245 _auth_token, roles=roles)
1246 else:
1246 else:
1247 log.debug('Unable to fetch db instance for auth user: %s', user)
1247 log.debug('Unable to fetch db instance for auth user: %s', user)
1248 token_match = False
1248 token_match = False
1249
1249
1250 if _auth_token and token_match:
1250 if _auth_token and token_match:
1251 auth_token_access_valid = True
1251 auth_token_access_valid = True
1252 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1252 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1253 else:
1253 else:
1254 auth_token_access_valid = False
1254 auth_token_access_valid = False
1255 if not _auth_token:
1255 if not _auth_token:
1256 log.debug("AUTH TOKEN *NOT* present in request")
1256 log.debug("AUTH TOKEN *NOT* present in request")
1257 else:
1257 else:
1258 log.warning(
1258 log.warning(
1259 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1259 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1260
1260
1261 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1261 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1262 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1262 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1263 else 'AUTH_TOKEN_AUTH'
1263 else 'AUTH_TOKEN_AUTH'
1264
1264
1265 if ip_access_valid and (
1265 if ip_access_valid and (
1266 user.is_authenticated or auth_token_access_valid):
1266 user.is_authenticated or auth_token_access_valid):
1267 log.info(
1267 log.info(
1268 'user %s authenticating with:%s IS authenticated on func %s'
1268 'user %s authenticating with:%s IS authenticated on func %s'
1269 % (user, reason, loc))
1269 % (user, reason, loc))
1270
1270
1271 # update user data to check last activity
1271 # update user data to check last activity
1272 user.update_lastactivity()
1272 user.update_lastactivity()
1273 Session().commit()
1273 Session().commit()
1274 return func(*fargs, **fkwargs)
1274 return func(*fargs, **fkwargs)
1275 else:
1275 else:
1276 log.warning(
1276 log.warning(
1277 'user %s authenticating with:%s NOT authenticated on '
1277 'user %s authenticating with:%s NOT authenticated on '
1278 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1278 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1279 % (user, reason, loc, ip_access_valid,
1279 % (user, reason, loc, ip_access_valid,
1280 auth_token_access_valid))
1280 auth_token_access_valid))
1281 # we preserve the get PARAM
1281 # we preserve the get PARAM
1282 came_from = request.path_qs
1282 came_from = request.path_qs
1283 log.debug('redirecting to login page with %s' % (came_from,))
1283 log.debug('redirecting to login page with %s' % (came_from,))
1284 raise HTTPFound(
1284 raise HTTPFound(
1285 h.route_path('login', _query={'came_from': came_from}))
1285 h.route_path('login', _query={'came_from': came_from}))
1286
1286
1287
1287
1288 class NotAnonymous(object):
1288 class NotAnonymous(object):
1289 """
1289 """
1290 Must be logged in to execute this function else
1290 Must be logged in to execute this function else
1291 redirect to login page
1291 redirect to login page
1292 """
1292 """
1293
1293
1294 def __call__(self, func):
1294 def __call__(self, func):
1295 return get_cython_compat_decorator(self.__wrapper, func)
1295 return get_cython_compat_decorator(self.__wrapper, func)
1296
1296
1297 def _get_request(self):
1297 def _get_request(self):
1298 return get_request(self)
1298 return get_request(self)
1299
1299
1300 def __wrapper(self, func, *fargs, **fkwargs):
1300 def __wrapper(self, func, *fargs, **fkwargs):
1301 import rhodecode.lib.helpers as h
1301 import rhodecode.lib.helpers as h
1302 cls = fargs[0]
1302 cls = fargs[0]
1303 self.user = cls._rhodecode_user
1303 self.user = cls._rhodecode_user
1304 request = self._get_request()
1304 request = self._get_request()
1305
1305
1306 log.debug('Checking if user is not anonymous @%s' % cls)
1306 log.debug('Checking if user is not anonymous @%s' % cls)
1307
1307
1308 anonymous = self.user.username == User.DEFAULT_USER
1308 anonymous = self.user.username == User.DEFAULT_USER
1309
1309
1310 if anonymous:
1310 if anonymous:
1311 came_from = request.path_qs
1311 came_from = request.path_qs
1312 h.flash(_('You need to be a registered user to '
1312 h.flash(_('You need to be a registered user to '
1313 'perform this action'),
1313 'perform this action'),
1314 category='warning')
1314 category='warning')
1315 raise HTTPFound(
1315 raise HTTPFound(
1316 h.route_path('login', _query={'came_from': came_from}))
1316 h.route_path('login', _query={'came_from': came_from}))
1317 else:
1317 else:
1318 return func(*fargs, **fkwargs)
1318 return func(*fargs, **fkwargs)
1319
1319
1320
1320
1321 class XHRRequired(object):
1321 class XHRRequired(object):
1322 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1322 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1323
1323
1324 def __call__(self, func):
1324 def __call__(self, func):
1325 return get_cython_compat_decorator(self.__wrapper, func)
1325 return get_cython_compat_decorator(self.__wrapper, func)
1326
1326
1327 def _get_request(self):
1327 def _get_request(self):
1328 return get_request(self)
1328 return get_request(self)
1329
1329
1330 def __wrapper(self, func, *fargs, **fkwargs):
1330 def __wrapper(self, func, *fargs, **fkwargs):
1331 from pylons.controllers.util import abort
1331 from pylons.controllers.util import abort
1332 request = self._get_request()
1332 request = self._get_request()
1333
1333
1334 log.debug('Checking if request is XMLHttpRequest (XHR)')
1334 log.debug('Checking if request is XMLHttpRequest (XHR)')
1335 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1335 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1336
1336
1337 if not request.is_xhr:
1337 if not request.is_xhr:
1338 abort(400, detail=xhr_message)
1338 abort(400, detail=xhr_message)
1339
1339
1340 return func(*fargs, **fkwargs)
1340 return func(*fargs, **fkwargs)
1341
1341
1342
1342
1343 class HasAcceptedRepoType(object):
1343 class HasAcceptedRepoType(object):
1344 """
1344 """
1345 Check if requested repo is within given repo type aliases
1345 Check if requested repo is within given repo type aliases
1346 """
1346 """
1347
1347
1348 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1348 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1349
1349
1350 def __init__(self, *repo_type_list):
1350 def __init__(self, *repo_type_list):
1351 self.repo_type_list = set(repo_type_list)
1351 self.repo_type_list = set(repo_type_list)
1352
1352
1353 def __call__(self, func):
1353 def __call__(self, func):
1354 return get_cython_compat_decorator(self.__wrapper, func)
1354 return get_cython_compat_decorator(self.__wrapper, func)
1355
1355
1356 def __wrapper(self, func, *fargs, **fkwargs):
1356 def __wrapper(self, func, *fargs, **fkwargs):
1357 import rhodecode.lib.helpers as h
1357 import rhodecode.lib.helpers as h
1358 cls = fargs[0]
1358 cls = fargs[0]
1359 rhodecode_repo = cls.rhodecode_repo
1359 rhodecode_repo = cls.rhodecode_repo
1360
1360
1361 log.debug('%s checking repo type for %s in %s',
1361 log.debug('%s checking repo type for %s in %s',
1362 self.__class__.__name__,
1362 self.__class__.__name__,
1363 rhodecode_repo.alias, self.repo_type_list)
1363 rhodecode_repo.alias, self.repo_type_list)
1364
1364
1365 if rhodecode_repo.alias in self.repo_type_list:
1365 if rhodecode_repo.alias in self.repo_type_list:
1366 return func(*fargs, **fkwargs)
1366 return func(*fargs, **fkwargs)
1367 else:
1367 else:
1368 h.flash(h.literal(
1368 h.flash(h.literal(
1369 _('Action not supported for %s.' % rhodecode_repo.alias)),
1369 _('Action not supported for %s.' % rhodecode_repo.alias)),
1370 category='warning')
1370 category='warning')
1371 raise HTTPFound(
1371 raise HTTPFound(
1372 h.route_path('repo_summary',
1372 h.route_path('repo_summary',
1373 repo_name=cls.rhodecode_db_repo.repo_name))
1373 repo_name=cls.rhodecode_db_repo.repo_name))
1374
1374
1375
1375
1376 class PermsDecorator(object):
1376 class PermsDecorator(object):
1377 """
1377 """
1378 Base class for controller decorators, we extract the current user from
1378 Base class for controller decorators, we extract the current user from
1379 the class itself, which has it stored in base controllers
1379 the class itself, which has it stored in base controllers
1380 """
1380 """
1381
1381
1382 def __init__(self, *required_perms):
1382 def __init__(self, *required_perms):
1383 self.required_perms = set(required_perms)
1383 self.required_perms = set(required_perms)
1384
1384
1385 def __call__(self, func):
1385 def __call__(self, func):
1386 return get_cython_compat_decorator(self.__wrapper, func)
1386 return get_cython_compat_decorator(self.__wrapper, func)
1387
1387
1388 def _get_request(self):
1388 def _get_request(self):
1389 return get_request(self)
1389 return get_request(self)
1390
1390
1391 def _get_came_from(self):
1391 def _get_came_from(self):
1392 _request = self._get_request()
1392 _request = self._get_request()
1393
1393
1394 # both pylons/pyramid has this attribute
1394 # both pylons/pyramid has this attribute
1395 return _request.path_qs
1395 return _request.path_qs
1396
1396
1397 def __wrapper(self, func, *fargs, **fkwargs):
1397 def __wrapper(self, func, *fargs, **fkwargs):
1398 import rhodecode.lib.helpers as h
1398 import rhodecode.lib.helpers as h
1399 cls = fargs[0]
1399 cls = fargs[0]
1400 _user = cls._rhodecode_user
1400 _user = cls._rhodecode_user
1401
1401
1402 log.debug('checking %s permissions %s for %s %s',
1402 log.debug('checking %s permissions %s for %s %s',
1403 self.__class__.__name__, self.required_perms, cls, _user)
1403 self.__class__.__name__, self.required_perms, cls, _user)
1404
1404
1405 if self.check_permissions(_user):
1405 if self.check_permissions(_user):
1406 log.debug('Permission granted for %s %s', cls, _user)
1406 log.debug('Permission granted for %s %s', cls, _user)
1407 return func(*fargs, **fkwargs)
1407 return func(*fargs, **fkwargs)
1408
1408
1409 else:
1409 else:
1410 log.debug('Permission denied for %s %s', cls, _user)
1410 log.debug('Permission denied for %s %s', cls, _user)
1411 anonymous = _user.username == User.DEFAULT_USER
1411 anonymous = _user.username == User.DEFAULT_USER
1412
1412
1413 if anonymous:
1413 if anonymous:
1414 came_from = self._get_came_from()
1414 came_from = self._get_came_from()
1415 h.flash(_('You need to be signed in to view this page'),
1415 h.flash(_('You need to be signed in to view this page'),
1416 category='warning')
1416 category='warning')
1417 raise HTTPFound(
1417 raise HTTPFound(
1418 h.route_path('login', _query={'came_from': came_from}))
1418 h.route_path('login', _query={'came_from': came_from}))
1419
1419
1420 else:
1420 else:
1421 # redirect with 404 to prevent resource discovery
1421 # redirect with 404 to prevent resource discovery
1422 raise HTTPNotFound()
1422 raise HTTPNotFound()
1423
1423
1424 def check_permissions(self, user):
1424 def check_permissions(self, user):
1425 """Dummy function for overriding"""
1425 """Dummy function for overriding"""
1426 raise NotImplementedError(
1426 raise NotImplementedError(
1427 'You have to write this function in child class')
1427 'You have to write this function in child class')
1428
1428
1429
1429
1430 class HasPermissionAllDecorator(PermsDecorator):
1430 class HasPermissionAllDecorator(PermsDecorator):
1431 """
1431 """
1432 Checks for access permission for all given predicates. All of them
1432 Checks for access permission for all given predicates. All of them
1433 have to be meet in order to fulfill the request
1433 have to be meet in order to fulfill the request
1434 """
1434 """
1435
1435
1436 def check_permissions(self, user):
1436 def check_permissions(self, user):
1437 perms = user.permissions_with_scope({})
1437 perms = user.permissions_with_scope({})
1438 if self.required_perms.issubset(perms['global']):
1438 if self.required_perms.issubset(perms['global']):
1439 return True
1439 return True
1440 return False
1440 return False
1441
1441
1442
1442
1443 class HasPermissionAnyDecorator(PermsDecorator):
1443 class HasPermissionAnyDecorator(PermsDecorator):
1444 """
1444 """
1445 Checks for access permission for any of given predicates. In order to
1445 Checks for access permission for any of given predicates. In order to
1446 fulfill the request any of predicates must be meet
1446 fulfill the request any of predicates must be meet
1447 """
1447 """
1448
1448
1449 def check_permissions(self, user):
1449 def check_permissions(self, user):
1450 perms = user.permissions_with_scope({})
1450 perms = user.permissions_with_scope({})
1451 if self.required_perms.intersection(perms['global']):
1451 if self.required_perms.intersection(perms['global']):
1452 return True
1452 return True
1453 return False
1453 return False
1454
1454
1455
1455
1456 class HasRepoPermissionAllDecorator(PermsDecorator):
1456 class HasRepoPermissionAllDecorator(PermsDecorator):
1457 """
1457 """
1458 Checks for access permission for all given predicates for specific
1458 Checks for access permission for all given predicates for specific
1459 repository. All of them have to be meet in order to fulfill the request
1459 repository. All of them have to be meet in order to fulfill the request
1460 """
1460 """
1461 def _get_repo_name(self):
1461 def _get_repo_name(self):
1462 _request = self._get_request()
1462 _request = self._get_request()
1463 return get_repo_slug(_request)
1463 return get_repo_slug(_request)
1464
1464
1465 def check_permissions(self, user):
1465 def check_permissions(self, user):
1466 perms = user.permissions
1466 perms = user.permissions
1467 repo_name = self._get_repo_name()
1467 repo_name = self._get_repo_name()
1468
1468
1469 try:
1469 try:
1470 user_perms = set([perms['repositories'][repo_name]])
1470 user_perms = set([perms['repositories'][repo_name]])
1471 except KeyError:
1471 except KeyError:
1472 log.debug('cannot locate repo with name: `%s` in permissions defs',
1472 log.debug('cannot locate repo with name: `%s` in permissions defs',
1473 repo_name)
1473 repo_name)
1474 return False
1474 return False
1475
1475
1476 log.debug('checking `%s` permissions for repo `%s`',
1476 log.debug('checking `%s` permissions for repo `%s`',
1477 user_perms, repo_name)
1477 user_perms, repo_name)
1478 if self.required_perms.issubset(user_perms):
1478 if self.required_perms.issubset(user_perms):
1479 return True
1479 return True
1480 return False
1480 return False
1481
1481
1482
1482
1483 class HasRepoPermissionAnyDecorator(PermsDecorator):
1483 class HasRepoPermissionAnyDecorator(PermsDecorator):
1484 """
1484 """
1485 Checks for access permission for any of given predicates for specific
1485 Checks for access permission for any of given predicates for specific
1486 repository. In order to fulfill the request any of predicates must be meet
1486 repository. In order to fulfill the request any of predicates must be meet
1487 """
1487 """
1488 def _get_repo_name(self):
1488 def _get_repo_name(self):
1489 _request = self._get_request()
1489 _request = self._get_request()
1490 return get_repo_slug(_request)
1490 return get_repo_slug(_request)
1491
1491
1492 def check_permissions(self, user):
1492 def check_permissions(self, user):
1493 perms = user.permissions
1493 perms = user.permissions
1494 repo_name = self._get_repo_name()
1494 repo_name = self._get_repo_name()
1495
1495
1496 try:
1496 try:
1497 user_perms = set([perms['repositories'][repo_name]])
1497 user_perms = set([perms['repositories'][repo_name]])
1498 except KeyError:
1498 except KeyError:
1499 log.debug('cannot locate repo with name: `%s` in permissions defs',
1499 log.debug('cannot locate repo with name: `%s` in permissions defs',
1500 repo_name)
1500 repo_name)
1501 return False
1501 return False
1502
1502
1503 log.debug('checking `%s` permissions for repo `%s`',
1503 log.debug('checking `%s` permissions for repo `%s`',
1504 user_perms, repo_name)
1504 user_perms, repo_name)
1505 if self.required_perms.intersection(user_perms):
1505 if self.required_perms.intersection(user_perms):
1506 return True
1506 return True
1507 return False
1507 return False
1508
1508
1509
1509
1510 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1510 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1511 """
1511 """
1512 Checks for access permission for all given predicates for specific
1512 Checks for access permission for all given predicates for specific
1513 repository group. All of them have to be meet in order to
1513 repository group. All of them have to be meet in order to
1514 fulfill the request
1514 fulfill the request
1515 """
1515 """
1516 def _get_repo_group_name(self):
1516 def _get_repo_group_name(self):
1517 _request = self._get_request()
1517 _request = self._get_request()
1518 return get_repo_group_slug(_request)
1518 return get_repo_group_slug(_request)
1519
1519
1520 def check_permissions(self, user):
1520 def check_permissions(self, user):
1521 perms = user.permissions
1521 perms = user.permissions
1522 group_name = self._get_repo_group_name()
1522 group_name = self._get_repo_group_name()
1523 try:
1523 try:
1524 user_perms = set([perms['repositories_groups'][group_name]])
1524 user_perms = set([perms['repositories_groups'][group_name]])
1525 except KeyError:
1525 except KeyError:
1526 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1526 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1527 group_name)
1527 group_name)
1528 return False
1528 return False
1529
1529
1530 log.debug('checking `%s` permissions for repo group `%s`',
1530 log.debug('checking `%s` permissions for repo group `%s`',
1531 user_perms, group_name)
1531 user_perms, group_name)
1532 if self.required_perms.issubset(user_perms):
1532 if self.required_perms.issubset(user_perms):
1533 return True
1533 return True
1534 return False
1534 return False
1535
1535
1536
1536
1537 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1537 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1538 """
1538 """
1539 Checks for access permission for any of given predicates for specific
1539 Checks for access permission for any of given predicates for specific
1540 repository group. In order to fulfill the request any
1540 repository group. In order to fulfill the request any
1541 of predicates must be met
1541 of predicates must be met
1542 """
1542 """
1543 def _get_repo_group_name(self):
1543 def _get_repo_group_name(self):
1544 _request = self._get_request()
1544 _request = self._get_request()
1545 return get_repo_group_slug(_request)
1545 return get_repo_group_slug(_request)
1546
1546
1547 def check_permissions(self, user):
1547 def check_permissions(self, user):
1548 perms = user.permissions
1548 perms = user.permissions
1549 group_name = self._get_repo_group_name()
1549 group_name = self._get_repo_group_name()
1550
1550
1551 try:
1551 try:
1552 user_perms = set([perms['repositories_groups'][group_name]])
1552 user_perms = set([perms['repositories_groups'][group_name]])
1553 except KeyError:
1553 except KeyError:
1554 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1554 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1555 group_name)
1555 group_name)
1556 return False
1556 return False
1557
1557
1558 log.debug('checking `%s` permissions for repo group `%s`',
1558 log.debug('checking `%s` permissions for repo group `%s`',
1559 user_perms, group_name)
1559 user_perms, group_name)
1560 if self.required_perms.intersection(user_perms):
1560 if self.required_perms.intersection(user_perms):
1561 return True
1561 return True
1562 return False
1562 return False
1563
1563
1564
1564
1565 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1565 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1566 """
1566 """
1567 Checks for access permission for all given predicates for specific
1567 Checks for access permission for all given predicates for specific
1568 user group. All of them have to be meet in order to fulfill the request
1568 user group. All of them have to be meet in order to fulfill the request
1569 """
1569 """
1570 def _get_user_group_name(self):
1570 def _get_user_group_name(self):
1571 _request = self._get_request()
1571 _request = self._get_request()
1572 return get_user_group_slug(_request)
1572 return get_user_group_slug(_request)
1573
1573
1574 def check_permissions(self, user):
1574 def check_permissions(self, user):
1575 perms = user.permissions
1575 perms = user.permissions
1576 group_name = self._get_user_group_name()
1576 group_name = self._get_user_group_name()
1577 try:
1577 try:
1578 user_perms = set([perms['user_groups'][group_name]])
1578 user_perms = set([perms['user_groups'][group_name]])
1579 except KeyError:
1579 except KeyError:
1580 return False
1580 return False
1581
1581
1582 if self.required_perms.issubset(user_perms):
1582 if self.required_perms.issubset(user_perms):
1583 return True
1583 return True
1584 return False
1584 return False
1585
1585
1586
1586
1587 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1587 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1588 """
1588 """
1589 Checks for access permission for any of given predicates for specific
1589 Checks for access permission for any of given predicates for specific
1590 user group. In order to fulfill the request any of predicates must be meet
1590 user group. In order to fulfill the request any of predicates must be meet
1591 """
1591 """
1592 def _get_user_group_name(self):
1592 def _get_user_group_name(self):
1593 _request = self._get_request()
1593 _request = self._get_request()
1594 return get_user_group_slug(_request)
1594 return get_user_group_slug(_request)
1595
1595
1596 def check_permissions(self, user):
1596 def check_permissions(self, user):
1597 perms = user.permissions
1597 perms = user.permissions
1598 group_name = self._get_user_group_name()
1598 group_name = self._get_user_group_name()
1599 try:
1599 try:
1600 user_perms = set([perms['user_groups'][group_name]])
1600 user_perms = set([perms['user_groups'][group_name]])
1601 except KeyError:
1601 except KeyError:
1602 return False
1602 return False
1603
1603
1604 if self.required_perms.intersection(user_perms):
1604 if self.required_perms.intersection(user_perms):
1605 return True
1605 return True
1606 return False
1606 return False
1607
1607
1608
1608
1609 # CHECK FUNCTIONS
1609 # CHECK FUNCTIONS
1610 class PermsFunction(object):
1610 class PermsFunction(object):
1611 """Base function for other check functions"""
1611 """Base function for other check functions"""
1612
1612
1613 def __init__(self, *perms):
1613 def __init__(self, *perms):
1614 self.required_perms = set(perms)
1614 self.required_perms = set(perms)
1615 self.repo_name = None
1615 self.repo_name = None
1616 self.repo_group_name = None
1616 self.repo_group_name = None
1617 self.user_group_name = None
1617 self.user_group_name = None
1618
1618
1619 def __bool__(self):
1619 def __bool__(self):
1620 frame = inspect.currentframe()
1620 frame = inspect.currentframe()
1621 stack_trace = traceback.format_stack(frame)
1621 stack_trace = traceback.format_stack(frame)
1622 log.error('Checking bool value on a class instance of perm '
1622 log.error('Checking bool value on a class instance of perm '
1623 'function is not allowed: %s' % ''.join(stack_trace))
1623 'function is not allowed: %s' % ''.join(stack_trace))
1624 # rather than throwing errors, here we always return False so if by
1624 # rather than throwing errors, here we always return False so if by
1625 # accident someone checks truth for just an instance it will always end
1625 # accident someone checks truth for just an instance it will always end
1626 # up in returning False
1626 # up in returning False
1627 return False
1627 return False
1628 __nonzero__ = __bool__
1628 __nonzero__ = __bool__
1629
1629
1630 def __call__(self, check_location='', user=None):
1630 def __call__(self, check_location='', user=None):
1631 if not user:
1631 if not user:
1632 log.debug('Using user attribute from global request')
1632 log.debug('Using user attribute from global request')
1633 # TODO: remove this someday,put as user as attribute here
1633 # TODO: remove this someday,put as user as attribute here
1634 request = self._get_request()
1634 request = self._get_request()
1635 user = request.user
1635 user = request.user
1636
1636
1637 # init auth user if not already given
1637 # init auth user if not already given
1638 if not isinstance(user, AuthUser):
1638 if not isinstance(user, AuthUser):
1639 log.debug('Wrapping user %s into AuthUser', user)
1639 log.debug('Wrapping user %s into AuthUser', user)
1640 user = AuthUser(user.user_id)
1640 user = AuthUser(user.user_id)
1641
1641
1642 cls_name = self.__class__.__name__
1642 cls_name = self.__class__.__name__
1643 check_scope = self._get_check_scope(cls_name)
1643 check_scope = self._get_check_scope(cls_name)
1644 check_location = check_location or 'unspecified location'
1644 check_location = check_location or 'unspecified location'
1645
1645
1646 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1646 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1647 self.required_perms, user, check_scope, check_location)
1647 self.required_perms, user, check_scope, check_location)
1648 if not user:
1648 if not user:
1649 log.warning('Empty user given for permission check')
1649 log.warning('Empty user given for permission check')
1650 return False
1650 return False
1651
1651
1652 if self.check_permissions(user):
1652 if self.check_permissions(user):
1653 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1653 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1654 check_scope, user, check_location)
1654 check_scope, user, check_location)
1655 return True
1655 return True
1656
1656
1657 else:
1657 else:
1658 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1658 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1659 check_scope, user, check_location)
1659 check_scope, user, check_location)
1660 return False
1660 return False
1661
1661
1662 def _get_request(self):
1662 def _get_request(self):
1663 return get_request(self)
1663 return get_request(self)
1664
1664
1665 def _get_check_scope(self, cls_name):
1665 def _get_check_scope(self, cls_name):
1666 return {
1666 return {
1667 'HasPermissionAll': 'GLOBAL',
1667 'HasPermissionAll': 'GLOBAL',
1668 'HasPermissionAny': 'GLOBAL',
1668 'HasPermissionAny': 'GLOBAL',
1669 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1669 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1670 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1670 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1671 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1671 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1672 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1672 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1673 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1673 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1674 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1674 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1675 }.get(cls_name, '?:%s' % cls_name)
1675 }.get(cls_name, '?:%s' % cls_name)
1676
1676
1677 def check_permissions(self, user):
1677 def check_permissions(self, user):
1678 """Dummy function for overriding"""
1678 """Dummy function for overriding"""
1679 raise Exception('You have to write this function in child class')
1679 raise Exception('You have to write this function in child class')
1680
1680
1681
1681
1682 class HasPermissionAll(PermsFunction):
1682 class HasPermissionAll(PermsFunction):
1683 def check_permissions(self, user):
1683 def check_permissions(self, user):
1684 perms = user.permissions_with_scope({})
1684 perms = user.permissions_with_scope({})
1685 if self.required_perms.issubset(perms.get('global')):
1685 if self.required_perms.issubset(perms.get('global')):
1686 return True
1686 return True
1687 return False
1687 return False
1688
1688
1689
1689
1690 class HasPermissionAny(PermsFunction):
1690 class HasPermissionAny(PermsFunction):
1691 def check_permissions(self, user):
1691 def check_permissions(self, user):
1692 perms = user.permissions_with_scope({})
1692 perms = user.permissions_with_scope({})
1693 if self.required_perms.intersection(perms.get('global')):
1693 if self.required_perms.intersection(perms.get('global')):
1694 return True
1694 return True
1695 return False
1695 return False
1696
1696
1697
1697
1698 class HasRepoPermissionAll(PermsFunction):
1698 class HasRepoPermissionAll(PermsFunction):
1699 def __call__(self, repo_name=None, check_location='', user=None):
1699 def __call__(self, repo_name=None, check_location='', user=None):
1700 self.repo_name = repo_name
1700 self.repo_name = repo_name
1701 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1701 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1702
1702
1703 def _get_repo_name(self):
1703 def _get_repo_name(self):
1704 if not self.repo_name:
1704 if not self.repo_name:
1705 _request = self._get_request()
1705 _request = self._get_request()
1706 self.repo_name = get_repo_slug(_request)
1706 self.repo_name = get_repo_slug(_request)
1707 return self.repo_name
1707 return self.repo_name
1708
1708
1709 def check_permissions(self, user):
1709 def check_permissions(self, user):
1710 self.repo_name = self._get_repo_name()
1710 self.repo_name = self._get_repo_name()
1711 perms = user.permissions
1711 perms = user.permissions
1712 try:
1712 try:
1713 user_perms = set([perms['repositories'][self.repo_name]])
1713 user_perms = set([perms['repositories'][self.repo_name]])
1714 except KeyError:
1714 except KeyError:
1715 return False
1715 return False
1716 if self.required_perms.issubset(user_perms):
1716 if self.required_perms.issubset(user_perms):
1717 return True
1717 return True
1718 return False
1718 return False
1719
1719
1720
1720
1721 class HasRepoPermissionAny(PermsFunction):
1721 class HasRepoPermissionAny(PermsFunction):
1722 def __call__(self, repo_name=None, check_location='', user=None):
1722 def __call__(self, repo_name=None, check_location='', user=None):
1723 self.repo_name = repo_name
1723 self.repo_name = repo_name
1724 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1724 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1725
1725
1726 def _get_repo_name(self):
1726 def _get_repo_name(self):
1727 if not self.repo_name:
1727 if not self.repo_name:
1728 _request = self._get_request()
1728 _request = self._get_request()
1729 self.repo_name = get_repo_slug(_request)
1729 self.repo_name = get_repo_slug(_request)
1730 return self.repo_name
1730 return self.repo_name
1731
1731
1732 def check_permissions(self, user):
1732 def check_permissions(self, user):
1733 self.repo_name = self._get_repo_name()
1733 self.repo_name = self._get_repo_name()
1734 perms = user.permissions
1734 perms = user.permissions
1735 try:
1735 try:
1736 user_perms = set([perms['repositories'][self.repo_name]])
1736 user_perms = set([perms['repositories'][self.repo_name]])
1737 except KeyError:
1737 except KeyError:
1738 return False
1738 return False
1739 if self.required_perms.intersection(user_perms):
1739 if self.required_perms.intersection(user_perms):
1740 return True
1740 return True
1741 return False
1741 return False
1742
1742
1743
1743
1744 class HasRepoGroupPermissionAny(PermsFunction):
1744 class HasRepoGroupPermissionAny(PermsFunction):
1745 def __call__(self, group_name=None, check_location='', user=None):
1745 def __call__(self, group_name=None, check_location='', user=None):
1746 self.repo_group_name = group_name
1746 self.repo_group_name = group_name
1747 return super(HasRepoGroupPermissionAny, self).__call__(
1747 return super(HasRepoGroupPermissionAny, self).__call__(
1748 check_location, user)
1748 check_location, user)
1749
1749
1750 def check_permissions(self, user):
1750 def check_permissions(self, user):
1751 perms = user.permissions
1751 perms = user.permissions
1752 try:
1752 try:
1753 user_perms = set(
1753 user_perms = set(
1754 [perms['repositories_groups'][self.repo_group_name]])
1754 [perms['repositories_groups'][self.repo_group_name]])
1755 except KeyError:
1755 except KeyError:
1756 return False
1756 return False
1757 if self.required_perms.intersection(user_perms):
1757 if self.required_perms.intersection(user_perms):
1758 return True
1758 return True
1759 return False
1759 return False
1760
1760
1761
1761
1762 class HasRepoGroupPermissionAll(PermsFunction):
1762 class HasRepoGroupPermissionAll(PermsFunction):
1763 def __call__(self, group_name=None, check_location='', user=None):
1763 def __call__(self, group_name=None, check_location='', user=None):
1764 self.repo_group_name = group_name
1764 self.repo_group_name = group_name
1765 return super(HasRepoGroupPermissionAll, self).__call__(
1765 return super(HasRepoGroupPermissionAll, self).__call__(
1766 check_location, user)
1766 check_location, user)
1767
1767
1768 def check_permissions(self, user):
1768 def check_permissions(self, user):
1769 perms = user.permissions
1769 perms = user.permissions
1770 try:
1770 try:
1771 user_perms = set(
1771 user_perms = set(
1772 [perms['repositories_groups'][self.repo_group_name]])
1772 [perms['repositories_groups'][self.repo_group_name]])
1773 except KeyError:
1773 except KeyError:
1774 return False
1774 return False
1775 if self.required_perms.issubset(user_perms):
1775 if self.required_perms.issubset(user_perms):
1776 return True
1776 return True
1777 return False
1777 return False
1778
1778
1779
1779
1780 class HasUserGroupPermissionAny(PermsFunction):
1780 class HasUserGroupPermissionAny(PermsFunction):
1781 def __call__(self, user_group_name=None, check_location='', user=None):
1781 def __call__(self, user_group_name=None, check_location='', user=None):
1782 self.user_group_name = user_group_name
1782 self.user_group_name = user_group_name
1783 return super(HasUserGroupPermissionAny, self).__call__(
1783 return super(HasUserGroupPermissionAny, self).__call__(
1784 check_location, user)
1784 check_location, user)
1785
1785
1786 def check_permissions(self, user):
1786 def check_permissions(self, user):
1787 perms = user.permissions
1787 perms = user.permissions
1788 try:
1788 try:
1789 user_perms = set([perms['user_groups'][self.user_group_name]])
1789 user_perms = set([perms['user_groups'][self.user_group_name]])
1790 except KeyError:
1790 except KeyError:
1791 return False
1791 return False
1792 if self.required_perms.intersection(user_perms):
1792 if self.required_perms.intersection(user_perms):
1793 return True
1793 return True
1794 return False
1794 return False
1795
1795
1796
1796
1797 class HasUserGroupPermissionAll(PermsFunction):
1797 class HasUserGroupPermissionAll(PermsFunction):
1798 def __call__(self, user_group_name=None, check_location='', user=None):
1798 def __call__(self, user_group_name=None, check_location='', user=None):
1799 self.user_group_name = user_group_name
1799 self.user_group_name = user_group_name
1800 return super(HasUserGroupPermissionAll, self).__call__(
1800 return super(HasUserGroupPermissionAll, self).__call__(
1801 check_location, user)
1801 check_location, user)
1802
1802
1803 def check_permissions(self, user):
1803 def check_permissions(self, user):
1804 perms = user.permissions
1804 perms = user.permissions
1805 try:
1805 try:
1806 user_perms = set([perms['user_groups'][self.user_group_name]])
1806 user_perms = set([perms['user_groups'][self.user_group_name]])
1807 except KeyError:
1807 except KeyError:
1808 return False
1808 return False
1809 if self.required_perms.issubset(user_perms):
1809 if self.required_perms.issubset(user_perms):
1810 return True
1810 return True
1811 return False
1811 return False
1812
1812
1813
1813
1814 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1814 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1815 class HasPermissionAnyMiddleware(object):
1815 class HasPermissionAnyMiddleware(object):
1816 def __init__(self, *perms):
1816 def __init__(self, *perms):
1817 self.required_perms = set(perms)
1817 self.required_perms = set(perms)
1818
1818
1819 def __call__(self, user, repo_name):
1819 def __call__(self, user, repo_name):
1820 # repo_name MUST be unicode, since we handle keys in permission
1820 # repo_name MUST be unicode, since we handle keys in permission
1821 # dict by unicode
1821 # dict by unicode
1822 repo_name = safe_unicode(repo_name)
1822 repo_name = safe_unicode(repo_name)
1823 user = AuthUser(user.user_id)
1823 user = AuthUser(user.user_id)
1824 log.debug(
1824 log.debug(
1825 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1825 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1826 self.required_perms, user, repo_name)
1826 self.required_perms, user, repo_name)
1827
1827
1828 if self.check_permissions(user, repo_name):
1828 if self.check_permissions(user, repo_name):
1829 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1829 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1830 repo_name, user, 'PermissionMiddleware')
1830 repo_name, user, 'PermissionMiddleware')
1831 return True
1831 return True
1832
1832
1833 else:
1833 else:
1834 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1834 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1835 repo_name, user, 'PermissionMiddleware')
1835 repo_name, user, 'PermissionMiddleware')
1836 return False
1836 return False
1837
1837
1838 def check_permissions(self, user, repo_name):
1838 def check_permissions(self, user, repo_name):
1839 perms = user.permissions_with_scope({'repo_name': repo_name})
1839 perms = user.permissions_with_scope({'repo_name': repo_name})
1840
1840
1841 try:
1841 try:
1842 user_perms = set([perms['repositories'][repo_name]])
1842 user_perms = set([perms['repositories'][repo_name]])
1843 except Exception:
1843 except Exception:
1844 log.exception('Error while accessing user permissions')
1844 log.exception('Error while accessing user permissions')
1845 return False
1845 return False
1846
1846
1847 if self.required_perms.intersection(user_perms):
1847 if self.required_perms.intersection(user_perms):
1848 return True
1848 return True
1849 return False
1849 return False
1850
1850
1851
1851
1852 # SPECIAL VERSION TO HANDLE API AUTH
1852 # SPECIAL VERSION TO HANDLE API AUTH
1853 class _BaseApiPerm(object):
1853 class _BaseApiPerm(object):
1854 def __init__(self, *perms):
1854 def __init__(self, *perms):
1855 self.required_perms = set(perms)
1855 self.required_perms = set(perms)
1856
1856
1857 def __call__(self, check_location=None, user=None, repo_name=None,
1857 def __call__(self, check_location=None, user=None, repo_name=None,
1858 group_name=None, user_group_name=None):
1858 group_name=None, user_group_name=None):
1859 cls_name = self.__class__.__name__
1859 cls_name = self.__class__.__name__
1860 check_scope = 'global:%s' % (self.required_perms,)
1860 check_scope = 'global:%s' % (self.required_perms,)
1861 if repo_name:
1861 if repo_name:
1862 check_scope += ', repo_name:%s' % (repo_name,)
1862 check_scope += ', repo_name:%s' % (repo_name,)
1863
1863
1864 if group_name:
1864 if group_name:
1865 check_scope += ', repo_group_name:%s' % (group_name,)
1865 check_scope += ', repo_group_name:%s' % (group_name,)
1866
1866
1867 if user_group_name:
1867 if user_group_name:
1868 check_scope += ', user_group_name:%s' % (user_group_name,)
1868 check_scope += ', user_group_name:%s' % (user_group_name,)
1869
1869
1870 log.debug(
1870 log.debug(
1871 'checking cls:%s %s %s @ %s'
1871 'checking cls:%s %s %s @ %s'
1872 % (cls_name, self.required_perms, check_scope, check_location))
1872 % (cls_name, self.required_perms, check_scope, check_location))
1873 if not user:
1873 if not user:
1874 log.debug('Empty User passed into arguments')
1874 log.debug('Empty User passed into arguments')
1875 return False
1875 return False
1876
1876
1877 # process user
1877 # process user
1878 if not isinstance(user, AuthUser):
1878 if not isinstance(user, AuthUser):
1879 user = AuthUser(user.user_id)
1879 user = AuthUser(user.user_id)
1880 if not check_location:
1880 if not check_location:
1881 check_location = 'unspecified'
1881 check_location = 'unspecified'
1882 if self.check_permissions(user.permissions, repo_name, group_name,
1882 if self.check_permissions(user.permissions, repo_name, group_name,
1883 user_group_name):
1883 user_group_name):
1884 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1884 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1885 check_scope, user, check_location)
1885 check_scope, user, check_location)
1886 return True
1886 return True
1887
1887
1888 else:
1888 else:
1889 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1889 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1890 check_scope, user, check_location)
1890 check_scope, user, check_location)
1891 return False
1891 return False
1892
1892
1893 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1893 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1894 user_group_name=None):
1894 user_group_name=None):
1895 """
1895 """
1896 implement in child class should return True if permissions are ok,
1896 implement in child class should return True if permissions are ok,
1897 False otherwise
1897 False otherwise
1898
1898
1899 :param perm_defs: dict with permission definitions
1899 :param perm_defs: dict with permission definitions
1900 :param repo_name: repo name
1900 :param repo_name: repo name
1901 """
1901 """
1902 raise NotImplementedError()
1902 raise NotImplementedError()
1903
1903
1904
1904
1905 class HasPermissionAllApi(_BaseApiPerm):
1905 class HasPermissionAllApi(_BaseApiPerm):
1906 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1906 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1907 user_group_name=None):
1907 user_group_name=None):
1908 if self.required_perms.issubset(perm_defs.get('global')):
1908 if self.required_perms.issubset(perm_defs.get('global')):
1909 return True
1909 return True
1910 return False
1910 return False
1911
1911
1912
1912
1913 class HasPermissionAnyApi(_BaseApiPerm):
1913 class HasPermissionAnyApi(_BaseApiPerm):
1914 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1914 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1915 user_group_name=None):
1915 user_group_name=None):
1916 if self.required_perms.intersection(perm_defs.get('global')):
1916 if self.required_perms.intersection(perm_defs.get('global')):
1917 return True
1917 return True
1918 return False
1918 return False
1919
1919
1920
1920
1921 class HasRepoPermissionAllApi(_BaseApiPerm):
1921 class HasRepoPermissionAllApi(_BaseApiPerm):
1922 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1922 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1923 user_group_name=None):
1923 user_group_name=None):
1924 try:
1924 try:
1925 _user_perms = set([perm_defs['repositories'][repo_name]])
1925 _user_perms = set([perm_defs['repositories'][repo_name]])
1926 except KeyError:
1926 except KeyError:
1927 log.warning(traceback.format_exc())
1927 log.warning(traceback.format_exc())
1928 return False
1928 return False
1929 if self.required_perms.issubset(_user_perms):
1929 if self.required_perms.issubset(_user_perms):
1930 return True
1930 return True
1931 return False
1931 return False
1932
1932
1933
1933
1934 class HasRepoPermissionAnyApi(_BaseApiPerm):
1934 class HasRepoPermissionAnyApi(_BaseApiPerm):
1935 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1935 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1936 user_group_name=None):
1936 user_group_name=None):
1937 try:
1937 try:
1938 _user_perms = set([perm_defs['repositories'][repo_name]])
1938 _user_perms = set([perm_defs['repositories'][repo_name]])
1939 except KeyError:
1939 except KeyError:
1940 log.warning(traceback.format_exc())
1940 log.warning(traceback.format_exc())
1941 return False
1941 return False
1942 if self.required_perms.intersection(_user_perms):
1942 if self.required_perms.intersection(_user_perms):
1943 return True
1943 return True
1944 return False
1944 return False
1945
1945
1946
1946
1947 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1947 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1948 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1948 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1949 user_group_name=None):
1949 user_group_name=None):
1950 try:
1950 try:
1951 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1951 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1952 except KeyError:
1952 except KeyError:
1953 log.warning(traceback.format_exc())
1953 log.warning(traceback.format_exc())
1954 return False
1954 return False
1955 if self.required_perms.intersection(_user_perms):
1955 if self.required_perms.intersection(_user_perms):
1956 return True
1956 return True
1957 return False
1957 return False
1958
1958
1959
1959
1960 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1960 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1961 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1961 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1962 user_group_name=None):
1962 user_group_name=None):
1963 try:
1963 try:
1964 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1964 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1965 except KeyError:
1965 except KeyError:
1966 log.warning(traceback.format_exc())
1966 log.warning(traceback.format_exc())
1967 return False
1967 return False
1968 if self.required_perms.issubset(_user_perms):
1968 if self.required_perms.issubset(_user_perms):
1969 return True
1969 return True
1970 return False
1970 return False
1971
1971
1972
1972
1973 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1973 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1974 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1974 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1975 user_group_name=None):
1975 user_group_name=None):
1976 try:
1976 try:
1977 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1977 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1978 except KeyError:
1978 except KeyError:
1979 log.warning(traceback.format_exc())
1979 log.warning(traceback.format_exc())
1980 return False
1980 return False
1981 if self.required_perms.intersection(_user_perms):
1981 if self.required_perms.intersection(_user_perms):
1982 return True
1982 return True
1983 return False
1983 return False
1984
1984
1985
1985
1986 def check_ip_access(source_ip, allowed_ips=None):
1986 def check_ip_access(source_ip, allowed_ips=None):
1987 """
1987 """
1988 Checks if source_ip is a subnet of any of allowed_ips.
1988 Checks if source_ip is a subnet of any of allowed_ips.
1989
1989
1990 :param source_ip:
1990 :param source_ip:
1991 :param allowed_ips: list of allowed ips together with mask
1991 :param allowed_ips: list of allowed ips together with mask
1992 """
1992 """
1993 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1993 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1994 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
1994 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
1995 if isinstance(allowed_ips, (tuple, list, set)):
1995 if isinstance(allowed_ips, (tuple, list, set)):
1996 for ip in allowed_ips:
1996 for ip in allowed_ips:
1997 ip = safe_unicode(ip)
1997 ip = safe_unicode(ip)
1998 try:
1998 try:
1999 network_address = ipaddress.ip_network(ip, strict=False)
1999 network_address = ipaddress.ip_network(ip, strict=False)
2000 if source_ip_address in network_address:
2000 if source_ip_address in network_address:
2001 log.debug('IP %s is network %s' %
2001 log.debug('IP %s is network %s' %
2002 (source_ip_address, network_address))
2002 (source_ip_address, network_address))
2003 return True
2003 return True
2004 # for any case we cannot determine the IP, don't crash just
2004 # for any case we cannot determine the IP, don't crash just
2005 # skip it and log as error, we want to say forbidden still when
2005 # skip it and log as error, we want to say forbidden still when
2006 # sending bad IP
2006 # sending bad IP
2007 except Exception:
2007 except Exception:
2008 log.error(traceback.format_exc())
2008 log.error(traceback.format_exc())
2009 continue
2009 continue
2010 return False
2010 return False
2011
2011
2012
2012
2013 def get_cython_compat_decorator(wrapper, func):
2013 def get_cython_compat_decorator(wrapper, func):
2014 """
2014 """
2015 Creates a cython compatible decorator. The previously used
2015 Creates a cython compatible decorator. The previously used
2016 decorator.decorator() function seems to be incompatible with cython.
2016 decorator.decorator() function seems to be incompatible with cython.
2017
2017
2018 :param wrapper: __wrapper method of the decorator class
2018 :param wrapper: __wrapper method of the decorator class
2019 :param func: decorated function
2019 :param func: decorated function
2020 """
2020 """
2021 @wraps(func)
2021 @wraps(func)
2022 def local_wrapper(*args, **kwds):
2022 def local_wrapper(*args, **kwds):
2023 return wrapper(func, *args, **kwds)
2023 return wrapper(func, *args, **kwds)
2024 local_wrapper.__wrapped__ = func
2024 local_wrapper.__wrapped__ = func
2025 return local_wrapper
2025 return local_wrapper
2026
2026
2027
2027
@@ -1,2045 +1,2045 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27
27
28 import random
28 import random
29 import hashlib
29 import hashlib
30 import StringIO
30 import StringIO
31 import urllib
31 import urllib
32 import math
32 import math
33 import logging
33 import logging
34 import re
34 import re
35 import urlparse
35 import urlparse
36 import time
36 import time
37 import string
37 import string
38 import hashlib
38 import hashlib
39 from collections import OrderedDict
39 from collections import OrderedDict
40
40
41 import pygments
41 import pygments
42 import itertools
42 import itertools
43 import fnmatch
43 import fnmatch
44
44
45 from datetime import datetime
45 from datetime import datetime
46 from functools import partial
46 from functools import partial
47 from pygments.formatters.html import HtmlFormatter
47 from pygments.formatters.html import HtmlFormatter
48 from pygments import highlight as code_highlight
48 from pygments import highlight as code_highlight
49 from pygments.lexers import (
49 from pygments.lexers import (
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
51
51
52 from pyramid.threadlocal import get_current_request
52 from pyramid.threadlocal import get_current_request
53
53
54 from webhelpers.html import literal, HTML, escape
54 from webhelpers.html import literal, HTML, escape
55 from webhelpers.html.tools import *
55 from webhelpers.html.tools import *
56 from webhelpers.html.builder import make_tag
56 from webhelpers.html.builder import make_tag
57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
60 submit, text, password, textarea, title, ul, xml_declaration, radio
60 submit, text, password, textarea, title, ul, xml_declaration, radio
61 from webhelpers.html.tools import auto_link, button_to, highlight, \
61 from webhelpers.html.tools import auto_link, button_to, highlight, \
62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
63 from webhelpers.pylonslib import Flash as _Flash
63 from webhelpers.pylonslib import Flash as _Flash
64 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
64 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
65 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
65 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
66 replace_whitespace, urlify, truncate, wrap_paragraphs
66 replace_whitespace, urlify, truncate, wrap_paragraphs
67 from webhelpers.date import time_ago_in_words
67 from webhelpers.date import time_ago_in_words
68 from webhelpers.paginate import Page as _Page
68 from webhelpers.paginate import Page as _Page
69 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
69 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
70 convert_boolean_attrs, NotGiven, _make_safe_id_component
70 convert_boolean_attrs, NotGiven, _make_safe_id_component
71 from webhelpers2.number import format_byte_size
71 from webhelpers2.number import format_byte_size
72
72
73 from rhodecode.lib.action_parser import action_parser
73 from rhodecode.lib.action_parser import action_parser
74 from rhodecode.lib.ext_json import json
74 from rhodecode.lib.ext_json import json
75 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
75 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
76 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
76 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
77 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
77 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
78 AttributeDict, safe_int, md5, md5_safe
78 AttributeDict, safe_int, md5, md5_safe
79 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
79 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
80 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
80 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
81 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
81 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
82 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
82 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
83 from rhodecode.model.changeset_status import ChangesetStatusModel
83 from rhodecode.model.changeset_status import ChangesetStatusModel
84 from rhodecode.model.db import Permission, User, Repository
84 from rhodecode.model.db import Permission, User, Repository
85 from rhodecode.model.repo_group import RepoGroupModel
85 from rhodecode.model.repo_group import RepoGroupModel
86 from rhodecode.model.settings import IssueTrackerSettingsModel
86 from rhodecode.model.settings import IssueTrackerSettingsModel
87
87
88 log = logging.getLogger(__name__)
88 log = logging.getLogger(__name__)
89
89
90
90
91 DEFAULT_USER = User.DEFAULT_USER
91 DEFAULT_USER = User.DEFAULT_USER
92 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
92 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
93
93
94
94
95 def url(*args, **kw):
95 def url(*args, **kw):
96 from pylons import url as pylons_url
96 from pylons import url as pylons_url
97 return pylons_url(*args, **kw)
97 return pylons_url(*args, **kw)
98
98
99
99
100 def pylons_url_current(*args, **kw):
100 def pylons_url_current(*args, **kw):
101 """
101 """
102 This function overrides pylons.url.current() which returns the current
102 This function overrides pylons.url.current() which returns the current
103 path so that it will also work from a pyramid only context. This
103 path so that it will also work from a pyramid only context. This
104 should be removed once port to pyramid is complete.
104 should be removed once port to pyramid is complete.
105 """
105 """
106 from pylons import url as pylons_url
106 from pylons import url as pylons_url
107 if not args and not kw:
107 if not args and not kw:
108 request = get_current_request()
108 request = get_current_request()
109 return request.path
109 return request.path
110 return pylons_url.current(*args, **kw)
110 return pylons_url.current(*args, **kw)
111
111
112 url.current = pylons_url_current
112 url.current = pylons_url_current
113
113
114
114
115 def url_replace(**qargs):
115 def url_replace(**qargs):
116 """ Returns the current request url while replacing query string args """
116 """ Returns the current request url while replacing query string args """
117
117
118 request = get_current_request()
118 request = get_current_request()
119 new_args = request.GET.mixed()
119 new_args = request.GET.mixed()
120 new_args.update(qargs)
120 new_args.update(qargs)
121 return url('', **new_args)
121 return url('', **new_args)
122
122
123
123
124 def asset(path, ver=None, **kwargs):
124 def asset(path, ver=None, **kwargs):
125 """
125 """
126 Helper to generate a static asset file path for rhodecode assets
126 Helper to generate a static asset file path for rhodecode assets
127
127
128 eg. h.asset('images/image.png', ver='3923')
128 eg. h.asset('images/image.png', ver='3923')
129
129
130 :param path: path of asset
130 :param path: path of asset
131 :param ver: optional version query param to append as ?ver=
131 :param ver: optional version query param to append as ?ver=
132 """
132 """
133 request = get_current_request()
133 request = get_current_request()
134 query = {}
134 query = {}
135 query.update(kwargs)
135 query.update(kwargs)
136 if ver:
136 if ver:
137 query = {'ver': ver}
137 query = {'ver': ver}
138 return request.static_path(
138 return request.static_path(
139 'rhodecode:public/{}'.format(path), _query=query)
139 'rhodecode:public/{}'.format(path), _query=query)
140
140
141
141
142 default_html_escape_table = {
142 default_html_escape_table = {
143 ord('&'): u'&amp;',
143 ord('&'): u'&amp;',
144 ord('<'): u'&lt;',
144 ord('<'): u'&lt;',
145 ord('>'): u'&gt;',
145 ord('>'): u'&gt;',
146 ord('"'): u'&quot;',
146 ord('"'): u'&quot;',
147 ord("'"): u'&#39;',
147 ord("'"): u'&#39;',
148 }
148 }
149
149
150
150
151 def html_escape(text, html_escape_table=default_html_escape_table):
151 def html_escape(text, html_escape_table=default_html_escape_table):
152 """Produce entities within text."""
152 """Produce entities within text."""
153 return text.translate(html_escape_table)
153 return text.translate(html_escape_table)
154
154
155
155
156 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
156 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
157 """
157 """
158 Truncate string ``s`` at the first occurrence of ``sub``.
158 Truncate string ``s`` at the first occurrence of ``sub``.
159
159
160 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
160 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
161 """
161 """
162 suffix_if_chopped = suffix_if_chopped or ''
162 suffix_if_chopped = suffix_if_chopped or ''
163 pos = s.find(sub)
163 pos = s.find(sub)
164 if pos == -1:
164 if pos == -1:
165 return s
165 return s
166
166
167 if inclusive:
167 if inclusive:
168 pos += len(sub)
168 pos += len(sub)
169
169
170 chopped = s[:pos]
170 chopped = s[:pos]
171 left = s[pos:].strip()
171 left = s[pos:].strip()
172
172
173 if left and suffix_if_chopped:
173 if left and suffix_if_chopped:
174 chopped += suffix_if_chopped
174 chopped += suffix_if_chopped
175
175
176 return chopped
176 return chopped
177
177
178
178
179 def shorter(text, size=20):
179 def shorter(text, size=20):
180 postfix = '...'
180 postfix = '...'
181 if len(text) > size:
181 if len(text) > size:
182 return text[:size - len(postfix)] + postfix
182 return text[:size - len(postfix)] + postfix
183 return text
183 return text
184
184
185
185
186 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
186 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
187 """
187 """
188 Reset button
188 Reset button
189 """
189 """
190 _set_input_attrs(attrs, type, name, value)
190 _set_input_attrs(attrs, type, name, value)
191 _set_id_attr(attrs, id, name)
191 _set_id_attr(attrs, id, name)
192 convert_boolean_attrs(attrs, ["disabled"])
192 convert_boolean_attrs(attrs, ["disabled"])
193 return HTML.input(**attrs)
193 return HTML.input(**attrs)
194
194
195 reset = _reset
195 reset = _reset
196 safeid = _make_safe_id_component
196 safeid = _make_safe_id_component
197
197
198
198
199 def branding(name, length=40):
199 def branding(name, length=40):
200 return truncate(name, length, indicator="")
200 return truncate(name, length, indicator="")
201
201
202
202
203 def FID(raw_id, path):
203 def FID(raw_id, path):
204 """
204 """
205 Creates a unique ID for filenode based on it's hash of path and commit
205 Creates a unique ID for filenode based on it's hash of path and commit
206 it's safe to use in urls
206 it's safe to use in urls
207
207
208 :param raw_id:
208 :param raw_id:
209 :param path:
209 :param path:
210 """
210 """
211
211
212 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
212 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
213
213
214
214
215 class _GetError(object):
215 class _GetError(object):
216 """Get error from form_errors, and represent it as span wrapped error
216 """Get error from form_errors, and represent it as span wrapped error
217 message
217 message
218
218
219 :param field_name: field to fetch errors for
219 :param field_name: field to fetch errors for
220 :param form_errors: form errors dict
220 :param form_errors: form errors dict
221 """
221 """
222
222
223 def __call__(self, field_name, form_errors):
223 def __call__(self, field_name, form_errors):
224 tmpl = """<span class="error_msg">%s</span>"""
224 tmpl = """<span class="error_msg">%s</span>"""
225 if form_errors and field_name in form_errors:
225 if form_errors and field_name in form_errors:
226 return literal(tmpl % form_errors.get(field_name))
226 return literal(tmpl % form_errors.get(field_name))
227
227
228 get_error = _GetError()
228 get_error = _GetError()
229
229
230
230
231 class _ToolTip(object):
231 class _ToolTip(object):
232
232
233 def __call__(self, tooltip_title, trim_at=50):
233 def __call__(self, tooltip_title, trim_at=50):
234 """
234 """
235 Special function just to wrap our text into nice formatted
235 Special function just to wrap our text into nice formatted
236 autowrapped text
236 autowrapped text
237
237
238 :param tooltip_title:
238 :param tooltip_title:
239 """
239 """
240 tooltip_title = escape(tooltip_title)
240 tooltip_title = escape(tooltip_title)
241 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
241 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
242 return tooltip_title
242 return tooltip_title
243 tooltip = _ToolTip()
243 tooltip = _ToolTip()
244
244
245
245
246 def files_breadcrumbs(repo_name, commit_id, file_path):
246 def files_breadcrumbs(repo_name, commit_id, file_path):
247 if isinstance(file_path, str):
247 if isinstance(file_path, str):
248 file_path = safe_unicode(file_path)
248 file_path = safe_unicode(file_path)
249
249
250 # TODO: johbo: Is this always a url like path, or is this operating
250 # TODO: johbo: Is this always a url like path, or is this operating
251 # system dependent?
251 # system dependent?
252 path_segments = file_path.split('/')
252 path_segments = file_path.split('/')
253
253
254 repo_name_html = escape(repo_name)
254 repo_name_html = escape(repo_name)
255 if len(path_segments) == 1 and path_segments[0] == '':
255 if len(path_segments) == 1 and path_segments[0] == '':
256 url_segments = [repo_name_html]
256 url_segments = [repo_name_html]
257 else:
257 else:
258 url_segments = [
258 url_segments = [
259 link_to(
259 link_to(
260 repo_name_html,
260 repo_name_html,
261 route_path(
261 route_path(
262 'repo_files',
262 'repo_files',
263 repo_name=repo_name,
263 repo_name=repo_name,
264 commit_id=commit_id,
264 commit_id=commit_id,
265 f_path=''),
265 f_path=''),
266 class_='pjax-link')]
266 class_='pjax-link')]
267
267
268 last_cnt = len(path_segments) - 1
268 last_cnt = len(path_segments) - 1
269 for cnt, segment in enumerate(path_segments):
269 for cnt, segment in enumerate(path_segments):
270 if not segment:
270 if not segment:
271 continue
271 continue
272 segment_html = escape(segment)
272 segment_html = escape(segment)
273
273
274 if cnt != last_cnt:
274 if cnt != last_cnt:
275 url_segments.append(
275 url_segments.append(
276 link_to(
276 link_to(
277 segment_html,
277 segment_html,
278 route_path(
278 route_path(
279 'repo_files',
279 'repo_files',
280 repo_name=repo_name,
280 repo_name=repo_name,
281 commit_id=commit_id,
281 commit_id=commit_id,
282 f_path='/'.join(path_segments[:cnt + 1])),
282 f_path='/'.join(path_segments[:cnt + 1])),
283 class_='pjax-link'))
283 class_='pjax-link'))
284 else:
284 else:
285 url_segments.append(segment_html)
285 url_segments.append(segment_html)
286
286
287 return literal('/'.join(url_segments))
287 return literal('/'.join(url_segments))
288
288
289
289
290 class CodeHtmlFormatter(HtmlFormatter):
290 class CodeHtmlFormatter(HtmlFormatter):
291 """
291 """
292 My code Html Formatter for source codes
292 My code Html Formatter for source codes
293 """
293 """
294
294
295 def wrap(self, source, outfile):
295 def wrap(self, source, outfile):
296 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
296 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
297
297
298 def _wrap_code(self, source):
298 def _wrap_code(self, source):
299 for cnt, it in enumerate(source):
299 for cnt, it in enumerate(source):
300 i, t = it
300 i, t = it
301 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
301 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
302 yield i, t
302 yield i, t
303
303
304 def _wrap_tablelinenos(self, inner):
304 def _wrap_tablelinenos(self, inner):
305 dummyoutfile = StringIO.StringIO()
305 dummyoutfile = StringIO.StringIO()
306 lncount = 0
306 lncount = 0
307 for t, line in inner:
307 for t, line in inner:
308 if t:
308 if t:
309 lncount += 1
309 lncount += 1
310 dummyoutfile.write(line)
310 dummyoutfile.write(line)
311
311
312 fl = self.linenostart
312 fl = self.linenostart
313 mw = len(str(lncount + fl - 1))
313 mw = len(str(lncount + fl - 1))
314 sp = self.linenospecial
314 sp = self.linenospecial
315 st = self.linenostep
315 st = self.linenostep
316 la = self.lineanchors
316 la = self.lineanchors
317 aln = self.anchorlinenos
317 aln = self.anchorlinenos
318 nocls = self.noclasses
318 nocls = self.noclasses
319 if sp:
319 if sp:
320 lines = []
320 lines = []
321
321
322 for i in range(fl, fl + lncount):
322 for i in range(fl, fl + lncount):
323 if i % st == 0:
323 if i % st == 0:
324 if i % sp == 0:
324 if i % sp == 0:
325 if aln:
325 if aln:
326 lines.append('<a href="#%s%d" class="special">%*d</a>' %
326 lines.append('<a href="#%s%d" class="special">%*d</a>' %
327 (la, i, mw, i))
327 (la, i, mw, i))
328 else:
328 else:
329 lines.append('<span class="special">%*d</span>' % (mw, i))
329 lines.append('<span class="special">%*d</span>' % (mw, i))
330 else:
330 else:
331 if aln:
331 if aln:
332 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
332 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
333 else:
333 else:
334 lines.append('%*d' % (mw, i))
334 lines.append('%*d' % (mw, i))
335 else:
335 else:
336 lines.append('')
336 lines.append('')
337 ls = '\n'.join(lines)
337 ls = '\n'.join(lines)
338 else:
338 else:
339 lines = []
339 lines = []
340 for i in range(fl, fl + lncount):
340 for i in range(fl, fl + lncount):
341 if i % st == 0:
341 if i % st == 0:
342 if aln:
342 if aln:
343 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
343 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
344 else:
344 else:
345 lines.append('%*d' % (mw, i))
345 lines.append('%*d' % (mw, i))
346 else:
346 else:
347 lines.append('')
347 lines.append('')
348 ls = '\n'.join(lines)
348 ls = '\n'.join(lines)
349
349
350 # in case you wonder about the seemingly redundant <div> here: since the
350 # in case you wonder about the seemingly redundant <div> here: since the
351 # content in the other cell also is wrapped in a div, some browsers in
351 # content in the other cell also is wrapped in a div, some browsers in
352 # some configurations seem to mess up the formatting...
352 # some configurations seem to mess up the formatting...
353 if nocls:
353 if nocls:
354 yield 0, ('<table class="%stable">' % self.cssclass +
354 yield 0, ('<table class="%stable">' % self.cssclass +
355 '<tr><td><div class="linenodiv" '
355 '<tr><td><div class="linenodiv" '
356 'style="background-color: #f0f0f0; padding-right: 10px">'
356 'style="background-color: #f0f0f0; padding-right: 10px">'
357 '<pre style="line-height: 125%">' +
357 '<pre style="line-height: 125%">' +
358 ls + '</pre></div></td><td id="hlcode" class="code">')
358 ls + '</pre></div></td><td id="hlcode" class="code">')
359 else:
359 else:
360 yield 0, ('<table class="%stable">' % self.cssclass +
360 yield 0, ('<table class="%stable">' % self.cssclass +
361 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
361 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
362 ls + '</pre></div></td><td id="hlcode" class="code">')
362 ls + '</pre></div></td><td id="hlcode" class="code">')
363 yield 0, dummyoutfile.getvalue()
363 yield 0, dummyoutfile.getvalue()
364 yield 0, '</td></tr></table>'
364 yield 0, '</td></tr></table>'
365
365
366
366
367 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
367 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
368 def __init__(self, **kw):
368 def __init__(self, **kw):
369 # only show these line numbers if set
369 # only show these line numbers if set
370 self.only_lines = kw.pop('only_line_numbers', [])
370 self.only_lines = kw.pop('only_line_numbers', [])
371 self.query_terms = kw.pop('query_terms', [])
371 self.query_terms = kw.pop('query_terms', [])
372 self.max_lines = kw.pop('max_lines', 5)
372 self.max_lines = kw.pop('max_lines', 5)
373 self.line_context = kw.pop('line_context', 3)
373 self.line_context = kw.pop('line_context', 3)
374 self.url = kw.pop('url', None)
374 self.url = kw.pop('url', None)
375
375
376 super(CodeHtmlFormatter, self).__init__(**kw)
376 super(CodeHtmlFormatter, self).__init__(**kw)
377
377
378 def _wrap_code(self, source):
378 def _wrap_code(self, source):
379 for cnt, it in enumerate(source):
379 for cnt, it in enumerate(source):
380 i, t = it
380 i, t = it
381 t = '<pre>%s</pre>' % t
381 t = '<pre>%s</pre>' % t
382 yield i, t
382 yield i, t
383
383
384 def _wrap_tablelinenos(self, inner):
384 def _wrap_tablelinenos(self, inner):
385 yield 0, '<table class="code-highlight %stable">' % self.cssclass
385 yield 0, '<table class="code-highlight %stable">' % self.cssclass
386
386
387 last_shown_line_number = 0
387 last_shown_line_number = 0
388 current_line_number = 1
388 current_line_number = 1
389
389
390 for t, line in inner:
390 for t, line in inner:
391 if not t:
391 if not t:
392 yield t, line
392 yield t, line
393 continue
393 continue
394
394
395 if current_line_number in self.only_lines:
395 if current_line_number in self.only_lines:
396 if last_shown_line_number + 1 != current_line_number:
396 if last_shown_line_number + 1 != current_line_number:
397 yield 0, '<tr>'
397 yield 0, '<tr>'
398 yield 0, '<td class="line">...</td>'
398 yield 0, '<td class="line">...</td>'
399 yield 0, '<td id="hlcode" class="code"></td>'
399 yield 0, '<td id="hlcode" class="code"></td>'
400 yield 0, '</tr>'
400 yield 0, '</tr>'
401
401
402 yield 0, '<tr>'
402 yield 0, '<tr>'
403 if self.url:
403 if self.url:
404 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
404 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
405 self.url, current_line_number, current_line_number)
405 self.url, current_line_number, current_line_number)
406 else:
406 else:
407 yield 0, '<td class="line"><a href="">%i</a></td>' % (
407 yield 0, '<td class="line"><a href="">%i</a></td>' % (
408 current_line_number)
408 current_line_number)
409 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
409 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
410 yield 0, '</tr>'
410 yield 0, '</tr>'
411
411
412 last_shown_line_number = current_line_number
412 last_shown_line_number = current_line_number
413
413
414 current_line_number += 1
414 current_line_number += 1
415
415
416
416
417 yield 0, '</table>'
417 yield 0, '</table>'
418
418
419
419
420 def extract_phrases(text_query):
420 def extract_phrases(text_query):
421 """
421 """
422 Extracts phrases from search term string making sure phrases
422 Extracts phrases from search term string making sure phrases
423 contained in double quotes are kept together - and discarding empty values
423 contained in double quotes are kept together - and discarding empty values
424 or fully whitespace values eg.
424 or fully whitespace values eg.
425
425
426 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
426 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
427
427
428 """
428 """
429
429
430 in_phrase = False
430 in_phrase = False
431 buf = ''
431 buf = ''
432 phrases = []
432 phrases = []
433 for char in text_query:
433 for char in text_query:
434 if in_phrase:
434 if in_phrase:
435 if char == '"': # end phrase
435 if char == '"': # end phrase
436 phrases.append(buf)
436 phrases.append(buf)
437 buf = ''
437 buf = ''
438 in_phrase = False
438 in_phrase = False
439 continue
439 continue
440 else:
440 else:
441 buf += char
441 buf += char
442 continue
442 continue
443 else:
443 else:
444 if char == '"': # start phrase
444 if char == '"': # start phrase
445 in_phrase = True
445 in_phrase = True
446 phrases.append(buf)
446 phrases.append(buf)
447 buf = ''
447 buf = ''
448 continue
448 continue
449 elif char == ' ':
449 elif char == ' ':
450 phrases.append(buf)
450 phrases.append(buf)
451 buf = ''
451 buf = ''
452 continue
452 continue
453 else:
453 else:
454 buf += char
454 buf += char
455
455
456 phrases.append(buf)
456 phrases.append(buf)
457 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
457 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
458 return phrases
458 return phrases
459
459
460
460
461 def get_matching_offsets(text, phrases):
461 def get_matching_offsets(text, phrases):
462 """
462 """
463 Returns a list of string offsets in `text` that the list of `terms` match
463 Returns a list of string offsets in `text` that the list of `terms` match
464
464
465 >>> get_matching_offsets('some text here', ['some', 'here'])
465 >>> get_matching_offsets('some text here', ['some', 'here'])
466 [(0, 4), (10, 14)]
466 [(0, 4), (10, 14)]
467
467
468 """
468 """
469 offsets = []
469 offsets = []
470 for phrase in phrases:
470 for phrase in phrases:
471 for match in re.finditer(phrase, text):
471 for match in re.finditer(phrase, text):
472 offsets.append((match.start(), match.end()))
472 offsets.append((match.start(), match.end()))
473
473
474 return offsets
474 return offsets
475
475
476
476
477 def normalize_text_for_matching(x):
477 def normalize_text_for_matching(x):
478 """
478 """
479 Replaces all non alnum characters to spaces and lower cases the string,
479 Replaces all non alnum characters to spaces and lower cases the string,
480 useful for comparing two text strings without punctuation
480 useful for comparing two text strings without punctuation
481 """
481 """
482 return re.sub(r'[^\w]', ' ', x.lower())
482 return re.sub(r'[^\w]', ' ', x.lower())
483
483
484
484
485 def get_matching_line_offsets(lines, terms):
485 def get_matching_line_offsets(lines, terms):
486 """ Return a set of `lines` indices (starting from 1) matching a
486 """ Return a set of `lines` indices (starting from 1) matching a
487 text search query, along with `context` lines above/below matching lines
487 text search query, along with `context` lines above/below matching lines
488
488
489 :param lines: list of strings representing lines
489 :param lines: list of strings representing lines
490 :param terms: search term string to match in lines eg. 'some text'
490 :param terms: search term string to match in lines eg. 'some text'
491 :param context: number of lines above/below a matching line to add to result
491 :param context: number of lines above/below a matching line to add to result
492 :param max_lines: cut off for lines of interest
492 :param max_lines: cut off for lines of interest
493 eg.
493 eg.
494
494
495 text = '''
495 text = '''
496 words words words
496 words words words
497 words words words
497 words words words
498 some text some
498 some text some
499 words words words
499 words words words
500 words words words
500 words words words
501 text here what
501 text here what
502 '''
502 '''
503 get_matching_line_offsets(text, 'text', context=1)
503 get_matching_line_offsets(text, 'text', context=1)
504 {3: [(5, 9)], 6: [(0, 4)]]
504 {3: [(5, 9)], 6: [(0, 4)]]
505
505
506 """
506 """
507 matching_lines = {}
507 matching_lines = {}
508 phrases = [normalize_text_for_matching(phrase)
508 phrases = [normalize_text_for_matching(phrase)
509 for phrase in extract_phrases(terms)]
509 for phrase in extract_phrases(terms)]
510
510
511 for line_index, line in enumerate(lines, start=1):
511 for line_index, line in enumerate(lines, start=1):
512 match_offsets = get_matching_offsets(
512 match_offsets = get_matching_offsets(
513 normalize_text_for_matching(line), phrases)
513 normalize_text_for_matching(line), phrases)
514 if match_offsets:
514 if match_offsets:
515 matching_lines[line_index] = match_offsets
515 matching_lines[line_index] = match_offsets
516
516
517 return matching_lines
517 return matching_lines
518
518
519
519
520 def hsv_to_rgb(h, s, v):
520 def hsv_to_rgb(h, s, v):
521 """ Convert hsv color values to rgb """
521 """ Convert hsv color values to rgb """
522
522
523 if s == 0.0:
523 if s == 0.0:
524 return v, v, v
524 return v, v, v
525 i = int(h * 6.0) # XXX assume int() truncates!
525 i = int(h * 6.0) # XXX assume int() truncates!
526 f = (h * 6.0) - i
526 f = (h * 6.0) - i
527 p = v * (1.0 - s)
527 p = v * (1.0 - s)
528 q = v * (1.0 - s * f)
528 q = v * (1.0 - s * f)
529 t = v * (1.0 - s * (1.0 - f))
529 t = v * (1.0 - s * (1.0 - f))
530 i = i % 6
530 i = i % 6
531 if i == 0:
531 if i == 0:
532 return v, t, p
532 return v, t, p
533 if i == 1:
533 if i == 1:
534 return q, v, p
534 return q, v, p
535 if i == 2:
535 if i == 2:
536 return p, v, t
536 return p, v, t
537 if i == 3:
537 if i == 3:
538 return p, q, v
538 return p, q, v
539 if i == 4:
539 if i == 4:
540 return t, p, v
540 return t, p, v
541 if i == 5:
541 if i == 5:
542 return v, p, q
542 return v, p, q
543
543
544
544
545 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
545 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
546 """
546 """
547 Generator for getting n of evenly distributed colors using
547 Generator for getting n of evenly distributed colors using
548 hsv color and golden ratio. It always return same order of colors
548 hsv color and golden ratio. It always return same order of colors
549
549
550 :param n: number of colors to generate
550 :param n: number of colors to generate
551 :param saturation: saturation of returned colors
551 :param saturation: saturation of returned colors
552 :param lightness: lightness of returned colors
552 :param lightness: lightness of returned colors
553 :returns: RGB tuple
553 :returns: RGB tuple
554 """
554 """
555
555
556 golden_ratio = 0.618033988749895
556 golden_ratio = 0.618033988749895
557 h = 0.22717784590367374
557 h = 0.22717784590367374
558
558
559 for _ in xrange(n):
559 for _ in xrange(n):
560 h += golden_ratio
560 h += golden_ratio
561 h %= 1
561 h %= 1
562 HSV_tuple = [h, saturation, lightness]
562 HSV_tuple = [h, saturation, lightness]
563 RGB_tuple = hsv_to_rgb(*HSV_tuple)
563 RGB_tuple = hsv_to_rgb(*HSV_tuple)
564 yield map(lambda x: str(int(x * 256)), RGB_tuple)
564 yield map(lambda x: str(int(x * 256)), RGB_tuple)
565
565
566
566
567 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
567 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
568 """
568 """
569 Returns a function which when called with an argument returns a unique
569 Returns a function which when called with an argument returns a unique
570 color for that argument, eg.
570 color for that argument, eg.
571
571
572 :param n: number of colors to generate
572 :param n: number of colors to generate
573 :param saturation: saturation of returned colors
573 :param saturation: saturation of returned colors
574 :param lightness: lightness of returned colors
574 :param lightness: lightness of returned colors
575 :returns: css RGB string
575 :returns: css RGB string
576
576
577 >>> color_hash = color_hasher()
577 >>> color_hash = color_hasher()
578 >>> color_hash('hello')
578 >>> color_hash('hello')
579 'rgb(34, 12, 59)'
579 'rgb(34, 12, 59)'
580 >>> color_hash('hello')
580 >>> color_hash('hello')
581 'rgb(34, 12, 59)'
581 'rgb(34, 12, 59)'
582 >>> color_hash('other')
582 >>> color_hash('other')
583 'rgb(90, 224, 159)'
583 'rgb(90, 224, 159)'
584 """
584 """
585
585
586 color_dict = {}
586 color_dict = {}
587 cgenerator = unique_color_generator(
587 cgenerator = unique_color_generator(
588 saturation=saturation, lightness=lightness)
588 saturation=saturation, lightness=lightness)
589
589
590 def get_color_string(thing):
590 def get_color_string(thing):
591 if thing in color_dict:
591 if thing in color_dict:
592 col = color_dict[thing]
592 col = color_dict[thing]
593 else:
593 else:
594 col = color_dict[thing] = cgenerator.next()
594 col = color_dict[thing] = cgenerator.next()
595 return "rgb(%s)" % (', '.join(col))
595 return "rgb(%s)" % (', '.join(col))
596
596
597 return get_color_string
597 return get_color_string
598
598
599
599
600 def get_lexer_safe(mimetype=None, filepath=None):
600 def get_lexer_safe(mimetype=None, filepath=None):
601 """
601 """
602 Tries to return a relevant pygments lexer using mimetype/filepath name,
602 Tries to return a relevant pygments lexer using mimetype/filepath name,
603 defaulting to plain text if none could be found
603 defaulting to plain text if none could be found
604 """
604 """
605 lexer = None
605 lexer = None
606 try:
606 try:
607 if mimetype:
607 if mimetype:
608 lexer = get_lexer_for_mimetype(mimetype)
608 lexer = get_lexer_for_mimetype(mimetype)
609 if not lexer:
609 if not lexer:
610 lexer = get_lexer_for_filename(filepath)
610 lexer = get_lexer_for_filename(filepath)
611 except pygments.util.ClassNotFound:
611 except pygments.util.ClassNotFound:
612 pass
612 pass
613
613
614 if not lexer:
614 if not lexer:
615 lexer = get_lexer_by_name('text')
615 lexer = get_lexer_by_name('text')
616
616
617 return lexer
617 return lexer
618
618
619
619
620 def get_lexer_for_filenode(filenode):
620 def get_lexer_for_filenode(filenode):
621 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
621 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
622 return lexer
622 return lexer
623
623
624
624
625 def pygmentize(filenode, **kwargs):
625 def pygmentize(filenode, **kwargs):
626 """
626 """
627 pygmentize function using pygments
627 pygmentize function using pygments
628
628
629 :param filenode:
629 :param filenode:
630 """
630 """
631 lexer = get_lexer_for_filenode(filenode)
631 lexer = get_lexer_for_filenode(filenode)
632 return literal(code_highlight(filenode.content, lexer,
632 return literal(code_highlight(filenode.content, lexer,
633 CodeHtmlFormatter(**kwargs)))
633 CodeHtmlFormatter(**kwargs)))
634
634
635
635
636 def is_following_repo(repo_name, user_id):
636 def is_following_repo(repo_name, user_id):
637 from rhodecode.model.scm import ScmModel
637 from rhodecode.model.scm import ScmModel
638 return ScmModel().is_following_repo(repo_name, user_id)
638 return ScmModel().is_following_repo(repo_name, user_id)
639
639
640
640
641 class _Message(object):
641 class _Message(object):
642 """A message returned by ``Flash.pop_messages()``.
642 """A message returned by ``Flash.pop_messages()``.
643
643
644 Converting the message to a string returns the message text. Instances
644 Converting the message to a string returns the message text. Instances
645 also have the following attributes:
645 also have the following attributes:
646
646
647 * ``message``: the message text.
647 * ``message``: the message text.
648 * ``category``: the category specified when the message was created.
648 * ``category``: the category specified when the message was created.
649 """
649 """
650
650
651 def __init__(self, category, message):
651 def __init__(self, category, message):
652 self.category = category
652 self.category = category
653 self.message = message
653 self.message = message
654
654
655 def __str__(self):
655 def __str__(self):
656 return self.message
656 return self.message
657
657
658 __unicode__ = __str__
658 __unicode__ = __str__
659
659
660 def __html__(self):
660 def __html__(self):
661 return escape(safe_unicode(self.message))
661 return escape(safe_unicode(self.message))
662
662
663
663
664 class Flash(_Flash):
664 class Flash(_Flash):
665
665
666 def pop_messages(self, request=None):
666 def pop_messages(self, request=None):
667 """Return all accumulated messages and delete them from the session.
667 """Return all accumulated messages and delete them from the session.
668
668
669 The return value is a list of ``Message`` objects.
669 The return value is a list of ``Message`` objects.
670 """
670 """
671 messages = []
671 messages = []
672
672
673 if request:
673 if request:
674 session = request.session
674 session = request.session
675 else:
675 else:
676 from pylons import session
676 from pylons import session
677
677
678 # Pop the 'old' pylons flash messages. They are tuples of the form
678 # Pop the 'old' pylons flash messages. They are tuples of the form
679 # (category, message)
679 # (category, message)
680 for cat, msg in session.pop(self.session_key, []):
680 for cat, msg in session.pop(self.session_key, []):
681 messages.append(_Message(cat, msg))
681 messages.append(_Message(cat, msg))
682
682
683 # Pop the 'new' pyramid flash messages for each category as list
683 # Pop the 'new' pyramid flash messages for each category as list
684 # of strings.
684 # of strings.
685 for cat in self.categories:
685 for cat in self.categories:
686 for msg in session.pop_flash(queue=cat):
686 for msg in session.pop_flash(queue=cat):
687 messages.append(_Message(cat, msg))
687 messages.append(_Message(cat, msg))
688 # Map messages from the default queue to the 'notice' category.
688 # Map messages from the default queue to the 'notice' category.
689 for msg in session.pop_flash():
689 for msg in session.pop_flash():
690 messages.append(_Message('notice', msg))
690 messages.append(_Message('notice', msg))
691
691
692 session.save()
692 session.save()
693 return messages
693 return messages
694
694
695 def json_alerts(self, request=None):
695 def json_alerts(self, request=None):
696 payloads = []
696 payloads = []
697 messages = flash.pop_messages(request=request)
697 messages = flash.pop_messages(request=request)
698 if messages:
698 if messages:
699 for message in messages:
699 for message in messages:
700 subdata = {}
700 subdata = {}
701 if hasattr(message.message, 'rsplit'):
701 if hasattr(message.message, 'rsplit'):
702 flash_data = message.message.rsplit('|DELIM|', 1)
702 flash_data = message.message.rsplit('|DELIM|', 1)
703 org_message = flash_data[0]
703 org_message = flash_data[0]
704 if len(flash_data) > 1:
704 if len(flash_data) > 1:
705 subdata = json.loads(flash_data[1])
705 subdata = json.loads(flash_data[1])
706 else:
706 else:
707 org_message = message.message
707 org_message = message.message
708 payloads.append({
708 payloads.append({
709 'message': {
709 'message': {
710 'message': u'{}'.format(org_message),
710 'message': u'{}'.format(org_message),
711 'level': message.category,
711 'level': message.category,
712 'force': True,
712 'force': True,
713 'subdata': subdata
713 'subdata': subdata
714 }
714 }
715 })
715 })
716 return json.dumps(payloads)
716 return json.dumps(payloads)
717
717
718 flash = Flash()
718 flash = Flash()
719
719
720 #==============================================================================
720 #==============================================================================
721 # SCM FILTERS available via h.
721 # SCM FILTERS available via h.
722 #==============================================================================
722 #==============================================================================
723 from rhodecode.lib.vcs.utils import author_name, author_email
723 from rhodecode.lib.vcs.utils import author_name, author_email
724 from rhodecode.lib.utils2 import credentials_filter, age as _age
724 from rhodecode.lib.utils2 import credentials_filter, age as _age
725 from rhodecode.model.db import User, ChangesetStatus
725 from rhodecode.model.db import User, ChangesetStatus
726
726
727 age = _age
727 age = _age
728 capitalize = lambda x: x.capitalize()
728 capitalize = lambda x: x.capitalize()
729 email = author_email
729 email = author_email
730 short_id = lambda x: x[:12]
730 short_id = lambda x: x[:12]
731 hide_credentials = lambda x: ''.join(credentials_filter(x))
731 hide_credentials = lambda x: ''.join(credentials_filter(x))
732
732
733
733
734 def age_component(datetime_iso, value=None, time_is_local=False):
734 def age_component(datetime_iso, value=None, time_is_local=False):
735 title = value or format_date(datetime_iso)
735 title = value or format_date(datetime_iso)
736 tzinfo = '+00:00'
736 tzinfo = '+00:00'
737
737
738 # detect if we have a timezone info, otherwise, add it
738 # detect if we have a timezone info, otherwise, add it
739 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
739 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
740 if time_is_local:
740 if time_is_local:
741 tzinfo = time.strftime("+%H:%M",
741 tzinfo = time.strftime("+%H:%M",
742 time.gmtime(
742 time.gmtime(
743 (datetime.now() - datetime.utcnow()).seconds + 1
743 (datetime.now() - datetime.utcnow()).seconds + 1
744 )
744 )
745 )
745 )
746
746
747 return literal(
747 return literal(
748 '<time class="timeago tooltip" '
748 '<time class="timeago tooltip" '
749 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
749 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
750 datetime_iso, title, tzinfo))
750 datetime_iso, title, tzinfo))
751
751
752
752
753 def _shorten_commit_id(commit_id):
753 def _shorten_commit_id(commit_id):
754 from rhodecode import CONFIG
754 from rhodecode import CONFIG
755 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
755 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
756 return commit_id[:def_len]
756 return commit_id[:def_len]
757
757
758
758
759 def show_id(commit):
759 def show_id(commit):
760 """
760 """
761 Configurable function that shows ID
761 Configurable function that shows ID
762 by default it's r123:fffeeefffeee
762 by default it's r123:fffeeefffeee
763
763
764 :param commit: commit instance
764 :param commit: commit instance
765 """
765 """
766 from rhodecode import CONFIG
766 from rhodecode import CONFIG
767 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
767 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
768
768
769 raw_id = _shorten_commit_id(commit.raw_id)
769 raw_id = _shorten_commit_id(commit.raw_id)
770 if show_idx:
770 if show_idx:
771 return 'r%s:%s' % (commit.idx, raw_id)
771 return 'r%s:%s' % (commit.idx, raw_id)
772 else:
772 else:
773 return '%s' % (raw_id, )
773 return '%s' % (raw_id, )
774
774
775
775
776 def format_date(date):
776 def format_date(date):
777 """
777 """
778 use a standardized formatting for dates used in RhodeCode
778 use a standardized formatting for dates used in RhodeCode
779
779
780 :param date: date/datetime object
780 :param date: date/datetime object
781 :return: formatted date
781 :return: formatted date
782 """
782 """
783
783
784 if date:
784 if date:
785 _fmt = "%a, %d %b %Y %H:%M:%S"
785 _fmt = "%a, %d %b %Y %H:%M:%S"
786 return safe_unicode(date.strftime(_fmt))
786 return safe_unicode(date.strftime(_fmt))
787
787
788 return u""
788 return u""
789
789
790
790
791 class _RepoChecker(object):
791 class _RepoChecker(object):
792
792
793 def __init__(self, backend_alias):
793 def __init__(self, backend_alias):
794 self._backend_alias = backend_alias
794 self._backend_alias = backend_alias
795
795
796 def __call__(self, repository):
796 def __call__(self, repository):
797 if hasattr(repository, 'alias'):
797 if hasattr(repository, 'alias'):
798 _type = repository.alias
798 _type = repository.alias
799 elif hasattr(repository, 'repo_type'):
799 elif hasattr(repository, 'repo_type'):
800 _type = repository.repo_type
800 _type = repository.repo_type
801 else:
801 else:
802 _type = repository
802 _type = repository
803 return _type == self._backend_alias
803 return _type == self._backend_alias
804
804
805 is_git = _RepoChecker('git')
805 is_git = _RepoChecker('git')
806 is_hg = _RepoChecker('hg')
806 is_hg = _RepoChecker('hg')
807 is_svn = _RepoChecker('svn')
807 is_svn = _RepoChecker('svn')
808
808
809
809
810 def get_repo_type_by_name(repo_name):
810 def get_repo_type_by_name(repo_name):
811 repo = Repository.get_by_repo_name(repo_name)
811 repo = Repository.get_by_repo_name(repo_name)
812 return repo.repo_type
812 return repo.repo_type
813
813
814
814
815 def is_svn_without_proxy(repository):
815 def is_svn_without_proxy(repository):
816 if is_svn(repository):
816 if is_svn(repository):
817 from rhodecode.model.settings import VcsSettingsModel
817 from rhodecode.model.settings import VcsSettingsModel
818 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
818 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
819 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
819 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
820 return False
820 return False
821
821
822
822
823 def discover_user(author):
823 def discover_user(author):
824 """
824 """
825 Tries to discover RhodeCode User based on the autho string. Author string
825 Tries to discover RhodeCode User based on the autho string. Author string
826 is typically `FirstName LastName <email@address.com>`
826 is typically `FirstName LastName <email@address.com>`
827 """
827 """
828
828
829 # if author is already an instance use it for extraction
829 # if author is already an instance use it for extraction
830 if isinstance(author, User):
830 if isinstance(author, User):
831 return author
831 return author
832
832
833 # Valid email in the attribute passed, see if they're in the system
833 # Valid email in the attribute passed, see if they're in the system
834 _email = author_email(author)
834 _email = author_email(author)
835 if _email != '':
835 if _email != '':
836 user = User.get_by_email(_email, case_insensitive=True, cache=True)
836 user = User.get_by_email(_email, case_insensitive=True, cache=True)
837 if user is not None:
837 if user is not None:
838 return user
838 return user
839
839
840 # Maybe it's a username, we try to extract it and fetch by username ?
840 # Maybe it's a username, we try to extract it and fetch by username ?
841 _author = author_name(author)
841 _author = author_name(author)
842 user = User.get_by_username(_author, case_insensitive=True, cache=True)
842 user = User.get_by_username(_author, case_insensitive=True, cache=True)
843 if user is not None:
843 if user is not None:
844 return user
844 return user
845
845
846 return None
846 return None
847
847
848
848
849 def email_or_none(author):
849 def email_or_none(author):
850 # extract email from the commit string
850 # extract email from the commit string
851 _email = author_email(author)
851 _email = author_email(author)
852
852
853 # If we have an email, use it, otherwise
853 # If we have an email, use it, otherwise
854 # see if it contains a username we can get an email from
854 # see if it contains a username we can get an email from
855 if _email != '':
855 if _email != '':
856 return _email
856 return _email
857 else:
857 else:
858 user = User.get_by_username(
858 user = User.get_by_username(
859 author_name(author), case_insensitive=True, cache=True)
859 author_name(author), case_insensitive=True, cache=True)
860
860
861 if user is not None:
861 if user is not None:
862 return user.email
862 return user.email
863
863
864 # No valid email, not a valid user in the system, none!
864 # No valid email, not a valid user in the system, none!
865 return None
865 return None
866
866
867
867
868 def link_to_user(author, length=0, **kwargs):
868 def link_to_user(author, length=0, **kwargs):
869 user = discover_user(author)
869 user = discover_user(author)
870 # user can be None, but if we have it already it means we can re-use it
870 # user can be None, but if we have it already it means we can re-use it
871 # in the person() function, so we save 1 intensive-query
871 # in the person() function, so we save 1 intensive-query
872 if user:
872 if user:
873 author = user
873 author = user
874
874
875 display_person = person(author, 'username_or_name_or_email')
875 display_person = person(author, 'username_or_name_or_email')
876 if length:
876 if length:
877 display_person = shorter(display_person, length)
877 display_person = shorter(display_person, length)
878
878
879 if user:
879 if user:
880 return link_to(
880 return link_to(
881 escape(display_person),
881 escape(display_person),
882 route_path('user_profile', username=user.username),
882 route_path('user_profile', username=user.username),
883 **kwargs)
883 **kwargs)
884 else:
884 else:
885 return escape(display_person)
885 return escape(display_person)
886
886
887
887
888 def person(author, show_attr="username_and_name"):
888 def person(author, show_attr="username_and_name"):
889 user = discover_user(author)
889 user = discover_user(author)
890 if user:
890 if user:
891 return getattr(user, show_attr)
891 return getattr(user, show_attr)
892 else:
892 else:
893 _author = author_name(author)
893 _author = author_name(author)
894 _email = email(author)
894 _email = email(author)
895 return _author or _email
895 return _author or _email
896
896
897
897
898 def author_string(email):
898 def author_string(email):
899 if email:
899 if email:
900 user = User.get_by_email(email, case_insensitive=True, cache=True)
900 user = User.get_by_email(email, case_insensitive=True, cache=True)
901 if user:
901 if user:
902 if user.first_name or user.last_name:
902 if user.first_name or user.last_name:
903 return '%s %s &lt;%s&gt;' % (
903 return '%s %s &lt;%s&gt;' % (
904 user.first_name, user.last_name, email)
904 user.first_name, user.last_name, email)
905 else:
905 else:
906 return email
906 return email
907 else:
907 else:
908 return email
908 return email
909 else:
909 else:
910 return None
910 return None
911
911
912
912
913 def person_by_id(id_, show_attr="username_and_name"):
913 def person_by_id(id_, show_attr="username_and_name"):
914 # attr to return from fetched user
914 # attr to return from fetched user
915 person_getter = lambda usr: getattr(usr, show_attr)
915 person_getter = lambda usr: getattr(usr, show_attr)
916
916
917 #maybe it's an ID ?
917 #maybe it's an ID ?
918 if str(id_).isdigit() or isinstance(id_, int):
918 if str(id_).isdigit() or isinstance(id_, int):
919 id_ = int(id_)
919 id_ = int(id_)
920 user = User.get(id_)
920 user = User.get(id_)
921 if user is not None:
921 if user is not None:
922 return person_getter(user)
922 return person_getter(user)
923 return id_
923 return id_
924
924
925
925
926 def gravatar_with_user(request, author, show_disabled=False):
926 def gravatar_with_user(request, author, show_disabled=False):
927 _render = request.get_partial_renderer('base/base.mako')
927 _render = request.get_partial_renderer('base/base.mako')
928 return _render('gravatar_with_user', author, show_disabled=show_disabled)
928 return _render('gravatar_with_user', author, show_disabled=show_disabled)
929
929
930
930
931 def desc_stylize(value):
931 def desc_stylize(value):
932 """
932 """
933 converts tags from value into html equivalent
933 converts tags from value into html equivalent
934
934
935 :param value:
935 :param value:
936 """
936 """
937 if not value:
937 if not value:
938 return ''
938 return ''
939
939
940 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
940 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
941 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
941 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
942 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
942 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
943 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
943 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
944 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
944 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
945 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
945 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
946 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
946 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
947 '<div class="metatag" tag="lang">\\2</div>', value)
947 '<div class="metatag" tag="lang">\\2</div>', value)
948 value = re.sub(r'\[([a-z]+)\]',
948 value = re.sub(r'\[([a-z]+)\]',
949 '<div class="metatag" tag="\\1">\\1</div>', value)
949 '<div class="metatag" tag="\\1">\\1</div>', value)
950
950
951 return value
951 return value
952
952
953
953
954 def escaped_stylize(value):
954 def escaped_stylize(value):
955 """
955 """
956 converts tags from value into html equivalent, but escaping its value first
956 converts tags from value into html equivalent, but escaping its value first
957 """
957 """
958 if not value:
958 if not value:
959 return ''
959 return ''
960
960
961 # Using default webhelper escape method, but has to force it as a
961 # Using default webhelper escape method, but has to force it as a
962 # plain unicode instead of a markup tag to be used in regex expressions
962 # plain unicode instead of a markup tag to be used in regex expressions
963 value = unicode(escape(safe_unicode(value)))
963 value = unicode(escape(safe_unicode(value)))
964
964
965 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
965 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
966 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
966 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
967 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
967 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
968 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
968 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
969 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
969 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
970 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
970 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
971 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
971 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
972 '<div class="metatag" tag="lang">\\2</div>', value)
972 '<div class="metatag" tag="lang">\\2</div>', value)
973 value = re.sub(r'\[([a-z]+)\]',
973 value = re.sub(r'\[([a-z]+)\]',
974 '<div class="metatag" tag="\\1">\\1</div>', value)
974 '<div class="metatag" tag="\\1">\\1</div>', value)
975
975
976 return value
976 return value
977
977
978
978
979 def bool2icon(value):
979 def bool2icon(value):
980 """
980 """
981 Returns boolean value of a given value, represented as html element with
981 Returns boolean value of a given value, represented as html element with
982 classes that will represent icons
982 classes that will represent icons
983
983
984 :param value: given value to convert to html node
984 :param value: given value to convert to html node
985 """
985 """
986
986
987 if value: # does bool conversion
987 if value: # does bool conversion
988 return HTML.tag('i', class_="icon-true")
988 return HTML.tag('i', class_="icon-true")
989 else: # not true as bool
989 else: # not true as bool
990 return HTML.tag('i', class_="icon-false")
990 return HTML.tag('i', class_="icon-false")
991
991
992
992
993 #==============================================================================
993 #==============================================================================
994 # PERMS
994 # PERMS
995 #==============================================================================
995 #==============================================================================
996 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
996 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
997 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
997 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
998 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
998 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
999 csrf_token_key
999 csrf_token_key
1000
1000
1001
1001
1002 #==============================================================================
1002 #==============================================================================
1003 # GRAVATAR URL
1003 # GRAVATAR URL
1004 #==============================================================================
1004 #==============================================================================
1005 class InitialsGravatar(object):
1005 class InitialsGravatar(object):
1006 def __init__(self, email_address, first_name, last_name, size=30,
1006 def __init__(self, email_address, first_name, last_name, size=30,
1007 background=None, text_color='#fff'):
1007 background=None, text_color='#fff'):
1008 self.size = size
1008 self.size = size
1009 self.first_name = first_name
1009 self.first_name = first_name
1010 self.last_name = last_name
1010 self.last_name = last_name
1011 self.email_address = email_address
1011 self.email_address = email_address
1012 self.background = background or self.str2color(email_address)
1012 self.background = background or self.str2color(email_address)
1013 self.text_color = text_color
1013 self.text_color = text_color
1014
1014
1015 def get_color_bank(self):
1015 def get_color_bank(self):
1016 """
1016 """
1017 returns a predefined list of colors that gravatars can use.
1017 returns a predefined list of colors that gravatars can use.
1018 Those are randomized distinct colors that guarantee readability and
1018 Those are randomized distinct colors that guarantee readability and
1019 uniqueness.
1019 uniqueness.
1020
1020
1021 generated with: http://phrogz.net/css/distinct-colors.html
1021 generated with: http://phrogz.net/css/distinct-colors.html
1022 """
1022 """
1023 return [
1023 return [
1024 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1024 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1025 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1025 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1026 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1026 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1027 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1027 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1028 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1028 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1029 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1029 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1030 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1030 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1031 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1031 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1032 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1032 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1033 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1033 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1034 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1034 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1035 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1035 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1036 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1036 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1037 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1037 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1038 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1038 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1039 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1039 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1040 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1040 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1041 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1041 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1042 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1042 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1043 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1043 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1044 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1044 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1045 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1045 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1046 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1046 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1047 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1047 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1048 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1048 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1049 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1049 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1050 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1050 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1051 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1051 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1052 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1052 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1053 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1053 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1054 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1054 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1055 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1055 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1056 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1056 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1057 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1057 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1058 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1058 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1059 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1059 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1060 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1060 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1061 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1061 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1062 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1062 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1063 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1063 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1064 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1064 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1065 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1065 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1066 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1066 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1067 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1067 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1068 '#4f8c46', '#368dd9', '#5c0073'
1068 '#4f8c46', '#368dd9', '#5c0073'
1069 ]
1069 ]
1070
1070
1071 def rgb_to_hex_color(self, rgb_tuple):
1071 def rgb_to_hex_color(self, rgb_tuple):
1072 """
1072 """
1073 Converts an rgb_tuple passed to an hex color.
1073 Converts an rgb_tuple passed to an hex color.
1074
1074
1075 :param rgb_tuple: tuple with 3 ints represents rgb color space
1075 :param rgb_tuple: tuple with 3 ints represents rgb color space
1076 """
1076 """
1077 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1077 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1078
1078
1079 def email_to_int_list(self, email_str):
1079 def email_to_int_list(self, email_str):
1080 """
1080 """
1081 Get every byte of the hex digest value of email and turn it to integer.
1081 Get every byte of the hex digest value of email and turn it to integer.
1082 It's going to be always between 0-255
1082 It's going to be always between 0-255
1083 """
1083 """
1084 digest = md5_safe(email_str.lower())
1084 digest = md5_safe(email_str.lower())
1085 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1085 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1086
1086
1087 def pick_color_bank_index(self, email_str, color_bank):
1087 def pick_color_bank_index(self, email_str, color_bank):
1088 return self.email_to_int_list(email_str)[0] % len(color_bank)
1088 return self.email_to_int_list(email_str)[0] % len(color_bank)
1089
1089
1090 def str2color(self, email_str):
1090 def str2color(self, email_str):
1091 """
1091 """
1092 Tries to map in a stable algorithm an email to color
1092 Tries to map in a stable algorithm an email to color
1093
1093
1094 :param email_str:
1094 :param email_str:
1095 """
1095 """
1096 color_bank = self.get_color_bank()
1096 color_bank = self.get_color_bank()
1097 # pick position (module it's length so we always find it in the
1097 # pick position (module it's length so we always find it in the
1098 # bank even if it's smaller than 256 values
1098 # bank even if it's smaller than 256 values
1099 pos = self.pick_color_bank_index(email_str, color_bank)
1099 pos = self.pick_color_bank_index(email_str, color_bank)
1100 return color_bank[pos]
1100 return color_bank[pos]
1101
1101
1102 def normalize_email(self, email_address):
1102 def normalize_email(self, email_address):
1103 import unicodedata
1103 import unicodedata
1104 # default host used to fill in the fake/missing email
1104 # default host used to fill in the fake/missing email
1105 default_host = u'localhost'
1105 default_host = u'localhost'
1106
1106
1107 if not email_address:
1107 if not email_address:
1108 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1108 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1109
1109
1110 email_address = safe_unicode(email_address)
1110 email_address = safe_unicode(email_address)
1111
1111
1112 if u'@' not in email_address:
1112 if u'@' not in email_address:
1113 email_address = u'%s@%s' % (email_address, default_host)
1113 email_address = u'%s@%s' % (email_address, default_host)
1114
1114
1115 if email_address.endswith(u'@'):
1115 if email_address.endswith(u'@'):
1116 email_address = u'%s%s' % (email_address, default_host)
1116 email_address = u'%s%s' % (email_address, default_host)
1117
1117
1118 email_address = unicodedata.normalize('NFKD', email_address)\
1118 email_address = unicodedata.normalize('NFKD', email_address)\
1119 .encode('ascii', 'ignore')
1119 .encode('ascii', 'ignore')
1120 return email_address
1120 return email_address
1121
1121
1122 def get_initials(self):
1122 def get_initials(self):
1123 """
1123 """
1124 Returns 2 letter initials calculated based on the input.
1124 Returns 2 letter initials calculated based on the input.
1125 The algorithm picks first given email address, and takes first letter
1125 The algorithm picks first given email address, and takes first letter
1126 of part before @, and then the first letter of server name. In case
1126 of part before @, and then the first letter of server name. In case
1127 the part before @ is in a format of `somestring.somestring2` it replaces
1127 the part before @ is in a format of `somestring.somestring2` it replaces
1128 the server letter with first letter of somestring2
1128 the server letter with first letter of somestring2
1129
1129
1130 In case function was initialized with both first and lastname, this
1130 In case function was initialized with both first and lastname, this
1131 overrides the extraction from email by first letter of the first and
1131 overrides the extraction from email by first letter of the first and
1132 last name. We add special logic to that functionality, In case Full name
1132 last name. We add special logic to that functionality, In case Full name
1133 is compound, like Guido Von Rossum, we use last part of the last name
1133 is compound, like Guido Von Rossum, we use last part of the last name
1134 (Von Rossum) picking `R`.
1134 (Von Rossum) picking `R`.
1135
1135
1136 Function also normalizes the non-ascii characters to they ascii
1136 Function also normalizes the non-ascii characters to they ascii
1137 representation, eg Ą => A
1137 representation, eg Ą => A
1138 """
1138 """
1139 import unicodedata
1139 import unicodedata
1140 # replace non-ascii to ascii
1140 # replace non-ascii to ascii
1141 first_name = unicodedata.normalize(
1141 first_name = unicodedata.normalize(
1142 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1142 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1143 last_name = unicodedata.normalize(
1143 last_name = unicodedata.normalize(
1144 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1144 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1145
1145
1146 # do NFKD encoding, and also make sure email has proper format
1146 # do NFKD encoding, and also make sure email has proper format
1147 email_address = self.normalize_email(self.email_address)
1147 email_address = self.normalize_email(self.email_address)
1148
1148
1149 # first push the email initials
1149 # first push the email initials
1150 prefix, server = email_address.split('@', 1)
1150 prefix, server = email_address.split('@', 1)
1151
1151
1152 # check if prefix is maybe a 'first_name.last_name' syntax
1152 # check if prefix is maybe a 'first_name.last_name' syntax
1153 _dot_split = prefix.rsplit('.', 1)
1153 _dot_split = prefix.rsplit('.', 1)
1154 if len(_dot_split) == 2:
1154 if len(_dot_split) == 2:
1155 initials = [_dot_split[0][0], _dot_split[1][0]]
1155 initials = [_dot_split[0][0], _dot_split[1][0]]
1156 else:
1156 else:
1157 initials = [prefix[0], server[0]]
1157 initials = [prefix[0], server[0]]
1158
1158
1159 # then try to replace either first_name or last_name
1159 # then try to replace either first_name or last_name
1160 fn_letter = (first_name or " ")[0].strip()
1160 fn_letter = (first_name or " ")[0].strip()
1161 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1161 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1162
1162
1163 if fn_letter:
1163 if fn_letter:
1164 initials[0] = fn_letter
1164 initials[0] = fn_letter
1165
1165
1166 if ln_letter:
1166 if ln_letter:
1167 initials[1] = ln_letter
1167 initials[1] = ln_letter
1168
1168
1169 return ''.join(initials).upper()
1169 return ''.join(initials).upper()
1170
1170
1171 def get_img_data_by_type(self, font_family, img_type):
1171 def get_img_data_by_type(self, font_family, img_type):
1172 default_user = """
1172 default_user = """
1173 <svg xmlns="http://www.w3.org/2000/svg"
1173 <svg xmlns="http://www.w3.org/2000/svg"
1174 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1174 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1175 viewBox="-15 -10 439.165 429.164"
1175 viewBox="-15 -10 439.165 429.164"
1176
1176
1177 xml:space="preserve"
1177 xml:space="preserve"
1178 style="background:{background};" >
1178 style="background:{background};" >
1179
1179
1180 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1180 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1181 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1181 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1182 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1182 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1183 168.596,153.916,216.671,
1183 168.596,153.916,216.671,
1184 204.583,216.671z" fill="{text_color}"/>
1184 204.583,216.671z" fill="{text_color}"/>
1185 <path d="M407.164,374.717L360.88,
1185 <path d="M407.164,374.717L360.88,
1186 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1186 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1187 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1187 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1188 15.366-44.203,23.488-69.076,23.488c-24.877,
1188 15.366-44.203,23.488-69.076,23.488c-24.877,
1189 0-48.762-8.122-69.078-23.488
1189 0-48.762-8.122-69.078-23.488
1190 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1190 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1191 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1191 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1192 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1192 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1193 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1193 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1194 19.402-10.527 C409.699,390.129,
1194 19.402-10.527 C409.699,390.129,
1195 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1195 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1196 </svg>""".format(
1196 </svg>""".format(
1197 size=self.size,
1197 size=self.size,
1198 background='#979797', # @grey4
1198 background='#979797', # @grey4
1199 text_color=self.text_color,
1199 text_color=self.text_color,
1200 font_family=font_family)
1200 font_family=font_family)
1201
1201
1202 return {
1202 return {
1203 "default_user": default_user
1203 "default_user": default_user
1204 }[img_type]
1204 }[img_type]
1205
1205
1206 def get_img_data(self, svg_type=None):
1206 def get_img_data(self, svg_type=None):
1207 """
1207 """
1208 generates the svg metadata for image
1208 generates the svg metadata for image
1209 """
1209 """
1210
1210
1211 font_family = ','.join([
1211 font_family = ','.join([
1212 'proximanovaregular',
1212 'proximanovaregular',
1213 'Proxima Nova Regular',
1213 'Proxima Nova Regular',
1214 'Proxima Nova',
1214 'Proxima Nova',
1215 'Arial',
1215 'Arial',
1216 'Lucida Grande',
1216 'Lucida Grande',
1217 'sans-serif'
1217 'sans-serif'
1218 ])
1218 ])
1219 if svg_type:
1219 if svg_type:
1220 return self.get_img_data_by_type(font_family, svg_type)
1220 return self.get_img_data_by_type(font_family, svg_type)
1221
1221
1222 initials = self.get_initials()
1222 initials = self.get_initials()
1223 img_data = """
1223 img_data = """
1224 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1224 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1225 width="{size}" height="{size}"
1225 width="{size}" height="{size}"
1226 style="width: 100%; height: 100%; background-color: {background}"
1226 style="width: 100%; height: 100%; background-color: {background}"
1227 viewBox="0 0 {size} {size}">
1227 viewBox="0 0 {size} {size}">
1228 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1228 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1229 pointer-events="auto" fill="{text_color}"
1229 pointer-events="auto" fill="{text_color}"
1230 font-family="{font_family}"
1230 font-family="{font_family}"
1231 style="font-weight: 400; font-size: {f_size}px;">{text}
1231 style="font-weight: 400; font-size: {f_size}px;">{text}
1232 </text>
1232 </text>
1233 </svg>""".format(
1233 </svg>""".format(
1234 size=self.size,
1234 size=self.size,
1235 f_size=self.size/1.85, # scale the text inside the box nicely
1235 f_size=self.size/1.85, # scale the text inside the box nicely
1236 background=self.background,
1236 background=self.background,
1237 text_color=self.text_color,
1237 text_color=self.text_color,
1238 text=initials.upper(),
1238 text=initials.upper(),
1239 font_family=font_family)
1239 font_family=font_family)
1240
1240
1241 return img_data
1241 return img_data
1242
1242
1243 def generate_svg(self, svg_type=None):
1243 def generate_svg(self, svg_type=None):
1244 img_data = self.get_img_data(svg_type)
1244 img_data = self.get_img_data(svg_type)
1245 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1245 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1246
1246
1247
1247
1248 def initials_gravatar(email_address, first_name, last_name, size=30):
1248 def initials_gravatar(email_address, first_name, last_name, size=30):
1249 svg_type = None
1249 svg_type = None
1250 if email_address == User.DEFAULT_USER_EMAIL:
1250 if email_address == User.DEFAULT_USER_EMAIL:
1251 svg_type = 'default_user'
1251 svg_type = 'default_user'
1252 klass = InitialsGravatar(email_address, first_name, last_name, size)
1252 klass = InitialsGravatar(email_address, first_name, last_name, size)
1253 return klass.generate_svg(svg_type=svg_type)
1253 return klass.generate_svg(svg_type=svg_type)
1254
1254
1255
1255
1256 def gravatar_url(email_address, size=30, request=None):
1256 def gravatar_url(email_address, size=30, request=None):
1257 request = get_current_request()
1257 request = get_current_request()
1258 if request and hasattr(request, 'call_context'):
1258 if request and hasattr(request, 'call_context'):
1259 _use_gravatar = request.call_context.visual.use_gravatar
1259 _use_gravatar = request.call_context.visual.use_gravatar
1260 _gravatar_url = request.call_context.visual.gravatar_url
1260 _gravatar_url = request.call_context.visual.gravatar_url
1261 else:
1261 else:
1262 # doh, we need to re-import those to mock it later
1262 # doh, we need to re-import those to mock it later
1263 from pylons import tmpl_context as c
1263 from pylons import tmpl_context as c
1264
1264
1265 _use_gravatar = c.visual.use_gravatar
1265 _use_gravatar = c.visual.use_gravatar
1266 _gravatar_url = c.visual.gravatar_url
1266 _gravatar_url = c.visual.gravatar_url
1267
1267
1268 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1268 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1269
1269
1270 email_address = email_address or User.DEFAULT_USER_EMAIL
1270 email_address = email_address or User.DEFAULT_USER_EMAIL
1271 if isinstance(email_address, unicode):
1271 if isinstance(email_address, unicode):
1272 # hashlib crashes on unicode items
1272 # hashlib crashes on unicode items
1273 email_address = safe_str(email_address)
1273 email_address = safe_str(email_address)
1274
1274
1275 # empty email or default user
1275 # empty email or default user
1276 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1276 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1277 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1277 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1278
1278
1279 if _use_gravatar:
1279 if _use_gravatar:
1280 # TODO: Disuse pyramid thread locals. Think about another solution to
1280 # TODO: Disuse pyramid thread locals. Think about another solution to
1281 # get the host and schema here.
1281 # get the host and schema here.
1282 request = get_current_request()
1282 request = get_current_request()
1283 tmpl = safe_str(_gravatar_url)
1283 tmpl = safe_str(_gravatar_url)
1284 tmpl = tmpl.replace('{email}', email_address)\
1284 tmpl = tmpl.replace('{email}', email_address)\
1285 .replace('{md5email}', md5_safe(email_address.lower())) \
1285 .replace('{md5email}', md5_safe(email_address.lower())) \
1286 .replace('{netloc}', request.host)\
1286 .replace('{netloc}', request.host)\
1287 .replace('{scheme}', request.scheme)\
1287 .replace('{scheme}', request.scheme)\
1288 .replace('{size}', safe_str(size))
1288 .replace('{size}', safe_str(size))
1289 return tmpl
1289 return tmpl
1290 else:
1290 else:
1291 return initials_gravatar(email_address, '', '', size=size)
1291 return initials_gravatar(email_address, '', '', size=size)
1292
1292
1293
1293
1294 class Page(_Page):
1294 class Page(_Page):
1295 """
1295 """
1296 Custom pager to match rendering style with paginator
1296 Custom pager to match rendering style with paginator
1297 """
1297 """
1298
1298
1299 def _get_pos(self, cur_page, max_page, items):
1299 def _get_pos(self, cur_page, max_page, items):
1300 edge = (items / 2) + 1
1300 edge = (items / 2) + 1
1301 if (cur_page <= edge):
1301 if (cur_page <= edge):
1302 radius = max(items / 2, items - cur_page)
1302 radius = max(items / 2, items - cur_page)
1303 elif (max_page - cur_page) < edge:
1303 elif (max_page - cur_page) < edge:
1304 radius = (items - 1) - (max_page - cur_page)
1304 radius = (items - 1) - (max_page - cur_page)
1305 else:
1305 else:
1306 radius = items / 2
1306 radius = items / 2
1307
1307
1308 left = max(1, (cur_page - (radius)))
1308 left = max(1, (cur_page - (radius)))
1309 right = min(max_page, cur_page + (radius))
1309 right = min(max_page, cur_page + (radius))
1310 return left, cur_page, right
1310 return left, cur_page, right
1311
1311
1312 def _range(self, regexp_match):
1312 def _range(self, regexp_match):
1313 """
1313 """
1314 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1314 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1315
1315
1316 Arguments:
1316 Arguments:
1317
1317
1318 regexp_match
1318 regexp_match
1319 A "re" (regular expressions) match object containing the
1319 A "re" (regular expressions) match object containing the
1320 radius of linked pages around the current page in
1320 radius of linked pages around the current page in
1321 regexp_match.group(1) as a string
1321 regexp_match.group(1) as a string
1322
1322
1323 This function is supposed to be called as a callable in
1323 This function is supposed to be called as a callable in
1324 re.sub.
1324 re.sub.
1325
1325
1326 """
1326 """
1327 radius = int(regexp_match.group(1))
1327 radius = int(regexp_match.group(1))
1328
1328
1329 # Compute the first and last page number within the radius
1329 # Compute the first and last page number within the radius
1330 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1330 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1331 # -> leftmost_page = 5
1331 # -> leftmost_page = 5
1332 # -> rightmost_page = 9
1332 # -> rightmost_page = 9
1333 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1333 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1334 self.last_page,
1334 self.last_page,
1335 (radius * 2) + 1)
1335 (radius * 2) + 1)
1336 nav_items = []
1336 nav_items = []
1337
1337
1338 # Create a link to the first page (unless we are on the first page
1338 # Create a link to the first page (unless we are on the first page
1339 # or there would be no need to insert '..' spacers)
1339 # or there would be no need to insert '..' spacers)
1340 if self.page != self.first_page and self.first_page < leftmost_page:
1340 if self.page != self.first_page and self.first_page < leftmost_page:
1341 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1341 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1342
1342
1343 # Insert dots if there are pages between the first page
1343 # Insert dots if there are pages between the first page
1344 # and the currently displayed page range
1344 # and the currently displayed page range
1345 if leftmost_page - self.first_page > 1:
1345 if leftmost_page - self.first_page > 1:
1346 # Wrap in a SPAN tag if nolink_attr is set
1346 # Wrap in a SPAN tag if nolink_attr is set
1347 text = '..'
1347 text = '..'
1348 if self.dotdot_attr:
1348 if self.dotdot_attr:
1349 text = HTML.span(c=text, **self.dotdot_attr)
1349 text = HTML.span(c=text, **self.dotdot_attr)
1350 nav_items.append(text)
1350 nav_items.append(text)
1351
1351
1352 for thispage in xrange(leftmost_page, rightmost_page + 1):
1352 for thispage in xrange(leftmost_page, rightmost_page + 1):
1353 # Hilight the current page number and do not use a link
1353 # Hilight the current page number and do not use a link
1354 if thispage == self.page:
1354 if thispage == self.page:
1355 text = '%s' % (thispage,)
1355 text = '%s' % (thispage,)
1356 # Wrap in a SPAN tag if nolink_attr is set
1356 # Wrap in a SPAN tag if nolink_attr is set
1357 if self.curpage_attr:
1357 if self.curpage_attr:
1358 text = HTML.span(c=text, **self.curpage_attr)
1358 text = HTML.span(c=text, **self.curpage_attr)
1359 nav_items.append(text)
1359 nav_items.append(text)
1360 # Otherwise create just a link to that page
1360 # Otherwise create just a link to that page
1361 else:
1361 else:
1362 text = '%s' % (thispage,)
1362 text = '%s' % (thispage,)
1363 nav_items.append(self._pagerlink(thispage, text))
1363 nav_items.append(self._pagerlink(thispage, text))
1364
1364
1365 # Insert dots if there are pages between the displayed
1365 # Insert dots if there are pages between the displayed
1366 # page numbers and the end of the page range
1366 # page numbers and the end of the page range
1367 if self.last_page - rightmost_page > 1:
1367 if self.last_page - rightmost_page > 1:
1368 text = '..'
1368 text = '..'
1369 # Wrap in a SPAN tag if nolink_attr is set
1369 # Wrap in a SPAN tag if nolink_attr is set
1370 if self.dotdot_attr:
1370 if self.dotdot_attr:
1371 text = HTML.span(c=text, **self.dotdot_attr)
1371 text = HTML.span(c=text, **self.dotdot_attr)
1372 nav_items.append(text)
1372 nav_items.append(text)
1373
1373
1374 # Create a link to the very last page (unless we are on the last
1374 # Create a link to the very last page (unless we are on the last
1375 # page or there would be no need to insert '..' spacers)
1375 # page or there would be no need to insert '..' spacers)
1376 if self.page != self.last_page and rightmost_page < self.last_page:
1376 if self.page != self.last_page and rightmost_page < self.last_page:
1377 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1377 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1378
1378
1379 ## prerender links
1379 ## prerender links
1380 #_page_link = url.current()
1380 #_page_link = url.current()
1381 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1381 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1382 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1382 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1383 return self.separator.join(nav_items)
1383 return self.separator.join(nav_items)
1384
1384
1385 def pager(self, format='~2~', page_param='page', partial_param='partial',
1385 def pager(self, format='~2~', page_param='page', partial_param='partial',
1386 show_if_single_page=False, separator=' ', onclick=None,
1386 show_if_single_page=False, separator=' ', onclick=None,
1387 symbol_first='<<', symbol_last='>>',
1387 symbol_first='<<', symbol_last='>>',
1388 symbol_previous='<', symbol_next='>',
1388 symbol_previous='<', symbol_next='>',
1389 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1389 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1390 curpage_attr={'class': 'pager_curpage'},
1390 curpage_attr={'class': 'pager_curpage'},
1391 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1391 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1392
1392
1393 self.curpage_attr = curpage_attr
1393 self.curpage_attr = curpage_attr
1394 self.separator = separator
1394 self.separator = separator
1395 self.pager_kwargs = kwargs
1395 self.pager_kwargs = kwargs
1396 self.page_param = page_param
1396 self.page_param = page_param
1397 self.partial_param = partial_param
1397 self.partial_param = partial_param
1398 self.onclick = onclick
1398 self.onclick = onclick
1399 self.link_attr = link_attr
1399 self.link_attr = link_attr
1400 self.dotdot_attr = dotdot_attr
1400 self.dotdot_attr = dotdot_attr
1401
1401
1402 # Don't show navigator if there is no more than one page
1402 # Don't show navigator if there is no more than one page
1403 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1403 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1404 return ''
1404 return ''
1405
1405
1406 from string import Template
1406 from string import Template
1407 # Replace ~...~ in token format by range of pages
1407 # Replace ~...~ in token format by range of pages
1408 result = re.sub(r'~(\d+)~', self._range, format)
1408 result = re.sub(r'~(\d+)~', self._range, format)
1409
1409
1410 # Interpolate '%' variables
1410 # Interpolate '%' variables
1411 result = Template(result).safe_substitute({
1411 result = Template(result).safe_substitute({
1412 'first_page': self.first_page,
1412 'first_page': self.first_page,
1413 'last_page': self.last_page,
1413 'last_page': self.last_page,
1414 'page': self.page,
1414 'page': self.page,
1415 'page_count': self.page_count,
1415 'page_count': self.page_count,
1416 'items_per_page': self.items_per_page,
1416 'items_per_page': self.items_per_page,
1417 'first_item': self.first_item,
1417 'first_item': self.first_item,
1418 'last_item': self.last_item,
1418 'last_item': self.last_item,
1419 'item_count': self.item_count,
1419 'item_count': self.item_count,
1420 'link_first': self.page > self.first_page and \
1420 'link_first': self.page > self.first_page and \
1421 self._pagerlink(self.first_page, symbol_first) or '',
1421 self._pagerlink(self.first_page, symbol_first) or '',
1422 'link_last': self.page < self.last_page and \
1422 'link_last': self.page < self.last_page and \
1423 self._pagerlink(self.last_page, symbol_last) or '',
1423 self._pagerlink(self.last_page, symbol_last) or '',
1424 'link_previous': self.previous_page and \
1424 'link_previous': self.previous_page and \
1425 self._pagerlink(self.previous_page, symbol_previous) \
1425 self._pagerlink(self.previous_page, symbol_previous) \
1426 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1426 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1427 'link_next': self.next_page and \
1427 'link_next': self.next_page and \
1428 self._pagerlink(self.next_page, symbol_next) \
1428 self._pagerlink(self.next_page, symbol_next) \
1429 or HTML.span(symbol_next, class_="pg-next disabled")
1429 or HTML.span(symbol_next, class_="pg-next disabled")
1430 })
1430 })
1431
1431
1432 return literal(result)
1432 return literal(result)
1433
1433
1434
1434
1435 #==============================================================================
1435 #==============================================================================
1436 # REPO PAGER, PAGER FOR REPOSITORY
1436 # REPO PAGER, PAGER FOR REPOSITORY
1437 #==============================================================================
1437 #==============================================================================
1438 class RepoPage(Page):
1438 class RepoPage(Page):
1439
1439
1440 def __init__(self, collection, page=1, items_per_page=20,
1440 def __init__(self, collection, page=1, items_per_page=20,
1441 item_count=None, url=None, **kwargs):
1441 item_count=None, url=None, **kwargs):
1442
1442
1443 """Create a "RepoPage" instance. special pager for paging
1443 """Create a "RepoPage" instance. special pager for paging
1444 repository
1444 repository
1445 """
1445 """
1446 self._url_generator = url
1446 self._url_generator = url
1447
1447
1448 # Safe the kwargs class-wide so they can be used in the pager() method
1448 # Safe the kwargs class-wide so they can be used in the pager() method
1449 self.kwargs = kwargs
1449 self.kwargs = kwargs
1450
1450
1451 # Save a reference to the collection
1451 # Save a reference to the collection
1452 self.original_collection = collection
1452 self.original_collection = collection
1453
1453
1454 self.collection = collection
1454 self.collection = collection
1455
1455
1456 # The self.page is the number of the current page.
1456 # The self.page is the number of the current page.
1457 # The first page has the number 1!
1457 # The first page has the number 1!
1458 try:
1458 try:
1459 self.page = int(page) # make it int() if we get it as a string
1459 self.page = int(page) # make it int() if we get it as a string
1460 except (ValueError, TypeError):
1460 except (ValueError, TypeError):
1461 self.page = 1
1461 self.page = 1
1462
1462
1463 self.items_per_page = items_per_page
1463 self.items_per_page = items_per_page
1464
1464
1465 # Unless the user tells us how many items the collections has
1465 # Unless the user tells us how many items the collections has
1466 # we calculate that ourselves.
1466 # we calculate that ourselves.
1467 if item_count is not None:
1467 if item_count is not None:
1468 self.item_count = item_count
1468 self.item_count = item_count
1469 else:
1469 else:
1470 self.item_count = len(self.collection)
1470 self.item_count = len(self.collection)
1471
1471
1472 # Compute the number of the first and last available page
1472 # Compute the number of the first and last available page
1473 if self.item_count > 0:
1473 if self.item_count > 0:
1474 self.first_page = 1
1474 self.first_page = 1
1475 self.page_count = int(math.ceil(float(self.item_count) /
1475 self.page_count = int(math.ceil(float(self.item_count) /
1476 self.items_per_page))
1476 self.items_per_page))
1477 self.last_page = self.first_page + self.page_count - 1
1477 self.last_page = self.first_page + self.page_count - 1
1478
1478
1479 # Make sure that the requested page number is the range of
1479 # Make sure that the requested page number is the range of
1480 # valid pages
1480 # valid pages
1481 if self.page > self.last_page:
1481 if self.page > self.last_page:
1482 self.page = self.last_page
1482 self.page = self.last_page
1483 elif self.page < self.first_page:
1483 elif self.page < self.first_page:
1484 self.page = self.first_page
1484 self.page = self.first_page
1485
1485
1486 # Note: the number of items on this page can be less than
1486 # Note: the number of items on this page can be less than
1487 # items_per_page if the last page is not full
1487 # items_per_page if the last page is not full
1488 self.first_item = max(0, (self.item_count) - (self.page *
1488 self.first_item = max(0, (self.item_count) - (self.page *
1489 items_per_page))
1489 items_per_page))
1490 self.last_item = ((self.item_count - 1) - items_per_page *
1490 self.last_item = ((self.item_count - 1) - items_per_page *
1491 (self.page - 1))
1491 (self.page - 1))
1492
1492
1493 self.items = list(self.collection[self.first_item:self.last_item + 1])
1493 self.items = list(self.collection[self.first_item:self.last_item + 1])
1494
1494
1495 # Links to previous and next page
1495 # Links to previous and next page
1496 if self.page > self.first_page:
1496 if self.page > self.first_page:
1497 self.previous_page = self.page - 1
1497 self.previous_page = self.page - 1
1498 else:
1498 else:
1499 self.previous_page = None
1499 self.previous_page = None
1500
1500
1501 if self.page < self.last_page:
1501 if self.page < self.last_page:
1502 self.next_page = self.page + 1
1502 self.next_page = self.page + 1
1503 else:
1503 else:
1504 self.next_page = None
1504 self.next_page = None
1505
1505
1506 # No items available
1506 # No items available
1507 else:
1507 else:
1508 self.first_page = None
1508 self.first_page = None
1509 self.page_count = 0
1509 self.page_count = 0
1510 self.last_page = None
1510 self.last_page = None
1511 self.first_item = None
1511 self.first_item = None
1512 self.last_item = None
1512 self.last_item = None
1513 self.previous_page = None
1513 self.previous_page = None
1514 self.next_page = None
1514 self.next_page = None
1515 self.items = []
1515 self.items = []
1516
1516
1517 # This is a subclass of the 'list' type. Initialise the list now.
1517 # This is a subclass of the 'list' type. Initialise the list now.
1518 list.__init__(self, reversed(self.items))
1518 list.__init__(self, reversed(self.items))
1519
1519
1520
1520
1521 def breadcrumb_repo_link(repo):
1521 def breadcrumb_repo_link(repo):
1522 """
1522 """
1523 Makes a breadcrumbs path link to repo
1523 Makes a breadcrumbs path link to repo
1524
1524
1525 ex::
1525 ex::
1526 group >> subgroup >> repo
1526 group >> subgroup >> repo
1527
1527
1528 :param repo: a Repository instance
1528 :param repo: a Repository instance
1529 """
1529 """
1530
1530
1531 path = [
1531 path = [
1532 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1532 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1533 for group in repo.groups_with_parents
1533 for group in repo.groups_with_parents
1534 ] + [
1534 ] + [
1535 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1535 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1536 ]
1536 ]
1537
1537
1538 return literal(' &raquo; '.join(path))
1538 return literal(' &raquo; '.join(path))
1539
1539
1540
1540
1541 def format_byte_size_binary(file_size):
1541 def format_byte_size_binary(file_size):
1542 """
1542 """
1543 Formats file/folder sizes to standard.
1543 Formats file/folder sizes to standard.
1544 """
1544 """
1545 if file_size is None:
1545 if file_size is None:
1546 file_size = 0
1546 file_size = 0
1547
1547
1548 formatted_size = format_byte_size(file_size, binary=True)
1548 formatted_size = format_byte_size(file_size, binary=True)
1549 return formatted_size
1549 return formatted_size
1550
1550
1551
1551
1552 def urlify_text(text_, safe=True):
1552 def urlify_text(text_, safe=True):
1553 """
1553 """
1554 Extrac urls from text and make html links out of them
1554 Extrac urls from text and make html links out of them
1555
1555
1556 :param text_:
1556 :param text_:
1557 """
1557 """
1558
1558
1559 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1559 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1560 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1560 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1561
1561
1562 def url_func(match_obj):
1562 def url_func(match_obj):
1563 url_full = match_obj.groups()[0]
1563 url_full = match_obj.groups()[0]
1564 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1564 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1565 _newtext = url_pat.sub(url_func, text_)
1565 _newtext = url_pat.sub(url_func, text_)
1566 if safe:
1566 if safe:
1567 return literal(_newtext)
1567 return literal(_newtext)
1568 return _newtext
1568 return _newtext
1569
1569
1570
1570
1571 def urlify_commits(text_, repository):
1571 def urlify_commits(text_, repository):
1572 """
1572 """
1573 Extract commit ids from text and make link from them
1573 Extract commit ids from text and make link from them
1574
1574
1575 :param text_:
1575 :param text_:
1576 :param repository: repo name to build the URL with
1576 :param repository: repo name to build the URL with
1577 """
1577 """
1578 from pylons import url # doh, we need to re-import url to mock it later
1578
1579 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1579 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1580
1580
1581 def url_func(match_obj):
1581 def url_func(match_obj):
1582 commit_id = match_obj.groups()[1]
1582 commit_id = match_obj.groups()[1]
1583 pref = match_obj.groups()[0]
1583 pref = match_obj.groups()[0]
1584 suf = match_obj.groups()[2]
1584 suf = match_obj.groups()[2]
1585
1585
1586 tmpl = (
1586 tmpl = (
1587 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1587 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1588 '%(commit_id)s</a>%(suf)s'
1588 '%(commit_id)s</a>%(suf)s'
1589 )
1589 )
1590 return tmpl % {
1590 return tmpl % {
1591 'pref': pref,
1591 'pref': pref,
1592 'cls': 'revision-link',
1592 'cls': 'revision-link',
1593 'url': url('changeset_home', repo_name=repository,
1593 'url': route_url('repo_commit', repo_name=repository,
1594 revision=commit_id, qualified=True),
1594 commit_id=commit_id),
1595 'commit_id': commit_id,
1595 'commit_id': commit_id,
1596 'suf': suf
1596 'suf': suf
1597 }
1597 }
1598
1598
1599 newtext = URL_PAT.sub(url_func, text_)
1599 newtext = URL_PAT.sub(url_func, text_)
1600
1600
1601 return newtext
1601 return newtext
1602
1602
1603
1603
1604 def _process_url_func(match_obj, repo_name, uid, entry,
1604 def _process_url_func(match_obj, repo_name, uid, entry,
1605 return_raw_data=False, link_format='html'):
1605 return_raw_data=False, link_format='html'):
1606 pref = ''
1606 pref = ''
1607 if match_obj.group().startswith(' '):
1607 if match_obj.group().startswith(' '):
1608 pref = ' '
1608 pref = ' '
1609
1609
1610 issue_id = ''.join(match_obj.groups())
1610 issue_id = ''.join(match_obj.groups())
1611
1611
1612 if link_format == 'html':
1612 if link_format == 'html':
1613 tmpl = (
1613 tmpl = (
1614 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1614 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1615 '%(issue-prefix)s%(id-repr)s'
1615 '%(issue-prefix)s%(id-repr)s'
1616 '</a>')
1616 '</a>')
1617 elif link_format == 'rst':
1617 elif link_format == 'rst':
1618 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1618 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1619 elif link_format == 'markdown':
1619 elif link_format == 'markdown':
1620 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1620 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1621 else:
1621 else:
1622 raise ValueError('Bad link_format:{}'.format(link_format))
1622 raise ValueError('Bad link_format:{}'.format(link_format))
1623
1623
1624 (repo_name_cleaned,
1624 (repo_name_cleaned,
1625 parent_group_name) = RepoGroupModel().\
1625 parent_group_name) = RepoGroupModel().\
1626 _get_group_name_and_parent(repo_name)
1626 _get_group_name_and_parent(repo_name)
1627
1627
1628 # variables replacement
1628 # variables replacement
1629 named_vars = {
1629 named_vars = {
1630 'id': issue_id,
1630 'id': issue_id,
1631 'repo': repo_name,
1631 'repo': repo_name,
1632 'repo_name': repo_name_cleaned,
1632 'repo_name': repo_name_cleaned,
1633 'group_name': parent_group_name
1633 'group_name': parent_group_name
1634 }
1634 }
1635 # named regex variables
1635 # named regex variables
1636 named_vars.update(match_obj.groupdict())
1636 named_vars.update(match_obj.groupdict())
1637 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1637 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1638
1638
1639 data = {
1639 data = {
1640 'pref': pref,
1640 'pref': pref,
1641 'cls': 'issue-tracker-link',
1641 'cls': 'issue-tracker-link',
1642 'url': _url,
1642 'url': _url,
1643 'id-repr': issue_id,
1643 'id-repr': issue_id,
1644 'issue-prefix': entry['pref'],
1644 'issue-prefix': entry['pref'],
1645 'serv': entry['url'],
1645 'serv': entry['url'],
1646 }
1646 }
1647 if return_raw_data:
1647 if return_raw_data:
1648 return {
1648 return {
1649 'id': issue_id,
1649 'id': issue_id,
1650 'url': _url
1650 'url': _url
1651 }
1651 }
1652 return tmpl % data
1652 return tmpl % data
1653
1653
1654
1654
1655 def process_patterns(text_string, repo_name, link_format='html'):
1655 def process_patterns(text_string, repo_name, link_format='html'):
1656 allowed_formats = ['html', 'rst', 'markdown']
1656 allowed_formats = ['html', 'rst', 'markdown']
1657 if link_format not in allowed_formats:
1657 if link_format not in allowed_formats:
1658 raise ValueError('Link format can be only one of:{} got {}'.format(
1658 raise ValueError('Link format can be only one of:{} got {}'.format(
1659 allowed_formats, link_format))
1659 allowed_formats, link_format))
1660
1660
1661 repo = None
1661 repo = None
1662 if repo_name:
1662 if repo_name:
1663 # Retrieving repo_name to avoid invalid repo_name to explode on
1663 # Retrieving repo_name to avoid invalid repo_name to explode on
1664 # IssueTrackerSettingsModel but still passing invalid name further down
1664 # IssueTrackerSettingsModel but still passing invalid name further down
1665 repo = Repository.get_by_repo_name(repo_name, cache=True)
1665 repo = Repository.get_by_repo_name(repo_name, cache=True)
1666
1666
1667 settings_model = IssueTrackerSettingsModel(repo=repo)
1667 settings_model = IssueTrackerSettingsModel(repo=repo)
1668 active_entries = settings_model.get_settings(cache=True)
1668 active_entries = settings_model.get_settings(cache=True)
1669
1669
1670 issues_data = []
1670 issues_data = []
1671 newtext = text_string
1671 newtext = text_string
1672
1672
1673 for uid, entry in active_entries.items():
1673 for uid, entry in active_entries.items():
1674 log.debug('found issue tracker entry with uid %s' % (uid,))
1674 log.debug('found issue tracker entry with uid %s' % (uid,))
1675
1675
1676 if not (entry['pat'] and entry['url']):
1676 if not (entry['pat'] and entry['url']):
1677 log.debug('skipping due to missing data')
1677 log.debug('skipping due to missing data')
1678 continue
1678 continue
1679
1679
1680 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1680 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1681 % (uid, entry['pat'], entry['url'], entry['pref']))
1681 % (uid, entry['pat'], entry['url'], entry['pref']))
1682
1682
1683 try:
1683 try:
1684 pattern = re.compile(r'%s' % entry['pat'])
1684 pattern = re.compile(r'%s' % entry['pat'])
1685 except re.error:
1685 except re.error:
1686 log.exception(
1686 log.exception(
1687 'issue tracker pattern: `%s` failed to compile',
1687 'issue tracker pattern: `%s` failed to compile',
1688 entry['pat'])
1688 entry['pat'])
1689 continue
1689 continue
1690
1690
1691 data_func = partial(
1691 data_func = partial(
1692 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1692 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1693 return_raw_data=True)
1693 return_raw_data=True)
1694
1694
1695 for match_obj in pattern.finditer(text_string):
1695 for match_obj in pattern.finditer(text_string):
1696 issues_data.append(data_func(match_obj))
1696 issues_data.append(data_func(match_obj))
1697
1697
1698 url_func = partial(
1698 url_func = partial(
1699 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1699 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1700 link_format=link_format)
1700 link_format=link_format)
1701
1701
1702 newtext = pattern.sub(url_func, newtext)
1702 newtext = pattern.sub(url_func, newtext)
1703 log.debug('processed prefix:uid `%s`' % (uid,))
1703 log.debug('processed prefix:uid `%s`' % (uid,))
1704
1704
1705 return newtext, issues_data
1705 return newtext, issues_data
1706
1706
1707
1707
1708 def urlify_commit_message(commit_text, repository=None):
1708 def urlify_commit_message(commit_text, repository=None):
1709 """
1709 """
1710 Parses given text message and makes proper links.
1710 Parses given text message and makes proper links.
1711 issues are linked to given issue-server, and rest is a commit link
1711 issues are linked to given issue-server, and rest is a commit link
1712
1712
1713 :param commit_text:
1713 :param commit_text:
1714 :param repository:
1714 :param repository:
1715 """
1715 """
1716 from pylons import url # doh, we need to re-import url to mock it later
1716 from pylons import url # doh, we need to re-import url to mock it later
1717
1717
1718 def escaper(string):
1718 def escaper(string):
1719 return string.replace('<', '&lt;').replace('>', '&gt;')
1719 return string.replace('<', '&lt;').replace('>', '&gt;')
1720
1720
1721 newtext = escaper(commit_text)
1721 newtext = escaper(commit_text)
1722
1722
1723 # extract http/https links and make them real urls
1723 # extract http/https links and make them real urls
1724 newtext = urlify_text(newtext, safe=False)
1724 newtext = urlify_text(newtext, safe=False)
1725
1725
1726 # urlify commits - extract commit ids and make link out of them, if we have
1726 # urlify commits - extract commit ids and make link out of them, if we have
1727 # the scope of repository present.
1727 # the scope of repository present.
1728 if repository:
1728 if repository:
1729 newtext = urlify_commits(newtext, repository)
1729 newtext = urlify_commits(newtext, repository)
1730
1730
1731 # process issue tracker patterns
1731 # process issue tracker patterns
1732 newtext, issues = process_patterns(newtext, repository or '')
1732 newtext, issues = process_patterns(newtext, repository or '')
1733
1733
1734 return literal(newtext)
1734 return literal(newtext)
1735
1735
1736
1736
1737 def render_binary(repo_name, file_obj):
1737 def render_binary(repo_name, file_obj):
1738 """
1738 """
1739 Choose how to render a binary file
1739 Choose how to render a binary file
1740 """
1740 """
1741 filename = file_obj.name
1741 filename = file_obj.name
1742
1742
1743 # images
1743 # images
1744 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1744 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1745 if fnmatch.fnmatch(filename, pat=ext):
1745 if fnmatch.fnmatch(filename, pat=ext):
1746 alt = filename
1746 alt = filename
1747 src = route_path(
1747 src = route_path(
1748 'repo_file_raw', repo_name=repo_name,
1748 'repo_file_raw', repo_name=repo_name,
1749 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1749 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1750 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1750 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1751
1751
1752
1752
1753 def renderer_from_filename(filename, exclude=None):
1753 def renderer_from_filename(filename, exclude=None):
1754 """
1754 """
1755 choose a renderer based on filename, this works only for text based files
1755 choose a renderer based on filename, this works only for text based files
1756 """
1756 """
1757
1757
1758 # ipython
1758 # ipython
1759 for ext in ['*.ipynb']:
1759 for ext in ['*.ipynb']:
1760 if fnmatch.fnmatch(filename, pat=ext):
1760 if fnmatch.fnmatch(filename, pat=ext):
1761 return 'jupyter'
1761 return 'jupyter'
1762
1762
1763 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1763 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1764 if is_markup:
1764 if is_markup:
1765 return is_markup
1765 return is_markup
1766 return None
1766 return None
1767
1767
1768
1768
1769 def render(source, renderer='rst', mentions=False, relative_url=None,
1769 def render(source, renderer='rst', mentions=False, relative_url=None,
1770 repo_name=None):
1770 repo_name=None):
1771
1771
1772 def maybe_convert_relative_links(html_source):
1772 def maybe_convert_relative_links(html_source):
1773 if relative_url:
1773 if relative_url:
1774 return relative_links(html_source, relative_url)
1774 return relative_links(html_source, relative_url)
1775 return html_source
1775 return html_source
1776
1776
1777 if renderer == 'rst':
1777 if renderer == 'rst':
1778 if repo_name:
1778 if repo_name:
1779 # process patterns on comments if we pass in repo name
1779 # process patterns on comments if we pass in repo name
1780 source, issues = process_patterns(
1780 source, issues = process_patterns(
1781 source, repo_name, link_format='rst')
1781 source, repo_name, link_format='rst')
1782
1782
1783 return literal(
1783 return literal(
1784 '<div class="rst-block">%s</div>' %
1784 '<div class="rst-block">%s</div>' %
1785 maybe_convert_relative_links(
1785 maybe_convert_relative_links(
1786 MarkupRenderer.rst(source, mentions=mentions)))
1786 MarkupRenderer.rst(source, mentions=mentions)))
1787 elif renderer == 'markdown':
1787 elif renderer == 'markdown':
1788 if repo_name:
1788 if repo_name:
1789 # process patterns on comments if we pass in repo name
1789 # process patterns on comments if we pass in repo name
1790 source, issues = process_patterns(
1790 source, issues = process_patterns(
1791 source, repo_name, link_format='markdown')
1791 source, repo_name, link_format='markdown')
1792
1792
1793 return literal(
1793 return literal(
1794 '<div class="markdown-block">%s</div>' %
1794 '<div class="markdown-block">%s</div>' %
1795 maybe_convert_relative_links(
1795 maybe_convert_relative_links(
1796 MarkupRenderer.markdown(source, flavored=True,
1796 MarkupRenderer.markdown(source, flavored=True,
1797 mentions=mentions)))
1797 mentions=mentions)))
1798 elif renderer == 'jupyter':
1798 elif renderer == 'jupyter':
1799 return literal(
1799 return literal(
1800 '<div class="ipynb">%s</div>' %
1800 '<div class="ipynb">%s</div>' %
1801 maybe_convert_relative_links(
1801 maybe_convert_relative_links(
1802 MarkupRenderer.jupyter(source)))
1802 MarkupRenderer.jupyter(source)))
1803
1803
1804 # None means just show the file-source
1804 # None means just show the file-source
1805 return None
1805 return None
1806
1806
1807
1807
1808 def commit_status(repo, commit_id):
1808 def commit_status(repo, commit_id):
1809 return ChangesetStatusModel().get_status(repo, commit_id)
1809 return ChangesetStatusModel().get_status(repo, commit_id)
1810
1810
1811
1811
1812 def commit_status_lbl(commit_status):
1812 def commit_status_lbl(commit_status):
1813 return dict(ChangesetStatus.STATUSES).get(commit_status)
1813 return dict(ChangesetStatus.STATUSES).get(commit_status)
1814
1814
1815
1815
1816 def commit_time(repo_name, commit_id):
1816 def commit_time(repo_name, commit_id):
1817 repo = Repository.get_by_repo_name(repo_name)
1817 repo = Repository.get_by_repo_name(repo_name)
1818 commit = repo.get_commit(commit_id=commit_id)
1818 commit = repo.get_commit(commit_id=commit_id)
1819 return commit.date
1819 return commit.date
1820
1820
1821
1821
1822 def get_permission_name(key):
1822 def get_permission_name(key):
1823 return dict(Permission.PERMS).get(key)
1823 return dict(Permission.PERMS).get(key)
1824
1824
1825
1825
1826 def journal_filter_help(request):
1826 def journal_filter_help(request):
1827 _ = request.translate
1827 _ = request.translate
1828
1828
1829 return _(
1829 return _(
1830 'Example filter terms:\n' +
1830 'Example filter terms:\n' +
1831 ' repository:vcs\n' +
1831 ' repository:vcs\n' +
1832 ' username:marcin\n' +
1832 ' username:marcin\n' +
1833 ' username:(NOT marcin)\n' +
1833 ' username:(NOT marcin)\n' +
1834 ' action:*push*\n' +
1834 ' action:*push*\n' +
1835 ' ip:127.0.0.1\n' +
1835 ' ip:127.0.0.1\n' +
1836 ' date:20120101\n' +
1836 ' date:20120101\n' +
1837 ' date:[20120101100000 TO 20120102]\n' +
1837 ' date:[20120101100000 TO 20120102]\n' +
1838 '\n' +
1838 '\n' +
1839 'Generate wildcards using \'*\' character:\n' +
1839 'Generate wildcards using \'*\' character:\n' +
1840 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1840 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1841 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1841 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1842 '\n' +
1842 '\n' +
1843 'Optional AND / OR operators in queries\n' +
1843 'Optional AND / OR operators in queries\n' +
1844 ' "repository:vcs OR repository:test"\n' +
1844 ' "repository:vcs OR repository:test"\n' +
1845 ' "username:test AND repository:test*"\n'
1845 ' "username:test AND repository:test*"\n'
1846 )
1846 )
1847
1847
1848
1848
1849 def search_filter_help(searcher, request):
1849 def search_filter_help(searcher, request):
1850 _ = request.translate
1850 _ = request.translate
1851
1851
1852 terms = ''
1852 terms = ''
1853 return _(
1853 return _(
1854 'Example filter terms for `{searcher}` search:\n' +
1854 'Example filter terms for `{searcher}` search:\n' +
1855 '{terms}\n' +
1855 '{terms}\n' +
1856 'Generate wildcards using \'*\' character:\n' +
1856 'Generate wildcards using \'*\' character:\n' +
1857 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1857 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1858 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1858 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1859 '\n' +
1859 '\n' +
1860 'Optional AND / OR operators in queries\n' +
1860 'Optional AND / OR operators in queries\n' +
1861 ' "repo_name:vcs OR repo_name:test"\n' +
1861 ' "repo_name:vcs OR repo_name:test"\n' +
1862 ' "owner:test AND repo_name:test*"\n' +
1862 ' "owner:test AND repo_name:test*"\n' +
1863 'More: {search_doc}'
1863 'More: {search_doc}'
1864 ).format(searcher=searcher.name,
1864 ).format(searcher=searcher.name,
1865 terms=terms, search_doc=searcher.query_lang_doc)
1865 terms=terms, search_doc=searcher.query_lang_doc)
1866
1866
1867
1867
1868 def not_mapped_error(repo_name):
1868 def not_mapped_error(repo_name):
1869 from rhodecode.translation import _
1869 from rhodecode.translation import _
1870 flash(_('%s repository is not mapped to db perhaps'
1870 flash(_('%s repository is not mapped to db perhaps'
1871 ' it was created or renamed from the filesystem'
1871 ' it was created or renamed from the filesystem'
1872 ' please run the application again'
1872 ' please run the application again'
1873 ' in order to rescan repositories') % repo_name, category='error')
1873 ' in order to rescan repositories') % repo_name, category='error')
1874
1874
1875
1875
1876 def ip_range(ip_addr):
1876 def ip_range(ip_addr):
1877 from rhodecode.model.db import UserIpMap
1877 from rhodecode.model.db import UserIpMap
1878 s, e = UserIpMap._get_ip_range(ip_addr)
1878 s, e = UserIpMap._get_ip_range(ip_addr)
1879 return '%s - %s' % (s, e)
1879 return '%s - %s' % (s, e)
1880
1880
1881
1881
1882 def form(url, method='post', needs_csrf_token=True, **attrs):
1882 def form(url, method='post', needs_csrf_token=True, **attrs):
1883 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1883 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1884 if method.lower() != 'get' and needs_csrf_token:
1884 if method.lower() != 'get' and needs_csrf_token:
1885 raise Exception(
1885 raise Exception(
1886 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1886 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1887 'CSRF token. If the endpoint does not require such token you can ' +
1887 'CSRF token. If the endpoint does not require such token you can ' +
1888 'explicitly set the parameter needs_csrf_token to false.')
1888 'explicitly set the parameter needs_csrf_token to false.')
1889
1889
1890 return wh_form(url, method=method, **attrs)
1890 return wh_form(url, method=method, **attrs)
1891
1891
1892
1892
1893 def secure_form(url, method="POST", multipart=False, **attrs):
1893 def secure_form(url, method="POST", multipart=False, **attrs):
1894 """Start a form tag that points the action to an url. This
1894 """Start a form tag that points the action to an url. This
1895 form tag will also include the hidden field containing
1895 form tag will also include the hidden field containing
1896 the auth token.
1896 the auth token.
1897
1897
1898 The url options should be given either as a string, or as a
1898 The url options should be given either as a string, or as a
1899 ``url()`` function. The method for the form defaults to POST.
1899 ``url()`` function. The method for the form defaults to POST.
1900
1900
1901 Options:
1901 Options:
1902
1902
1903 ``multipart``
1903 ``multipart``
1904 If set to True, the enctype is set to "multipart/form-data".
1904 If set to True, the enctype is set to "multipart/form-data".
1905 ``method``
1905 ``method``
1906 The method to use when submitting the form, usually either
1906 The method to use when submitting the form, usually either
1907 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1907 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1908 hidden input with name _method is added to simulate the verb
1908 hidden input with name _method is added to simulate the verb
1909 over POST.
1909 over POST.
1910
1910
1911 """
1911 """
1912 from webhelpers.pylonslib.secure_form import insecure_form
1912 from webhelpers.pylonslib.secure_form import insecure_form
1913 form = insecure_form(url, method, multipart, **attrs)
1913 form = insecure_form(url, method, multipart, **attrs)
1914
1914
1915 session = None
1915 session = None
1916 # TODO(marcink): after pyramid migration require request variable ALWAYS
1916 # TODO(marcink): after pyramid migration require request variable ALWAYS
1917 if 'request' in attrs:
1917 if 'request' in attrs:
1918 session = attrs['request'].session
1918 session = attrs['request'].session
1919
1919
1920 token = literal(
1920 token = literal(
1921 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1921 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1922 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1922 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1923
1923
1924 return literal("%s\n%s" % (form, token))
1924 return literal("%s\n%s" % (form, token))
1925
1925
1926
1926
1927 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1927 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1928 select_html = select(name, selected, options, **attrs)
1928 select_html = select(name, selected, options, **attrs)
1929 select2 = """
1929 select2 = """
1930 <script>
1930 <script>
1931 $(document).ready(function() {
1931 $(document).ready(function() {
1932 $('#%s').select2({
1932 $('#%s').select2({
1933 containerCssClass: 'drop-menu',
1933 containerCssClass: 'drop-menu',
1934 dropdownCssClass: 'drop-menu-dropdown',
1934 dropdownCssClass: 'drop-menu-dropdown',
1935 dropdownAutoWidth: true%s
1935 dropdownAutoWidth: true%s
1936 });
1936 });
1937 });
1937 });
1938 </script>
1938 </script>
1939 """
1939 """
1940 filter_option = """,
1940 filter_option = """,
1941 minimumResultsForSearch: -1
1941 minimumResultsForSearch: -1
1942 """
1942 """
1943 input_id = attrs.get('id') or name
1943 input_id = attrs.get('id') or name
1944 filter_enabled = "" if enable_filter else filter_option
1944 filter_enabled = "" if enable_filter else filter_option
1945 select_script = literal(select2 % (input_id, filter_enabled))
1945 select_script = literal(select2 % (input_id, filter_enabled))
1946
1946
1947 return literal(select_html+select_script)
1947 return literal(select_html+select_script)
1948
1948
1949
1949
1950 def get_visual_attr(tmpl_context_var, attr_name):
1950 def get_visual_attr(tmpl_context_var, attr_name):
1951 """
1951 """
1952 A safe way to get a variable from visual variable of template context
1952 A safe way to get a variable from visual variable of template context
1953
1953
1954 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1954 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1955 :param attr_name: name of the attribute we fetch from the c.visual
1955 :param attr_name: name of the attribute we fetch from the c.visual
1956 """
1956 """
1957 visual = getattr(tmpl_context_var, 'visual', None)
1957 visual = getattr(tmpl_context_var, 'visual', None)
1958 if not visual:
1958 if not visual:
1959 return
1959 return
1960 else:
1960 else:
1961 return getattr(visual, attr_name, None)
1961 return getattr(visual, attr_name, None)
1962
1962
1963
1963
1964 def get_last_path_part(file_node):
1964 def get_last_path_part(file_node):
1965 if not file_node.path:
1965 if not file_node.path:
1966 return u''
1966 return u''
1967
1967
1968 path = safe_unicode(file_node.path.split('/')[-1])
1968 path = safe_unicode(file_node.path.split('/')[-1])
1969 return u'../' + path
1969 return u'../' + path
1970
1970
1971
1971
1972 def route_url(*args, **kwargs):
1972 def route_url(*args, **kwargs):
1973 """
1973 """
1974 Wrapper around pyramids `route_url` (fully qualified url) function.
1974 Wrapper around pyramids `route_url` (fully qualified url) function.
1975 It is used to generate URLs from within pylons views or templates.
1975 It is used to generate URLs from within pylons views or templates.
1976 This will be removed when pyramid migration if finished.
1976 This will be removed when pyramid migration if finished.
1977 """
1977 """
1978 req = get_current_request()
1978 req = get_current_request()
1979 return req.route_url(*args, **kwargs)
1979 return req.route_url(*args, **kwargs)
1980
1980
1981
1981
1982 def route_path(*args, **kwargs):
1982 def route_path(*args, **kwargs):
1983 """
1983 """
1984 Wrapper around pyramids `route_path` function. It is used to generate
1984 Wrapper around pyramids `route_path` function. It is used to generate
1985 URLs from within pylons views or templates. This will be removed when
1985 URLs from within pylons views or templates. This will be removed when
1986 pyramid migration if finished.
1986 pyramid migration if finished.
1987 """
1987 """
1988 req = get_current_request()
1988 req = get_current_request()
1989 return req.route_path(*args, **kwargs)
1989 return req.route_path(*args, **kwargs)
1990
1990
1991
1991
1992 def route_path_or_none(*args, **kwargs):
1992 def route_path_or_none(*args, **kwargs):
1993 try:
1993 try:
1994 return route_path(*args, **kwargs)
1994 return route_path(*args, **kwargs)
1995 except KeyError:
1995 except KeyError:
1996 return None
1996 return None
1997
1997
1998
1998
1999 def static_url(*args, **kwds):
1999 def static_url(*args, **kwds):
2000 """
2000 """
2001 Wrapper around pyramids `route_path` function. It is used to generate
2001 Wrapper around pyramids `route_path` function. It is used to generate
2002 URLs from within pylons views or templates. This will be removed when
2002 URLs from within pylons views or templates. This will be removed when
2003 pyramid migration if finished.
2003 pyramid migration if finished.
2004 """
2004 """
2005 req = get_current_request()
2005 req = get_current_request()
2006 return req.static_url(*args, **kwds)
2006 return req.static_url(*args, **kwds)
2007
2007
2008
2008
2009 def resource_path(*args, **kwds):
2009 def resource_path(*args, **kwds):
2010 """
2010 """
2011 Wrapper around pyramids `route_path` function. It is used to generate
2011 Wrapper around pyramids `route_path` function. It is used to generate
2012 URLs from within pylons views or templates. This will be removed when
2012 URLs from within pylons views or templates. This will be removed when
2013 pyramid migration if finished.
2013 pyramid migration if finished.
2014 """
2014 """
2015 req = get_current_request()
2015 req = get_current_request()
2016 return req.resource_path(*args, **kwds)
2016 return req.resource_path(*args, **kwds)
2017
2017
2018
2018
2019 def api_call_example(method, args):
2019 def api_call_example(method, args):
2020 """
2020 """
2021 Generates an API call example via CURL
2021 Generates an API call example via CURL
2022 """
2022 """
2023 args_json = json.dumps(OrderedDict([
2023 args_json = json.dumps(OrderedDict([
2024 ('id', 1),
2024 ('id', 1),
2025 ('auth_token', 'SECRET'),
2025 ('auth_token', 'SECRET'),
2026 ('method', method),
2026 ('method', method),
2027 ('args', args)
2027 ('args', args)
2028 ]))
2028 ]))
2029 return literal(
2029 return literal(
2030 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2030 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2031 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2031 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2032 "and needs to be of `api calls` role."
2032 "and needs to be of `api calls` role."
2033 .format(
2033 .format(
2034 api_url=route_url('apiv2'),
2034 api_url=route_url('apiv2'),
2035 token_url=route_url('my_account_auth_tokens'),
2035 token_url=route_url('my_account_auth_tokens'),
2036 data=args_json))
2036 data=args_json))
2037
2037
2038
2038
2039 def notification_description(notification, request):
2039 def notification_description(notification, request):
2040 """
2040 """
2041 Generate notification human readable description based on notification type
2041 Generate notification human readable description based on notification type
2042 """
2042 """
2043 from rhodecode.model.notification import NotificationModel
2043 from rhodecode.model.notification import NotificationModel
2044 return NotificationModel().make_description(
2044 return NotificationModel().make_description(
2045 notification, translate=request.translate)
2045 notification, translate=request.translate)
@@ -1,214 +1,216 b''
1
1
2 /******************************************************************************
2 /******************************************************************************
3 * *
3 * *
4 * DO NOT CHANGE THIS FILE MANUALLY *
4 * DO NOT CHANGE THIS FILE MANUALLY *
5 * *
5 * *
6 * *
6 * *
7 * This file is automatically generated when the app starts up with *
7 * This file is automatically generated when the app starts up with *
8 * generate_js_files = true *
8 * generate_js_files = true *
9 * *
9 * *
10 * To add a route here pass jsroute=True to the route definition in the app *
10 * To add a route here pass jsroute=True to the route definition in the app *
11 * *
11 * *
12 ******************************************************************************/
12 ******************************************************************************/
13 function registerRCRoutes() {
13 function registerRCRoutes() {
14 // routes registration
14 // routes registration
15 pyroutes.register('new_repo', '/_admin/create_repository', []);
15 pyroutes.register('new_repo', '/_admin/create_repository', []);
16 pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']);
16 pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']);
17 pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']);
17 pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']);
18 pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']);
19 pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']);
20 pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']);
21 pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
22 pyroutes.register('changeset_info', '/%(repo_name)s/changeset_info/%(revision)s', ['repo_name', 'revision']);
23 pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
18 pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
24 pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']);
19 pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']);
25 pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']);
20 pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']);
26 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
21 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
27 pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']);
22 pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']);
28 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
23 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
29 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
24 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
30 pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
25 pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
31 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
26 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']);
32 pyroutes.register('favicon', '/favicon.ico', []);
27 pyroutes.register('favicon', '/favicon.ico', []);
33 pyroutes.register('robots', '/robots.txt', []);
28 pyroutes.register('robots', '/robots.txt', []);
34 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
29 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
35 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
30 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
36 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
31 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
37 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
32 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
38 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
33 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
39 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
34 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
40 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/settings/integrations', ['repo_group_name']);
35 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/settings/integrations', ['repo_group_name']);
41 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
36 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
42 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/settings/integrations/new', ['repo_group_name']);
37 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/settings/integrations/new', ['repo_group_name']);
43 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
38 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
44 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
39 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
45 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
40 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
46 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
41 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
47 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
42 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
48 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
43 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
49 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
44 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
50 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
45 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
51 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
46 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
52 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
47 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
53 pyroutes.register('admin_home', '/_admin', []);
48 pyroutes.register('admin_home', '/_admin', []);
54 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
49 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
55 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
50 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
56 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
51 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
57 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
52 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
58 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
53 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
59 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
54 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
60 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
55 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
61 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
56 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
62 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
57 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
63 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
58 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
64 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
59 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
65 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
60 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
66 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
61 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
67 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
62 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
68 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
63 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
69 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
64 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
70 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
65 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
71 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
66 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
72 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
67 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
73 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
68 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
74 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
69 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
75 pyroutes.register('users', '/_admin/users', []);
70 pyroutes.register('users', '/_admin/users', []);
76 pyroutes.register('users_data', '/_admin/users_data', []);
71 pyroutes.register('users_data', '/_admin/users_data', []);
77 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
72 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
78 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
73 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
79 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
74 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
80 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
75 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
81 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
76 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
82 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
77 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
83 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
78 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
84 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
79 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
85 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
80 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
86 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
81 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
87 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
82 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
88 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
83 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
89 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
84 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
90 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
85 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
91 pyroutes.register('channelstream_proxy', '/_channelstream', []);
86 pyroutes.register('channelstream_proxy', '/_channelstream', []);
92 pyroutes.register('login', '/_admin/login', []);
87 pyroutes.register('login', '/_admin/login', []);
93 pyroutes.register('logout', '/_admin/logout', []);
88 pyroutes.register('logout', '/_admin/logout', []);
94 pyroutes.register('register', '/_admin/register', []);
89 pyroutes.register('register', '/_admin/register', []);
95 pyroutes.register('reset_password', '/_admin/password_reset', []);
90 pyroutes.register('reset_password', '/_admin/password_reset', []);
96 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
91 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
97 pyroutes.register('home', '/', []);
92 pyroutes.register('home', '/', []);
98 pyroutes.register('user_autocomplete_data', '/_users', []);
93 pyroutes.register('user_autocomplete_data', '/_users', []);
99 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
94 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
100 pyroutes.register('repo_list_data', '/_repos', []);
95 pyroutes.register('repo_list_data', '/_repos', []);
101 pyroutes.register('goto_switcher_data', '/_goto_data', []);
96 pyroutes.register('goto_switcher_data', '/_goto_data', []);
102 pyroutes.register('journal', '/_admin/journal', []);
97 pyroutes.register('journal', '/_admin/journal', []);
103 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
98 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
104 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
99 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
105 pyroutes.register('journal_public', '/_admin/public_journal', []);
100 pyroutes.register('journal_public', '/_admin/public_journal', []);
106 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
101 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
107 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
102 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
108 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
103 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
109 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
104 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
110 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
105 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
111 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
106 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
112 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
107 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
113 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
108 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
109 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
110 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
111 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
112 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
113 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
114 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
115 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
116 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
117 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
118 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
114 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
119 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
115 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
120 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
116 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
121 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
117 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
122 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
118 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
123 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
119 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
124 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
120 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
125 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
121 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
126 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
122 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
127 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
123 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
128 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
124 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
129 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
125 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
130 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
126 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
131 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
127 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
132 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
128 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
133 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
129 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
134 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
130 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
135 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
131 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
136 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
132 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
137 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
133 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
138 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
134 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
139 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
135 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
140 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
136 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
141 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
137 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
142 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
138 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
143 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
139 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
144 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
140 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
145 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
141 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
146 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
142 pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']);
147 pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']);
143 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
148 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
144 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
149 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
145 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
150 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
146 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
151 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
147 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
152 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
148 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
153 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
149 pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']);
150 pyroutes.register('changeset_children', '/%(repo_name)s/changeset_children/%(revision)s', ['repo_name', 'revision']);
151 pyroutes.register('changeset_parents', '/%(repo_name)s/changeset_parents/%(revision)s', ['repo_name', 'revision']);
152 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
154 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
153 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
155 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
154 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
156 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
155 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
157 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
156 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
158 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
157 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
159 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
158 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
160 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
159 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
161 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
160 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
162 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
161 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
163 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
162 pyroutes.register('repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
164 pyroutes.register('repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
163 pyroutes.register('repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
165 pyroutes.register('repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
164 pyroutes.register('strip', '/%(repo_name)s/settings/strip', ['repo_name']);
166 pyroutes.register('strip', '/%(repo_name)s/settings/strip', ['repo_name']);
165 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
167 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
166 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
168 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
167 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']);
169 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']);
168 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']);
170 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']);
169 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
171 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
170 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
172 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
171 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
173 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
172 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
174 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
173 pyroutes.register('search', '/_admin/search', []);
175 pyroutes.register('search', '/_admin/search', []);
174 pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']);
176 pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']);
175 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
177 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
176 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
178 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
177 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
179 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
178 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
180 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
179 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
181 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
180 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
182 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
181 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
183 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
182 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
184 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
183 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
185 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
184 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
186 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
185 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
187 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
186 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
188 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
187 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
189 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
188 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
190 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
189 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
191 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
190 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
192 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
191 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
193 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
192 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
194 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
193 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
195 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
194 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
196 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
195 pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []);
197 pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []);
196 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
198 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
197 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
199 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
198 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
200 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
199 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
201 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
200 pyroutes.register('gists_show', '/_admin/gists', []);
202 pyroutes.register('gists_show', '/_admin/gists', []);
201 pyroutes.register('gists_new', '/_admin/gists/new', []);
203 pyroutes.register('gists_new', '/_admin/gists/new', []);
202 pyroutes.register('gists_create', '/_admin/gists/create', []);
204 pyroutes.register('gists_create', '/_admin/gists/create', []);
203 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
205 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
204 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
206 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
205 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
207 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
206 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
208 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
207 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
209 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
208 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']);
210 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']);
209 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
211 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
210 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
212 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
211 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
213 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
212 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
214 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
213 pyroutes.register('apiv2', '/_admin/api', []);
215 pyroutes.register('apiv2', '/_admin/api', []);
214 }
216 }
@@ -1,830 +1,831 b''
1 // # Copyright (C) 2010-2017 RhodeCode GmbH
1 // # Copyright (C) 2010-2017 RhodeCode GmbH
2 // #
2 // #
3 // # This program is free software: you can redistribute it and/or modify
3 // # This program is free software: you can redistribute it and/or modify
4 // # it under the terms of the GNU Affero General Public License, version 3
4 // # it under the terms of the GNU Affero General Public License, version 3
5 // # (only), as published by the Free Software Foundation.
5 // # (only), as published by the Free Software Foundation.
6 // #
6 // #
7 // # This program is distributed in the hope that it will be useful,
7 // # This program is distributed in the hope that it will be useful,
8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 // # GNU General Public License for more details.
10 // # GNU General Public License for more details.
11 // #
11 // #
12 // # You should have received a copy of the GNU Affero General Public License
12 // # You should have received a copy of the GNU Affero General Public License
13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 // #
14 // #
15 // # This program is dual-licensed. If you wish to learn more about the
15 // # This program is dual-licensed. If you wish to learn more about the
16 // # RhodeCode Enterprise Edition, including its added features, Support services,
16 // # RhodeCode Enterprise Edition, including its added features, Support services,
17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 var firefoxAnchorFix = function() {
19 var firefoxAnchorFix = function() {
20 // hack to make anchor links behave properly on firefox, in our inline
20 // hack to make anchor links behave properly on firefox, in our inline
21 // comments generation when comments are injected firefox is misbehaving
21 // comments generation when comments are injected firefox is misbehaving
22 // when jumping to anchor links
22 // when jumping to anchor links
23 if (location.href.indexOf('#') > -1) {
23 if (location.href.indexOf('#') > -1) {
24 location.href += '';
24 location.href += '';
25 }
25 }
26 };
26 };
27
27
28 var linkifyComments = function(comments) {
28 var linkifyComments = function(comments) {
29 var firstCommentId = null;
29 var firstCommentId = null;
30 if (comments) {
30 if (comments) {
31 firstCommentId = $(comments[0]).data('comment-id');
31 firstCommentId = $(comments[0]).data('comment-id');
32 }
32 }
33
33
34 if (firstCommentId){
34 if (firstCommentId){
35 $('#inline-comments-counter').attr('href', '#comment-' + firstCommentId);
35 $('#inline-comments-counter').attr('href', '#comment-' + firstCommentId);
36 }
36 }
37 };
37 };
38
38
39 var bindToggleButtons = function() {
39 var bindToggleButtons = function() {
40 $('.comment-toggle').on('click', function() {
40 $('.comment-toggle').on('click', function() {
41 $(this).parent().nextUntil('tr.line').toggle('inline-comments');
41 $(this).parent().nextUntil('tr.line').toggle('inline-comments');
42 });
42 });
43 };
43 };
44
44
45 /* Comment form for main and inline comments */
45 /* Comment form for main and inline comments */
46 (function(mod) {
46 (function(mod) {
47
47
48 if (typeof exports == "object" && typeof module == "object") {
48 if (typeof exports == "object" && typeof module == "object") {
49 // CommonJS
49 // CommonJS
50 module.exports = mod();
50 module.exports = mod();
51 }
51 }
52 else {
52 else {
53 // Plain browser env
53 // Plain browser env
54 (this || window).CommentForm = mod();
54 (this || window).CommentForm = mod();
55 }
55 }
56
56
57 })(function() {
57 })(function() {
58 "use strict";
58 "use strict";
59
59
60 function CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId) {
60 function CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId) {
61 if (!(this instanceof CommentForm)) {
61 if (!(this instanceof CommentForm)) {
62 return new CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId);
62 return new CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId);
63 }
63 }
64
64
65 // bind the element instance to our Form
65 // bind the element instance to our Form
66 $(formElement).get(0).CommentForm = this;
66 $(formElement).get(0).CommentForm = this;
67
67
68 this.withLineNo = function(selector) {
68 this.withLineNo = function(selector) {
69 var lineNo = this.lineNo;
69 var lineNo = this.lineNo;
70 if (lineNo === undefined) {
70 if (lineNo === undefined) {
71 return selector
71 return selector
72 } else {
72 } else {
73 return selector + '_' + lineNo;
73 return selector + '_' + lineNo;
74 }
74 }
75 };
75 };
76
76
77 this.commitId = commitId;
77 this.commitId = commitId;
78 this.pullRequestId = pullRequestId;
78 this.pullRequestId = pullRequestId;
79 this.lineNo = lineNo;
79 this.lineNo = lineNo;
80 this.initAutocompleteActions = initAutocompleteActions;
80 this.initAutocompleteActions = initAutocompleteActions;
81
81
82 this.previewButton = this.withLineNo('#preview-btn');
82 this.previewButton = this.withLineNo('#preview-btn');
83 this.previewContainer = this.withLineNo('#preview-container');
83 this.previewContainer = this.withLineNo('#preview-container');
84
84
85 this.previewBoxSelector = this.withLineNo('#preview-box');
85 this.previewBoxSelector = this.withLineNo('#preview-box');
86
86
87 this.editButton = this.withLineNo('#edit-btn');
87 this.editButton = this.withLineNo('#edit-btn');
88 this.editContainer = this.withLineNo('#edit-container');
88 this.editContainer = this.withLineNo('#edit-container');
89 this.cancelButton = this.withLineNo('#cancel-btn');
89 this.cancelButton = this.withLineNo('#cancel-btn');
90 this.commentType = this.withLineNo('#comment_type');
90 this.commentType = this.withLineNo('#comment_type');
91
91
92 this.resolvesId = null;
92 this.resolvesId = null;
93 this.resolvesActionId = null;
93 this.resolvesActionId = null;
94
94
95 this.closesPr = '#close_pull_request';
95 this.closesPr = '#close_pull_request';
96
96
97 this.cmBox = this.withLineNo('#text');
97 this.cmBox = this.withLineNo('#text');
98 this.cm = initCommentBoxCodeMirror(this, this.cmBox, this.initAutocompleteActions);
98 this.cm = initCommentBoxCodeMirror(this, this.cmBox, this.initAutocompleteActions);
99
99
100 this.statusChange = this.withLineNo('#change_status');
100 this.statusChange = this.withLineNo('#change_status');
101
101
102 this.submitForm = formElement;
102 this.submitForm = formElement;
103 this.submitButton = $(this.submitForm).find('input[type="submit"]');
103 this.submitButton = $(this.submitForm).find('input[type="submit"]');
104 this.submitButtonText = this.submitButton.val();
104 this.submitButtonText = this.submitButton.val();
105
105
106 this.previewUrl = pyroutes.url('changeset_comment_preview',
106 this.previewUrl = pyroutes.url('repo_commit_comment_preview',
107 {'repo_name': templateContext.repo_name});
107 {'repo_name': templateContext.repo_name,
108 'commit_id': templateContext.commit_data.commit_id});
108
109
109 if (resolvesCommentId){
110 if (resolvesCommentId){
110 this.resolvesId = '#resolve_comment_{0}'.format(resolvesCommentId);
111 this.resolvesId = '#resolve_comment_{0}'.format(resolvesCommentId);
111 this.resolvesActionId = '#resolve_comment_action_{0}'.format(resolvesCommentId);
112 this.resolvesActionId = '#resolve_comment_action_{0}'.format(resolvesCommentId);
112 $(this.commentType).prop('disabled', true);
113 $(this.commentType).prop('disabled', true);
113 $(this.commentType).addClass('disabled');
114 $(this.commentType).addClass('disabled');
114
115
115 // disable select
116 // disable select
116 setTimeout(function() {
117 setTimeout(function() {
117 $(self.statusChange).select2('readonly', true);
118 $(self.statusChange).select2('readonly', true);
118 }, 10);
119 }, 10);
119
120
120 var resolvedInfo = (
121 var resolvedInfo = (
121 '<li class="resolve-action">' +
122 '<li class="resolve-action">' +
122 '<input type="hidden" id="resolve_comment_{0}" name="resolve_comment_{0}" value="{0}">' +
123 '<input type="hidden" id="resolve_comment_{0}" name="resolve_comment_{0}" value="{0}">' +
123 '<button id="resolve_comment_action_{0}" class="resolve-text btn btn-sm" onclick="return Rhodecode.comments.submitResolution({0})">{1} #{0}</button>' +
124 '<button id="resolve_comment_action_{0}" class="resolve-text btn btn-sm" onclick="return Rhodecode.comments.submitResolution({0})">{1} #{0}</button>' +
124 '</li>'
125 '</li>'
125 ).format(resolvesCommentId, _gettext('resolve comment'));
126 ).format(resolvesCommentId, _gettext('resolve comment'));
126 $(resolvedInfo).insertAfter($(this.commentType).parent());
127 $(resolvedInfo).insertAfter($(this.commentType).parent());
127 }
128 }
128
129
129 // based on commitId, or pullRequestId decide where do we submit
130 // based on commitId, or pullRequestId decide where do we submit
130 // out data
131 // out data
131 if (this.commitId){
132 if (this.commitId){
132 this.submitUrl = pyroutes.url('changeset_comment',
133 this.submitUrl = pyroutes.url('repo_commit_comment_create',
133 {'repo_name': templateContext.repo_name,
134 {'repo_name': templateContext.repo_name,
134 'revision': this.commitId});
135 'commit_id': this.commitId});
135 this.selfUrl = pyroutes.url('changeset_home',
136 this.selfUrl = pyroutes.url('repo_commit',
136 {'repo_name': templateContext.repo_name,
137 {'repo_name': templateContext.repo_name,
137 'revision': this.commitId});
138 'commit_id': this.commitId});
138
139
139 } else if (this.pullRequestId) {
140 } else if (this.pullRequestId) {
140 this.submitUrl = pyroutes.url('pullrequest_comment',
141 this.submitUrl = pyroutes.url('pullrequest_comment',
141 {'repo_name': templateContext.repo_name,
142 {'repo_name': templateContext.repo_name,
142 'pull_request_id': this.pullRequestId});
143 'pull_request_id': this.pullRequestId});
143 this.selfUrl = pyroutes.url('pullrequest_show',
144 this.selfUrl = pyroutes.url('pullrequest_show',
144 {'repo_name': templateContext.repo_name,
145 {'repo_name': templateContext.repo_name,
145 'pull_request_id': this.pullRequestId});
146 'pull_request_id': this.pullRequestId});
146
147
147 } else {
148 } else {
148 throw new Error(
149 throw new Error(
149 'CommentForm requires pullRequestId, or commitId to be specified.')
150 'CommentForm requires pullRequestId, or commitId to be specified.')
150 }
151 }
151
152
152 // FUNCTIONS and helpers
153 // FUNCTIONS and helpers
153 var self = this;
154 var self = this;
154
155
155 this.isInline = function(){
156 this.isInline = function(){
156 return this.lineNo && this.lineNo != 'general';
157 return this.lineNo && this.lineNo != 'general';
157 };
158 };
158
159
159 this.getCmInstance = function(){
160 this.getCmInstance = function(){
160 return this.cm
161 return this.cm
161 };
162 };
162
163
163 this.setPlaceholder = function(placeholder) {
164 this.setPlaceholder = function(placeholder) {
164 var cm = this.getCmInstance();
165 var cm = this.getCmInstance();
165 if (cm){
166 if (cm){
166 cm.setOption('placeholder', placeholder);
167 cm.setOption('placeholder', placeholder);
167 }
168 }
168 };
169 };
169
170
170 this.getCommentStatus = function() {
171 this.getCommentStatus = function() {
171 return $(this.submitForm).find(this.statusChange).val();
172 return $(this.submitForm).find(this.statusChange).val();
172 };
173 };
173 this.getCommentType = function() {
174 this.getCommentType = function() {
174 return $(this.submitForm).find(this.commentType).val();
175 return $(this.submitForm).find(this.commentType).val();
175 };
176 };
176
177
177 this.getResolvesId = function() {
178 this.getResolvesId = function() {
178 return $(this.submitForm).find(this.resolvesId).val() || null;
179 return $(this.submitForm).find(this.resolvesId).val() || null;
179 };
180 };
180
181
181 this.getClosePr = function() {
182 this.getClosePr = function() {
182 return $(this.submitForm).find(this.closesPr).val() || null;
183 return $(this.submitForm).find(this.closesPr).val() || null;
183 };
184 };
184
185
185 this.markCommentResolved = function(resolvedCommentId){
186 this.markCommentResolved = function(resolvedCommentId){
186 $('#comment-label-{0}'.format(resolvedCommentId)).find('.resolved').show();
187 $('#comment-label-{0}'.format(resolvedCommentId)).find('.resolved').show();
187 $('#comment-label-{0}'.format(resolvedCommentId)).find('.resolve').hide();
188 $('#comment-label-{0}'.format(resolvedCommentId)).find('.resolve').hide();
188 };
189 };
189
190
190 this.isAllowedToSubmit = function() {
191 this.isAllowedToSubmit = function() {
191 return !$(this.submitButton).prop('disabled');
192 return !$(this.submitButton).prop('disabled');
192 };
193 };
193
194
194 this.initStatusChangeSelector = function(){
195 this.initStatusChangeSelector = function(){
195 var formatChangeStatus = function(state, escapeMarkup) {
196 var formatChangeStatus = function(state, escapeMarkup) {
196 var originalOption = state.element;
197 var originalOption = state.element;
197 return '<div class="flag_status ' + $(originalOption).data('status') + ' pull-left"></div>' +
198 return '<div class="flag_status ' + $(originalOption).data('status') + ' pull-left"></div>' +
198 '<span>' + escapeMarkup(state.text) + '</span>';
199 '<span>' + escapeMarkup(state.text) + '</span>';
199 };
200 };
200 var formatResult = function(result, container, query, escapeMarkup) {
201 var formatResult = function(result, container, query, escapeMarkup) {
201 return formatChangeStatus(result, escapeMarkup);
202 return formatChangeStatus(result, escapeMarkup);
202 };
203 };
203
204
204 var formatSelection = function(data, container, escapeMarkup) {
205 var formatSelection = function(data, container, escapeMarkup) {
205 return formatChangeStatus(data, escapeMarkup);
206 return formatChangeStatus(data, escapeMarkup);
206 };
207 };
207
208
208 $(this.submitForm).find(this.statusChange).select2({
209 $(this.submitForm).find(this.statusChange).select2({
209 placeholder: _gettext('Status Review'),
210 placeholder: _gettext('Status Review'),
210 formatResult: formatResult,
211 formatResult: formatResult,
211 formatSelection: formatSelection,
212 formatSelection: formatSelection,
212 containerCssClass: "drop-menu status_box_menu",
213 containerCssClass: "drop-menu status_box_menu",
213 dropdownCssClass: "drop-menu-dropdown",
214 dropdownCssClass: "drop-menu-dropdown",
214 dropdownAutoWidth: true,
215 dropdownAutoWidth: true,
215 minimumResultsForSearch: -1
216 minimumResultsForSearch: -1
216 });
217 });
217 $(this.submitForm).find(this.statusChange).on('change', function() {
218 $(this.submitForm).find(this.statusChange).on('change', function() {
218 var status = self.getCommentStatus();
219 var status = self.getCommentStatus();
219
220
220 if (status && !self.isInline()) {
221 if (status && !self.isInline()) {
221 $(self.submitButton).prop('disabled', false);
222 $(self.submitButton).prop('disabled', false);
222 }
223 }
223
224
224 var placeholderText = _gettext('Comment text will be set automatically based on currently selected status ({0}) ...').format(status);
225 var placeholderText = _gettext('Comment text will be set automatically based on currently selected status ({0}) ...').format(status);
225 self.setPlaceholder(placeholderText)
226 self.setPlaceholder(placeholderText)
226 })
227 })
227 };
228 };
228
229
229 // reset the comment form into it's original state
230 // reset the comment form into it's original state
230 this.resetCommentFormState = function(content) {
231 this.resetCommentFormState = function(content) {
231 content = content || '';
232 content = content || '';
232
233
233 $(this.editContainer).show();
234 $(this.editContainer).show();
234 $(this.editButton).parent().addClass('active');
235 $(this.editButton).parent().addClass('active');
235
236
236 $(this.previewContainer).hide();
237 $(this.previewContainer).hide();
237 $(this.previewButton).parent().removeClass('active');
238 $(this.previewButton).parent().removeClass('active');
238
239
239 this.setActionButtonsDisabled(true);
240 this.setActionButtonsDisabled(true);
240 self.cm.setValue(content);
241 self.cm.setValue(content);
241 self.cm.setOption("readOnly", false);
242 self.cm.setOption("readOnly", false);
242
243
243 if (this.resolvesId) {
244 if (this.resolvesId) {
244 // destroy the resolve action
245 // destroy the resolve action
245 $(this.resolvesId).parent().remove();
246 $(this.resolvesId).parent().remove();
246 }
247 }
247 // reset closingPR flag
248 // reset closingPR flag
248 $('.close-pr-input').remove();
249 $('.close-pr-input').remove();
249
250
250 $(this.statusChange).select2('readonly', false);
251 $(this.statusChange).select2('readonly', false);
251 };
252 };
252
253
253 this.globalSubmitSuccessCallback = function(){
254 this.globalSubmitSuccessCallback = function(){
254 // default behaviour is to call GLOBAL hook, if it's registered.
255 // default behaviour is to call GLOBAL hook, if it's registered.
255 if (window.commentFormGlobalSubmitSuccessCallback !== undefined){
256 if (window.commentFormGlobalSubmitSuccessCallback !== undefined){
256 commentFormGlobalSubmitSuccessCallback()
257 commentFormGlobalSubmitSuccessCallback()
257 }
258 }
258 };
259 };
259
260
260 this.submitAjaxPOST = function(url, postData, successHandler, failHandler) {
261 this.submitAjaxPOST = function(url, postData, successHandler, failHandler) {
261 failHandler = failHandler || function() {};
262 failHandler = failHandler || function() {};
262 var postData = toQueryString(postData);
263 var postData = toQueryString(postData);
263 var request = $.ajax({
264 var request = $.ajax({
264 url: url,
265 url: url,
265 type: 'POST',
266 type: 'POST',
266 data: postData,
267 data: postData,
267 headers: {'X-PARTIAL-XHR': true}
268 headers: {'X-PARTIAL-XHR': true}
268 })
269 })
269 .done(function(data) {
270 .done(function(data) {
270 successHandler(data);
271 successHandler(data);
271 })
272 })
272 .fail(function(data, textStatus, errorThrown){
273 .fail(function(data, textStatus, errorThrown){
273 alert(
274 alert(
274 "Error while submitting comment.\n" +
275 "Error while submitting comment.\n" +
275 "Error code {0} ({1}).".format(data.status, data.statusText));
276 "Error code {0} ({1}).".format(data.status, data.statusText));
276 failHandler()
277 failHandler()
277 });
278 });
278 return request;
279 return request;
279 };
280 };
280
281
281 // overwrite a submitHandler, we need to do it for inline comments
282 // overwrite a submitHandler, we need to do it for inline comments
282 this.setHandleFormSubmit = function(callback) {
283 this.setHandleFormSubmit = function(callback) {
283 this.handleFormSubmit = callback;
284 this.handleFormSubmit = callback;
284 };
285 };
285
286
286 // overwrite a submitSuccessHandler
287 // overwrite a submitSuccessHandler
287 this.setGlobalSubmitSuccessCallback = function(callback) {
288 this.setGlobalSubmitSuccessCallback = function(callback) {
288 this.globalSubmitSuccessCallback = callback;
289 this.globalSubmitSuccessCallback = callback;
289 };
290 };
290
291
291 // default handler for for submit for main comments
292 // default handler for for submit for main comments
292 this.handleFormSubmit = function() {
293 this.handleFormSubmit = function() {
293 var text = self.cm.getValue();
294 var text = self.cm.getValue();
294 var status = self.getCommentStatus();
295 var status = self.getCommentStatus();
295 var commentType = self.getCommentType();
296 var commentType = self.getCommentType();
296 var resolvesCommentId = self.getResolvesId();
297 var resolvesCommentId = self.getResolvesId();
297 var closePullRequest = self.getClosePr();
298 var closePullRequest = self.getClosePr();
298
299
299 if (text === "" && !status) {
300 if (text === "" && !status) {
300 return;
301 return;
301 }
302 }
302
303
303 var excludeCancelBtn = false;
304 var excludeCancelBtn = false;
304 var submitEvent = true;
305 var submitEvent = true;
305 self.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
306 self.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
306 self.cm.setOption("readOnly", true);
307 self.cm.setOption("readOnly", true);
307
308
308 var postData = {
309 var postData = {
309 'text': text,
310 'text': text,
310 'changeset_status': status,
311 'changeset_status': status,
311 'comment_type': commentType,
312 'comment_type': commentType,
312 'csrf_token': CSRF_TOKEN
313 'csrf_token': CSRF_TOKEN
313 };
314 };
314
315
315 if (resolvesCommentId) {
316 if (resolvesCommentId) {
316 postData['resolves_comment_id'] = resolvesCommentId;
317 postData['resolves_comment_id'] = resolvesCommentId;
317 }
318 }
318
319
319 if (closePullRequest) {
320 if (closePullRequest) {
320 postData['close_pull_request'] = true;
321 postData['close_pull_request'] = true;
321 }
322 }
322
323
323 var submitSuccessCallback = function(o) {
324 var submitSuccessCallback = function(o) {
324 // reload page if we change status for single commit.
325 // reload page if we change status for single commit.
325 if (status && self.commitId) {
326 if (status && self.commitId) {
326 location.reload(true);
327 location.reload(true);
327 } else {
328 } else {
328 $('#injected_page_comments').append(o.rendered_text);
329 $('#injected_page_comments').append(o.rendered_text);
329 self.resetCommentFormState();
330 self.resetCommentFormState();
330 timeagoActivate();
331 timeagoActivate();
331
332
332 // mark visually which comment was resolved
333 // mark visually which comment was resolved
333 if (resolvesCommentId) {
334 if (resolvesCommentId) {
334 self.markCommentResolved(resolvesCommentId);
335 self.markCommentResolved(resolvesCommentId);
335 }
336 }
336 }
337 }
337
338
338 // run global callback on submit
339 // run global callback on submit
339 self.globalSubmitSuccessCallback();
340 self.globalSubmitSuccessCallback();
340
341
341 };
342 };
342 var submitFailCallback = function(){
343 var submitFailCallback = function(){
343 self.resetCommentFormState(text);
344 self.resetCommentFormState(text);
344 };
345 };
345 self.submitAjaxPOST(
346 self.submitAjaxPOST(
346 self.submitUrl, postData, submitSuccessCallback, submitFailCallback);
347 self.submitUrl, postData, submitSuccessCallback, submitFailCallback);
347 };
348 };
348
349
349 this.previewSuccessCallback = function(o) {
350 this.previewSuccessCallback = function(o) {
350 $(self.previewBoxSelector).html(o);
351 $(self.previewBoxSelector).html(o);
351 $(self.previewBoxSelector).removeClass('unloaded');
352 $(self.previewBoxSelector).removeClass('unloaded');
352
353
353 // swap buttons, making preview active
354 // swap buttons, making preview active
354 $(self.previewButton).parent().addClass('active');
355 $(self.previewButton).parent().addClass('active');
355 $(self.editButton).parent().removeClass('active');
356 $(self.editButton).parent().removeClass('active');
356
357
357 // unlock buttons
358 // unlock buttons
358 self.setActionButtonsDisabled(false);
359 self.setActionButtonsDisabled(false);
359 };
360 };
360
361
361 this.setActionButtonsDisabled = function(state, excludeCancelBtn, submitEvent) {
362 this.setActionButtonsDisabled = function(state, excludeCancelBtn, submitEvent) {
362 excludeCancelBtn = excludeCancelBtn || false;
363 excludeCancelBtn = excludeCancelBtn || false;
363 submitEvent = submitEvent || false;
364 submitEvent = submitEvent || false;
364
365
365 $(this.editButton).prop('disabled', state);
366 $(this.editButton).prop('disabled', state);
366 $(this.previewButton).prop('disabled', state);
367 $(this.previewButton).prop('disabled', state);
367
368
368 if (!excludeCancelBtn) {
369 if (!excludeCancelBtn) {
369 $(this.cancelButton).prop('disabled', state);
370 $(this.cancelButton).prop('disabled', state);
370 }
371 }
371
372
372 var submitState = state;
373 var submitState = state;
373 if (!submitEvent && this.getCommentStatus() && !self.isInline()) {
374 if (!submitEvent && this.getCommentStatus() && !self.isInline()) {
374 // if the value of commit review status is set, we allow
375 // if the value of commit review status is set, we allow
375 // submit button, but only on Main form, isInline means inline
376 // submit button, but only on Main form, isInline means inline
376 submitState = false
377 submitState = false
377 }
378 }
378
379
379 $(this.submitButton).prop('disabled', submitState);
380 $(this.submitButton).prop('disabled', submitState);
380 if (submitEvent) {
381 if (submitEvent) {
381 $(this.submitButton).val(_gettext('Submitting...'));
382 $(this.submitButton).val(_gettext('Submitting...'));
382 } else {
383 } else {
383 $(this.submitButton).val(this.submitButtonText);
384 $(this.submitButton).val(this.submitButtonText);
384 }
385 }
385
386
386 };
387 };
387
388
388 // lock preview/edit/submit buttons on load, but exclude cancel button
389 // lock preview/edit/submit buttons on load, but exclude cancel button
389 var excludeCancelBtn = true;
390 var excludeCancelBtn = true;
390 this.setActionButtonsDisabled(true, excludeCancelBtn);
391 this.setActionButtonsDisabled(true, excludeCancelBtn);
391
392
392 // anonymous users don't have access to initialized CM instance
393 // anonymous users don't have access to initialized CM instance
393 if (this.cm !== undefined){
394 if (this.cm !== undefined){
394 this.cm.on('change', function(cMirror) {
395 this.cm.on('change', function(cMirror) {
395 if (cMirror.getValue() === "") {
396 if (cMirror.getValue() === "") {
396 self.setActionButtonsDisabled(true, excludeCancelBtn)
397 self.setActionButtonsDisabled(true, excludeCancelBtn)
397 } else {
398 } else {
398 self.setActionButtonsDisabled(false, excludeCancelBtn)
399 self.setActionButtonsDisabled(false, excludeCancelBtn)
399 }
400 }
400 });
401 });
401 }
402 }
402
403
403 $(this.editButton).on('click', function(e) {
404 $(this.editButton).on('click', function(e) {
404 e.preventDefault();
405 e.preventDefault();
405
406
406 $(self.previewButton).parent().removeClass('active');
407 $(self.previewButton).parent().removeClass('active');
407 $(self.previewContainer).hide();
408 $(self.previewContainer).hide();
408
409
409 $(self.editButton).parent().addClass('active');
410 $(self.editButton).parent().addClass('active');
410 $(self.editContainer).show();
411 $(self.editContainer).show();
411
412
412 });
413 });
413
414
414 $(this.previewButton).on('click', function(e) {
415 $(this.previewButton).on('click', function(e) {
415 e.preventDefault();
416 e.preventDefault();
416 var text = self.cm.getValue();
417 var text = self.cm.getValue();
417
418
418 if (text === "") {
419 if (text === "") {
419 return;
420 return;
420 }
421 }
421
422
422 var postData = {
423 var postData = {
423 'text': text,
424 'text': text,
424 'renderer': templateContext.visual.default_renderer,
425 'renderer': templateContext.visual.default_renderer,
425 'csrf_token': CSRF_TOKEN
426 'csrf_token': CSRF_TOKEN
426 };
427 };
427
428
428 // lock ALL buttons on preview
429 // lock ALL buttons on preview
429 self.setActionButtonsDisabled(true);
430 self.setActionButtonsDisabled(true);
430
431
431 $(self.previewBoxSelector).addClass('unloaded');
432 $(self.previewBoxSelector).addClass('unloaded');
432 $(self.previewBoxSelector).html(_gettext('Loading ...'));
433 $(self.previewBoxSelector).html(_gettext('Loading ...'));
433
434
434 $(self.editContainer).hide();
435 $(self.editContainer).hide();
435 $(self.previewContainer).show();
436 $(self.previewContainer).show();
436
437
437 // by default we reset state of comment preserving the text
438 // by default we reset state of comment preserving the text
438 var previewFailCallback = function(){
439 var previewFailCallback = function(){
439 self.resetCommentFormState(text)
440 self.resetCommentFormState(text)
440 };
441 };
441 self.submitAjaxPOST(
442 self.submitAjaxPOST(
442 self.previewUrl, postData, self.previewSuccessCallback,
443 self.previewUrl, postData, self.previewSuccessCallback,
443 previewFailCallback);
444 previewFailCallback);
444
445
445 $(self.previewButton).parent().addClass('active');
446 $(self.previewButton).parent().addClass('active');
446 $(self.editButton).parent().removeClass('active');
447 $(self.editButton).parent().removeClass('active');
447 });
448 });
448
449
449 $(this.submitForm).submit(function(e) {
450 $(this.submitForm).submit(function(e) {
450 e.preventDefault();
451 e.preventDefault();
451 var allowedToSubmit = self.isAllowedToSubmit();
452 var allowedToSubmit = self.isAllowedToSubmit();
452 if (!allowedToSubmit){
453 if (!allowedToSubmit){
453 return false;
454 return false;
454 }
455 }
455 self.handleFormSubmit();
456 self.handleFormSubmit();
456 });
457 });
457
458
458 }
459 }
459
460
460 return CommentForm;
461 return CommentForm;
461 });
462 });
462
463
463 /* comments controller */
464 /* comments controller */
464 var CommentsController = function() {
465 var CommentsController = function() {
465 var mainComment = '#text';
466 var mainComment = '#text';
466 var self = this;
467 var self = this;
467
468
468 this.cancelComment = function(node) {
469 this.cancelComment = function(node) {
469 var $node = $(node);
470 var $node = $(node);
470 var $td = $node.closest('td');
471 var $td = $node.closest('td');
471 $node.closest('.comment-inline-form').remove();
472 $node.closest('.comment-inline-form').remove();
472 return false;
473 return false;
473 };
474 };
474
475
475 this.getLineNumber = function(node) {
476 this.getLineNumber = function(node) {
476 var $node = $(node);
477 var $node = $(node);
477 return $node.closest('td').attr('data-line-number');
478 return $node.closest('td').attr('data-line-number');
478 };
479 };
479
480
480 this.scrollToComment = function(node, offset, outdated) {
481 this.scrollToComment = function(node, offset, outdated) {
481 if (offset === undefined) {
482 if (offset === undefined) {
482 offset = 0;
483 offset = 0;
483 }
484 }
484 var outdated = outdated || false;
485 var outdated = outdated || false;
485 var klass = outdated ? 'div.comment-outdated' : 'div.comment-current';
486 var klass = outdated ? 'div.comment-outdated' : 'div.comment-current';
486
487
487 if (!node) {
488 if (!node) {
488 node = $('.comment-selected');
489 node = $('.comment-selected');
489 if (!node.length) {
490 if (!node.length) {
490 node = $('comment-current')
491 node = $('comment-current')
491 }
492 }
492 }
493 }
493 $wrapper = $(node).closest('div.comment');
494 $wrapper = $(node).closest('div.comment');
494 $comment = $(node).closest(klass);
495 $comment = $(node).closest(klass);
495 $comments = $(klass);
496 $comments = $(klass);
496
497
497 // show hidden comment when referenced.
498 // show hidden comment when referenced.
498 if (!$wrapper.is(':visible')){
499 if (!$wrapper.is(':visible')){
499 $wrapper.show();
500 $wrapper.show();
500 }
501 }
501
502
502 $('.comment-selected').removeClass('comment-selected');
503 $('.comment-selected').removeClass('comment-selected');
503
504
504 var nextIdx = $(klass).index($comment) + offset;
505 var nextIdx = $(klass).index($comment) + offset;
505 if (nextIdx >= $comments.length) {
506 if (nextIdx >= $comments.length) {
506 nextIdx = 0;
507 nextIdx = 0;
507 }
508 }
508 var $next = $(klass).eq(nextIdx);
509 var $next = $(klass).eq(nextIdx);
509
510
510 var $cb = $next.closest('.cb');
511 var $cb = $next.closest('.cb');
511 $cb.removeClass('cb-collapsed');
512 $cb.removeClass('cb-collapsed');
512
513
513 var $filediffCollapseState = $cb.closest('.filediff').prev();
514 var $filediffCollapseState = $cb.closest('.filediff').prev();
514 $filediffCollapseState.prop('checked', false);
515 $filediffCollapseState.prop('checked', false);
515 $next.addClass('comment-selected');
516 $next.addClass('comment-selected');
516 scrollToElement($next);
517 scrollToElement($next);
517 return false;
518 return false;
518 };
519 };
519
520
520 this.nextComment = function(node) {
521 this.nextComment = function(node) {
521 return self.scrollToComment(node, 1);
522 return self.scrollToComment(node, 1);
522 };
523 };
523
524
524 this.prevComment = function(node) {
525 this.prevComment = function(node) {
525 return self.scrollToComment(node, -1);
526 return self.scrollToComment(node, -1);
526 };
527 };
527
528
528 this.nextOutdatedComment = function(node) {
529 this.nextOutdatedComment = function(node) {
529 return self.scrollToComment(node, 1, true);
530 return self.scrollToComment(node, 1, true);
530 };
531 };
531
532
532 this.prevOutdatedComment = function(node) {
533 this.prevOutdatedComment = function(node) {
533 return self.scrollToComment(node, -1, true);
534 return self.scrollToComment(node, -1, true);
534 };
535 };
535
536
536 this.deleteComment = function(node) {
537 this.deleteComment = function(node) {
537 if (!confirm(_gettext('Delete this comment?'))) {
538 if (!confirm(_gettext('Delete this comment?'))) {
538 return false;
539 return false;
539 }
540 }
540 var $node = $(node);
541 var $node = $(node);
541 var $td = $node.closest('td');
542 var $td = $node.closest('td');
542 var $comment = $node.closest('.comment');
543 var $comment = $node.closest('.comment');
543 var comment_id = $comment.attr('data-comment-id');
544 var comment_id = $comment.attr('data-comment-id');
544 var url = AJAX_COMMENT_DELETE_URL.replace('__COMMENT_ID__', comment_id);
545 var url = AJAX_COMMENT_DELETE_URL.replace('__COMMENT_ID__', comment_id);
545 var postData = {
546 var postData = {
546 '_method': 'delete',
547 '_method': 'delete',
547 'csrf_token': CSRF_TOKEN
548 'csrf_token': CSRF_TOKEN
548 };
549 };
549
550
550 $comment.addClass('comment-deleting');
551 $comment.addClass('comment-deleting');
551 $comment.hide('fast');
552 $comment.hide('fast');
552
553
553 var success = function(response) {
554 var success = function(response) {
554 $comment.remove();
555 $comment.remove();
555 return false;
556 return false;
556 };
557 };
557 var failure = function(data, textStatus, xhr) {
558 var failure = function(data, textStatus, xhr) {
558 alert("error processing request: " + textStatus);
559 alert("error processing request: " + textStatus);
559 $comment.show('fast');
560 $comment.show('fast');
560 $comment.removeClass('comment-deleting');
561 $comment.removeClass('comment-deleting');
561 return false;
562 return false;
562 };
563 };
563 ajaxPOST(url, postData, success, failure);
564 ajaxPOST(url, postData, success, failure);
564 };
565 };
565
566
566 this.toggleWideMode = function (node) {
567 this.toggleWideMode = function (node) {
567 if ($('#content').hasClass('wrapper')) {
568 if ($('#content').hasClass('wrapper')) {
568 $('#content').removeClass("wrapper");
569 $('#content').removeClass("wrapper");
569 $('#content').addClass("wide-mode-wrapper");
570 $('#content').addClass("wide-mode-wrapper");
570 $(node).addClass('btn-success');
571 $(node).addClass('btn-success');
571 } else {
572 } else {
572 $('#content').removeClass("wide-mode-wrapper");
573 $('#content').removeClass("wide-mode-wrapper");
573 $('#content').addClass("wrapper");
574 $('#content').addClass("wrapper");
574 $(node).removeClass('btn-success');
575 $(node).removeClass('btn-success');
575 }
576 }
576 return false;
577 return false;
577 };
578 };
578
579
579 this.toggleComments = function(node, show) {
580 this.toggleComments = function(node, show) {
580 var $filediff = $(node).closest('.filediff');
581 var $filediff = $(node).closest('.filediff');
581 if (show === true) {
582 if (show === true) {
582 $filediff.removeClass('hide-comments');
583 $filediff.removeClass('hide-comments');
583 } else if (show === false) {
584 } else if (show === false) {
584 $filediff.find('.hide-line-comments').removeClass('hide-line-comments');
585 $filediff.find('.hide-line-comments').removeClass('hide-line-comments');
585 $filediff.addClass('hide-comments');
586 $filediff.addClass('hide-comments');
586 } else {
587 } else {
587 $filediff.find('.hide-line-comments').removeClass('hide-line-comments');
588 $filediff.find('.hide-line-comments').removeClass('hide-line-comments');
588 $filediff.toggleClass('hide-comments');
589 $filediff.toggleClass('hide-comments');
589 }
590 }
590 return false;
591 return false;
591 };
592 };
592
593
593 this.toggleLineComments = function(node) {
594 this.toggleLineComments = function(node) {
594 self.toggleComments(node, true);
595 self.toggleComments(node, true);
595 var $node = $(node);
596 var $node = $(node);
596 $node.closest('tr').toggleClass('hide-line-comments');
597 $node.closest('tr').toggleClass('hide-line-comments');
597 };
598 };
598
599
599 this.createCommentForm = function(formElement, lineno, placeholderText, initAutocompleteActions, resolvesCommentId){
600 this.createCommentForm = function(formElement, lineno, placeholderText, initAutocompleteActions, resolvesCommentId){
600 var pullRequestId = templateContext.pull_request_data.pull_request_id;
601 var pullRequestId = templateContext.pull_request_data.pull_request_id;
601 var commitId = templateContext.commit_data.commit_id;
602 var commitId = templateContext.commit_data.commit_id;
602
603
603 var commentForm = new CommentForm(
604 var commentForm = new CommentForm(
604 formElement, commitId, pullRequestId, lineno, initAutocompleteActions, resolvesCommentId);
605 formElement, commitId, pullRequestId, lineno, initAutocompleteActions, resolvesCommentId);
605 var cm = commentForm.getCmInstance();
606 var cm = commentForm.getCmInstance();
606
607
607 if (resolvesCommentId){
608 if (resolvesCommentId){
608 var placeholderText = _gettext('Leave a comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId);
609 var placeholderText = _gettext('Leave a comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId);
609 }
610 }
610
611
611 setTimeout(function() {
612 setTimeout(function() {
612 // callbacks
613 // callbacks
613 if (cm !== undefined) {
614 if (cm !== undefined) {
614 commentForm.setPlaceholder(placeholderText);
615 commentForm.setPlaceholder(placeholderText);
615 if (commentForm.isInline()) {
616 if (commentForm.isInline()) {
616 cm.focus();
617 cm.focus();
617 cm.refresh();
618 cm.refresh();
618 }
619 }
619 }
620 }
620 }, 10);
621 }, 10);
621
622
622 // trigger scrolldown to the resolve comment, since it might be away
623 // trigger scrolldown to the resolve comment, since it might be away
623 // from the clicked
624 // from the clicked
624 if (resolvesCommentId){
625 if (resolvesCommentId){
625 var actionNode = $(commentForm.resolvesActionId).offset();
626 var actionNode = $(commentForm.resolvesActionId).offset();
626
627
627 setTimeout(function() {
628 setTimeout(function() {
628 if (actionNode) {
629 if (actionNode) {
629 $('body, html').animate({scrollTop: actionNode.top}, 10);
630 $('body, html').animate({scrollTop: actionNode.top}, 10);
630 }
631 }
631 }, 100);
632 }, 100);
632 }
633 }
633
634
634 return commentForm;
635 return commentForm;
635 };
636 };
636
637
637 this.createGeneralComment = function (lineNo, placeholderText, resolvesCommentId) {
638 this.createGeneralComment = function (lineNo, placeholderText, resolvesCommentId) {
638
639
639 var tmpl = $('#cb-comment-general-form-template').html();
640 var tmpl = $('#cb-comment-general-form-template').html();
640 tmpl = tmpl.format(null, 'general');
641 tmpl = tmpl.format(null, 'general');
641 var $form = $(tmpl);
642 var $form = $(tmpl);
642
643
643 var $formPlaceholder = $('#cb-comment-general-form-placeholder');
644 var $formPlaceholder = $('#cb-comment-general-form-placeholder');
644 var curForm = $formPlaceholder.find('form');
645 var curForm = $formPlaceholder.find('form');
645 if (curForm){
646 if (curForm){
646 curForm.remove();
647 curForm.remove();
647 }
648 }
648 $formPlaceholder.append($form);
649 $formPlaceholder.append($form);
649
650
650 var _form = $($form[0]);
651 var _form = $($form[0]);
651 var autocompleteActions = ['approve', 'reject', 'as_note', 'as_todo'];
652 var autocompleteActions = ['approve', 'reject', 'as_note', 'as_todo'];
652 var commentForm = this.createCommentForm(
653 var commentForm = this.createCommentForm(
653 _form, lineNo, placeholderText, autocompleteActions, resolvesCommentId);
654 _form, lineNo, placeholderText, autocompleteActions, resolvesCommentId);
654 commentForm.initStatusChangeSelector();
655 commentForm.initStatusChangeSelector();
655
656
656 return commentForm;
657 return commentForm;
657 };
658 };
658
659
659 this.createComment = function(node, resolutionComment) {
660 this.createComment = function(node, resolutionComment) {
660 var resolvesCommentId = resolutionComment || null;
661 var resolvesCommentId = resolutionComment || null;
661 var $node = $(node);
662 var $node = $(node);
662 var $td = $node.closest('td');
663 var $td = $node.closest('td');
663 var $form = $td.find('.comment-inline-form');
664 var $form = $td.find('.comment-inline-form');
664
665
665 if (!$form.length) {
666 if (!$form.length) {
666
667
667 var $filediff = $node.closest('.filediff');
668 var $filediff = $node.closest('.filediff');
668 $filediff.removeClass('hide-comments');
669 $filediff.removeClass('hide-comments');
669 var f_path = $filediff.attr('data-f-path');
670 var f_path = $filediff.attr('data-f-path');
670 var lineno = self.getLineNumber(node);
671 var lineno = self.getLineNumber(node);
671 // create a new HTML from template
672 // create a new HTML from template
672 var tmpl = $('#cb-comment-inline-form-template').html();
673 var tmpl = $('#cb-comment-inline-form-template').html();
673 tmpl = tmpl.format(f_path, lineno);
674 tmpl = tmpl.format(f_path, lineno);
674 $form = $(tmpl);
675 $form = $(tmpl);
675
676
676 var $comments = $td.find('.inline-comments');
677 var $comments = $td.find('.inline-comments');
677 if (!$comments.length) {
678 if (!$comments.length) {
678 $comments = $(
679 $comments = $(
679 $('#cb-comments-inline-container-template').html());
680 $('#cb-comments-inline-container-template').html());
680 $td.append($comments);
681 $td.append($comments);
681 }
682 }
682
683
683 $td.find('.cb-comment-add-button').before($form);
684 $td.find('.cb-comment-add-button').before($form);
684
685
685 var placeholderText = _gettext('Leave a comment on line {0}.').format(lineno);
686 var placeholderText = _gettext('Leave a comment on line {0}.').format(lineno);
686 var _form = $($form[0]).find('form');
687 var _form = $($form[0]).find('form');
687 var autocompleteActions = ['as_note', 'as_todo'];
688 var autocompleteActions = ['as_note', 'as_todo'];
688 var commentForm = this.createCommentForm(
689 var commentForm = this.createCommentForm(
689 _form, lineno, placeholderText, autocompleteActions, resolvesCommentId);
690 _form, lineno, placeholderText, autocompleteActions, resolvesCommentId);
690
691
691 $.Topic('/ui/plugins/code/comment_form_built').prepareOrPublish({
692 $.Topic('/ui/plugins/code/comment_form_built').prepareOrPublish({
692 form: _form,
693 form: _form,
693 parent: $td[0],
694 parent: $td[0],
694 lineno: lineno,
695 lineno: lineno,
695 f_path: f_path}
696 f_path: f_path}
696 );
697 );
697
698
698 // set a CUSTOM submit handler for inline comments.
699 // set a CUSTOM submit handler for inline comments.
699 commentForm.setHandleFormSubmit(function(o) {
700 commentForm.setHandleFormSubmit(function(o) {
700 var text = commentForm.cm.getValue();
701 var text = commentForm.cm.getValue();
701 var commentType = commentForm.getCommentType();
702 var commentType = commentForm.getCommentType();
702 var resolvesCommentId = commentForm.getResolvesId();
703 var resolvesCommentId = commentForm.getResolvesId();
703
704
704 if (text === "") {
705 if (text === "") {
705 return;
706 return;
706 }
707 }
707
708
708 if (lineno === undefined) {
709 if (lineno === undefined) {
709 alert('missing line !');
710 alert('missing line !');
710 return;
711 return;
711 }
712 }
712 if (f_path === undefined) {
713 if (f_path === undefined) {
713 alert('missing file path !');
714 alert('missing file path !');
714 return;
715 return;
715 }
716 }
716
717
717 var excludeCancelBtn = false;
718 var excludeCancelBtn = false;
718 var submitEvent = true;
719 var submitEvent = true;
719 commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
720 commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
720 commentForm.cm.setOption("readOnly", true);
721 commentForm.cm.setOption("readOnly", true);
721 var postData = {
722 var postData = {
722 'text': text,
723 'text': text,
723 'f_path': f_path,
724 'f_path': f_path,
724 'line': lineno,
725 'line': lineno,
725 'comment_type': commentType,
726 'comment_type': commentType,
726 'csrf_token': CSRF_TOKEN
727 'csrf_token': CSRF_TOKEN
727 };
728 };
728 if (resolvesCommentId){
729 if (resolvesCommentId){
729 postData['resolves_comment_id'] = resolvesCommentId;
730 postData['resolves_comment_id'] = resolvesCommentId;
730 }
731 }
731
732
732 var submitSuccessCallback = function(json_data) {
733 var submitSuccessCallback = function(json_data) {
733 $form.remove();
734 $form.remove();
734 try {
735 try {
735 var html = json_data.rendered_text;
736 var html = json_data.rendered_text;
736 var lineno = json_data.line_no;
737 var lineno = json_data.line_no;
737 var target_id = json_data.target_id;
738 var target_id = json_data.target_id;
738
739
739 $comments.find('.cb-comment-add-button').before(html);
740 $comments.find('.cb-comment-add-button').before(html);
740
741
741 //mark visually which comment was resolved
742 //mark visually which comment was resolved
742 if (resolvesCommentId) {
743 if (resolvesCommentId) {
743 commentForm.markCommentResolved(resolvesCommentId);
744 commentForm.markCommentResolved(resolvesCommentId);
744 }
745 }
745
746
746 // run global callback on submit
747 // run global callback on submit
747 commentForm.globalSubmitSuccessCallback();
748 commentForm.globalSubmitSuccessCallback();
748
749
749 } catch (e) {
750 } catch (e) {
750 console.error(e);
751 console.error(e);
751 }
752 }
752
753
753 // re trigger the linkification of next/prev navigation
754 // re trigger the linkification of next/prev navigation
754 linkifyComments($('.inline-comment-injected'));
755 linkifyComments($('.inline-comment-injected'));
755 timeagoActivate();
756 timeagoActivate();
756 commentForm.setActionButtonsDisabled(false);
757 commentForm.setActionButtonsDisabled(false);
757
758
758 };
759 };
759 var submitFailCallback = function(){
760 var submitFailCallback = function(){
760 commentForm.resetCommentFormState(text)
761 commentForm.resetCommentFormState(text)
761 };
762 };
762 commentForm.submitAjaxPOST(
763 commentForm.submitAjaxPOST(
763 commentForm.submitUrl, postData, submitSuccessCallback, submitFailCallback);
764 commentForm.submitUrl, postData, submitSuccessCallback, submitFailCallback);
764 });
765 });
765 }
766 }
766
767
767 $form.addClass('comment-inline-form-open');
768 $form.addClass('comment-inline-form-open');
768 };
769 };
769
770
770 this.createResolutionComment = function(commentId){
771 this.createResolutionComment = function(commentId){
771 // hide the trigger text
772 // hide the trigger text
772 $('#resolve-comment-{0}'.format(commentId)).hide();
773 $('#resolve-comment-{0}'.format(commentId)).hide();
773
774
774 var comment = $('#comment-'+commentId);
775 var comment = $('#comment-'+commentId);
775 var commentData = comment.data();
776 var commentData = comment.data();
776 if (commentData.commentInline) {
777 if (commentData.commentInline) {
777 this.createComment(comment, commentId)
778 this.createComment(comment, commentId)
778 } else {
779 } else {
779 Rhodecode.comments.createGeneralComment('general', "$placeholder", commentId)
780 Rhodecode.comments.createGeneralComment('general', "$placeholder", commentId)
780 }
781 }
781
782
782 return false;
783 return false;
783 };
784 };
784
785
785 this.submitResolution = function(commentId){
786 this.submitResolution = function(commentId){
786 var form = $('#resolve_comment_{0}'.format(commentId)).closest('form');
787 var form = $('#resolve_comment_{0}'.format(commentId)).closest('form');
787 var commentForm = form.get(0).CommentForm;
788 var commentForm = form.get(0).CommentForm;
788
789
789 var cm = commentForm.getCmInstance();
790 var cm = commentForm.getCmInstance();
790 var renderer = templateContext.visual.default_renderer;
791 var renderer = templateContext.visual.default_renderer;
791 if (renderer == 'rst'){
792 if (renderer == 'rst'){
792 var commentUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentForm.selfUrl);
793 var commentUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentForm.selfUrl);
793 } else if (renderer == 'markdown') {
794 } else if (renderer == 'markdown') {
794 var commentUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentForm.selfUrl);
795 var commentUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentForm.selfUrl);
795 } else {
796 } else {
796 var commentUrl = '{1}#comment-{0}'.format(commentId, commentForm.selfUrl);
797 var commentUrl = '{1}#comment-{0}'.format(commentId, commentForm.selfUrl);
797 }
798 }
798
799
799 cm.setValue(_gettext('TODO from comment {0} was fixed.').format(commentUrl));
800 cm.setValue(_gettext('TODO from comment {0} was fixed.').format(commentUrl));
800 form.submit();
801 form.submit();
801 return false;
802 return false;
802 };
803 };
803
804
804 this.renderInlineComments = function(file_comments) {
805 this.renderInlineComments = function(file_comments) {
805 show_add_button = typeof show_add_button !== 'undefined' ? show_add_button : true;
806 show_add_button = typeof show_add_button !== 'undefined' ? show_add_button : true;
806
807
807 for (var i = 0; i < file_comments.length; i++) {
808 for (var i = 0; i < file_comments.length; i++) {
808 var box = file_comments[i];
809 var box = file_comments[i];
809
810
810 var target_id = $(box).attr('target_id');
811 var target_id = $(box).attr('target_id');
811
812
812 // actually comments with line numbers
813 // actually comments with line numbers
813 var comments = box.children;
814 var comments = box.children;
814
815
815 for (var j = 0; j < comments.length; j++) {
816 for (var j = 0; j < comments.length; j++) {
816 var data = {
817 var data = {
817 'rendered_text': comments[j].outerHTML,
818 'rendered_text': comments[j].outerHTML,
818 'line_no': $(comments[j]).attr('line'),
819 'line_no': $(comments[j]).attr('line'),
819 'target_id': target_id
820 'target_id': target_id
820 };
821 };
821 }
822 }
822 }
823 }
823
824
824 // since order of injection is random, we're now re-iterating
825 // since order of injection is random, we're now re-iterating
825 // from correct order and filling in links
826 // from correct order and filling in links
826 linkifyComments($('.inline-comment-injected'));
827 linkifyComments($('.inline-comment-injected'));
827 firefoxAnchorFix();
828 firefoxAnchorFix();
828 };
829 };
829
830
830 };
831 };
@@ -1,210 +1,210 b''
1 <%namespace name="base" file="/base/base.mako"/>
1 <%namespace name="base" file="/base/base.mako"/>
2
2
3 <%
3 <%
4 elems = [
4 elems = [
5 (_('Owner'), lambda:base.gravatar_with_user(c.repo_info.user.email), '', ''),
5 (_('Owner'), lambda:base.gravatar_with_user(c.repo_info.user.email), '', ''),
6 (_('Created on'), h.format_date(c.repo_info.created_on), '', ''),
6 (_('Created on'), h.format_date(c.repo_info.created_on), '', ''),
7 (_('Updated on'), h.format_date(c.repo_info.updated_on), '', ''),
7 (_('Updated on'), h.format_date(c.repo_info.updated_on), '', ''),
8 (_('Cached Commit id'), lambda: h.link_to(c.repo_info.changeset_cache.get('short_id'), h.url('changeset_home',repo_name=c.repo_name,revision=c.repo_info.changeset_cache.get('raw_id'))), '', ''),
8 (_('Cached Commit id'), lambda: h.link_to(c.repo_info.changeset_cache.get('short_id'), h.route_path('repo_commit',repo_name=c.repo_name,commit_id=c.repo_info.changeset_cache.get('raw_id'))), '', ''),
9 ]
9 ]
10 %>
10 %>
11
11
12 <div class="panel panel-default">
12 <div class="panel panel-default">
13 <div class="panel-heading" id="advanced-info" >
13 <div class="panel-heading" id="advanced-info" >
14 <h3 class="panel-title">${_('Repository: %s') % c.repo_info.repo_name} <a class="permalink" href="#advanced-info"></a></h3>
14 <h3 class="panel-title">${_('Repository: %s') % c.repo_info.repo_name} <a class="permalink" href="#advanced-info"></a></h3>
15 </div>
15 </div>
16 <div class="panel-body">
16 <div class="panel-body">
17 ${base.dt_info_panel(elems)}
17 ${base.dt_info_panel(elems)}
18 </div>
18 </div>
19 </div>
19 </div>
20
20
21
21
22 <div class="panel panel-default">
22 <div class="panel panel-default">
23 <div class="panel-heading" id="advanced-fork">
23 <div class="panel-heading" id="advanced-fork">
24 <h3 class="panel-title">${_('Fork Reference')} <a class="permalink" href="#advanced-fork"></a></h3>
24 <h3 class="panel-title">${_('Fork Reference')} <a class="permalink" href="#advanced-fork"></a></h3>
25 </div>
25 </div>
26 <div class="panel-body">
26 <div class="panel-body">
27 ${h.secure_form(h.route_path('edit_repo_advanced_fork', repo_name=c.repo_info.repo_name), method='POST', request=request)}
27 ${h.secure_form(h.route_path('edit_repo_advanced_fork', repo_name=c.repo_info.repo_name), method='POST', request=request)}
28
28
29 % if c.repo_info.fork:
29 % if c.repo_info.fork:
30 <div class="panel-body-title-text">${h.literal(_('This repository is a fork of %(repo_link)s') % {'repo_link': h.link_to_if(c.has_origin_repo_read_perm,c.repo_info.fork.repo_name, h.route_path('repo_summary', repo_name=c.repo_info.fork.repo_name))})}
30 <div class="panel-body-title-text">${h.literal(_('This repository is a fork of %(repo_link)s') % {'repo_link': h.link_to_if(c.has_origin_repo_read_perm,c.repo_info.fork.repo_name, h.route_path('repo_summary', repo_name=c.repo_info.fork.repo_name))})}
31 | <button class="btn btn-link btn-danger" type="submit">Remove fork reference</button></div>
31 | <button class="btn btn-link btn-danger" type="submit">Remove fork reference</button></div>
32 % endif
32 % endif
33
33
34 <div class="field">
34 <div class="field">
35 ${h.hidden('id_fork_of')}
35 ${h.hidden('id_fork_of')}
36 ${h.submit('set_as_fork_%s' % c.repo_info.repo_name,_('Set'),class_="btn btn-small",)}
36 ${h.submit('set_as_fork_%s' % c.repo_info.repo_name,_('Set'),class_="btn btn-small",)}
37 </div>
37 </div>
38 <div class="field">
38 <div class="field">
39 <span class="help-block">${_('Manually set this repository as a fork of another from the list')}</span>
39 <span class="help-block">${_('Manually set this repository as a fork of another from the list')}</span>
40 </div>
40 </div>
41 ${h.end_form()}
41 ${h.end_form()}
42 </div>
42 </div>
43 </div>
43 </div>
44
44
45
45
46 <div class="panel panel-default">
46 <div class="panel panel-default">
47 <div class="panel-heading" id="advanced-journal">
47 <div class="panel-heading" id="advanced-journal">
48 <h3 class="panel-title">${_('Public Journal Visibility')} <a class="permalink" href="#advanced-journal"></a></h3>
48 <h3 class="panel-title">${_('Public Journal Visibility')} <a class="permalink" href="#advanced-journal"></a></h3>
49 </div>
49 </div>
50 <div class="panel-body">
50 <div class="panel-body">
51 ${h.secure_form(h.route_path('edit_repo_advanced_journal', repo_name=c.repo_info.repo_name), method='POST', request=request)}
51 ${h.secure_form(h.route_path('edit_repo_advanced_journal', repo_name=c.repo_info.repo_name), method='POST', request=request)}
52 <div class="field">
52 <div class="field">
53 %if c.in_public_journal:
53 %if c.in_public_journal:
54 <button class="btn btn-small" type="submit">
54 <button class="btn btn-small" type="submit">
55 ${_('Remove from Public Journal')}
55 ${_('Remove from Public Journal')}
56 </button>
56 </button>
57 %else:
57 %else:
58 <button class="btn btn-small" type="submit">
58 <button class="btn btn-small" type="submit">
59 ${_('Add to Public Journal')}
59 ${_('Add to Public Journal')}
60 </button>
60 </button>
61 %endif
61 %endif
62 </div>
62 </div>
63 <div class="field" >
63 <div class="field" >
64 <span class="help-block">${_('All actions made on this repository will be visible to everyone following the public journal.')}</span>
64 <span class="help-block">${_('All actions made on this repository will be visible to everyone following the public journal.')}</span>
65 </div>
65 </div>
66 ${h.end_form()}
66 ${h.end_form()}
67 </div>
67 </div>
68 </div>
68 </div>
69
69
70
70
71 <div class="panel panel-default">
71 <div class="panel panel-default">
72 <div class="panel-heading" id="advanced-locking">
72 <div class="panel-heading" id="advanced-locking">
73 <h3 class="panel-title">${_('Locking state')} <a class="permalink" href="#advanced-locking"></a></h3>
73 <h3 class="panel-title">${_('Locking state')} <a class="permalink" href="#advanced-locking"></a></h3>
74 </div>
74 </div>
75 <div class="panel-body">
75 <div class="panel-body">
76 ${h.secure_form(h.route_path('edit_repo_advanced_locking', repo_name=c.repo_info.repo_name), method='POST', request=request)}
76 ${h.secure_form(h.route_path('edit_repo_advanced_locking', repo_name=c.repo_info.repo_name), method='POST', request=request)}
77
77
78 %if c.repo_info.locked[0]:
78 %if c.repo_info.locked[0]:
79 <div class="panel-body-title-text">${'Locked by %s on %s. Lock reason: %s' % (h.person_by_id(c.repo_info.locked[0]),
79 <div class="panel-body-title-text">${'Locked by %s on %s. Lock reason: %s' % (h.person_by_id(c.repo_info.locked[0]),
80 h.format_date(h. time_to_datetime(c.repo_info.locked[1])), c.repo_info.locked[2])}</div>
80 h.format_date(h. time_to_datetime(c.repo_info.locked[1])), c.repo_info.locked[2])}</div>
81 %else:
81 %else:
82 <div class="panel-body-title-text">${_('This Repository is not currently locked.')}</div>
82 <div class="panel-body-title-text">${_('This Repository is not currently locked.')}</div>
83 %endif
83 %endif
84
84
85 <div class="field" >
85 <div class="field" >
86 %if c.repo_info.locked[0]:
86 %if c.repo_info.locked[0]:
87 ${h.hidden('set_unlock', '1')}
87 ${h.hidden('set_unlock', '1')}
88 <button class="btn btn-small" type="submit"
88 <button class="btn btn-small" type="submit"
89 onclick="return confirm('${_('Confirm to unlock repository.')}');">
89 onclick="return confirm('${_('Confirm to unlock repository.')}');">
90 <i class="icon-unlock"></i>
90 <i class="icon-unlock"></i>
91 ${_('Unlock repository')}
91 ${_('Unlock repository')}
92 </button>
92 </button>
93 %else:
93 %else:
94 ${h.hidden('set_lock', '1')}
94 ${h.hidden('set_lock', '1')}
95 <button class="btn btn-small" type="submit"
95 <button class="btn btn-small" type="submit"
96 onclick="return confirm('${_('Confirm to lock repository.')}');">
96 onclick="return confirm('${_('Confirm to lock repository.')}');">
97 <i class="icon-lock"></i>
97 <i class="icon-lock"></i>
98 ${_('Lock Repository')}
98 ${_('Lock Repository')}
99 </button>
99 </button>
100 %endif
100 %endif
101 </div>
101 </div>
102 <div class="field" >
102 <div class="field" >
103 <span class="help-block">
103 <span class="help-block">
104 ${_('Force repository locking. This only works when anonymous access is disabled. Pulling from the repository locks the repository to that user until the same user pushes to that repository again.')}
104 ${_('Force repository locking. This only works when anonymous access is disabled. Pulling from the repository locks the repository to that user until the same user pushes to that repository again.')}
105 </span>
105 </span>
106 </div>
106 </div>
107 ${h.end_form()}
107 ${h.end_form()}
108 </div>
108 </div>
109 </div>
109 </div>
110
110
111 <div class="panel panel-danger">
111 <div class="panel panel-danger">
112 <div class="panel-heading" id="advanced-delete">
112 <div class="panel-heading" id="advanced-delete">
113 <h3 class="panel-title">${_('Delete repository')} <a class="permalink" href="#advanced-delete"></a></h3>
113 <h3 class="panel-title">${_('Delete repository')} <a class="permalink" href="#advanced-delete"></a></h3>
114 </div>
114 </div>
115 <div class="panel-body">
115 <div class="panel-body">
116 ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=c.repo_name), method='POST', request=request)}
116 ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=c.repo_name), method='POST', request=request)}
117 <table class="display">
117 <table class="display">
118 <tr>
118 <tr>
119 <td>
119 <td>
120 ${_ungettext('This repository has %s fork.', 'This repository has %s forks.', c.repo_info.forks.count()) % c.repo_info.forks.count()}
120 ${_ungettext('This repository has %s fork.', 'This repository has %s forks.', c.repo_info.forks.count()) % c.repo_info.forks.count()}
121 </td>
121 </td>
122 <td>
122 <td>
123 %if c.repo_info.forks.count():
123 %if c.repo_info.forks.count():
124 <input type="radio" name="forks" value="detach_forks" checked="checked"/> <label for="forks">${_('Detach forks')}</label>
124 <input type="radio" name="forks" value="detach_forks" checked="checked"/> <label for="forks">${_('Detach forks')}</label>
125 %endif
125 %endif
126 </td>
126 </td>
127 <td>
127 <td>
128 %if c.repo_info.forks.count():
128 %if c.repo_info.forks.count():
129 <input type="radio" name="forks" value="delete_forks"/> <label for="forks">${_('Delete forks')}</label>
129 <input type="radio" name="forks" value="delete_forks"/> <label for="forks">${_('Delete forks')}</label>
130 %endif
130 %endif
131 </td>
131 </td>
132 </tr>
132 </tr>
133 </table>
133 </table>
134 <div style="margin: 0 0 20px 0" class="fake-space"></div>
134 <div style="margin: 0 0 20px 0" class="fake-space"></div>
135
135
136 <div class="field">
136 <div class="field">
137 <button class="btn btn-small btn-danger" type="submit"
137 <button class="btn btn-small btn-danger" type="submit"
138 onclick="return confirm('${_('Confirm to delete this repository: %s') % c.repo_name}');">
138 onclick="return confirm('${_('Confirm to delete this repository: %s') % c.repo_name}');">
139 <i class="icon-remove-sign"></i>
139 <i class="icon-remove-sign"></i>
140 ${_('Delete This Repository')}
140 ${_('Delete This Repository')}
141 </button>
141 </button>
142 </div>
142 </div>
143 <div class="field">
143 <div class="field">
144 <span class="help-block">
144 <span class="help-block">
145 ${_('This repository will be renamed in a special way in order to make it inaccessible to RhodeCode Enterprise and its VCS systems. If you need to fully delete it from the file system, please do it manually, or with rhodecode-cleanup-repos command available in rhodecode-tools.')}
145 ${_('This repository will be renamed in a special way in order to make it inaccessible to RhodeCode Enterprise and its VCS systems. If you need to fully delete it from the file system, please do it manually, or with rhodecode-cleanup-repos command available in rhodecode-tools.')}
146 </span>
146 </span>
147 </div>
147 </div>
148
148
149 ${h.end_form()}
149 ${h.end_form()}
150 </div>
150 </div>
151 </div>
151 </div>
152
152
153
153
154 <script>
154 <script>
155
155
156 var currentRepoId = ${c.repo_info.repo_id};
156 var currentRepoId = ${c.repo_info.repo_id};
157
157
158 var repoTypeFilter = function(data) {
158 var repoTypeFilter = function(data) {
159 var results = [];
159 var results = [];
160
160
161 if (!data.results[0]) {
161 if (!data.results[0]) {
162 return data
162 return data
163 }
163 }
164
164
165 $.each(data.results[0].children, function() {
165 $.each(data.results[0].children, function() {
166 // filter out the SAME repo, it cannot be used as fork of itself
166 // filter out the SAME repo, it cannot be used as fork of itself
167 if (this.obj.repo_id != currentRepoId) {
167 if (this.obj.repo_id != currentRepoId) {
168 this.id = this.obj.repo_id;
168 this.id = this.obj.repo_id;
169 results.push(this)
169 results.push(this)
170 }
170 }
171 });
171 });
172 data.results[0].children = results;
172 data.results[0].children = results;
173 return data;
173 return data;
174 };
174 };
175
175
176 $("#id_fork_of").select2({
176 $("#id_fork_of").select2({
177 cachedDataSource: {},
177 cachedDataSource: {},
178 minimumInputLength: 2,
178 minimumInputLength: 2,
179 placeholder: "${_('Change repository') if c.repo_info.fork else _('Pick repository')}",
179 placeholder: "${_('Change repository') if c.repo_info.fork else _('Pick repository')}",
180 dropdownAutoWidth: true,
180 dropdownAutoWidth: true,
181 containerCssClass: "drop-menu",
181 containerCssClass: "drop-menu",
182 dropdownCssClass: "drop-menu-dropdown",
182 dropdownCssClass: "drop-menu-dropdown",
183 formatResult: formatResult,
183 formatResult: formatResult,
184 query: $.debounce(250, function(query){
184 query: $.debounce(250, function(query){
185 self = this;
185 self = this;
186 var cacheKey = query.term;
186 var cacheKey = query.term;
187 var cachedData = self.cachedDataSource[cacheKey];
187 var cachedData = self.cachedDataSource[cacheKey];
188
188
189 if (cachedData) {
189 if (cachedData) {
190 query.callback({results: cachedData.results});
190 query.callback({results: cachedData.results});
191 } else {
191 } else {
192 $.ajax({
192 $.ajax({
193 url: pyroutes.url('repo_list_data'),
193 url: pyroutes.url('repo_list_data'),
194 data: {'query': query.term, repo_type: '${c.repo_info.repo_type}'},
194 data: {'query': query.term, repo_type: '${c.repo_info.repo_type}'},
195 dataType: 'json',
195 dataType: 'json',
196 type: 'GET',
196 type: 'GET',
197 success: function(data) {
197 success: function(data) {
198 data = repoTypeFilter(data);
198 data = repoTypeFilter(data);
199 self.cachedDataSource[cacheKey] = data;
199 self.cachedDataSource[cacheKey] = data;
200 query.callback({results: data.results});
200 query.callback({results: data.results});
201 },
201 },
202 error: function(data, textStatus, errorThrown) {
202 error: function(data, textStatus, errorThrown) {
203 alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText));
203 alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText));
204 }
204 }
205 })
205 })
206 }
206 }
207 })
207 })
208 });
208 });
209 </script>
209 </script>
210
210
@@ -1,299 +1,299 b''
1 ## -*- coding: utf-8 -*-
1 ## -*- coding: utf-8 -*-
2
2
3 <%inherit file="/base/base.mako"/>
3 <%inherit file="/base/base.mako"/>
4
4
5 <%def name="title()">
5 <%def name="title()">
6 ${_('%s Changelog') % c.repo_name}
6 ${_('%s Changelog') % c.repo_name}
7 %if c.changelog_for_path:
7 %if c.changelog_for_path:
8 /${c.changelog_for_path}
8 /${c.changelog_for_path}
9 %endif
9 %endif
10 %if c.rhodecode_name:
10 %if c.rhodecode_name:
11 &middot; ${h.branding(c.rhodecode_name)}
11 &middot; ${h.branding(c.rhodecode_name)}
12 %endif
12 %endif
13 </%def>
13 </%def>
14
14
15 <%def name="breadcrumbs_links()">
15 <%def name="breadcrumbs_links()">
16 %if c.changelog_for_path:
16 %if c.changelog_for_path:
17 /${c.changelog_for_path}
17 /${c.changelog_for_path}
18 %endif
18 %endif
19 </%def>
19 </%def>
20
20
21 <%def name="menu_bar_nav()">
21 <%def name="menu_bar_nav()">
22 ${self.menu_items(active='repositories')}
22 ${self.menu_items(active='repositories')}
23 </%def>
23 </%def>
24
24
25 <%def name="menu_bar_subnav()">
25 <%def name="menu_bar_subnav()">
26 ${self.repo_menu(active='changelog')}
26 ${self.repo_menu(active='changelog')}
27 </%def>
27 </%def>
28
28
29 <%def name="main()">
29 <%def name="main()">
30
30
31 <div class="box">
31 <div class="box">
32 <div class="title">
32 <div class="title">
33 ${self.repo_page_title(c.rhodecode_db_repo)}
33 ${self.repo_page_title(c.rhodecode_db_repo)}
34 <ul class="links">
34 <ul class="links">
35 <li>
35 <li>
36 <a href="#" class="btn btn-small" id="rev_range_container" style="display:none;"></a>
36 <a href="#" class="btn btn-small" id="rev_range_container" style="display:none;"></a>
37 %if c.rhodecode_db_repo.fork:
37 %if c.rhodecode_db_repo.fork:
38 <span>
38 <span>
39 <a id="compare_fork_button"
39 <a id="compare_fork_button"
40 title="${h.tooltip(_('Compare fork with %s' % c.rhodecode_db_repo.fork.repo_name))}"
40 title="${h.tooltip(_('Compare fork with %s' % c.rhodecode_db_repo.fork.repo_name))}"
41 class="btn btn-small"
41 class="btn btn-small"
42 href="${h.url('compare_url',
42 href="${h.url('compare_url',
43 repo_name=c.rhodecode_db_repo.fork.repo_name,
43 repo_name=c.rhodecode_db_repo.fork.repo_name,
44 source_ref_type=c.rhodecode_db_repo.landing_rev[0],
44 source_ref_type=c.rhodecode_db_repo.landing_rev[0],
45 source_ref=c.rhodecode_db_repo.landing_rev[1],
45 source_ref=c.rhodecode_db_repo.landing_rev[1],
46 target_repo=c.repo_name,
46 target_repo=c.repo_name,
47 target_ref_type='branch' if request.GET.get('branch') else c.rhodecode_db_repo.landing_rev[0],
47 target_ref_type='branch' if request.GET.get('branch') else c.rhodecode_db_repo.landing_rev[0],
48 target_ref=request.GET.get('branch') or c.rhodecode_db_repo.landing_rev[1],
48 target_ref=request.GET.get('branch') or c.rhodecode_db_repo.landing_rev[1],
49 merge=1)}"
49 merge=1)}"
50 >
50 >
51 <i class="icon-loop"></i>
51 <i class="icon-loop"></i>
52 ${_('Compare fork with Parent (%s)' % c.rhodecode_db_repo.fork.repo_name)}
52 ${_('Compare fork with Parent (%s)' % c.rhodecode_db_repo.fork.repo_name)}
53 </a>
53 </a>
54 </span>
54 </span>
55 %endif
55 %endif
56
56
57 ## pr open link
57 ## pr open link
58 %if h.is_hg(c.rhodecode_repo) or h.is_git(c.rhodecode_repo):
58 %if h.is_hg(c.rhodecode_repo) or h.is_git(c.rhodecode_repo):
59 <span>
59 <span>
60 <a id="open_new_pull_request" class="btn btn-small btn-success" href="${h.url('pullrequest_home',repo_name=c.repo_name)}">
60 <a id="open_new_pull_request" class="btn btn-small btn-success" href="${h.url('pullrequest_home',repo_name=c.repo_name)}">
61 ${_('Open new pull request')}
61 ${_('Open new pull request')}
62 </a>
62 </a>
63 </span>
63 </span>
64 %endif
64 %endif
65
65
66 ## clear selection
66 ## clear selection
67 <div title="${_('Clear selection')}" class="btn" id="rev_range_clear" style="display:none">
67 <div title="${_('Clear selection')}" class="btn" id="rev_range_clear" style="display:none">
68 ${_('Clear selection')}
68 ${_('Clear selection')}
69 </div>
69 </div>
70
70
71 </li>
71 </li>
72 </ul>
72 </ul>
73 </div>
73 </div>
74
74
75 % if c.pagination:
75 % if c.pagination:
76 <script type="text/javascript" src="${h.asset('js/jquery.commits-graph.js')}"></script>
76 <script type="text/javascript" src="${h.asset('js/jquery.commits-graph.js')}"></script>
77
77
78 <div class="graph-header">
78 <div class="graph-header">
79 <div id="filter_changelog">
79 <div id="filter_changelog">
80 ${h.hidden('branch_filter')}
80 ${h.hidden('branch_filter')}
81 %if c.selected_name:
81 %if c.selected_name:
82 <div class="btn btn-default" id="clear_filter" >
82 <div class="btn btn-default" id="clear_filter" >
83 ${_('Clear filter')}
83 ${_('Clear filter')}
84 </div>
84 </div>
85 %endif
85 %endif
86 </div>
86 </div>
87 ${self.breadcrumbs('breadcrumbs_light')}
87 ${self.breadcrumbs('breadcrumbs_light')}
88 <div id="commit-counter" data-total=${c.total_cs} class="pull-right">
88 <div id="commit-counter" data-total=${c.total_cs} class="pull-right">
89 ${_ungettext('showing %d out of %d commit', 'showing %d out of %d commits', c.showing_commits) % (c.showing_commits, c.total_cs)}
89 ${_ungettext('showing %d out of %d commit', 'showing %d out of %d commits', c.showing_commits) % (c.showing_commits, c.total_cs)}
90 </div>
90 </div>
91 </div>
91 </div>
92
92
93 <div id="graph">
93 <div id="graph">
94 <div class="graph-col-wrapper">
94 <div class="graph-col-wrapper">
95 <div id="graph_nodes">
95 <div id="graph_nodes">
96 <div id="graph_canvas"></div>
96 <div id="graph_canvas"></div>
97 </div>
97 </div>
98 <div id="graph_content" class="main-content graph_full_width">
98 <div id="graph_content" class="main-content graph_full_width">
99
99
100 <div class="table">
100 <div class="table">
101 <table id="changesets" class="rctable">
101 <table id="changesets" class="rctable">
102 <tr>
102 <tr>
103 ## checkbox
103 ## checkbox
104 <th></th>
104 <th></th>
105 <th colspan="2"></th>
105 <th colspan="2"></th>
106
106
107 <th>${_('Commit')}</th>
107 <th>${_('Commit')}</th>
108 ## commit message expand arrow
108 ## commit message expand arrow
109 <th></th>
109 <th></th>
110 <th>${_('Commit Message')}</th>
110 <th>${_('Commit Message')}</th>
111
111
112 <th>${_('Age')}</th>
112 <th>${_('Age')}</th>
113 <th>${_('Author')}</th>
113 <th>${_('Author')}</th>
114
114
115 <th>${_('Refs')}</th>
115 <th>${_('Refs')}</th>
116 </tr>
116 </tr>
117
117
118 <tbody class="commits-range">
118 <tbody class="commits-range">
119 <%include file='changelog_elements.mako'/>
119 <%include file='changelog_elements.mako'/>
120 </tbody>
120 </tbody>
121 </table>
121 </table>
122 </div>
122 </div>
123 </div>
123 </div>
124 <div class="pagination-wh pagination-left">
124 <div class="pagination-wh pagination-left">
125 ${c.pagination.pager('$link_previous ~2~ $link_next')}
125 ${c.pagination.pager('$link_previous ~2~ $link_next')}
126 </div>
126 </div>
127 </div>
127 </div>
128
128
129 <script type="text/javascript">
129 <script type="text/javascript">
130 var cache = {};
130 var cache = {};
131 $(function(){
131 $(function(){
132
132
133 // Create links to commit ranges when range checkboxes are selected
133 // Create links to commit ranges when range checkboxes are selected
134 var $commitCheckboxes = $('.commit-range');
134 var $commitCheckboxes = $('.commit-range');
135 // cache elements
135 // cache elements
136 var $commitRangeContainer = $('#rev_range_container');
136 var $commitRangeContainer = $('#rev_range_container');
137 var $commitRangeClear = $('#rev_range_clear');
137 var $commitRangeClear = $('#rev_range_clear');
138
138
139 var checkboxRangeSelector = function(e){
139 var checkboxRangeSelector = function(e){
140 var selectedCheckboxes = [];
140 var selectedCheckboxes = [];
141 for (pos in $commitCheckboxes){
141 for (pos in $commitCheckboxes){
142 if($commitCheckboxes[pos].checked){
142 if($commitCheckboxes[pos].checked){
143 selectedCheckboxes.push($commitCheckboxes[pos]);
143 selectedCheckboxes.push($commitCheckboxes[pos]);
144 }
144 }
145 }
145 }
146 var open_new_pull_request = $('#open_new_pull_request');
146 var open_new_pull_request = $('#open_new_pull_request');
147 if(open_new_pull_request){
147 if(open_new_pull_request){
148 var selected_changes = selectedCheckboxes.length;
148 var selected_changes = selectedCheckboxes.length;
149 if (selected_changes > 1 || selected_changes == 1 && templateContext.repo_type != 'hg') {
149 if (selected_changes > 1 || selected_changes == 1 && templateContext.repo_type != 'hg') {
150 open_new_pull_request.hide();
150 open_new_pull_request.hide();
151 } else {
151 } else {
152 if (selected_changes == 1) {
152 if (selected_changes == 1) {
153 open_new_pull_request.html(_gettext('Open new pull request for selected commit'));
153 open_new_pull_request.html(_gettext('Open new pull request for selected commit'));
154 } else if (selected_changes == 0) {
154 } else if (selected_changes == 0) {
155 open_new_pull_request.html(_gettext('Open new pull request'));
155 open_new_pull_request.html(_gettext('Open new pull request'));
156 }
156 }
157 open_new_pull_request.show();
157 open_new_pull_request.show();
158 }
158 }
159 }
159 }
160
160
161 if (selectedCheckboxes.length>0){
161 if (selectedCheckboxes.length>0){
162 var revEnd = selectedCheckboxes[0].name;
162 var revEnd = selectedCheckboxes[0].name;
163 var revStart = selectedCheckboxes[selectedCheckboxes.length-1].name;
163 var revStart = selectedCheckboxes[selectedCheckboxes.length-1].name;
164 var url = pyroutes.url('changeset_home',
164 var url = pyroutes.url('repo_commit',
165 {'repo_name': '${c.repo_name}',
165 {'repo_name': '${c.repo_name}',
166 'revision': revStart+'...'+revEnd});
166 'commit_id': revStart+'...'+revEnd});
167
167
168 var link = (revStart == revEnd)
168 var link = (revStart == revEnd)
169 ? _gettext('Show selected commit __S')
169 ? _gettext('Show selected commit __S')
170 : _gettext('Show selected commits __S ... __E');
170 : _gettext('Show selected commits __S ... __E');
171
171
172 link = link.replace('__S', revStart.substr(0,6));
172 link = link.replace('__S', revStart.substr(0,6));
173 link = link.replace('__E', revEnd.substr(0,6));
173 link = link.replace('__E', revEnd.substr(0,6));
174
174
175 $commitRangeContainer
175 $commitRangeContainer
176 .attr('href',url)
176 .attr('href',url)
177 .html(link)
177 .html(link)
178 .show();
178 .show();
179
179
180 $commitRangeClear.show();
180 $commitRangeClear.show();
181 var _url = pyroutes.url('pullrequest_home',
181 var _url = pyroutes.url('pullrequest_home',
182 {'repo_name': '${c.repo_name}',
182 {'repo_name': '${c.repo_name}',
183 'commit': revEnd});
183 'commit': revEnd});
184 open_new_pull_request.attr('href', _url);
184 open_new_pull_request.attr('href', _url);
185 $('#compare_fork_button').hide();
185 $('#compare_fork_button').hide();
186 } else {
186 } else {
187 $commitRangeContainer.hide();
187 $commitRangeContainer.hide();
188 $commitRangeClear.hide();
188 $commitRangeClear.hide();
189
189
190 %if c.branch_name:
190 %if c.branch_name:
191 var _url = pyroutes.url('pullrequest_home',
191 var _url = pyroutes.url('pullrequest_home',
192 {'repo_name': '${c.repo_name}',
192 {'repo_name': '${c.repo_name}',
193 'branch':'${c.branch_name}'});
193 'branch':'${c.branch_name}'});
194 open_new_pull_request.attr('href', _url);
194 open_new_pull_request.attr('href', _url);
195 %else:
195 %else:
196 var _url = pyroutes.url('pullrequest_home',
196 var _url = pyroutes.url('pullrequest_home',
197 {'repo_name': '${c.repo_name}'});
197 {'repo_name': '${c.repo_name}'});
198 open_new_pull_request.attr('href', _url);
198 open_new_pull_request.attr('href', _url);
199 %endif
199 %endif
200 $('#compare_fork_button').show();
200 $('#compare_fork_button').show();
201 }
201 }
202 };
202 };
203
203
204 $commitCheckboxes.on('click', checkboxRangeSelector);
204 $commitCheckboxes.on('click', checkboxRangeSelector);
205
205
206 $commitRangeClear.on('click',function(e) {
206 $commitRangeClear.on('click',function(e) {
207 $commitCheckboxes.attr('checked', false);
207 $commitCheckboxes.attr('checked', false);
208 checkboxRangeSelector();
208 checkboxRangeSelector();
209 e.preventDefault();
209 e.preventDefault();
210 });
210 });
211
211
212 // make sure the buttons are consistent when navigate back and forth
212 // make sure the buttons are consistent when navigate back and forth
213 checkboxRangeSelector();
213 checkboxRangeSelector();
214
214
215 var msgs = $('.message');
215 var msgs = $('.message');
216 // get first element height
216 // get first element height
217 var el = $('#graph_content .container')[0];
217 var el = $('#graph_content .container')[0];
218 var row_h = el.clientHeight;
218 var row_h = el.clientHeight;
219 for (var i=0; i < msgs.length; i++) {
219 for (var i=0; i < msgs.length; i++) {
220 var m = msgs[i];
220 var m = msgs[i];
221
221
222 var h = m.clientHeight;
222 var h = m.clientHeight;
223 var pad = $(m).css('padding');
223 var pad = $(m).css('padding');
224 if (h > row_h) {
224 if (h > row_h) {
225 var offset = row_h - (h+12);
225 var offset = row_h - (h+12);
226 $(m.nextElementSibling).css('display','block');
226 $(m.nextElementSibling).css('display','block');
227 $(m.nextElementSibling).css('margin-top',offset+'px');
227 $(m.nextElementSibling).css('margin-top',offset+'px');
228 }
228 }
229 }
229 }
230
230
231 $("#clear_filter").on("click", function() {
231 $("#clear_filter").on("click", function() {
232 var filter = {'repo_name': '${c.repo_name}'};
232 var filter = {'repo_name': '${c.repo_name}'};
233 window.location = pyroutes.url('repo_changelog', filter);
233 window.location = pyroutes.url('repo_changelog', filter);
234 });
234 });
235
235
236 $("#branch_filter").select2({
236 $("#branch_filter").select2({
237 'dropdownAutoWidth': true,
237 'dropdownAutoWidth': true,
238 'width': 'resolve',
238 'width': 'resolve',
239 'placeholder': "${c.selected_name or _('Filter changelog')}",
239 'placeholder': "${c.selected_name or _('Filter changelog')}",
240 containerCssClass: "drop-menu",
240 containerCssClass: "drop-menu",
241 dropdownCssClass: "drop-menu-dropdown",
241 dropdownCssClass: "drop-menu-dropdown",
242 query: function(query){
242 query: function(query){
243 var key = 'cache';
243 var key = 'cache';
244 var cached = cache[key] ;
244 var cached = cache[key] ;
245 if(cached) {
245 if(cached) {
246 var data = {results: []};
246 var data = {results: []};
247 //filter results
247 //filter results
248 $.each(cached.results, function(){
248 $.each(cached.results, function(){
249 var section = this.text;
249 var section = this.text;
250 var children = [];
250 var children = [];
251 $.each(this.children, function(){
251 $.each(this.children, function(){
252 if(query.term.length == 0 || this.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0 ){
252 if(query.term.length == 0 || this.text.toUpperCase().indexOf(query.term.toUpperCase()) >= 0 ){
253 children.push({'id': this.id, 'text': this.text, 'type': this.type})
253 children.push({'id': this.id, 'text': this.text, 'type': this.type})
254 }
254 }
255 });
255 });
256 data.results.push({'text': section, 'children': children});
256 data.results.push({'text': section, 'children': children});
257 query.callback({results: data.results});
257 query.callback({results: data.results});
258 });
258 });
259 }else{
259 }else{
260 $.ajax({
260 $.ajax({
261 url: pyroutes.url('repo_refs_changelog_data', {'repo_name': '${c.repo_name}'}),
261 url: pyroutes.url('repo_refs_changelog_data', {'repo_name': '${c.repo_name}'}),
262 data: {},
262 data: {},
263 dataType: 'json',
263 dataType: 'json',
264 type: 'GET',
264 type: 'GET',
265 success: function(data) {
265 success: function(data) {
266 cache[key] = data;
266 cache[key] = data;
267 query.callback({results: data.results});
267 query.callback({results: data.results});
268 }
268 }
269 })
269 })
270 }
270 }
271 }
271 }
272 });
272 });
273 $('#branch_filter').on('change', function(e){
273 $('#branch_filter').on('change', function(e){
274 var data = $('#branch_filter').select2('data');
274 var data = $('#branch_filter').select2('data');
275 var selected = data.text;
275 var selected = data.text;
276 var filter = {'repo_name': '${c.repo_name}'};
276 var filter = {'repo_name': '${c.repo_name}'};
277 if(data.type == 'branch' || data.type == 'branch_closed'){
277 if(data.type == 'branch' || data.type == 'branch_closed'){
278 filter["branch"] = selected;
278 filter["branch"] = selected;
279 }
279 }
280 else if (data.type == 'book'){
280 else if (data.type == 'book'){
281 filter["bookmark"] = selected;
281 filter["bookmark"] = selected;
282 }
282 }
283 window.location = pyroutes.url('repo_changelog', filter);
283 window.location = pyroutes.url('repo_changelog', filter);
284 });
284 });
285
285
286 commitsController = new CommitsController();
286 commitsController = new CommitsController();
287 % if not c.changelog_for_path:
287 % if not c.changelog_for_path:
288 commitsController.reloadGraph();
288 commitsController.reloadGraph();
289 % endif
289 % endif
290
290
291 });
291 });
292
292
293 </script>
293 </script>
294 </div>
294 </div>
295 % else:
295 % else:
296 ${_('There are no changes yet')}
296 ${_('There are no changes yet')}
297 % endif
297 % endif
298 </div>
298 </div>
299 </%def>
299 </%def>
@@ -1,144 +1,144 b''
1 ## small box that displays changed/added/removed details fetched by AJAX
1 ## small box that displays changed/added/removed details fetched by AJAX
2 <%namespace name="base" file="/base/base.mako"/>
2 <%namespace name="base" file="/base/base.mako"/>
3
3
4
4
5 % if c.prev_page:
5 % if c.prev_page:
6 <tr>
6 <tr>
7 <td colspan="9" class="load-more-commits">
7 <td colspan="9" class="load-more-commits">
8 <a class="prev-commits" href="#loadPrevCommits" onclick="commitsController.loadPrev(this, ${c.prev_page}, '${c.branch_name}');return false">
8 <a class="prev-commits" href="#loadPrevCommits" onclick="commitsController.loadPrev(this, ${c.prev_page}, '${c.branch_name}');return false">
9 ${_('load previous')}
9 ${_('load previous')}
10 </a>
10 </a>
11 </td>
11 </td>
12 </tr>
12 </tr>
13 % endif
13 % endif
14
14
15 % for cnt,commit in enumerate(c.pagination):
15 % for cnt,commit in enumerate(c.pagination):
16 <tr id="sha_${commit.raw_id}" class="changelogRow container ${'tablerow%s' % (cnt%2)}">
16 <tr id="sha_${commit.raw_id}" class="changelogRow container ${'tablerow%s' % (cnt%2)}">
17
17
18 <td class="td-checkbox">
18 <td class="td-checkbox">
19 ${h.checkbox(commit.raw_id,class_="commit-range")}
19 ${h.checkbox(commit.raw_id,class_="commit-range")}
20 </td>
20 </td>
21 <td class="td-status">
21 <td class="td-status">
22
22
23 %if c.statuses.get(commit.raw_id):
23 %if c.statuses.get(commit.raw_id):
24 <div class="changeset-status-ico">
24 <div class="changeset-status-ico">
25 %if c.statuses.get(commit.raw_id)[2]:
25 %if c.statuses.get(commit.raw_id)[2]:
26 <a class="tooltip" title="${_('Commit status: %s\nClick to open associated pull request #%s') % (h.commit_status_lbl(c.statuses.get(commit.raw_id)[0]), c.statuses.get(commit.raw_id)[2])}" href="${h.route_path('pullrequest_show',repo_name=c.statuses.get(commit.raw_id)[3],pull_request_id=c.statuses.get(commit.raw_id)[2])}">
26 <a class="tooltip" title="${_('Commit status: %s\nClick to open associated pull request #%s') % (h.commit_status_lbl(c.statuses.get(commit.raw_id)[0]), c.statuses.get(commit.raw_id)[2])}" href="${h.route_path('pullrequest_show',repo_name=c.statuses.get(commit.raw_id)[3],pull_request_id=c.statuses.get(commit.raw_id)[2])}">
27 <div class="${'flag_status %s' % c.statuses.get(commit.raw_id)[0]}"></div>
27 <div class="${'flag_status {}'.format(c.statuses.get(commit.raw_id)[0])}"></div>
28 </a>
28 </a>
29 %else:
29 %else:
30 <a class="tooltip" title="${_('Commit status: %s') % h.commit_status_lbl(c.statuses.get(commit.raw_id)[0])}" href="${h.url('changeset_home',repo_name=c.repo_name,revision=commit.raw_id,anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}">
30 <a class="tooltip" title="${_('Commit status: {}').format(h.commit_status_lbl(c.statuses.get(commit.raw_id)[0]))}" href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id,_anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}">
31 <div class="${'flag_status %s' % c.statuses.get(commit.raw_id)[0]}"></div>
31 <div class="${'flag_status {}'.format(c.statuses.get(commit.raw_id)[0])}"></div>
32 </a>
32 </a>
33 %endif
33 %endif
34 </div>
34 </div>
35 %else:
35 %else:
36 <div class="tooltip flag_status not_reviewed" title="${_('Commit status: Not Reviewed')}"></div>
36 <div class="tooltip flag_status not_reviewed" title="${_('Commit status: Not Reviewed')}"></div>
37 %endif
37 %endif
38 </td>
38 </td>
39 <td class="td-comments comments-col">
39 <td class="td-comments comments-col">
40 %if c.comments.get(commit.raw_id):
40 %if c.comments.get(commit.raw_id):
41 <a title="${_('Commit has comments')}" href="${h.url('changeset_home',repo_name=c.repo_name,revision=commit.raw_id,anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}">
41 <a title="${_('Commit has comments')}" href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id,_anchor='comment-%s' % c.comments[commit.raw_id][0].comment_id)}">
42 <i class="icon-comment"></i> ${len(c.comments[commit.raw_id])}
42 <i class="icon-comment"></i> ${len(c.comments[commit.raw_id])}
43 </a>
43 </a>
44 %endif
44 %endif
45 </td>
45 </td>
46 <td class="td-hash">
46 <td class="td-hash">
47 <code>
47 <code>
48
48
49 <a href="${h.url('changeset_home',repo_name=c.repo_name,revision=commit.raw_id)}">
49 <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=commit.raw_id)}">
50 <span class="${'commit_hash obsolete' if getattr(commit, 'obsolete', None) else 'commit_hash'}">${h.show_id(commit)}</span>
50 <span class="${'commit_hash obsolete' if getattr(commit, 'obsolete', None) else 'commit_hash'}">${h.show_id(commit)}</span>
51 </a>
51 </a>
52 <i class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${commit.raw_id}" title="${_('Copy the full commit id')}"></i>
52 <i class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${commit.raw_id}" title="${_('Copy the full commit id')}"></i>
53 % if hasattr(commit, 'phase'):
53 % if hasattr(commit, 'phase'):
54 % if commit.phase != 'public':
54 % if commit.phase != 'public':
55 <span class="tag phase-${commit.phase} tooltip" title="${_('Commit phase')}">${commit.phase}</span>
55 <span class="tag phase-${commit.phase} tooltip" title="${_('Commit phase')}">${commit.phase}</span>
56 % endif
56 % endif
57 % endif
57 % endif
58
58
59 ## obsolete commits
59 ## obsolete commits
60 % if hasattr(commit, 'obsolete'):
60 % if hasattr(commit, 'obsolete'):
61 % if commit.obsolete:
61 % if commit.obsolete:
62 <span class="tag obsolete-${commit.obsolete} tooltip" title="${_('Evolve State')}">${_('obsolete')}</span>
62 <span class="tag obsolete-${commit.obsolete} tooltip" title="${_('Evolve State')}">${_('obsolete')}</span>
63 % endif
63 % endif
64 % endif
64 % endif
65
65
66 ## hidden commits
66 ## hidden commits
67 % if hasattr(commit, 'hidden'):
67 % if hasattr(commit, 'hidden'):
68 % if commit.hidden:
68 % if commit.hidden:
69 <span class="tag obsolete-${commit.hidden} tooltip" title="${_('Evolve State')}">${_('hidden')}</span>
69 <span class="tag obsolete-${commit.hidden} tooltip" title="${_('Evolve State')}">${_('hidden')}</span>
70 % endif
70 % endif
71 % endif
71 % endif
72
72
73 </code>
73 </code>
74 </td>
74 </td>
75 <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_('Expand commit message')}" onclick="commitsController.expandCommit(this); return false">
75 <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_('Expand commit message')}" onclick="commitsController.expandCommit(this); return false">
76 <div class="show_more_col">
76 <div class="show_more_col">
77 <i class="show_more"></i>&nbsp;
77 <i class="show_more"></i>&nbsp;
78 </div>
78 </div>
79 </td>
79 </td>
80 <td class="td-description mid">
80 <td class="td-description mid">
81 <div class="log-container truncate-wrap">
81 <div class="log-container truncate-wrap">
82 <div class="message truncate" id="c-${commit.raw_id}">${h.urlify_commit_message(commit.message, c.repo_name)}</div>
82 <div class="message truncate" id="c-${commit.raw_id}">${h.urlify_commit_message(commit.message, c.repo_name)}</div>
83 </div>
83 </div>
84 </td>
84 </td>
85
85
86 <td class="td-time">
86 <td class="td-time">
87 ${h.age_component(commit.date)}
87 ${h.age_component(commit.date)}
88 </td>
88 </td>
89 <td class="td-user">
89 <td class="td-user">
90 ${base.gravatar_with_user(commit.author)}
90 ${base.gravatar_with_user(commit.author)}
91 </td>
91 </td>
92
92
93 <td class="td-tags tags-col">
93 <td class="td-tags tags-col">
94 <div id="t-${commit.raw_id}">
94 <div id="t-${commit.raw_id}">
95
95
96 ## merge
96 ## merge
97 %if commit.merge:
97 %if commit.merge:
98 <span class="tag mergetag">
98 <span class="tag mergetag">
99 <i class="icon-merge"></i>${_('merge')}
99 <i class="icon-merge"></i>${_('merge')}
100 </span>
100 </span>
101 %endif
101 %endif
102
102
103 ## branch
103 ## branch
104 %if commit.branch:
104 %if commit.branch:
105 <span class="tag branchtag" title="${h.tooltip(_('Branch %s') % commit.branch)}">
105 <span class="tag branchtag" title="${h.tooltip(_('Branch %s') % commit.branch)}">
106 <a href="${h.route_path('repo_changelog',repo_name=c.repo_name,_query=dict(branch=commit.branch))}"><i class="icon-code-fork"></i>${h.shorter(commit.branch)}</a>
106 <a href="${h.route_path('repo_changelog',repo_name=c.repo_name,_query=dict(branch=commit.branch))}"><i class="icon-code-fork"></i>${h.shorter(commit.branch)}</a>
107 </span>
107 </span>
108 %endif
108 %endif
109
109
110 ## bookmarks
110 ## bookmarks
111 %if h.is_hg(c.rhodecode_repo):
111 %if h.is_hg(c.rhodecode_repo):
112 %for book in commit.bookmarks:
112 %for book in commit.bookmarks:
113 <span class="tag booktag" title="${h.tooltip(_('Bookmark %s') % book)}">
113 <span class="tag booktag" title="${h.tooltip(_('Bookmark %s') % book)}">
114 <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=commit.raw_id, _query=dict(at=book))}"><i class="icon-bookmark"></i>${h.shorter(book)}</a>
114 <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=commit.raw_id, _query=dict(at=book))}"><i class="icon-bookmark"></i>${h.shorter(book)}</a>
115 </span>
115 </span>
116 %endfor
116 %endfor
117 %endif
117 %endif
118
118
119 ## tags
119 ## tags
120 %for tag in commit.tags:
120 %for tag in commit.tags:
121 <span class="tag tagtag" title="${h.tooltip(_('Tag %s') % tag)}">
121 <span class="tag tagtag" title="${h.tooltip(_('Tag %s') % tag)}">
122 <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=commit.raw_id, _query=dict(at=tag))}"><i class="icon-tag"></i>${h.shorter(tag)}</a>
122 <a href="${h.route_path('repo_files:default_path',repo_name=c.repo_name,commit_id=commit.raw_id, _query=dict(at=tag))}"><i class="icon-tag"></i>${h.shorter(tag)}</a>
123 </span>
123 </span>
124 %endfor
124 %endfor
125
125
126 </div>
126 </div>
127 </td>
127 </td>
128 </tr>
128 </tr>
129 % endfor
129 % endfor
130
130
131 % if c.next_page:
131 % if c.next_page:
132 <tr>
132 <tr>
133 <td colspan="9" class="load-more-commits">
133 <td colspan="9" class="load-more-commits">
134 <a class="next-commits" href="#loadNextCommits" onclick="commitsController.loadNext(this, ${c.next_page}, '${c.branch_name}');return false">
134 <a class="next-commits" href="#loadNextCommits" onclick="commitsController.loadNext(this, ${c.next_page}, '${c.branch_name}');return false">
135 ${_('load next')}
135 ${_('load next')}
136 </a>
136 </a>
137 </td>
137 </td>
138 </tr>
138 </tr>
139 % endif
139 % endif
140 <tr class="chunk-graph-data" style="display:none"
140 <tr class="chunk-graph-data" style="display:none"
141 data-graph='${c.graph_data|n}'
141 data-graph='${c.graph_data|n}'
142 data-node='${c.prev_page}:${c.next_page}'
142 data-node='${c.prev_page}:${c.next_page}'
143 data-commits='${c.graph_commits|n}'>
143 data-commits='${c.graph_commits|n}'>
144 </tr> No newline at end of file
144 </tr>
@@ -1,39 +1,39 b''
1 <%namespace name="base" file="/base/base.mako"/>
1 <%namespace name="base" file="/base/base.mako"/>
2 <div class="table">
2 <div class="table">
3
3
4 <table class="table rctable file_history">
4 <table class="table rctable file_history">
5 %for cnt,cs in enumerate(c.pagination):
5 %for cnt,cs in enumerate(c.pagination):
6 <tr id="chg_${cnt+1}" class="${'tablerow%s' % (cnt%2)}">
6 <tr id="chg_${cnt+1}" class="${'tablerow%s' % (cnt%2)}">
7 <td class="td-user">
7 <td class="td-user">
8 ${base.gravatar_with_user(cs.author, 16)}
8 ${base.gravatar_with_user(cs.author, 16)}
9 </td>
9 </td>
10 <td class="td-time">
10 <td class="td-time">
11 <div class="date">
11 <div class="date">
12 ${h.age_component(cs.date)}
12 ${h.age_component(cs.date)}
13 </div>
13 </div>
14 </td>
14 </td>
15 <td class="td-message">
15 <td class="td-message">
16 <div class="log-container">
16 <div class="log-container">
17 <div class="message_history" title="${h.tooltip(cs.message)}">
17 <div class="message_history" title="${h.tooltip(cs.message)}">
18 <a href="${h.url('changeset_home',repo_name=c.repo_name,revision=cs.raw_id)}">
18 <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=cs.raw_id)}">
19 ${h.shorter(cs.message, 75)}
19 ${h.shorter(cs.message, 75)}
20 </a>
20 </a>
21 </div>
21 </div>
22 </div>
22 </div>
23 </td>
23 </td>
24 <td class="td-hash">
24 <td class="td-hash">
25 <code>
25 <code>
26 <a href="${h.url('changeset_home',repo_name=c.repo_name,revision=cs.raw_id)}">
26 <a href="${h.route_path('repo_commit',repo_name=c.repo_name,commit_id=cs.raw_id)}">
27 <span>${h.show_id(cs)}</span>
27 <span>${h.show_id(cs)}</span>
28 </a>
28 </a>
29 </code>
29 </code>
30 </td>
30 </td>
31 <td class="td-actions">
31 <td class="td-actions">
32 <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=cs.raw_id,f_path=c.changelog_for_path)}">
32 <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=cs.raw_id,f_path=c.changelog_for_path)}">
33 ${_('Show File')}
33 ${_('Show File')}
34 </a>
34 </a>
35 </td>
35 </td>
36 </tr>
36 </tr>
37 %endfor
37 %endfor
38 </table>
38 </table>
39 </div>
39 </div>
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now