Show More
@@ -1,85 +1,93 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | from rhodecode.config import routing_links |
|
20 | from rhodecode.config import routing_links | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | class VCSCallPredicate(object): |
|
23 | class VCSCallPredicate(object): | |
24 | def __init__(self, val, config): |
|
24 | def __init__(self, val, config): | |
25 | self.val = val |
|
25 | self.val = val | |
26 |
|
26 | |||
27 | def text(self): |
|
27 | def text(self): | |
28 | return 'vcs_call route = %s' % self.val |
|
28 | return 'vcs_call route = %s' % self.val | |
29 |
|
29 | |||
30 | phash = text |
|
30 | phash = text | |
31 |
|
31 | |||
32 | def __call__(self, info, request): |
|
32 | def __call__(self, info, request): | |
33 | if hasattr(request, 'vcs_call'): |
|
33 | if hasattr(request, 'vcs_call'): | |
34 | # skip vcs calls |
|
34 | # skip vcs calls | |
35 | return False |
|
35 | return False | |
36 |
|
36 | |||
37 | return True |
|
37 | return True | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | def includeme(config): |
|
40 | def includeme(config): | |
41 |
|
41 | |||
42 | config.add_route( |
|
42 | config.add_route( | |
43 | name='home', |
|
43 | name='home', | |
44 | pattern='/') |
|
44 | pattern='/') | |
45 |
|
45 | |||
46 | config.add_route( |
|
46 | config.add_route( | |
|
47 | name='main_page_repos_data', | |||
|
48 | pattern='/_home_repos') | |||
|
49 | ||||
|
50 | config.add_route( | |||
|
51 | name='main_page_repo_groups_data', | |||
|
52 | pattern='/_home_repo_groups') | |||
|
53 | ||||
|
54 | config.add_route( | |||
47 | name='user_autocomplete_data', |
|
55 | name='user_autocomplete_data', | |
48 | pattern='/_users') |
|
56 | pattern='/_users') | |
49 |
|
57 | |||
50 | config.add_route( |
|
58 | config.add_route( | |
51 | name='user_group_autocomplete_data', |
|
59 | name='user_group_autocomplete_data', | |
52 | pattern='/_user_groups') |
|
60 | pattern='/_user_groups') | |
53 |
|
61 | |||
54 | config.add_route( |
|
62 | config.add_route( | |
55 | name='repo_list_data', |
|
63 | name='repo_list_data', | |
56 | pattern='/_repos') |
|
64 | pattern='/_repos') | |
57 |
|
65 | |||
58 | config.add_route( |
|
66 | config.add_route( | |
59 | name='repo_group_list_data', |
|
67 | name='repo_group_list_data', | |
60 | pattern='/_repo_groups') |
|
68 | pattern='/_repo_groups') | |
61 |
|
69 | |||
62 | config.add_route( |
|
70 | config.add_route( | |
63 | name='goto_switcher_data', |
|
71 | name='goto_switcher_data', | |
64 | pattern='/_goto_data') |
|
72 | pattern='/_goto_data') | |
65 |
|
73 | |||
66 | config.add_route( |
|
74 | config.add_route( | |
67 | name='markup_preview', |
|
75 | name='markup_preview', | |
68 | pattern='/_markup_preview') |
|
76 | pattern='/_markup_preview') | |
69 |
|
77 | |||
70 | config.add_route( |
|
78 | config.add_route( | |
71 | name='file_preview', |
|
79 | name='file_preview', | |
72 | pattern='/_file_preview') |
|
80 | pattern='/_file_preview') | |
73 |
|
81 | |||
74 | config.add_route( |
|
82 | config.add_route( | |
75 | name='store_user_session_value', |
|
83 | name='store_user_session_value', | |
76 | pattern='/_store_session_attr') |
|
84 | pattern='/_store_session_attr') | |
77 |
|
85 | |||
78 | # register our static links via redirection mechanism |
|
86 | # register our static links via redirection mechanism | |
79 | routing_links.connect_redirection_links(config) |
|
87 | routing_links.connect_redirection_links(config) | |
80 |
|
88 | |||
81 | # Scan module for configuration decorators. |
|
89 | # Scan module for configuration decorators. | |
82 | config.scan('.views', ignore='.tests') |
|
90 | config.scan('.views', ignore='.tests') | |
83 |
|
91 | |||
84 | config.add_route_predicate( |
|
92 | config.add_route_predicate( | |
85 | 'skip_vcs_call', VCSCallPredicate) |
|
93 | 'skip_vcs_call', VCSCallPredicate) |
@@ -1,791 +1,823 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import re |
|
21 | import re | |
22 | import logging |
|
22 | import logging | |
23 | import collections |
|
23 | import collections | |
24 |
|
24 | |||
|
25 | from pyramid.httpexceptions import HTTPNotFound | |||
25 | from pyramid.view import view_config |
|
26 | from pyramid.view import view_config | |
26 |
|
27 | |||
27 | from rhodecode.apps._base import BaseAppView |
|
28 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |
28 | from rhodecode.lib import helpers as h |
|
29 | from rhodecode.lib import helpers as h | |
29 | from rhodecode.lib.auth import ( |
|
30 | from rhodecode.lib.auth import ( | |
30 |
LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, CSRFRequired |
|
31 | LoginRequired, NotAnonymous, HasRepoGroupPermissionAnyDecorator, CSRFRequired, | |
|
32 | HasRepoGroupPermissionAny) | |||
31 | from rhodecode.lib.codeblocks import filenode_as_lines_tokens |
|
33 | from rhodecode.lib.codeblocks import filenode_as_lines_tokens | |
32 | from rhodecode.lib.index import searcher_from_config |
|
34 | from rhodecode.lib.index import searcher_from_config | |
33 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int |
|
35 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int | |
34 | from rhodecode.lib.ext_json import json |
|
|||
35 | from rhodecode.lib.vcs.nodes import FileNode |
|
36 | from rhodecode.lib.vcs.nodes import FileNode | |
36 | from rhodecode.model.db import ( |
|
37 | from rhodecode.model.db import ( | |
37 |
func, true, or_, case, in_filter_generator, |
|
38 | func, true, or_, case, in_filter_generator, Session, | |
|
39 | Repository, RepoGroup, User, UserGroup) | |||
38 | from rhodecode.model.repo import RepoModel |
|
40 | from rhodecode.model.repo import RepoModel | |
39 | from rhodecode.model.repo_group import RepoGroupModel |
|
41 | from rhodecode.model.repo_group import RepoGroupModel | |
40 | from rhodecode.model.scm import RepoGroupList, RepoList |
|
|||
41 | from rhodecode.model.user import UserModel |
|
42 | from rhodecode.model.user import UserModel | |
42 | from rhodecode.model.user_group import UserGroupModel |
|
43 | from rhodecode.model.user_group import UserGroupModel | |
43 |
|
44 | |||
44 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
45 |
|
46 | |||
46 |
|
47 | |||
47 | class HomeView(BaseAppView): |
|
48 | class HomeView(BaseAppView, DataGridAppView): | |
48 |
|
49 | |||
49 | def load_default_context(self): |
|
50 | def load_default_context(self): | |
50 | c = self._get_local_tmpl_context() |
|
51 | c = self._get_local_tmpl_context() | |
51 | c.user = c.auth_user.get_instance() |
|
52 | c.user = c.auth_user.get_instance() | |
52 |
|
53 | |||
53 | return c |
|
54 | return c | |
54 |
|
55 | |||
55 | @LoginRequired() |
|
56 | @LoginRequired() | |
56 | @view_config( |
|
57 | @view_config( | |
57 | route_name='user_autocomplete_data', request_method='GET', |
|
58 | route_name='user_autocomplete_data', request_method='GET', | |
58 | renderer='json_ext', xhr=True) |
|
59 | renderer='json_ext', xhr=True) | |
59 | def user_autocomplete_data(self): |
|
60 | def user_autocomplete_data(self): | |
60 | self.load_default_context() |
|
61 | self.load_default_context() | |
61 | query = self.request.GET.get('query') |
|
62 | query = self.request.GET.get('query') | |
62 | active = str2bool(self.request.GET.get('active') or True) |
|
63 | active = str2bool(self.request.GET.get('active') or True) | |
63 | include_groups = str2bool(self.request.GET.get('user_groups')) |
|
64 | include_groups = str2bool(self.request.GET.get('user_groups')) | |
64 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
65 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) | |
65 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) |
|
66 | skip_default_user = str2bool(self.request.GET.get('skip_default_user')) | |
66 |
|
67 | |||
67 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', |
|
68 | log.debug('generating user list, query:%s, active:%s, with_groups:%s', | |
68 | query, active, include_groups) |
|
69 | query, active, include_groups) | |
69 |
|
70 | |||
70 | _users = UserModel().get_users( |
|
71 | _users = UserModel().get_users( | |
71 | name_contains=query, only_active=active) |
|
72 | name_contains=query, only_active=active) | |
72 |
|
73 | |||
73 | def maybe_skip_default_user(usr): |
|
74 | def maybe_skip_default_user(usr): | |
74 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: |
|
75 | if skip_default_user and usr['username'] == UserModel.cls.DEFAULT_USER: | |
75 | return False |
|
76 | return False | |
76 | return True |
|
77 | return True | |
77 | _users = filter(maybe_skip_default_user, _users) |
|
78 | _users = filter(maybe_skip_default_user, _users) | |
78 |
|
79 | |||
79 | if include_groups: |
|
80 | if include_groups: | |
80 | # extend with user groups |
|
81 | # extend with user groups | |
81 | _user_groups = UserGroupModel().get_user_groups( |
|
82 | _user_groups = UserGroupModel().get_user_groups( | |
82 | name_contains=query, only_active=active, |
|
83 | name_contains=query, only_active=active, | |
83 | expand_groups=expand_groups) |
|
84 | expand_groups=expand_groups) | |
84 | _users = _users + _user_groups |
|
85 | _users = _users + _user_groups | |
85 |
|
86 | |||
86 | return {'suggestions': _users} |
|
87 | return {'suggestions': _users} | |
87 |
|
88 | |||
88 | @LoginRequired() |
|
89 | @LoginRequired() | |
89 | @NotAnonymous() |
|
90 | @NotAnonymous() | |
90 | @view_config( |
|
91 | @view_config( | |
91 | route_name='user_group_autocomplete_data', request_method='GET', |
|
92 | route_name='user_group_autocomplete_data', request_method='GET', | |
92 | renderer='json_ext', xhr=True) |
|
93 | renderer='json_ext', xhr=True) | |
93 | def user_group_autocomplete_data(self): |
|
94 | def user_group_autocomplete_data(self): | |
94 | self.load_default_context() |
|
95 | self.load_default_context() | |
95 | query = self.request.GET.get('query') |
|
96 | query = self.request.GET.get('query') | |
96 | active = str2bool(self.request.GET.get('active') or True) |
|
97 | active = str2bool(self.request.GET.get('active') or True) | |
97 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) |
|
98 | expand_groups = str2bool(self.request.GET.get('user_groups_expand')) | |
98 |
|
99 | |||
99 | log.debug('generating user group list, query:%s, active:%s', |
|
100 | log.debug('generating user group list, query:%s, active:%s', | |
100 | query, active) |
|
101 | query, active) | |
101 |
|
102 | |||
102 | _user_groups = UserGroupModel().get_user_groups( |
|
103 | _user_groups = UserGroupModel().get_user_groups( | |
103 | name_contains=query, only_active=active, |
|
104 | name_contains=query, only_active=active, | |
104 | expand_groups=expand_groups) |
|
105 | expand_groups=expand_groups) | |
105 | _user_groups = _user_groups |
|
106 | _user_groups = _user_groups | |
106 |
|
107 | |||
107 | return {'suggestions': _user_groups} |
|
108 | return {'suggestions': _user_groups} | |
108 |
|
109 | |||
109 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): |
|
110 | def _get_repo_list(self, name_contains=None, repo_type=None, repo_group_name='', limit=20): | |
110 | org_query = name_contains |
|
111 | org_query = name_contains | |
111 | allowed_ids = self._rhodecode_user.repo_acl_ids( |
|
112 | allowed_ids = self._rhodecode_user.repo_acl_ids( | |
112 | ['repository.read', 'repository.write', 'repository.admin'], |
|
113 | ['repository.read', 'repository.write', 'repository.admin'], | |
113 | cache=False, name_filter=name_contains) or [-1] |
|
114 | cache=False, name_filter=name_contains) or [-1] | |
114 |
|
115 | |||
115 | query = Repository.query()\ |
|
116 | query = Repository.query()\ | |
116 | .filter(Repository.archived.isnot(true()))\ |
|
117 | .filter(Repository.archived.isnot(true()))\ | |
117 | .filter(or_( |
|
118 | .filter(or_( | |
118 | # generate multiple IN to fix limitation problems |
|
119 | # generate multiple IN to fix limitation problems | |
119 | *in_filter_generator(Repository.repo_id, allowed_ids) |
|
120 | *in_filter_generator(Repository.repo_id, allowed_ids) | |
120 | )) |
|
121 | )) | |
121 |
|
122 | |||
122 | query = query.order_by(case( |
|
123 | query = query.order_by(case( | |
123 | [ |
|
124 | [ | |
124 | (Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), |
|
125 | (Repository.repo_name.startswith(repo_group_name), repo_group_name+'/'), | |
125 | ], |
|
126 | ], | |
126 | )) |
|
127 | )) | |
127 | query = query.order_by(func.length(Repository.repo_name)) |
|
128 | query = query.order_by(func.length(Repository.repo_name)) | |
128 | query = query.order_by(Repository.repo_name) |
|
129 | query = query.order_by(Repository.repo_name) | |
129 |
|
130 | |||
130 | if repo_type: |
|
131 | if repo_type: | |
131 | query = query.filter(Repository.repo_type == repo_type) |
|
132 | query = query.filter(Repository.repo_type == repo_type) | |
132 |
|
133 | |||
133 | if name_contains: |
|
134 | if name_contains: | |
134 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
135 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
135 | query = query.filter( |
|
136 | query = query.filter( | |
136 | Repository.repo_name.ilike(ilike_expression)) |
|
137 | Repository.repo_name.ilike(ilike_expression)) | |
137 | query = query.limit(limit) |
|
138 | query = query.limit(limit) | |
138 |
|
139 | |||
139 | acl_iter = query |
|
140 | acl_iter = query | |
140 |
|
141 | |||
141 | return [ |
|
142 | return [ | |
142 | { |
|
143 | { | |
143 | 'id': obj.repo_name, |
|
144 | 'id': obj.repo_name, | |
144 | 'value': org_query, |
|
145 | 'value': org_query, | |
145 | 'value_display': obj.repo_name, |
|
146 | 'value_display': obj.repo_name, | |
146 | 'text': obj.repo_name, |
|
147 | 'text': obj.repo_name, | |
147 | 'type': 'repo', |
|
148 | 'type': 'repo', | |
148 | 'repo_id': obj.repo_id, |
|
149 | 'repo_id': obj.repo_id, | |
149 | 'repo_type': obj.repo_type, |
|
150 | 'repo_type': obj.repo_type, | |
150 | 'private': obj.private, |
|
151 | 'private': obj.private, | |
151 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) |
|
152 | 'url': h.route_path('repo_summary', repo_name=obj.repo_name) | |
152 | } |
|
153 | } | |
153 | for obj in acl_iter] |
|
154 | for obj in acl_iter] | |
154 |
|
155 | |||
155 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): |
|
156 | def _get_repo_group_list(self, name_contains=None, repo_group_name='', limit=20): | |
156 | org_query = name_contains |
|
157 | org_query = name_contains | |
157 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( |
|
158 | allowed_ids = self._rhodecode_user.repo_group_acl_ids( | |
158 | ['group.read', 'group.write', 'group.admin'], |
|
159 | ['group.read', 'group.write', 'group.admin'], | |
159 | cache=False, name_filter=name_contains) or [-1] |
|
160 | cache=False, name_filter=name_contains) or [-1] | |
160 |
|
161 | |||
161 | query = RepoGroup.query()\ |
|
162 | query = RepoGroup.query()\ | |
162 | .filter(or_( |
|
163 | .filter(or_( | |
163 | # generate multiple IN to fix limitation problems |
|
164 | # generate multiple IN to fix limitation problems | |
164 | *in_filter_generator(RepoGroup.group_id, allowed_ids) |
|
165 | *in_filter_generator(RepoGroup.group_id, allowed_ids) | |
165 | )) |
|
166 | )) | |
166 |
|
167 | |||
167 | query = query.order_by(case( |
|
168 | query = query.order_by(case( | |
168 | [ |
|
169 | [ | |
169 | (RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), |
|
170 | (RepoGroup.group_name.startswith(repo_group_name), repo_group_name+'/'), | |
170 | ], |
|
171 | ], | |
171 | )) |
|
172 | )) | |
172 | query = query.order_by(func.length(RepoGroup.group_name)) |
|
173 | query = query.order_by(func.length(RepoGroup.group_name)) | |
173 | query = query.order_by(RepoGroup.group_name) |
|
174 | query = query.order_by(RepoGroup.group_name) | |
174 |
|
175 | |||
175 | if name_contains: |
|
176 | if name_contains: | |
176 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
177 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
177 | query = query.filter( |
|
178 | query = query.filter( | |
178 | RepoGroup.group_name.ilike(ilike_expression)) |
|
179 | RepoGroup.group_name.ilike(ilike_expression)) | |
179 | query = query.limit(limit) |
|
180 | query = query.limit(limit) | |
180 |
|
181 | |||
181 | acl_iter = query |
|
182 | acl_iter = query | |
182 |
|
183 | |||
183 | return [ |
|
184 | return [ | |
184 | { |
|
185 | { | |
185 | 'id': obj.group_name, |
|
186 | 'id': obj.group_name, | |
186 | 'value': org_query, |
|
187 | 'value': org_query, | |
187 | 'value_display': obj.group_name, |
|
188 | 'value_display': obj.group_name, | |
188 | 'text': obj.group_name, |
|
189 | 'text': obj.group_name, | |
189 | 'type': 'repo_group', |
|
190 | 'type': 'repo_group', | |
190 | 'repo_group_id': obj.group_id, |
|
191 | 'repo_group_id': obj.group_id, | |
191 | 'url': h.route_path( |
|
192 | 'url': h.route_path( | |
192 | 'repo_group_home', repo_group_name=obj.group_name) |
|
193 | 'repo_group_home', repo_group_name=obj.group_name) | |
193 | } |
|
194 | } | |
194 | for obj in acl_iter] |
|
195 | for obj in acl_iter] | |
195 |
|
196 | |||
196 | def _get_user_list(self, name_contains=None, limit=20): |
|
197 | def _get_user_list(self, name_contains=None, limit=20): | |
197 | org_query = name_contains |
|
198 | org_query = name_contains | |
198 | if not name_contains: |
|
199 | if not name_contains: | |
199 | return [], False |
|
200 | return [], False | |
200 |
|
201 | |||
201 | # TODO(marcink): should all logged in users be allowed to search others? |
|
202 | # TODO(marcink): should all logged in users be allowed to search others? | |
202 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
203 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | |
203 | if not allowed_user_search: |
|
204 | if not allowed_user_search: | |
204 | return [], False |
|
205 | return [], False | |
205 |
|
206 | |||
206 | name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) |
|
207 | name_contains = re.compile('(?:user:[ ]?)(.+)').findall(name_contains) | |
207 | if len(name_contains) != 1: |
|
208 | if len(name_contains) != 1: | |
208 | return [], False |
|
209 | return [], False | |
209 |
|
210 | |||
210 | name_contains = name_contains[0] |
|
211 | name_contains = name_contains[0] | |
211 |
|
212 | |||
212 | query = User.query()\ |
|
213 | query = User.query()\ | |
213 | .order_by(func.length(User.username))\ |
|
214 | .order_by(func.length(User.username))\ | |
214 | .order_by(User.username) \ |
|
215 | .order_by(User.username) \ | |
215 | .filter(User.username != User.DEFAULT_USER) |
|
216 | .filter(User.username != User.DEFAULT_USER) | |
216 |
|
217 | |||
217 | if name_contains: |
|
218 | if name_contains: | |
218 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
219 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
219 | query = query.filter( |
|
220 | query = query.filter( | |
220 | User.username.ilike(ilike_expression)) |
|
221 | User.username.ilike(ilike_expression)) | |
221 | query = query.limit(limit) |
|
222 | query = query.limit(limit) | |
222 |
|
223 | |||
223 | acl_iter = query |
|
224 | acl_iter = query | |
224 |
|
225 | |||
225 | return [ |
|
226 | return [ | |
226 | { |
|
227 | { | |
227 | 'id': obj.user_id, |
|
228 | 'id': obj.user_id, | |
228 | 'value': org_query, |
|
229 | 'value': org_query, | |
229 | 'value_display': 'user: `{}`'.format(obj.username), |
|
230 | 'value_display': 'user: `{}`'.format(obj.username), | |
230 | 'type': 'user', |
|
231 | 'type': 'user', | |
231 | 'icon_link': h.gravatar_url(obj.email, 30), |
|
232 | 'icon_link': h.gravatar_url(obj.email, 30), | |
232 | 'url': h.route_path( |
|
233 | 'url': h.route_path( | |
233 | 'user_profile', username=obj.username) |
|
234 | 'user_profile', username=obj.username) | |
234 | } |
|
235 | } | |
235 | for obj in acl_iter], True |
|
236 | for obj in acl_iter], True | |
236 |
|
237 | |||
237 | def _get_user_groups_list(self, name_contains=None, limit=20): |
|
238 | def _get_user_groups_list(self, name_contains=None, limit=20): | |
238 | org_query = name_contains |
|
239 | org_query = name_contains | |
239 | if not name_contains: |
|
240 | if not name_contains: | |
240 | return [], False |
|
241 | return [], False | |
241 |
|
242 | |||
242 | # TODO(marcink): should all logged in users be allowed to search others? |
|
243 | # TODO(marcink): should all logged in users be allowed to search others? | |
243 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER |
|
244 | allowed_user_search = self._rhodecode_user.username != User.DEFAULT_USER | |
244 | if not allowed_user_search: |
|
245 | if not allowed_user_search: | |
245 | return [], False |
|
246 | return [], False | |
246 |
|
247 | |||
247 | name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) |
|
248 | name_contains = re.compile('(?:user_group:[ ]?)(.+)').findall(name_contains) | |
248 | if len(name_contains) != 1: |
|
249 | if len(name_contains) != 1: | |
249 | return [], False |
|
250 | return [], False | |
250 |
|
251 | |||
251 | name_contains = name_contains[0] |
|
252 | name_contains = name_contains[0] | |
252 |
|
253 | |||
253 | query = UserGroup.query()\ |
|
254 | query = UserGroup.query()\ | |
254 | .order_by(func.length(UserGroup.users_group_name))\ |
|
255 | .order_by(func.length(UserGroup.users_group_name))\ | |
255 | .order_by(UserGroup.users_group_name) |
|
256 | .order_by(UserGroup.users_group_name) | |
256 |
|
257 | |||
257 | if name_contains: |
|
258 | if name_contains: | |
258 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
259 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
259 | query = query.filter( |
|
260 | query = query.filter( | |
260 | UserGroup.users_group_name.ilike(ilike_expression)) |
|
261 | UserGroup.users_group_name.ilike(ilike_expression)) | |
261 | query = query.limit(limit) |
|
262 | query = query.limit(limit) | |
262 |
|
263 | |||
263 | acl_iter = query |
|
264 | acl_iter = query | |
264 |
|
265 | |||
265 | return [ |
|
266 | return [ | |
266 | { |
|
267 | { | |
267 | 'id': obj.users_group_id, |
|
268 | 'id': obj.users_group_id, | |
268 | 'value': org_query, |
|
269 | 'value': org_query, | |
269 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), |
|
270 | 'value_display': 'user_group: `{}`'.format(obj.users_group_name), | |
270 | 'type': 'user_group', |
|
271 | 'type': 'user_group', | |
271 | 'url': h.route_path( |
|
272 | 'url': h.route_path( | |
272 | 'user_group_profile', user_group_name=obj.users_group_name) |
|
273 | 'user_group_profile', user_group_name=obj.users_group_name) | |
273 | } |
|
274 | } | |
274 | for obj in acl_iter], True |
|
275 | for obj in acl_iter], True | |
275 |
|
276 | |||
276 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
277 | def _get_hash_commit_list(self, auth_user, searcher, query, repo=None, repo_group=None): | |
277 | repo_name = repo_group_name = None |
|
278 | repo_name = repo_group_name = None | |
278 | if repo: |
|
279 | if repo: | |
279 | repo_name = repo.repo_name |
|
280 | repo_name = repo.repo_name | |
280 | if repo_group: |
|
281 | if repo_group: | |
281 | repo_group_name = repo_group.group_name |
|
282 | repo_group_name = repo_group.group_name | |
282 |
|
283 | |||
283 | org_query = query |
|
284 | org_query = query | |
284 | if not query or len(query) < 3 or not searcher: |
|
285 | if not query or len(query) < 3 or not searcher: | |
285 | return [], False |
|
286 | return [], False | |
286 |
|
287 | |||
287 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) |
|
288 | commit_hashes = re.compile('(?:commit:[ ]?)([0-9a-f]{2,40})').findall(query) | |
288 |
|
289 | |||
289 | if len(commit_hashes) != 1: |
|
290 | if len(commit_hashes) != 1: | |
290 | return [], False |
|
291 | return [], False | |
291 |
|
292 | |||
292 | commit_hash = commit_hashes[0] |
|
293 | commit_hash = commit_hashes[0] | |
293 |
|
294 | |||
294 | result = searcher.search( |
|
295 | result = searcher.search( | |
295 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, |
|
296 | 'commit_id:{}*'.format(commit_hash), 'commit', auth_user, | |
296 | repo_name, repo_group_name, raise_on_exc=False) |
|
297 | repo_name, repo_group_name, raise_on_exc=False) | |
297 |
|
298 | |||
298 | commits = [] |
|
299 | commits = [] | |
299 | for entry in result['results']: |
|
300 | for entry in result['results']: | |
300 | repo_data = { |
|
301 | repo_data = { | |
301 | 'repository_id': entry.get('repository_id'), |
|
302 | 'repository_id': entry.get('repository_id'), | |
302 | 'repository_type': entry.get('repo_type'), |
|
303 | 'repository_type': entry.get('repo_type'), | |
303 | 'repository_name': entry.get('repository'), |
|
304 | 'repository_name': entry.get('repository'), | |
304 | } |
|
305 | } | |
305 |
|
306 | |||
306 | commit_entry = { |
|
307 | commit_entry = { | |
307 | 'id': entry['commit_id'], |
|
308 | 'id': entry['commit_id'], | |
308 | 'value': org_query, |
|
309 | 'value': org_query, | |
309 | 'value_display': '`{}` commit: {}'.format( |
|
310 | 'value_display': '`{}` commit: {}'.format( | |
310 | entry['repository'], entry['commit_id']), |
|
311 | entry['repository'], entry['commit_id']), | |
311 | 'type': 'commit', |
|
312 | 'type': 'commit', | |
312 | 'repo': entry['repository'], |
|
313 | 'repo': entry['repository'], | |
313 | 'repo_data': repo_data, |
|
314 | 'repo_data': repo_data, | |
314 |
|
315 | |||
315 | 'url': h.route_path( |
|
316 | 'url': h.route_path( | |
316 | 'repo_commit', |
|
317 | 'repo_commit', | |
317 | repo_name=entry['repository'], commit_id=entry['commit_id']) |
|
318 | repo_name=entry['repository'], commit_id=entry['commit_id']) | |
318 | } |
|
319 | } | |
319 |
|
320 | |||
320 | commits.append(commit_entry) |
|
321 | commits.append(commit_entry) | |
321 | return commits, True |
|
322 | return commits, True | |
322 |
|
323 | |||
323 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): |
|
324 | def _get_path_list(self, auth_user, searcher, query, repo=None, repo_group=None): | |
324 | repo_name = repo_group_name = None |
|
325 | repo_name = repo_group_name = None | |
325 | if repo: |
|
326 | if repo: | |
326 | repo_name = repo.repo_name |
|
327 | repo_name = repo.repo_name | |
327 | if repo_group: |
|
328 | if repo_group: | |
328 | repo_group_name = repo_group.group_name |
|
329 | repo_group_name = repo_group.group_name | |
329 |
|
330 | |||
330 | org_query = query |
|
331 | org_query = query | |
331 | if not query or len(query) < 3 or not searcher: |
|
332 | if not query or len(query) < 3 or not searcher: | |
332 | return [], False |
|
333 | return [], False | |
333 |
|
334 | |||
334 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) |
|
335 | paths_re = re.compile('(?:file:[ ]?)(.+)').findall(query) | |
335 | if len(paths_re) != 1: |
|
336 | if len(paths_re) != 1: | |
336 | return [], False |
|
337 | return [], False | |
337 |
|
338 | |||
338 | file_path = paths_re[0] |
|
339 | file_path = paths_re[0] | |
339 |
|
340 | |||
340 | search_path = searcher.escape_specials(file_path) |
|
341 | search_path = searcher.escape_specials(file_path) | |
341 | result = searcher.search( |
|
342 | result = searcher.search( | |
342 | 'file.raw:*{}*'.format(search_path), 'path', auth_user, |
|
343 | 'file.raw:*{}*'.format(search_path), 'path', auth_user, | |
343 | repo_name, repo_group_name, raise_on_exc=False) |
|
344 | repo_name, repo_group_name, raise_on_exc=False) | |
344 |
|
345 | |||
345 | files = [] |
|
346 | files = [] | |
346 | for entry in result['results']: |
|
347 | for entry in result['results']: | |
347 | repo_data = { |
|
348 | repo_data = { | |
348 | 'repository_id': entry.get('repository_id'), |
|
349 | 'repository_id': entry.get('repository_id'), | |
349 | 'repository_type': entry.get('repo_type'), |
|
350 | 'repository_type': entry.get('repo_type'), | |
350 | 'repository_name': entry.get('repository'), |
|
351 | 'repository_name': entry.get('repository'), | |
351 | } |
|
352 | } | |
352 |
|
353 | |||
353 | file_entry = { |
|
354 | file_entry = { | |
354 | 'id': entry['commit_id'], |
|
355 | 'id': entry['commit_id'], | |
355 | 'value': org_query, |
|
356 | 'value': org_query, | |
356 | 'value_display': '`{}` file: {}'.format( |
|
357 | 'value_display': '`{}` file: {}'.format( | |
357 | entry['repository'], entry['file']), |
|
358 | entry['repository'], entry['file']), | |
358 | 'type': 'file', |
|
359 | 'type': 'file', | |
359 | 'repo': entry['repository'], |
|
360 | 'repo': entry['repository'], | |
360 | 'repo_data': repo_data, |
|
361 | 'repo_data': repo_data, | |
361 |
|
362 | |||
362 | 'url': h.route_path( |
|
363 | 'url': h.route_path( | |
363 | 'repo_files', |
|
364 | 'repo_files', | |
364 | repo_name=entry['repository'], commit_id=entry['commit_id'], |
|
365 | repo_name=entry['repository'], commit_id=entry['commit_id'], | |
365 | f_path=entry['file']) |
|
366 | f_path=entry['file']) | |
366 | } |
|
367 | } | |
367 |
|
368 | |||
368 | files.append(file_entry) |
|
369 | files.append(file_entry) | |
369 | return files, True |
|
370 | return files, True | |
370 |
|
371 | |||
371 | @LoginRequired() |
|
372 | @LoginRequired() | |
372 | @view_config( |
|
373 | @view_config( | |
373 | route_name='repo_list_data', request_method='GET', |
|
374 | route_name='repo_list_data', request_method='GET', | |
374 | renderer='json_ext', xhr=True) |
|
375 | renderer='json_ext', xhr=True) | |
375 | def repo_list_data(self): |
|
376 | def repo_list_data(self): | |
376 | _ = self.request.translate |
|
377 | _ = self.request.translate | |
377 | self.load_default_context() |
|
378 | self.load_default_context() | |
378 |
|
379 | |||
379 | query = self.request.GET.get('query') |
|
380 | query = self.request.GET.get('query') | |
380 | repo_type = self.request.GET.get('repo_type') |
|
381 | repo_type = self.request.GET.get('repo_type') | |
381 | log.debug('generating repo list, query:%s, repo_type:%s', |
|
382 | log.debug('generating repo list, query:%s, repo_type:%s', | |
382 | query, repo_type) |
|
383 | query, repo_type) | |
383 |
|
384 | |||
384 | res = [] |
|
385 | res = [] | |
385 | repos = self._get_repo_list(query, repo_type=repo_type) |
|
386 | repos = self._get_repo_list(query, repo_type=repo_type) | |
386 | if repos: |
|
387 | if repos: | |
387 | res.append({ |
|
388 | res.append({ | |
388 | 'text': _('Repositories'), |
|
389 | 'text': _('Repositories'), | |
389 | 'children': repos |
|
390 | 'children': repos | |
390 | }) |
|
391 | }) | |
391 |
|
392 | |||
392 | data = { |
|
393 | data = { | |
393 | 'more': False, |
|
394 | 'more': False, | |
394 | 'results': res |
|
395 | 'results': res | |
395 | } |
|
396 | } | |
396 | return data |
|
397 | return data | |
397 |
|
398 | |||
398 | @LoginRequired() |
|
399 | @LoginRequired() | |
399 | @view_config( |
|
400 | @view_config( | |
400 | route_name='repo_group_list_data', request_method='GET', |
|
401 | route_name='repo_group_list_data', request_method='GET', | |
401 | renderer='json_ext', xhr=True) |
|
402 | renderer='json_ext', xhr=True) | |
402 | def repo_group_list_data(self): |
|
403 | def repo_group_list_data(self): | |
403 | _ = self.request.translate |
|
404 | _ = self.request.translate | |
404 | self.load_default_context() |
|
405 | self.load_default_context() | |
405 |
|
406 | |||
406 | query = self.request.GET.get('query') |
|
407 | query = self.request.GET.get('query') | |
407 |
|
408 | |||
408 | log.debug('generating repo group list, query:%s', |
|
409 | log.debug('generating repo group list, query:%s', | |
409 | query) |
|
410 | query) | |
410 |
|
411 | |||
411 | res = [] |
|
412 | res = [] | |
412 | repo_groups = self._get_repo_group_list(query) |
|
413 | repo_groups = self._get_repo_group_list(query) | |
413 | if repo_groups: |
|
414 | if repo_groups: | |
414 | res.append({ |
|
415 | res.append({ | |
415 | 'text': _('Repository Groups'), |
|
416 | 'text': _('Repository Groups'), | |
416 | 'children': repo_groups |
|
417 | 'children': repo_groups | |
417 | }) |
|
418 | }) | |
418 |
|
419 | |||
419 | data = { |
|
420 | data = { | |
420 | 'more': False, |
|
421 | 'more': False, | |
421 | 'results': res |
|
422 | 'results': res | |
422 | } |
|
423 | } | |
423 | return data |
|
424 | return data | |
424 |
|
425 | |||
425 | def _get_default_search_queries(self, search_context, searcher, query): |
|
426 | def _get_default_search_queries(self, search_context, searcher, query): | |
426 | if not searcher: |
|
427 | if not searcher: | |
427 | return [] |
|
428 | return [] | |
428 |
|
429 | |||
429 | is_es_6 = searcher.is_es_6 |
|
430 | is_es_6 = searcher.is_es_6 | |
430 |
|
431 | |||
431 | queries = [] |
|
432 | queries = [] | |
432 | repo_group_name, repo_name, repo_context = None, None, None |
|
433 | repo_group_name, repo_name, repo_context = None, None, None | |
433 |
|
434 | |||
434 | # repo group context |
|
435 | # repo group context | |
435 | if search_context.get('search_context[repo_group_name]'): |
|
436 | if search_context.get('search_context[repo_group_name]'): | |
436 | repo_group_name = search_context.get('search_context[repo_group_name]') |
|
437 | repo_group_name = search_context.get('search_context[repo_group_name]') | |
437 | if search_context.get('search_context[repo_name]'): |
|
438 | if search_context.get('search_context[repo_name]'): | |
438 | repo_name = search_context.get('search_context[repo_name]') |
|
439 | repo_name = search_context.get('search_context[repo_name]') | |
439 | repo_context = search_context.get('search_context[repo_view_type]') |
|
440 | repo_context = search_context.get('search_context[repo_view_type]') | |
440 |
|
441 | |||
441 | if is_es_6 and repo_name: |
|
442 | if is_es_6 and repo_name: | |
442 | # files |
|
443 | # files | |
443 | def query_modifier(): |
|
444 | def query_modifier(): | |
444 | qry = query |
|
445 | qry = query | |
445 | return {'q': qry, 'type': 'content'} |
|
446 | return {'q': qry, 'type': 'content'} | |
446 |
|
447 | |||
447 | label = u'File search for `{}`'.format(h.escape(query)) |
|
448 | label = u'File search for `{}`'.format(h.escape(query)) | |
448 | file_qry = { |
|
449 | file_qry = { | |
449 | 'id': -10, |
|
450 | 'id': -10, | |
450 | 'value': query, |
|
451 | 'value': query, | |
451 | 'value_display': label, |
|
452 | 'value_display': label, | |
452 | 'value_icon': '<i class="icon-code"></i>', |
|
453 | 'value_icon': '<i class="icon-code"></i>', | |
453 | 'type': 'search', |
|
454 | 'type': 'search', | |
454 | 'subtype': 'repo', |
|
455 | 'subtype': 'repo', | |
455 | 'url': h.route_path('search_repo', |
|
456 | 'url': h.route_path('search_repo', | |
456 | repo_name=repo_name, |
|
457 | repo_name=repo_name, | |
457 | _query=query_modifier()) |
|
458 | _query=query_modifier()) | |
458 | } |
|
459 | } | |
459 |
|
460 | |||
460 | # commits |
|
461 | # commits | |
461 | def query_modifier(): |
|
462 | def query_modifier(): | |
462 | qry = query |
|
463 | qry = query | |
463 | return {'q': qry, 'type': 'commit'} |
|
464 | return {'q': qry, 'type': 'commit'} | |
464 |
|
465 | |||
465 | label = u'Commit search for `{}`'.format(h.escape(query)) |
|
466 | label = u'Commit search for `{}`'.format(h.escape(query)) | |
466 | commit_qry = { |
|
467 | commit_qry = { | |
467 | 'id': -20, |
|
468 | 'id': -20, | |
468 | 'value': query, |
|
469 | 'value': query, | |
469 | 'value_display': label, |
|
470 | 'value_display': label, | |
470 | 'value_icon': '<i class="icon-history"></i>', |
|
471 | 'value_icon': '<i class="icon-history"></i>', | |
471 | 'type': 'search', |
|
472 | 'type': 'search', | |
472 | 'subtype': 'repo', |
|
473 | 'subtype': 'repo', | |
473 | 'url': h.route_path('search_repo', |
|
474 | 'url': h.route_path('search_repo', | |
474 | repo_name=repo_name, |
|
475 | repo_name=repo_name, | |
475 | _query=query_modifier()) |
|
476 | _query=query_modifier()) | |
476 | } |
|
477 | } | |
477 |
|
478 | |||
478 | if repo_context in ['commit', 'commits']: |
|
479 | if repo_context in ['commit', 'commits']: | |
479 | queries.extend([commit_qry, file_qry]) |
|
480 | queries.extend([commit_qry, file_qry]) | |
480 | elif repo_context in ['files', 'summary']: |
|
481 | elif repo_context in ['files', 'summary']: | |
481 | queries.extend([file_qry, commit_qry]) |
|
482 | queries.extend([file_qry, commit_qry]) | |
482 | else: |
|
483 | else: | |
483 | queries.extend([commit_qry, file_qry]) |
|
484 | queries.extend([commit_qry, file_qry]) | |
484 |
|
485 | |||
485 | elif is_es_6 and repo_group_name: |
|
486 | elif is_es_6 and repo_group_name: | |
486 | # files |
|
487 | # files | |
487 | def query_modifier(): |
|
488 | def query_modifier(): | |
488 | qry = query |
|
489 | qry = query | |
489 | return {'q': qry, 'type': 'content'} |
|
490 | return {'q': qry, 'type': 'content'} | |
490 |
|
491 | |||
491 | label = u'File search for `{}`'.format(query) |
|
492 | label = u'File search for `{}`'.format(query) | |
492 | file_qry = { |
|
493 | file_qry = { | |
493 | 'id': -30, |
|
494 | 'id': -30, | |
494 | 'value': query, |
|
495 | 'value': query, | |
495 | 'value_display': label, |
|
496 | 'value_display': label, | |
496 | 'value_icon': '<i class="icon-code"></i>', |
|
497 | 'value_icon': '<i class="icon-code"></i>', | |
497 | 'type': 'search', |
|
498 | 'type': 'search', | |
498 | 'subtype': 'repo_group', |
|
499 | 'subtype': 'repo_group', | |
499 | 'url': h.route_path('search_repo_group', |
|
500 | 'url': h.route_path('search_repo_group', | |
500 | repo_group_name=repo_group_name, |
|
501 | repo_group_name=repo_group_name, | |
501 | _query=query_modifier()) |
|
502 | _query=query_modifier()) | |
502 | } |
|
503 | } | |
503 |
|
504 | |||
504 | # commits |
|
505 | # commits | |
505 | def query_modifier(): |
|
506 | def query_modifier(): | |
506 | qry = query |
|
507 | qry = query | |
507 | return {'q': qry, 'type': 'commit'} |
|
508 | return {'q': qry, 'type': 'commit'} | |
508 |
|
509 | |||
509 | label = u'Commit search for `{}`'.format(query) |
|
510 | label = u'Commit search for `{}`'.format(query) | |
510 | commit_qry = { |
|
511 | commit_qry = { | |
511 | 'id': -40, |
|
512 | 'id': -40, | |
512 | 'value': query, |
|
513 | 'value': query, | |
513 | 'value_display': label, |
|
514 | 'value_display': label, | |
514 | 'value_icon': '<i class="icon-history"></i>', |
|
515 | 'value_icon': '<i class="icon-history"></i>', | |
515 | 'type': 'search', |
|
516 | 'type': 'search', | |
516 | 'subtype': 'repo_group', |
|
517 | 'subtype': 'repo_group', | |
517 | 'url': h.route_path('search_repo_group', |
|
518 | 'url': h.route_path('search_repo_group', | |
518 | repo_group_name=repo_group_name, |
|
519 | repo_group_name=repo_group_name, | |
519 | _query=query_modifier()) |
|
520 | _query=query_modifier()) | |
520 | } |
|
521 | } | |
521 |
|
522 | |||
522 | if repo_context in ['commit', 'commits']: |
|
523 | if repo_context in ['commit', 'commits']: | |
523 | queries.extend([commit_qry, file_qry]) |
|
524 | queries.extend([commit_qry, file_qry]) | |
524 | elif repo_context in ['files', 'summary']: |
|
525 | elif repo_context in ['files', 'summary']: | |
525 | queries.extend([file_qry, commit_qry]) |
|
526 | queries.extend([file_qry, commit_qry]) | |
526 | else: |
|
527 | else: | |
527 | queries.extend([commit_qry, file_qry]) |
|
528 | queries.extend([commit_qry, file_qry]) | |
528 |
|
529 | |||
529 | # Global, not scoped |
|
530 | # Global, not scoped | |
530 | if not queries: |
|
531 | if not queries: | |
531 | queries.append( |
|
532 | queries.append( | |
532 | { |
|
533 | { | |
533 | 'id': -1, |
|
534 | 'id': -1, | |
534 | 'value': query, |
|
535 | 'value': query, | |
535 | 'value_display': u'File search for: `{}`'.format(query), |
|
536 | 'value_display': u'File search for: `{}`'.format(query), | |
536 | 'value_icon': '<i class="icon-code"></i>', |
|
537 | 'value_icon': '<i class="icon-code"></i>', | |
537 | 'type': 'search', |
|
538 | 'type': 'search', | |
538 | 'subtype': 'global', |
|
539 | 'subtype': 'global', | |
539 | 'url': h.route_path('search', |
|
540 | 'url': h.route_path('search', | |
540 | _query={'q': query, 'type': 'content'}) |
|
541 | _query={'q': query, 'type': 'content'}) | |
541 | }) |
|
542 | }) | |
542 | queries.append( |
|
543 | queries.append( | |
543 | { |
|
544 | { | |
544 | 'id': -2, |
|
545 | 'id': -2, | |
545 | 'value': query, |
|
546 | 'value': query, | |
546 | 'value_display': u'Commit search for: `{}`'.format(query), |
|
547 | 'value_display': u'Commit search for: `{}`'.format(query), | |
547 | 'value_icon': '<i class="icon-history"></i>', |
|
548 | 'value_icon': '<i class="icon-history"></i>', | |
548 | 'type': 'search', |
|
549 | 'type': 'search', | |
549 | 'subtype': 'global', |
|
550 | 'subtype': 'global', | |
550 | 'url': h.route_path('search', |
|
551 | 'url': h.route_path('search', | |
551 | _query={'q': query, 'type': 'commit'}) |
|
552 | _query={'q': query, 'type': 'commit'}) | |
552 | }) |
|
553 | }) | |
553 |
|
554 | |||
554 | return queries |
|
555 | return queries | |
555 |
|
556 | |||
556 | @LoginRequired() |
|
557 | @LoginRequired() | |
557 | @view_config( |
|
558 | @view_config( | |
558 | route_name='goto_switcher_data', request_method='GET', |
|
559 | route_name='goto_switcher_data', request_method='GET', | |
559 | renderer='json_ext', xhr=True) |
|
560 | renderer='json_ext', xhr=True) | |
560 | def goto_switcher_data(self): |
|
561 | def goto_switcher_data(self): | |
561 | c = self.load_default_context() |
|
562 | c = self.load_default_context() | |
562 |
|
563 | |||
563 | _ = self.request.translate |
|
564 | _ = self.request.translate | |
564 |
|
565 | |||
565 | query = self.request.GET.get('query') |
|
566 | query = self.request.GET.get('query') | |
566 | log.debug('generating main filter data, query %s', query) |
|
567 | log.debug('generating main filter data, query %s', query) | |
567 |
|
568 | |||
568 | res = [] |
|
569 | res = [] | |
569 | if not query: |
|
570 | if not query: | |
570 | return {'suggestions': res} |
|
571 | return {'suggestions': res} | |
571 |
|
572 | |||
572 | def no_match(name): |
|
573 | def no_match(name): | |
573 | return { |
|
574 | return { | |
574 | 'id': -1, |
|
575 | 'id': -1, | |
575 | 'value': "", |
|
576 | 'value': "", | |
576 | 'value_display': name, |
|
577 | 'value_display': name, | |
577 | 'type': 'text', |
|
578 | 'type': 'text', | |
578 | 'url': "" |
|
579 | 'url': "" | |
579 | } |
|
580 | } | |
580 | searcher = searcher_from_config(self.request.registry.settings) |
|
581 | searcher = searcher_from_config(self.request.registry.settings) | |
581 | has_specialized_search = False |
|
582 | has_specialized_search = False | |
582 |
|
583 | |||
583 | # set repo context |
|
584 | # set repo context | |
584 | repo = None |
|
585 | repo = None | |
585 | repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) |
|
586 | repo_id = safe_int(self.request.GET.get('search_context[repo_id]')) | |
586 | if repo_id: |
|
587 | if repo_id: | |
587 | repo = Repository.get(repo_id) |
|
588 | repo = Repository.get(repo_id) | |
588 |
|
589 | |||
589 | # set group context |
|
590 | # set group context | |
590 | repo_group = None |
|
591 | repo_group = None | |
591 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) |
|
592 | repo_group_id = safe_int(self.request.GET.get('search_context[repo_group_id]')) | |
592 | if repo_group_id: |
|
593 | if repo_group_id: | |
593 | repo_group = RepoGroup.get(repo_group_id) |
|
594 | repo_group = RepoGroup.get(repo_group_id) | |
594 | prefix_match = False |
|
595 | prefix_match = False | |
595 |
|
596 | |||
596 | # user: type search |
|
597 | # user: type search | |
597 | if not prefix_match: |
|
598 | if not prefix_match: | |
598 | users, prefix_match = self._get_user_list(query) |
|
599 | users, prefix_match = self._get_user_list(query) | |
599 | if users: |
|
600 | if users: | |
600 | has_specialized_search = True |
|
601 | has_specialized_search = True | |
601 | for serialized_user in users: |
|
602 | for serialized_user in users: | |
602 | res.append(serialized_user) |
|
603 | res.append(serialized_user) | |
603 | elif prefix_match: |
|
604 | elif prefix_match: | |
604 | has_specialized_search = True |
|
605 | has_specialized_search = True | |
605 | res.append(no_match('No matching users found')) |
|
606 | res.append(no_match('No matching users found')) | |
606 |
|
607 | |||
607 | # user_group: type search |
|
608 | # user_group: type search | |
608 | if not prefix_match: |
|
609 | if not prefix_match: | |
609 | user_groups, prefix_match = self._get_user_groups_list(query) |
|
610 | user_groups, prefix_match = self._get_user_groups_list(query) | |
610 | if user_groups: |
|
611 | if user_groups: | |
611 | has_specialized_search = True |
|
612 | has_specialized_search = True | |
612 | for serialized_user_group in user_groups: |
|
613 | for serialized_user_group in user_groups: | |
613 | res.append(serialized_user_group) |
|
614 | res.append(serialized_user_group) | |
614 | elif prefix_match: |
|
615 | elif prefix_match: | |
615 | has_specialized_search = True |
|
616 | has_specialized_search = True | |
616 | res.append(no_match('No matching user groups found')) |
|
617 | res.append(no_match('No matching user groups found')) | |
617 |
|
618 | |||
618 | # FTS commit: type search |
|
619 | # FTS commit: type search | |
619 | if not prefix_match: |
|
620 | if not prefix_match: | |
620 | commits, prefix_match = self._get_hash_commit_list( |
|
621 | commits, prefix_match = self._get_hash_commit_list( | |
621 | c.auth_user, searcher, query, repo, repo_group) |
|
622 | c.auth_user, searcher, query, repo, repo_group) | |
622 | if commits: |
|
623 | if commits: | |
623 | has_specialized_search = True |
|
624 | has_specialized_search = True | |
624 | unique_repos = collections.OrderedDict() |
|
625 | unique_repos = collections.OrderedDict() | |
625 | for commit in commits: |
|
626 | for commit in commits: | |
626 | repo_name = commit['repo'] |
|
627 | repo_name = commit['repo'] | |
627 | unique_repos.setdefault(repo_name, []).append(commit) |
|
628 | unique_repos.setdefault(repo_name, []).append(commit) | |
628 |
|
629 | |||
629 | for _repo, commits in unique_repos.items(): |
|
630 | for _repo, commits in unique_repos.items(): | |
630 | for commit in commits: |
|
631 | for commit in commits: | |
631 | res.append(commit) |
|
632 | res.append(commit) | |
632 | elif prefix_match: |
|
633 | elif prefix_match: | |
633 | has_specialized_search = True |
|
634 | has_specialized_search = True | |
634 | res.append(no_match('No matching commits found')) |
|
635 | res.append(no_match('No matching commits found')) | |
635 |
|
636 | |||
636 | # FTS file: type search |
|
637 | # FTS file: type search | |
637 | if not prefix_match: |
|
638 | if not prefix_match: | |
638 | paths, prefix_match = self._get_path_list( |
|
639 | paths, prefix_match = self._get_path_list( | |
639 | c.auth_user, searcher, query, repo, repo_group) |
|
640 | c.auth_user, searcher, query, repo, repo_group) | |
640 | if paths: |
|
641 | if paths: | |
641 | has_specialized_search = True |
|
642 | has_specialized_search = True | |
642 | unique_repos = collections.OrderedDict() |
|
643 | unique_repos = collections.OrderedDict() | |
643 | for path in paths: |
|
644 | for path in paths: | |
644 | repo_name = path['repo'] |
|
645 | repo_name = path['repo'] | |
645 | unique_repos.setdefault(repo_name, []).append(path) |
|
646 | unique_repos.setdefault(repo_name, []).append(path) | |
646 |
|
647 | |||
647 | for repo, paths in unique_repos.items(): |
|
648 | for repo, paths in unique_repos.items(): | |
648 | for path in paths: |
|
649 | for path in paths: | |
649 | res.append(path) |
|
650 | res.append(path) | |
650 | elif prefix_match: |
|
651 | elif prefix_match: | |
651 | has_specialized_search = True |
|
652 | has_specialized_search = True | |
652 | res.append(no_match('No matching files found')) |
|
653 | res.append(no_match('No matching files found')) | |
653 |
|
654 | |||
654 | # main suggestions |
|
655 | # main suggestions | |
655 | if not has_specialized_search: |
|
656 | if not has_specialized_search: | |
656 | repo_group_name = '' |
|
657 | repo_group_name = '' | |
657 | if repo_group: |
|
658 | if repo_group: | |
658 | repo_group_name = repo_group.group_name |
|
659 | repo_group_name = repo_group.group_name | |
659 |
|
660 | |||
660 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): |
|
661 | for _q in self._get_default_search_queries(self.request.GET, searcher, query): | |
661 | res.append(_q) |
|
662 | res.append(_q) | |
662 |
|
663 | |||
663 | repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) |
|
664 | repo_groups = self._get_repo_group_list(query, repo_group_name=repo_group_name) | |
664 | for serialized_repo_group in repo_groups: |
|
665 | for serialized_repo_group in repo_groups: | |
665 | res.append(serialized_repo_group) |
|
666 | res.append(serialized_repo_group) | |
666 |
|
667 | |||
667 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) |
|
668 | repos = self._get_repo_list(query, repo_group_name=repo_group_name) | |
668 | for serialized_repo in repos: |
|
669 | for serialized_repo in repos: | |
669 | res.append(serialized_repo) |
|
670 | res.append(serialized_repo) | |
670 |
|
671 | |||
671 | if not repos and not repo_groups: |
|
672 | if not repos and not repo_groups: | |
672 | res.append(no_match('No matches found')) |
|
673 | res.append(no_match('No matches found')) | |
673 |
|
674 | |||
674 | return {'suggestions': res} |
|
675 | return {'suggestions': res} | |
675 |
|
676 | |||
676 | def _get_groups_and_repos(self, repo_group_id=None): |
|
|||
677 | # repo groups groups |
|
|||
678 | repo_group_list = RepoGroup.get_all_repo_groups(group_id=repo_group_id) |
|
|||
679 | _perms = ['group.read', 'group.write', 'group.admin'] |
|
|||
680 | repo_group_list_acl = RepoGroupList(repo_group_list, perm_set=_perms) |
|
|||
681 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
|||
682 | repo_group_list=repo_group_list_acl, admin=False) |
|
|||
683 |
|
||||
684 | # repositories |
|
|||
685 | repo_list = Repository.get_all_repos(group_id=repo_group_id) |
|
|||
686 | _perms = ['repository.read', 'repository.write', 'repository.admin'] |
|
|||
687 | repo_list_acl = RepoList(repo_list, perm_set=_perms) |
|
|||
688 | repo_data = RepoModel().get_repos_as_dict( |
|
|||
689 | repo_list=repo_list_acl, admin=False) |
|
|||
690 |
|
||||
691 | return repo_data, repo_group_data |
|
|||
692 |
|
||||
693 | @LoginRequired() |
|
677 | @LoginRequired() | |
694 | @view_config( |
|
678 | @view_config( | |
695 | route_name='home', request_method='GET', |
|
679 | route_name='home', request_method='GET', | |
696 | renderer='rhodecode:templates/index.mako') |
|
680 | renderer='rhodecode:templates/index.mako') | |
697 | def main_page(self): |
|
681 | def main_page(self): | |
698 | c = self.load_default_context() |
|
682 | c = self.load_default_context() | |
699 | c.repo_group = None |
|
683 | c.repo_group = None | |
|
684 | return self._get_template_context(c) | |||
700 |
|
685 | |||
701 | repo_data, repo_group_data = self._get_groups_and_repos() |
|
686 | def _main_page_repo_groups_data(self, repo_group_id): | |
702 | # json used to render the grids |
|
687 | column_map = { | |
703 | c.repos_data = json.dumps(repo_data) |
|
688 | 'name_raw': 'group_name_hash', | |
704 | c.repo_groups_data = json.dumps(repo_group_data) |
|
689 | 'desc': 'group_description', | |
|
690 | 'last_change_raw': 'updated_on', | |||
|
691 | 'owner': 'user_username', | |||
|
692 | } | |||
|
693 | draw, start, limit = self._extract_chunk(self.request) | |||
|
694 | search_q, order_by, order_dir = self._extract_ordering( | |||
|
695 | self.request, column_map=column_map) | |||
|
696 | return RepoGroupModel().get_repo_groups_data_table( | |||
|
697 | draw, start, limit, | |||
|
698 | search_q, order_by, order_dir, | |||
|
699 | self._rhodecode_user, repo_group_id) | |||
|
700 | ||||
|
701 | def _main_page_repos_data(self, repo_group_id): | |||
|
702 | column_map = { | |||
|
703 | 'name_raw': 'repo_name', | |||
|
704 | 'desc': 'description', | |||
|
705 | 'last_change_raw': 'updated_on', | |||
|
706 | 'owner': 'user_username', | |||
|
707 | } | |||
|
708 | draw, start, limit = self._extract_chunk(self.request) | |||
|
709 | search_q, order_by, order_dir = self._extract_ordering( | |||
|
710 | self.request, column_map=column_map) | |||
|
711 | return RepoModel().get_repos_data_table( | |||
|
712 | draw, start, limit, | |||
|
713 | search_q, order_by, order_dir, | |||
|
714 | self._rhodecode_user, repo_group_id) | |||
705 |
|
715 | |||
706 | return self._get_template_context(c) |
|
716 | @LoginRequired() | |
|
717 | @view_config( | |||
|
718 | route_name='main_page_repo_groups_data', | |||
|
719 | request_method='GET', renderer='json_ext', xhr=True) | |||
|
720 | def main_page_repo_groups_data(self): | |||
|
721 | self.load_default_context() | |||
|
722 | repo_group_id = safe_int(self.request.GET.get('repo_group_id')) | |||
|
723 | ||||
|
724 | if repo_group_id: | |||
|
725 | group = RepoGroup.get_or_404(repo_group_id) | |||
|
726 | _perms = ['group.read', 'group.write', 'group.admin'] | |||
|
727 | if not HasRepoGroupPermissionAny(*_perms)( | |||
|
728 | group.group_name, 'user is allowed to list repo group children'): | |||
|
729 | raise HTTPNotFound() | |||
|
730 | ||||
|
731 | return self._main_page_repo_groups_data(repo_group_id) | |||
|
732 | ||||
|
733 | @LoginRequired() | |||
|
734 | @view_config( | |||
|
735 | route_name='main_page_repos_data', | |||
|
736 | request_method='GET', renderer='json_ext', xhr=True) | |||
|
737 | def main_page_repos_data(self): | |||
|
738 | self.load_default_context() | |||
|
739 | repo_group_id = safe_int(self.request.GET.get('repo_group_id')) | |||
|
740 | ||||
|
741 | if repo_group_id: | |||
|
742 | group = RepoGroup.get_or_404(repo_group_id) | |||
|
743 | _perms = ['group.read', 'group.write', 'group.admin'] | |||
|
744 | if not HasRepoGroupPermissionAny(*_perms)( | |||
|
745 | group.group_name, 'user is allowed to list repo group children'): | |||
|
746 | raise HTTPNotFound() | |||
|
747 | ||||
|
748 | return self._main_page_repos_data(repo_group_id) | |||
707 |
|
749 | |||
708 | @LoginRequired() |
|
750 | @LoginRequired() | |
709 | @HasRepoGroupPermissionAnyDecorator( |
|
751 | @HasRepoGroupPermissionAnyDecorator( | |
710 | 'group.read', 'group.write', 'group.admin') |
|
752 | 'group.read', 'group.write', 'group.admin') | |
711 | @view_config( |
|
753 | @view_config( | |
712 | route_name='repo_group_home', request_method='GET', |
|
754 | route_name='repo_group_home', request_method='GET', | |
713 | renderer='rhodecode:templates/index_repo_group.mako') |
|
755 | renderer='rhodecode:templates/index_repo_group.mako') | |
714 | @view_config( |
|
756 | @view_config( | |
715 | route_name='repo_group_home_slash', request_method='GET', |
|
757 | route_name='repo_group_home_slash', request_method='GET', | |
716 | renderer='rhodecode:templates/index_repo_group.mako') |
|
758 | renderer='rhodecode:templates/index_repo_group.mako') | |
717 | def repo_group_main_page(self): |
|
759 | def repo_group_main_page(self): | |
718 | c = self.load_default_context() |
|
760 | c = self.load_default_context() | |
719 | c.repo_group = self.request.db_repo_group |
|
761 | c.repo_group = self.request.db_repo_group | |
720 | repo_data, repo_group_data = self._get_groups_and_repos(c.repo_group.group_id) |
|
|||
721 |
|
||||
722 | # update every 5 min |
|
|||
723 | if self.request.db_repo_group.last_commit_cache_update_diff > 60 * 5: |
|
|||
724 | self.request.db_repo_group.update_commit_cache() |
|
|||
725 |
|
||||
726 | # json used to render the grids |
|
|||
727 | c.repos_data = json.dumps(repo_data) |
|
|||
728 | c.repo_groups_data = json.dumps(repo_group_data) |
|
|||
729 |
|
||||
730 | return self._get_template_context(c) |
|
762 | return self._get_template_context(c) | |
731 |
|
763 | |||
732 | @LoginRequired() |
|
764 | @LoginRequired() | |
733 | @CSRFRequired() |
|
765 | @CSRFRequired() | |
734 | @view_config( |
|
766 | @view_config( | |
735 | route_name='markup_preview', request_method='POST', |
|
767 | route_name='markup_preview', request_method='POST', | |
736 | renderer='string', xhr=True) |
|
768 | renderer='string', xhr=True) | |
737 | def markup_preview(self): |
|
769 | def markup_preview(self): | |
738 | # Technically a CSRF token is not needed as no state changes with this |
|
770 | # Technically a CSRF token is not needed as no state changes with this | |
739 | # call. However, as this is a POST is better to have it, so automated |
|
771 | # call. However, as this is a POST is better to have it, so automated | |
740 | # tools don't flag it as potential CSRF. |
|
772 | # tools don't flag it as potential CSRF. | |
741 | # Post is required because the payload could be bigger than the maximum |
|
773 | # Post is required because the payload could be bigger than the maximum | |
742 | # allowed by GET. |
|
774 | # allowed by GET. | |
743 |
|
775 | |||
744 | text = self.request.POST.get('text') |
|
776 | text = self.request.POST.get('text') | |
745 | renderer = self.request.POST.get('renderer') or 'rst' |
|
777 | renderer = self.request.POST.get('renderer') or 'rst' | |
746 | if text: |
|
778 | if text: | |
747 | return h.render(text, renderer=renderer, mentions=True) |
|
779 | return h.render(text, renderer=renderer, mentions=True) | |
748 | return '' |
|
780 | return '' | |
749 |
|
781 | |||
750 | @LoginRequired() |
|
782 | @LoginRequired() | |
751 | @CSRFRequired() |
|
783 | @CSRFRequired() | |
752 | @view_config( |
|
784 | @view_config( | |
753 | route_name='file_preview', request_method='POST', |
|
785 | route_name='file_preview', request_method='POST', | |
754 | renderer='string', xhr=True) |
|
786 | renderer='string', xhr=True) | |
755 | def file_preview(self): |
|
787 | def file_preview(self): | |
756 | # Technically a CSRF token is not needed as no state changes with this |
|
788 | # Technically a CSRF token is not needed as no state changes with this | |
757 | # call. However, as this is a POST is better to have it, so automated |
|
789 | # call. However, as this is a POST is better to have it, so automated | |
758 | # tools don't flag it as potential CSRF. |
|
790 | # tools don't flag it as potential CSRF. | |
759 | # Post is required because the payload could be bigger than the maximum |
|
791 | # Post is required because the payload could be bigger than the maximum | |
760 | # allowed by GET. |
|
792 | # allowed by GET. | |
761 |
|
793 | |||
762 | text = self.request.POST.get('text') |
|
794 | text = self.request.POST.get('text') | |
763 | file_path = self.request.POST.get('file_path') |
|
795 | file_path = self.request.POST.get('file_path') | |
764 |
|
796 | |||
765 | renderer = h.renderer_from_filename(file_path) |
|
797 | renderer = h.renderer_from_filename(file_path) | |
766 |
|
798 | |||
767 | if renderer: |
|
799 | if renderer: | |
768 | return h.render(text, renderer=renderer, mentions=True) |
|
800 | return h.render(text, renderer=renderer, mentions=True) | |
769 | else: |
|
801 | else: | |
770 | self.load_default_context() |
|
802 | self.load_default_context() | |
771 | _render = self.request.get_partial_renderer( |
|
803 | _render = self.request.get_partial_renderer( | |
772 | 'rhodecode:templates/files/file_content.mako') |
|
804 | 'rhodecode:templates/files/file_content.mako') | |
773 |
|
805 | |||
774 | lines = filenode_as_lines_tokens(FileNode(file_path, text)) |
|
806 | lines = filenode_as_lines_tokens(FileNode(file_path, text)) | |
775 |
|
807 | |||
776 | return _render('render_lines', lines) |
|
808 | return _render('render_lines', lines) | |
777 |
|
809 | |||
778 | @LoginRequired() |
|
810 | @LoginRequired() | |
779 | @CSRFRequired() |
|
811 | @CSRFRequired() | |
780 | @view_config( |
|
812 | @view_config( | |
781 | route_name='store_user_session_value', request_method='POST', |
|
813 | route_name='store_user_session_value', request_method='POST', | |
782 | renderer='string', xhr=True) |
|
814 | renderer='string', xhr=True) | |
783 | def store_user_session_attr(self): |
|
815 | def store_user_session_attr(self): | |
784 | key = self.request.POST.get('key') |
|
816 | key = self.request.POST.get('key') | |
785 | val = self.request.POST.get('val') |
|
817 | val = self.request.POST.get('val') | |
786 |
|
818 | |||
787 | existing_value = self.request.session.get(key) |
|
819 | existing_value = self.request.session.get(key) | |
788 | if existing_value != val: |
|
820 | if existing_value != val: | |
789 | self.request.session[key] = val |
|
821 | self.request.session[key] = val | |
790 |
|
822 | |||
791 | return 'stored:{}:{}'.format(key, val) |
|
823 | return 'stored:{}:{}'.format(key, val) |
@@ -1,613 +1,615 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | The base Controller API |
|
22 | The base Controller API | |
23 | Provides the BaseController class for subclassing. And usage in different |
|
23 | Provides the BaseController class for subclassing. And usage in different | |
24 | controllers |
|
24 | controllers | |
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | import logging |
|
27 | import logging | |
28 | import socket |
|
28 | import socket | |
29 |
|
29 | |||
30 | import markupsafe |
|
30 | import markupsafe | |
31 | import ipaddress |
|
31 | import ipaddress | |
32 |
|
32 | |||
33 | from paste.auth.basic import AuthBasicAuthenticator |
|
33 | from paste.auth.basic import AuthBasicAuthenticator | |
34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception |
|
34 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception | |
35 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION |
|
35 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION | |
36 |
|
36 | |||
37 | import rhodecode |
|
37 | import rhodecode | |
38 | from rhodecode.apps._base import TemplateArgs |
|
38 | from rhodecode.apps._base import TemplateArgs | |
39 | from rhodecode.authentication.base import VCS_TYPE |
|
39 | from rhodecode.authentication.base import VCS_TYPE | |
40 | from rhodecode.lib import auth, utils2 |
|
40 | from rhodecode.lib import auth, utils2 | |
41 | from rhodecode.lib import helpers as h |
|
41 | from rhodecode.lib import helpers as h | |
42 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper |
|
42 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper | |
43 | from rhodecode.lib.exceptions import UserCreationError |
|
43 | from rhodecode.lib.exceptions import UserCreationError | |
44 | from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes) |
|
44 | from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes) | |
45 | from rhodecode.lib.utils2 import ( |
|
45 | from rhodecode.lib.utils2 import ( | |
46 | str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str) |
|
46 | str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str) | |
47 | from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark |
|
47 | from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark | |
48 | from rhodecode.model.notification import NotificationModel |
|
48 | from rhodecode.model.notification import NotificationModel | |
49 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
49 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel | |
50 |
|
50 | |||
51 | log = logging.getLogger(__name__) |
|
51 | log = logging.getLogger(__name__) | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | def _filter_proxy(ip): |
|
54 | def _filter_proxy(ip): | |
55 | """ |
|
55 | """ | |
56 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
56 | Passed in IP addresses in HEADERS can be in a special format of multiple | |
57 | ips. Those comma separated IPs are passed from various proxies in the |
|
57 | ips. Those comma separated IPs are passed from various proxies in the | |
58 | chain of request processing. The left-most being the original client. |
|
58 | chain of request processing. The left-most being the original client. | |
59 | We only care about the first IP which came from the org. client. |
|
59 | We only care about the first IP which came from the org. client. | |
60 |
|
60 | |||
61 | :param ip: ip string from headers |
|
61 | :param ip: ip string from headers | |
62 | """ |
|
62 | """ | |
63 | if ',' in ip: |
|
63 | if ',' in ip: | |
64 | _ips = ip.split(',') |
|
64 | _ips = ip.split(',') | |
65 | _first_ip = _ips[0].strip() |
|
65 | _first_ip = _ips[0].strip() | |
66 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
66 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) | |
67 | return _first_ip |
|
67 | return _first_ip | |
68 | return ip |
|
68 | return ip | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def _filter_port(ip): |
|
71 | def _filter_port(ip): | |
72 | """ |
|
72 | """ | |
73 | Removes a port from ip, there are 4 main cases to handle here. |
|
73 | Removes a port from ip, there are 4 main cases to handle here. | |
74 | - ipv4 eg. 127.0.0.1 |
|
74 | - ipv4 eg. 127.0.0.1 | |
75 | - ipv6 eg. ::1 |
|
75 | - ipv6 eg. ::1 | |
76 | - ipv4+port eg. 127.0.0.1:8080 |
|
76 | - ipv4+port eg. 127.0.0.1:8080 | |
77 | - ipv6+port eg. [::1]:8080 |
|
77 | - ipv6+port eg. [::1]:8080 | |
78 |
|
78 | |||
79 | :param ip: |
|
79 | :param ip: | |
80 | """ |
|
80 | """ | |
81 | def is_ipv6(ip_addr): |
|
81 | def is_ipv6(ip_addr): | |
82 | if hasattr(socket, 'inet_pton'): |
|
82 | if hasattr(socket, 'inet_pton'): | |
83 | try: |
|
83 | try: | |
84 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
84 | socket.inet_pton(socket.AF_INET6, ip_addr) | |
85 | except socket.error: |
|
85 | except socket.error: | |
86 | return False |
|
86 | return False | |
87 | else: |
|
87 | else: | |
88 | # fallback to ipaddress |
|
88 | # fallback to ipaddress | |
89 | try: |
|
89 | try: | |
90 | ipaddress.IPv6Address(safe_unicode(ip_addr)) |
|
90 | ipaddress.IPv6Address(safe_unicode(ip_addr)) | |
91 | except Exception: |
|
91 | except Exception: | |
92 | return False |
|
92 | return False | |
93 | return True |
|
93 | return True | |
94 |
|
94 | |||
95 | if ':' not in ip: # must be ipv4 pure ip |
|
95 | if ':' not in ip: # must be ipv4 pure ip | |
96 | return ip |
|
96 | return ip | |
97 |
|
97 | |||
98 | if '[' in ip and ']' in ip: # ipv6 with port |
|
98 | if '[' in ip and ']' in ip: # ipv6 with port | |
99 | return ip.split(']')[0][1:].lower() |
|
99 | return ip.split(']')[0][1:].lower() | |
100 |
|
100 | |||
101 | # must be ipv6 or ipv4 with port |
|
101 | # must be ipv6 or ipv4 with port | |
102 | if is_ipv6(ip): |
|
102 | if is_ipv6(ip): | |
103 | return ip |
|
103 | return ip | |
104 | else: |
|
104 | else: | |
105 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
105 | ip, _port = ip.split(':')[:2] # means ipv4+port | |
106 | return ip |
|
106 | return ip | |
107 |
|
107 | |||
108 |
|
108 | |||
109 | def get_ip_addr(environ): |
|
109 | def get_ip_addr(environ): | |
110 | proxy_key = 'HTTP_X_REAL_IP' |
|
110 | proxy_key = 'HTTP_X_REAL_IP' | |
111 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
111 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' | |
112 | def_key = 'REMOTE_ADDR' |
|
112 | def_key = 'REMOTE_ADDR' | |
113 | _filters = lambda x: _filter_port(_filter_proxy(x)) |
|
113 | _filters = lambda x: _filter_port(_filter_proxy(x)) | |
114 |
|
114 | |||
115 | ip = environ.get(proxy_key) |
|
115 | ip = environ.get(proxy_key) | |
116 | if ip: |
|
116 | if ip: | |
117 | return _filters(ip) |
|
117 | return _filters(ip) | |
118 |
|
118 | |||
119 | ip = environ.get(proxy_key2) |
|
119 | ip = environ.get(proxy_key2) | |
120 | if ip: |
|
120 | if ip: | |
121 | return _filters(ip) |
|
121 | return _filters(ip) | |
122 |
|
122 | |||
123 | ip = environ.get(def_key, '0.0.0.0') |
|
123 | ip = environ.get(def_key, '0.0.0.0') | |
124 | return _filters(ip) |
|
124 | return _filters(ip) | |
125 |
|
125 | |||
126 |
|
126 | |||
127 | def get_server_ip_addr(environ, log_errors=True): |
|
127 | def get_server_ip_addr(environ, log_errors=True): | |
128 | hostname = environ.get('SERVER_NAME') |
|
128 | hostname = environ.get('SERVER_NAME') | |
129 | try: |
|
129 | try: | |
130 | return socket.gethostbyname(hostname) |
|
130 | return socket.gethostbyname(hostname) | |
131 | except Exception as e: |
|
131 | except Exception as e: | |
132 | if log_errors: |
|
132 | if log_errors: | |
133 | # in some cases this lookup is not possible, and we don't want to |
|
133 | # in some cases this lookup is not possible, and we don't want to | |
134 | # make it an exception in logs |
|
134 | # make it an exception in logs | |
135 | log.exception('Could not retrieve server ip address: %s', e) |
|
135 | log.exception('Could not retrieve server ip address: %s', e) | |
136 | return hostname |
|
136 | return hostname | |
137 |
|
137 | |||
138 |
|
138 | |||
139 | def get_server_port(environ): |
|
139 | def get_server_port(environ): | |
140 | return environ.get('SERVER_PORT') |
|
140 | return environ.get('SERVER_PORT') | |
141 |
|
141 | |||
142 |
|
142 | |||
143 | def get_access_path(environ): |
|
143 | def get_access_path(environ): | |
144 | path = environ.get('PATH_INFO') |
|
144 | path = environ.get('PATH_INFO') | |
145 | org_req = environ.get('pylons.original_request') |
|
145 | org_req = environ.get('pylons.original_request') | |
146 | if org_req: |
|
146 | if org_req: | |
147 | path = org_req.environ.get('PATH_INFO') |
|
147 | path = org_req.environ.get('PATH_INFO') | |
148 | return path |
|
148 | return path | |
149 |
|
149 | |||
150 |
|
150 | |||
151 | def get_user_agent(environ): |
|
151 | def get_user_agent(environ): | |
152 | return environ.get('HTTP_USER_AGENT') |
|
152 | return environ.get('HTTP_USER_AGENT') | |
153 |
|
153 | |||
154 |
|
154 | |||
155 | def vcs_operation_context( |
|
155 | def vcs_operation_context( | |
156 | environ, repo_name, username, action, scm, check_locking=True, |
|
156 | environ, repo_name, username, action, scm, check_locking=True, | |
157 | is_shadow_repo=False, check_branch_perms=False, detect_force_push=False): |
|
157 | is_shadow_repo=False, check_branch_perms=False, detect_force_push=False): | |
158 | """ |
|
158 | """ | |
159 | Generate the context for a vcs operation, e.g. push or pull. |
|
159 | Generate the context for a vcs operation, e.g. push or pull. | |
160 |
|
160 | |||
161 | This context is passed over the layers so that hooks triggered by the |
|
161 | This context is passed over the layers so that hooks triggered by the | |
162 | vcs operation know details like the user, the user's IP address etc. |
|
162 | vcs operation know details like the user, the user's IP address etc. | |
163 |
|
163 | |||
164 | :param check_locking: Allows to switch of the computation of the locking |
|
164 | :param check_locking: Allows to switch of the computation of the locking | |
165 | data. This serves mainly the need of the simplevcs middleware to be |
|
165 | data. This serves mainly the need of the simplevcs middleware to be | |
166 | able to disable this for certain operations. |
|
166 | able to disable this for certain operations. | |
167 |
|
167 | |||
168 | """ |
|
168 | """ | |
169 | # Tri-state value: False: unlock, None: nothing, True: lock |
|
169 | # Tri-state value: False: unlock, None: nothing, True: lock | |
170 | make_lock = None |
|
170 | make_lock = None | |
171 | locked_by = [None, None, None] |
|
171 | locked_by = [None, None, None] | |
172 | is_anonymous = username == User.DEFAULT_USER |
|
172 | is_anonymous = username == User.DEFAULT_USER | |
173 | user = User.get_by_username(username) |
|
173 | user = User.get_by_username(username) | |
174 | if not is_anonymous and check_locking: |
|
174 | if not is_anonymous and check_locking: | |
175 | log.debug('Checking locking on repository "%s"', repo_name) |
|
175 | log.debug('Checking locking on repository "%s"', repo_name) | |
176 | repo = Repository.get_by_repo_name(repo_name) |
|
176 | repo = Repository.get_by_repo_name(repo_name) | |
177 | make_lock, __, locked_by = repo.get_locking_state( |
|
177 | make_lock, __, locked_by = repo.get_locking_state( | |
178 | action, user.user_id) |
|
178 | action, user.user_id) | |
179 | user_id = user.user_id |
|
179 | user_id = user.user_id | |
180 | settings_model = VcsSettingsModel(repo=repo_name) |
|
180 | settings_model = VcsSettingsModel(repo=repo_name) | |
181 | ui_settings = settings_model.get_ui_settings() |
|
181 | ui_settings = settings_model.get_ui_settings() | |
182 |
|
182 | |||
183 | # NOTE(marcink): This should be also in sync with |
|
183 | # NOTE(marcink): This should be also in sync with | |
184 | # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data |
|
184 | # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data | |
185 | store = [x for x in ui_settings if x.key == '/'] |
|
185 | store = [x for x in ui_settings if x.key == '/'] | |
186 | repo_store = '' |
|
186 | repo_store = '' | |
187 | if store: |
|
187 | if store: | |
188 | repo_store = store[0].value |
|
188 | repo_store = store[0].value | |
189 |
|
189 | |||
190 | scm_data = { |
|
190 | scm_data = { | |
191 | 'ip': get_ip_addr(environ), |
|
191 | 'ip': get_ip_addr(environ), | |
192 | 'username': username, |
|
192 | 'username': username, | |
193 | 'user_id': user_id, |
|
193 | 'user_id': user_id, | |
194 | 'action': action, |
|
194 | 'action': action, | |
195 | 'repository': repo_name, |
|
195 | 'repository': repo_name, | |
196 | 'scm': scm, |
|
196 | 'scm': scm, | |
197 | 'config': rhodecode.CONFIG['__file__'], |
|
197 | 'config': rhodecode.CONFIG['__file__'], | |
198 | 'repo_store': repo_store, |
|
198 | 'repo_store': repo_store, | |
199 | 'make_lock': make_lock, |
|
199 | 'make_lock': make_lock, | |
200 | 'locked_by': locked_by, |
|
200 | 'locked_by': locked_by, | |
201 | 'server_url': utils2.get_server_url(environ), |
|
201 | 'server_url': utils2.get_server_url(environ), | |
202 | 'user_agent': get_user_agent(environ), |
|
202 | 'user_agent': get_user_agent(environ), | |
203 | 'hooks': get_enabled_hook_classes(ui_settings), |
|
203 | 'hooks': get_enabled_hook_classes(ui_settings), | |
204 | 'is_shadow_repo': is_shadow_repo, |
|
204 | 'is_shadow_repo': is_shadow_repo, | |
205 | 'detect_force_push': detect_force_push, |
|
205 | 'detect_force_push': detect_force_push, | |
206 | 'check_branch_perms': check_branch_perms, |
|
206 | 'check_branch_perms': check_branch_perms, | |
207 | } |
|
207 | } | |
208 | return scm_data |
|
208 | return scm_data | |
209 |
|
209 | |||
210 |
|
210 | |||
211 | class BasicAuth(AuthBasicAuthenticator): |
|
211 | class BasicAuth(AuthBasicAuthenticator): | |
212 |
|
212 | |||
213 | def __init__(self, realm, authfunc, registry, auth_http_code=None, |
|
213 | def __init__(self, realm, authfunc, registry, auth_http_code=None, | |
214 | initial_call_detection=False, acl_repo_name=None): |
|
214 | initial_call_detection=False, acl_repo_name=None): | |
215 | self.realm = realm |
|
215 | self.realm = realm | |
216 | self.initial_call = initial_call_detection |
|
216 | self.initial_call = initial_call_detection | |
217 | self.authfunc = authfunc |
|
217 | self.authfunc = authfunc | |
218 | self.registry = registry |
|
218 | self.registry = registry | |
219 | self.acl_repo_name = acl_repo_name |
|
219 | self.acl_repo_name = acl_repo_name | |
220 | self._rc_auth_http_code = auth_http_code |
|
220 | self._rc_auth_http_code = auth_http_code | |
221 |
|
221 | |||
222 | def _get_response_from_code(self, http_code): |
|
222 | def _get_response_from_code(self, http_code): | |
223 | try: |
|
223 | try: | |
224 | return get_exception(safe_int(http_code)) |
|
224 | return get_exception(safe_int(http_code)) | |
225 | except Exception: |
|
225 | except Exception: | |
226 | log.exception('Failed to fetch response for code %s', http_code) |
|
226 | log.exception('Failed to fetch response for code %s', http_code) | |
227 | return HTTPForbidden |
|
227 | return HTTPForbidden | |
228 |
|
228 | |||
229 | def get_rc_realm(self): |
|
229 | def get_rc_realm(self): | |
230 | return safe_str(self.registry.rhodecode_settings.get('rhodecode_realm')) |
|
230 | return safe_str(self.registry.rhodecode_settings.get('rhodecode_realm')) | |
231 |
|
231 | |||
232 | def build_authentication(self): |
|
232 | def build_authentication(self): | |
233 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
233 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) | |
234 | if self._rc_auth_http_code and not self.initial_call: |
|
234 | if self._rc_auth_http_code and not self.initial_call: | |
235 | # return alternative HTTP code if alternative http return code |
|
235 | # return alternative HTTP code if alternative http return code | |
236 | # is specified in RhodeCode config, but ONLY if it's not the |
|
236 | # is specified in RhodeCode config, but ONLY if it's not the | |
237 | # FIRST call |
|
237 | # FIRST call | |
238 | custom_response_klass = self._get_response_from_code( |
|
238 | custom_response_klass = self._get_response_from_code( | |
239 | self._rc_auth_http_code) |
|
239 | self._rc_auth_http_code) | |
240 | return custom_response_klass(headers=head) |
|
240 | return custom_response_klass(headers=head) | |
241 | return HTTPUnauthorized(headers=head) |
|
241 | return HTTPUnauthorized(headers=head) | |
242 |
|
242 | |||
243 | def authenticate(self, environ): |
|
243 | def authenticate(self, environ): | |
244 | authorization = AUTHORIZATION(environ) |
|
244 | authorization = AUTHORIZATION(environ) | |
245 | if not authorization: |
|
245 | if not authorization: | |
246 | return self.build_authentication() |
|
246 | return self.build_authentication() | |
247 | (authmeth, auth) = authorization.split(' ', 1) |
|
247 | (authmeth, auth) = authorization.split(' ', 1) | |
248 | if 'basic' != authmeth.lower(): |
|
248 | if 'basic' != authmeth.lower(): | |
249 | return self.build_authentication() |
|
249 | return self.build_authentication() | |
250 | auth = auth.strip().decode('base64') |
|
250 | auth = auth.strip().decode('base64') | |
251 | _parts = auth.split(':', 1) |
|
251 | _parts = auth.split(':', 1) | |
252 | if len(_parts) == 2: |
|
252 | if len(_parts) == 2: | |
253 | username, password = _parts |
|
253 | username, password = _parts | |
254 | auth_data = self.authfunc( |
|
254 | auth_data = self.authfunc( | |
255 | username, password, environ, VCS_TYPE, |
|
255 | username, password, environ, VCS_TYPE, | |
256 | registry=self.registry, acl_repo_name=self.acl_repo_name) |
|
256 | registry=self.registry, acl_repo_name=self.acl_repo_name) | |
257 | if auth_data: |
|
257 | if auth_data: | |
258 | return {'username': username, 'auth_data': auth_data} |
|
258 | return {'username': username, 'auth_data': auth_data} | |
259 | if username and password: |
|
259 | if username and password: | |
260 | # we mark that we actually executed authentication once, at |
|
260 | # we mark that we actually executed authentication once, at | |
261 | # that point we can use the alternative auth code |
|
261 | # that point we can use the alternative auth code | |
262 | self.initial_call = False |
|
262 | self.initial_call = False | |
263 |
|
263 | |||
264 | return self.build_authentication() |
|
264 | return self.build_authentication() | |
265 |
|
265 | |||
266 | __call__ = authenticate |
|
266 | __call__ = authenticate | |
267 |
|
267 | |||
268 |
|
268 | |||
269 | def calculate_version_hash(config): |
|
269 | def calculate_version_hash(config): | |
270 | return sha1( |
|
270 | return sha1( | |
271 | config.get('beaker.session.secret', '') + |
|
271 | config.get('beaker.session.secret', '') + | |
272 | rhodecode.__version__)[:8] |
|
272 | rhodecode.__version__)[:8] | |
273 |
|
273 | |||
274 |
|
274 | |||
275 | def get_current_lang(request): |
|
275 | def get_current_lang(request): | |
276 | # NOTE(marcink): remove after pyramid move |
|
276 | # NOTE(marcink): remove after pyramid move | |
277 | try: |
|
277 | try: | |
278 | return translation.get_lang()[0] |
|
278 | return translation.get_lang()[0] | |
279 | except: |
|
279 | except: | |
280 | pass |
|
280 | pass | |
281 |
|
281 | |||
282 | return getattr(request, '_LOCALE_', request.locale_name) |
|
282 | return getattr(request, '_LOCALE_', request.locale_name) | |
283 |
|
283 | |||
284 |
|
284 | |||
285 | def attach_context_attributes(context, request, user_id=None): |
|
285 | def attach_context_attributes(context, request, user_id=None): | |
286 | """ |
|
286 | """ | |
287 | Attach variables into template context called `c`. |
|
287 | Attach variables into template context called `c`. | |
288 | """ |
|
288 | """ | |
289 | config = request.registry.settings |
|
289 | config = request.registry.settings | |
290 |
|
290 | |||
291 | rc_config = SettingsModel().get_all_settings(cache=True) |
|
291 | rc_config = SettingsModel().get_all_settings(cache=True) | |
292 | context.rc_config = rc_config |
|
292 | context.rc_config = rc_config | |
293 | context.rhodecode_version = rhodecode.__version__ |
|
293 | context.rhodecode_version = rhodecode.__version__ | |
294 | context.rhodecode_edition = config.get('rhodecode.edition') |
|
294 | context.rhodecode_edition = config.get('rhodecode.edition') | |
295 | # unique secret + version does not leak the version but keep consistency |
|
295 | # unique secret + version does not leak the version but keep consistency | |
296 | context.rhodecode_version_hash = calculate_version_hash(config) |
|
296 | context.rhodecode_version_hash = calculate_version_hash(config) | |
297 |
|
297 | |||
298 | # Default language set for the incoming request |
|
298 | # Default language set for the incoming request | |
299 | context.language = get_current_lang(request) |
|
299 | context.language = get_current_lang(request) | |
300 |
|
300 | |||
301 | # Visual options |
|
301 | # Visual options | |
302 | context.visual = AttributeDict({}) |
|
302 | context.visual = AttributeDict({}) | |
303 |
|
303 | |||
304 | # DB stored Visual Items |
|
304 | # DB stored Visual Items | |
305 | context.visual.show_public_icon = str2bool( |
|
305 | context.visual.show_public_icon = str2bool( | |
306 | rc_config.get('rhodecode_show_public_icon')) |
|
306 | rc_config.get('rhodecode_show_public_icon')) | |
307 | context.visual.show_private_icon = str2bool( |
|
307 | context.visual.show_private_icon = str2bool( | |
308 | rc_config.get('rhodecode_show_private_icon')) |
|
308 | rc_config.get('rhodecode_show_private_icon')) | |
309 | context.visual.stylify_metatags = str2bool( |
|
309 | context.visual.stylify_metatags = str2bool( | |
310 | rc_config.get('rhodecode_stylify_metatags')) |
|
310 | rc_config.get('rhodecode_stylify_metatags')) | |
311 | context.visual.dashboard_items = safe_int( |
|
311 | context.visual.dashboard_items = safe_int( | |
312 | rc_config.get('rhodecode_dashboard_items', 100)) |
|
312 | rc_config.get('rhodecode_dashboard_items', 100)) | |
313 | context.visual.admin_grid_items = safe_int( |
|
313 | context.visual.admin_grid_items = safe_int( | |
314 | rc_config.get('rhodecode_admin_grid_items', 100)) |
|
314 | rc_config.get('rhodecode_admin_grid_items', 100)) | |
315 | context.visual.show_revision_number = str2bool( |
|
315 | context.visual.show_revision_number = str2bool( | |
316 | rc_config.get('rhodecode_show_revision_number', True)) |
|
316 | rc_config.get('rhodecode_show_revision_number', True)) | |
317 | context.visual.show_sha_length = safe_int( |
|
317 | context.visual.show_sha_length = safe_int( | |
318 | rc_config.get('rhodecode_show_sha_length', 100)) |
|
318 | rc_config.get('rhodecode_show_sha_length', 100)) | |
319 | context.visual.repository_fields = str2bool( |
|
319 | context.visual.repository_fields = str2bool( | |
320 | rc_config.get('rhodecode_repository_fields')) |
|
320 | rc_config.get('rhodecode_repository_fields')) | |
321 | context.visual.show_version = str2bool( |
|
321 | context.visual.show_version = str2bool( | |
322 | rc_config.get('rhodecode_show_version')) |
|
322 | rc_config.get('rhodecode_show_version')) | |
323 | context.visual.use_gravatar = str2bool( |
|
323 | context.visual.use_gravatar = str2bool( | |
324 | rc_config.get('rhodecode_use_gravatar')) |
|
324 | rc_config.get('rhodecode_use_gravatar')) | |
325 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') |
|
325 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') | |
326 | context.visual.default_renderer = rc_config.get( |
|
326 | context.visual.default_renderer = rc_config.get( | |
327 | 'rhodecode_markup_renderer', 'rst') |
|
327 | 'rhodecode_markup_renderer', 'rst') | |
328 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES |
|
328 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES | |
329 | context.visual.rhodecode_support_url = \ |
|
329 | context.visual.rhodecode_support_url = \ | |
330 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') |
|
330 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') | |
331 |
|
331 | |||
332 | context.visual.affected_files_cut_off = 60 |
|
332 | context.visual.affected_files_cut_off = 60 | |
333 |
|
333 | |||
334 | context.pre_code = rc_config.get('rhodecode_pre_code') |
|
334 | context.pre_code = rc_config.get('rhodecode_pre_code') | |
335 | context.post_code = rc_config.get('rhodecode_post_code') |
|
335 | context.post_code = rc_config.get('rhodecode_post_code') | |
336 | context.rhodecode_name = rc_config.get('rhodecode_title') |
|
336 | context.rhodecode_name = rc_config.get('rhodecode_title') | |
337 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') |
|
337 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') | |
338 | # if we have specified default_encoding in the request, it has more |
|
338 | # if we have specified default_encoding in the request, it has more | |
339 | # priority |
|
339 | # priority | |
340 | if request.GET.get('default_encoding'): |
|
340 | if request.GET.get('default_encoding'): | |
341 | context.default_encodings.insert(0, request.GET.get('default_encoding')) |
|
341 | context.default_encodings.insert(0, request.GET.get('default_encoding')) | |
342 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') |
|
342 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') | |
343 | context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl') |
|
343 | context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl') | |
344 |
|
344 | |||
345 | # INI stored |
|
345 | # INI stored | |
346 | context.labs_active = str2bool( |
|
346 | context.labs_active = str2bool( | |
347 | config.get('labs_settings_active', 'false')) |
|
347 | config.get('labs_settings_active', 'false')) | |
348 | context.ssh_enabled = str2bool( |
|
348 | context.ssh_enabled = str2bool( | |
349 | config.get('ssh.generate_authorized_keyfile', 'false')) |
|
349 | config.get('ssh.generate_authorized_keyfile', 'false')) | |
350 | context.ssh_key_generator_enabled = str2bool( |
|
350 | context.ssh_key_generator_enabled = str2bool( | |
351 | config.get('ssh.enable_ui_key_generator', 'true')) |
|
351 | config.get('ssh.enable_ui_key_generator', 'true')) | |
352 |
|
352 | |||
353 | context.visual.allow_repo_location_change = str2bool( |
|
353 | context.visual.allow_repo_location_change = str2bool( | |
354 | config.get('allow_repo_location_change', True)) |
|
354 | config.get('allow_repo_location_change', True)) | |
355 | context.visual.allow_custom_hooks_settings = str2bool( |
|
355 | context.visual.allow_custom_hooks_settings = str2bool( | |
356 | config.get('allow_custom_hooks_settings', True)) |
|
356 | config.get('allow_custom_hooks_settings', True)) | |
357 | context.debug_style = str2bool(config.get('debug_style', False)) |
|
357 | context.debug_style = str2bool(config.get('debug_style', False)) | |
358 |
|
358 | |||
359 | context.rhodecode_instanceid = config.get('instance_id') |
|
359 | context.rhodecode_instanceid = config.get('instance_id') | |
360 |
|
360 | |||
361 | context.visual.cut_off_limit_diff = safe_int( |
|
361 | context.visual.cut_off_limit_diff = safe_int( | |
362 | config.get('cut_off_limit_diff')) |
|
362 | config.get('cut_off_limit_diff')) | |
363 | context.visual.cut_off_limit_file = safe_int( |
|
363 | context.visual.cut_off_limit_file = safe_int( | |
364 | config.get('cut_off_limit_file')) |
|
364 | config.get('cut_off_limit_file')) | |
365 |
|
365 | |||
366 | context.license = AttributeDict({}) |
|
366 | context.license = AttributeDict({}) | |
367 | context.license.hide_license_info = str2bool( |
|
367 | context.license.hide_license_info = str2bool( | |
368 | config.get('license.hide_license_info', False)) |
|
368 | config.get('license.hide_license_info', False)) | |
369 |
|
369 | |||
370 | # AppEnlight |
|
370 | # AppEnlight | |
371 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) |
|
371 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) | |
372 | context.appenlight_api_public_key = config.get( |
|
372 | context.appenlight_api_public_key = config.get( | |
373 | 'appenlight.api_public_key', '') |
|
373 | 'appenlight.api_public_key', '') | |
374 | context.appenlight_server_url = config.get('appenlight.server_url', '') |
|
374 | context.appenlight_server_url = config.get('appenlight.server_url', '') | |
375 |
|
375 | |||
376 | diffmode = { |
|
376 | diffmode = { | |
377 | "unified": "unified", |
|
377 | "unified": "unified", | |
378 | "sideside": "sideside" |
|
378 | "sideside": "sideside" | |
379 | }.get(request.GET.get('diffmode')) |
|
379 | }.get(request.GET.get('diffmode')) | |
380 |
|
380 | |||
381 | is_api = hasattr(request, 'rpc_user') |
|
381 | is_api = hasattr(request, 'rpc_user') | |
382 | session_attrs = { |
|
382 | session_attrs = { | |
383 | # defaults |
|
383 | # defaults | |
384 | "clone_url_format": "http", |
|
384 | "clone_url_format": "http", | |
385 | "diffmode": "sideside" |
|
385 | "diffmode": "sideside" | |
386 | } |
|
386 | } | |
387 |
|
387 | |||
388 | if not is_api: |
|
388 | if not is_api: | |
389 | # don't access pyramid session for API calls |
|
389 | # don't access pyramid session for API calls | |
390 | if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'): |
|
390 | if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'): | |
391 | request.session['rc_user_session_attr.diffmode'] = diffmode |
|
391 | request.session['rc_user_session_attr.diffmode'] = diffmode | |
392 |
|
392 | |||
393 | # session settings per user |
|
393 | # session settings per user | |
394 |
|
394 | |||
395 | for k, v in request.session.items(): |
|
395 | for k, v in request.session.items(): | |
396 | pref = 'rc_user_session_attr.' |
|
396 | pref = 'rc_user_session_attr.' | |
397 | if k and k.startswith(pref): |
|
397 | if k and k.startswith(pref): | |
398 | k = k[len(pref):] |
|
398 | k = k[len(pref):] | |
399 | session_attrs[k] = v |
|
399 | session_attrs[k] = v | |
400 |
|
400 | |||
401 | context.user_session_attrs = session_attrs |
|
401 | context.user_session_attrs = session_attrs | |
402 |
|
402 | |||
403 | # JS template context |
|
403 | # JS template context | |
404 | context.template_context = { |
|
404 | context.template_context = { | |
405 | 'repo_name': None, |
|
405 | 'repo_name': None, | |
406 | 'repo_type': None, |
|
406 | 'repo_type': None, | |
407 | 'repo_landing_commit': None, |
|
407 | 'repo_landing_commit': None, | |
408 | 'rhodecode_user': { |
|
408 | 'rhodecode_user': { | |
409 | 'username': None, |
|
409 | 'username': None, | |
410 | 'email': None, |
|
410 | 'email': None, | |
411 | 'notification_status': False |
|
411 | 'notification_status': False | |
412 | }, |
|
412 | }, | |
413 | 'session_attrs': session_attrs, |
|
413 | 'session_attrs': session_attrs, | |
414 | 'visual': { |
|
414 | 'visual': { | |
415 | 'default_renderer': None |
|
415 | 'default_renderer': None | |
416 | }, |
|
416 | }, | |
417 | 'commit_data': { |
|
417 | 'commit_data': { | |
418 | 'commit_id': None |
|
418 | 'commit_id': None | |
419 | }, |
|
419 | }, | |
420 | 'pull_request_data': {'pull_request_id': None}, |
|
420 | 'pull_request_data': {'pull_request_id': None}, | |
421 | 'timeago': { |
|
421 | 'timeago': { | |
422 | 'refresh_time': 120 * 1000, |
|
422 | 'refresh_time': 120 * 1000, | |
423 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 |
|
423 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 | |
424 | }, |
|
424 | }, | |
425 | 'pyramid_dispatch': { |
|
425 | 'pyramid_dispatch': { | |
426 |
|
426 | |||
427 | }, |
|
427 | }, | |
428 | 'extra': {'plugins': {}} |
|
428 | 'extra': {'plugins': {}} | |
429 | } |
|
429 | } | |
430 | # END CONFIG VARS |
|
430 | # END CONFIG VARS | |
431 | if is_api: |
|
431 | if is_api: | |
432 | csrf_token = None |
|
432 | csrf_token = None | |
433 | else: |
|
433 | else: | |
434 | csrf_token = auth.get_csrf_token(session=request.session) |
|
434 | csrf_token = auth.get_csrf_token(session=request.session) | |
435 |
|
435 | |||
436 | context.csrf_token = csrf_token |
|
436 | context.csrf_token = csrf_token | |
437 | context.backends = rhodecode.BACKENDS.keys() |
|
437 | context.backends = rhodecode.BACKENDS.keys() | |
438 | context.backends.sort() |
|
438 | context.backends.sort() | |
439 | unread_count = 0 |
|
439 | unread_count = 0 | |
440 | user_bookmark_list = [] |
|
440 | user_bookmark_list = [] | |
441 | if user_id: |
|
441 | if user_id: | |
442 | unread_count = NotificationModel().get_unread_cnt_for_user(user_id) |
|
442 | unread_count = NotificationModel().get_unread_cnt_for_user(user_id) | |
443 | user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id) |
|
443 | user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id) | |
444 | context.unread_notifications = unread_count |
|
444 | context.unread_notifications = unread_count | |
445 | context.bookmark_items = user_bookmark_list |
|
445 | context.bookmark_items = user_bookmark_list | |
446 |
|
446 | |||
447 | # web case |
|
447 | # web case | |
448 | if hasattr(request, 'user'): |
|
448 | if hasattr(request, 'user'): | |
449 | context.auth_user = request.user |
|
449 | context.auth_user = request.user | |
450 | context.rhodecode_user = request.user |
|
450 | context.rhodecode_user = request.user | |
451 |
|
451 | |||
452 | # api case |
|
452 | # api case | |
453 | if hasattr(request, 'rpc_user'): |
|
453 | if hasattr(request, 'rpc_user'): | |
454 | context.auth_user = request.rpc_user |
|
454 | context.auth_user = request.rpc_user | |
455 | context.rhodecode_user = request.rpc_user |
|
455 | context.rhodecode_user = request.rpc_user | |
456 |
|
456 | |||
457 | # attach the whole call context to the request |
|
457 | # attach the whole call context to the request | |
458 | request.call_context = context |
|
458 | request.call_context = context | |
459 |
|
459 | |||
460 |
|
460 | |||
461 | def get_auth_user(request): |
|
461 | def get_auth_user(request): | |
462 | environ = request.environ |
|
462 | environ = request.environ | |
463 | session = request.session |
|
463 | session = request.session | |
464 |
|
464 | |||
465 | ip_addr = get_ip_addr(environ) |
|
465 | ip_addr = get_ip_addr(environ) | |
466 |
|
466 | |||
467 | # make sure that we update permissions each time we call controller |
|
467 | # make sure that we update permissions each time we call controller | |
468 | _auth_token = (request.GET.get('auth_token', '') or request.GET.get('api_key', '')) |
|
468 | _auth_token = (request.GET.get('auth_token', '') or request.GET.get('api_key', '')) | |
469 | if not _auth_token and request.matchdict: |
|
469 | if not _auth_token and request.matchdict: | |
470 | url_auth_token = request.matchdict.get('_auth_token') |
|
470 | url_auth_token = request.matchdict.get('_auth_token') | |
471 | _auth_token = url_auth_token |
|
471 | _auth_token = url_auth_token | |
472 | if _auth_token: |
|
472 | if _auth_token: | |
473 | log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:]) |
|
473 | log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:]) | |
474 |
|
474 | |||
475 | if _auth_token: |
|
475 | if _auth_token: | |
476 | # when using API_KEY we assume user exists, and |
|
476 | # when using API_KEY we assume user exists, and | |
477 | # doesn't need auth based on cookies. |
|
477 | # doesn't need auth based on cookies. | |
478 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) |
|
478 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) | |
479 | authenticated = False |
|
479 | authenticated = False | |
480 | else: |
|
480 | else: | |
481 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
481 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) | |
482 | try: |
|
482 | try: | |
483 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), |
|
483 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), | |
484 | ip_addr=ip_addr) |
|
484 | ip_addr=ip_addr) | |
485 | except UserCreationError as e: |
|
485 | except UserCreationError as e: | |
486 | h.flash(e, 'error') |
|
486 | h.flash(e, 'error') | |
487 | # container auth or other auth functions that create users |
|
487 | # container auth or other auth functions that create users | |
488 | # on the fly can throw this exception signaling that there's |
|
488 | # on the fly can throw this exception signaling that there's | |
489 | # issue with user creation, explanation should be provided |
|
489 | # issue with user creation, explanation should be provided | |
490 | # in Exception itself. We then create a simple blank |
|
490 | # in Exception itself. We then create a simple blank | |
491 | # AuthUser |
|
491 | # AuthUser | |
492 | auth_user = AuthUser(ip_addr=ip_addr) |
|
492 | auth_user = AuthUser(ip_addr=ip_addr) | |
493 |
|
493 | |||
494 | # in case someone changes a password for user it triggers session |
|
494 | # in case someone changes a password for user it triggers session | |
495 | # flush and forces a re-login |
|
495 | # flush and forces a re-login | |
496 | if password_changed(auth_user, session): |
|
496 | if password_changed(auth_user, session): | |
497 | session.invalidate() |
|
497 | session.invalidate() | |
498 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
498 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) | |
499 | auth_user = AuthUser(ip_addr=ip_addr) |
|
499 | auth_user = AuthUser(ip_addr=ip_addr) | |
500 |
|
500 | |||
501 | authenticated = cookie_store.get('is_authenticated') |
|
501 | authenticated = cookie_store.get('is_authenticated') | |
502 |
|
502 | |||
503 | if not auth_user.is_authenticated and auth_user.is_user_object: |
|
503 | if not auth_user.is_authenticated and auth_user.is_user_object: | |
504 | # user is not authenticated and not empty |
|
504 | # user is not authenticated and not empty | |
505 | auth_user.set_authenticated(authenticated) |
|
505 | auth_user.set_authenticated(authenticated) | |
506 |
|
506 | |||
507 | return auth_user, _auth_token |
|
507 | return auth_user, _auth_token | |
508 |
|
508 | |||
509 |
|
509 | |||
510 | def h_filter(s): |
|
510 | def h_filter(s): | |
511 | """ |
|
511 | """ | |
512 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` |
|
512 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` | |
513 | we wrap this with additional functionality that converts None to empty |
|
513 | we wrap this with additional functionality that converts None to empty | |
514 | strings |
|
514 | strings | |
515 | """ |
|
515 | """ | |
516 | if s is None: |
|
516 | if s is None: | |
517 | return markupsafe.Markup() |
|
517 | return markupsafe.Markup() | |
518 | return markupsafe.escape(s) |
|
518 | return markupsafe.escape(s) | |
519 |
|
519 | |||
520 |
|
520 | |||
521 | def add_events_routes(config): |
|
521 | def add_events_routes(config): | |
522 | """ |
|
522 | """ | |
523 | Adds routing that can be used in events. Because some events are triggered |
|
523 | Adds routing that can be used in events. Because some events are triggered | |
524 | outside of pyramid context, we need to bootstrap request with some |
|
524 | outside of pyramid context, we need to bootstrap request with some | |
525 | routing registered |
|
525 | routing registered | |
526 | """ |
|
526 | """ | |
527 |
|
527 | |||
528 | from rhodecode.apps._base import ADMIN_PREFIX |
|
528 | from rhodecode.apps._base import ADMIN_PREFIX | |
529 |
|
529 | |||
530 | config.add_route(name='home', pattern='/') |
|
530 | config.add_route(name='home', pattern='/') | |
|
531 | config.add_route(name='main_page_repos_data', pattern='/_home_repos') | |||
|
532 | config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups') | |||
531 |
|
533 | |||
532 | config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') |
|
534 | config.add_route(name='login', pattern=ADMIN_PREFIX + '/login') | |
533 | config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') |
|
535 | config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout') | |
534 | config.add_route(name='repo_summary', pattern='/{repo_name}') |
|
536 | config.add_route(name='repo_summary', pattern='/{repo_name}') | |
535 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') |
|
537 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') | |
536 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') |
|
538 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') | |
537 |
|
539 | |||
538 | config.add_route(name='pullrequest_show', |
|
540 | config.add_route(name='pullrequest_show', | |
539 | pattern='/{repo_name}/pull-request/{pull_request_id}') |
|
541 | pattern='/{repo_name}/pull-request/{pull_request_id}') | |
540 | config.add_route(name='pull_requests_global', |
|
542 | config.add_route(name='pull_requests_global', | |
541 | pattern='/pull-request/{pull_request_id}') |
|
543 | pattern='/pull-request/{pull_request_id}') | |
542 |
|
544 | |||
543 | config.add_route(name='repo_commit', |
|
545 | config.add_route(name='repo_commit', | |
544 | pattern='/{repo_name}/changeset/{commit_id}') |
|
546 | pattern='/{repo_name}/changeset/{commit_id}') | |
545 | config.add_route(name='repo_files', |
|
547 | config.add_route(name='repo_files', | |
546 | pattern='/{repo_name}/files/{commit_id}/{f_path}') |
|
548 | pattern='/{repo_name}/files/{commit_id}/{f_path}') | |
547 |
|
549 | |||
548 | config.add_route(name='hovercard_user', |
|
550 | config.add_route(name='hovercard_user', | |
549 | pattern='/_hovercard/user/{user_id}') |
|
551 | pattern='/_hovercard/user/{user_id}') | |
550 |
|
552 | |||
551 | config.add_route(name='hovercard_user_group', |
|
553 | config.add_route(name='hovercard_user_group', | |
552 | pattern='/_hovercard/user_group/{user_group_id}') |
|
554 | pattern='/_hovercard/user_group/{user_group_id}') | |
553 |
|
555 | |||
554 | config.add_route(name='hovercard_pull_request', |
|
556 | config.add_route(name='hovercard_pull_request', | |
555 | pattern='/_hovercard/pull_request/{pull_request_id}') |
|
557 | pattern='/_hovercard/pull_request/{pull_request_id}') | |
556 |
|
558 | |||
557 | config.add_route(name='hovercard_repo_commit', |
|
559 | config.add_route(name='hovercard_repo_commit', | |
558 | pattern='/_hovercard/commit/{repo_name}/{commit_id}') |
|
560 | pattern='/_hovercard/commit/{repo_name}/{commit_id}') | |
559 |
|
561 | |||
560 |
|
562 | |||
561 | def bootstrap_config(request): |
|
563 | def bootstrap_config(request): | |
562 | import pyramid.testing |
|
564 | import pyramid.testing | |
563 | registry = pyramid.testing.Registry('RcTestRegistry') |
|
565 | registry = pyramid.testing.Registry('RcTestRegistry') | |
564 |
|
566 | |||
565 | config = pyramid.testing.setUp(registry=registry, request=request) |
|
567 | config = pyramid.testing.setUp(registry=registry, request=request) | |
566 |
|
568 | |||
567 | # allow pyramid lookup in testing |
|
569 | # allow pyramid lookup in testing | |
568 | config.include('pyramid_mako') |
|
570 | config.include('pyramid_mako') | |
569 | config.include('rhodecode.lib.rc_beaker') |
|
571 | config.include('rhodecode.lib.rc_beaker') | |
570 | config.include('rhodecode.lib.rc_cache') |
|
572 | config.include('rhodecode.lib.rc_cache') | |
571 |
|
573 | |||
572 | add_events_routes(config) |
|
574 | add_events_routes(config) | |
573 |
|
575 | |||
574 | return config |
|
576 | return config | |
575 |
|
577 | |||
576 |
|
578 | |||
577 | def bootstrap_request(**kwargs): |
|
579 | def bootstrap_request(**kwargs): | |
578 | import pyramid.testing |
|
580 | import pyramid.testing | |
579 |
|
581 | |||
580 | class TestRequest(pyramid.testing.DummyRequest): |
|
582 | class TestRequest(pyramid.testing.DummyRequest): | |
581 | application_url = kwargs.pop('application_url', 'http://example.com') |
|
583 | application_url = kwargs.pop('application_url', 'http://example.com') | |
582 | host = kwargs.pop('host', 'example.com:80') |
|
584 | host = kwargs.pop('host', 'example.com:80') | |
583 | domain = kwargs.pop('domain', 'example.com') |
|
585 | domain = kwargs.pop('domain', 'example.com') | |
584 |
|
586 | |||
585 | def translate(self, msg): |
|
587 | def translate(self, msg): | |
586 | return msg |
|
588 | return msg | |
587 |
|
589 | |||
588 | def plularize(self, singular, plural, n): |
|
590 | def plularize(self, singular, plural, n): | |
589 | return singular |
|
591 | return singular | |
590 |
|
592 | |||
591 | def get_partial_renderer(self, tmpl_name): |
|
593 | def get_partial_renderer(self, tmpl_name): | |
592 |
|
594 | |||
593 | from rhodecode.lib.partial_renderer import get_partial_renderer |
|
595 | from rhodecode.lib.partial_renderer import get_partial_renderer | |
594 | return get_partial_renderer(request=self, tmpl_name=tmpl_name) |
|
596 | return get_partial_renderer(request=self, tmpl_name=tmpl_name) | |
595 |
|
597 | |||
596 | _call_context = TemplateArgs() |
|
598 | _call_context = TemplateArgs() | |
597 | _call_context.visual = TemplateArgs() |
|
599 | _call_context.visual = TemplateArgs() | |
598 | _call_context.visual.show_sha_length = 12 |
|
600 | _call_context.visual.show_sha_length = 12 | |
599 | _call_context.visual.show_revision_number = True |
|
601 | _call_context.visual.show_revision_number = True | |
600 |
|
602 | |||
601 | @property |
|
603 | @property | |
602 | def call_context(self): |
|
604 | def call_context(self): | |
603 | return self._call_context |
|
605 | return self._call_context | |
604 |
|
606 | |||
605 | class TestDummySession(pyramid.testing.DummySession): |
|
607 | class TestDummySession(pyramid.testing.DummySession): | |
606 | def save(*arg, **kw): |
|
608 | def save(*arg, **kw): | |
607 | pass |
|
609 | pass | |
608 |
|
610 | |||
609 | request = TestRequest(**kwargs) |
|
611 | request = TestRequest(**kwargs) | |
610 | request.session = TestDummySession() |
|
612 | request.session = TestDummySession() | |
611 |
|
613 | |||
612 | return request |
|
614 | return request | |
613 |
|
615 |
@@ -1,1079 +1,1160 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import re |
|
22 | import re | |
23 | import shutil |
|
23 | import shutil | |
24 | import time |
|
24 | import time | |
25 | import logging |
|
25 | import logging | |
26 | import traceback |
|
26 | import traceback | |
27 | import datetime |
|
27 | import datetime | |
28 |
|
28 | |||
29 | from pyramid.threadlocal import get_current_request |
|
29 | from pyramid.threadlocal import get_current_request | |
30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
31 |
|
31 | |||
32 | from rhodecode import events |
|
32 | from rhodecode import events | |
33 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
33 | from rhodecode.lib.auth import HasUserGroupPermissionAny | |
34 | from rhodecode.lib.caching_query import FromCache |
|
34 | from rhodecode.lib.caching_query import FromCache | |
35 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError |
|
35 | from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError | |
36 | from rhodecode.lib.hooks_base import log_delete_repository |
|
36 | from rhodecode.lib.hooks_base import log_delete_repository | |
37 | from rhodecode.lib.user_log_filter import user_log_filter |
|
37 | from rhodecode.lib.user_log_filter import user_log_filter | |
38 | from rhodecode.lib.utils import make_db_config |
|
38 | from rhodecode.lib.utils import make_db_config | |
39 | from rhodecode.lib.utils2 import ( |
|
39 | from rhodecode.lib.utils2 import ( | |
40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, | |
41 | get_current_rhodecode_user, safe_int, datetime_to_time, |
|
41 | get_current_rhodecode_user, safe_int, datetime_to_time, | |
42 | action_logger_generic) |
|
42 | action_logger_generic) | |
43 | from rhodecode.lib.vcs.backends import get_backend |
|
43 | from rhodecode.lib.vcs.backends import get_backend | |
44 | from rhodecode.model import BaseModel |
|
44 | from rhodecode.model import BaseModel | |
45 | from rhodecode.model.db import ( |
|
45 | from rhodecode.model.db import ( | |
46 | _hash_key, func, case, joinedload, or_, in_filter_generator, |
|
46 | _hash_key, func, case, joinedload, or_, in_filter_generator, | |
47 | Session, Repository, UserRepoToPerm, UserGroupRepoToPerm, |
|
47 | Session, Repository, UserRepoToPerm, UserGroupRepoToPerm, | |
48 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, |
|
48 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, | |
49 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) |
|
49 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) | |
50 | from rhodecode.model.settings import VcsSettingsModel |
|
50 | from rhodecode.model.settings import VcsSettingsModel | |
51 |
|
51 | |||
52 | log = logging.getLogger(__name__) |
|
52 | log = logging.getLogger(__name__) | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | class RepoModel(BaseModel): |
|
55 | class RepoModel(BaseModel): | |
56 |
|
56 | |||
57 | cls = Repository |
|
57 | cls = Repository | |
58 |
|
58 | |||
59 | def _get_user_group(self, users_group): |
|
59 | def _get_user_group(self, users_group): | |
60 | return self._get_instance(UserGroup, users_group, |
|
60 | return self._get_instance(UserGroup, users_group, | |
61 | callback=UserGroup.get_by_group_name) |
|
61 | callback=UserGroup.get_by_group_name) | |
62 |
|
62 | |||
63 | def _get_repo_group(self, repo_group): |
|
63 | def _get_repo_group(self, repo_group): | |
64 | return self._get_instance(RepoGroup, repo_group, |
|
64 | return self._get_instance(RepoGroup, repo_group, | |
65 | callback=RepoGroup.get_by_group_name) |
|
65 | callback=RepoGroup.get_by_group_name) | |
66 |
|
66 | |||
67 | def _create_default_perms(self, repository, private): |
|
67 | def _create_default_perms(self, repository, private): | |
68 | # create default permission |
|
68 | # create default permission | |
69 | default = 'repository.read' |
|
69 | default = 'repository.read' | |
70 | def_user = User.get_default_user() |
|
70 | def_user = User.get_default_user() | |
71 | for p in def_user.user_perms: |
|
71 | for p in def_user.user_perms: | |
72 | if p.permission.permission_name.startswith('repository.'): |
|
72 | if p.permission.permission_name.startswith('repository.'): | |
73 | default = p.permission.permission_name |
|
73 | default = p.permission.permission_name | |
74 | break |
|
74 | break | |
75 |
|
75 | |||
76 | default_perm = 'repository.none' if private else default |
|
76 | default_perm = 'repository.none' if private else default | |
77 |
|
77 | |||
78 | repo_to_perm = UserRepoToPerm() |
|
78 | repo_to_perm = UserRepoToPerm() | |
79 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
79 | repo_to_perm.permission = Permission.get_by_key(default_perm) | |
80 |
|
80 | |||
81 | repo_to_perm.repository = repository |
|
81 | repo_to_perm.repository = repository | |
82 | repo_to_perm.user_id = def_user.user_id |
|
82 | repo_to_perm.user_id = def_user.user_id | |
83 |
|
83 | |||
84 | return repo_to_perm |
|
84 | return repo_to_perm | |
85 |
|
85 | |||
86 | @LazyProperty |
|
86 | @LazyProperty | |
87 | def repos_path(self): |
|
87 | def repos_path(self): | |
88 | """ |
|
88 | """ | |
89 | Gets the repositories root path from database |
|
89 | Gets the repositories root path from database | |
90 | """ |
|
90 | """ | |
91 | settings_model = VcsSettingsModel(sa=self.sa) |
|
91 | settings_model = VcsSettingsModel(sa=self.sa) | |
92 | return settings_model.get_repos_location() |
|
92 | return settings_model.get_repos_location() | |
93 |
|
93 | |||
94 | def get(self, repo_id): |
|
94 | def get(self, repo_id): | |
95 | repo = self.sa.query(Repository) \ |
|
95 | repo = self.sa.query(Repository) \ | |
96 | .filter(Repository.repo_id == repo_id) |
|
96 | .filter(Repository.repo_id == repo_id) | |
97 |
|
97 | |||
98 | return repo.scalar() |
|
98 | return repo.scalar() | |
99 |
|
99 | |||
100 | def get_repo(self, repository): |
|
100 | def get_repo(self, repository): | |
101 | return self._get_repo(repository) |
|
101 | return self._get_repo(repository) | |
102 |
|
102 | |||
103 | def get_by_repo_name(self, repo_name, cache=False): |
|
103 | def get_by_repo_name(self, repo_name, cache=False): | |
104 | repo = self.sa.query(Repository) \ |
|
104 | repo = self.sa.query(Repository) \ | |
105 | .filter(Repository.repo_name == repo_name) |
|
105 | .filter(Repository.repo_name == repo_name) | |
106 |
|
106 | |||
107 | if cache: |
|
107 | if cache: | |
108 | name_key = _hash_key(repo_name) |
|
108 | name_key = _hash_key(repo_name) | |
109 | repo = repo.options( |
|
109 | repo = repo.options( | |
110 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
110 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) | |
111 | return repo.scalar() |
|
111 | return repo.scalar() | |
112 |
|
112 | |||
113 | def _extract_id_from_repo_name(self, repo_name): |
|
113 | def _extract_id_from_repo_name(self, repo_name): | |
114 | if repo_name.startswith('/'): |
|
114 | if repo_name.startswith('/'): | |
115 | repo_name = repo_name.lstrip('/') |
|
115 | repo_name = repo_name.lstrip('/') | |
116 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
116 | by_id_match = re.match(r'^_(\d{1,})', repo_name) | |
117 | if by_id_match: |
|
117 | if by_id_match: | |
118 | return by_id_match.groups()[0] |
|
118 | return by_id_match.groups()[0] | |
119 |
|
119 | |||
120 | def get_repo_by_id(self, repo_name): |
|
120 | def get_repo_by_id(self, repo_name): | |
121 | """ |
|
121 | """ | |
122 | Extracts repo_name by id from special urls. |
|
122 | Extracts repo_name by id from special urls. | |
123 | Example url is _11/repo_name |
|
123 | Example url is _11/repo_name | |
124 |
|
124 | |||
125 | :param repo_name: |
|
125 | :param repo_name: | |
126 | :return: repo object if matched else None |
|
126 | :return: repo object if matched else None | |
127 | """ |
|
127 | """ | |
128 |
|
128 | |||
129 | try: |
|
129 | try: | |
130 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
130 | _repo_id = self._extract_id_from_repo_name(repo_name) | |
131 | if _repo_id: |
|
131 | if _repo_id: | |
132 | return self.get(_repo_id) |
|
132 | return self.get(_repo_id) | |
133 | except Exception: |
|
133 | except Exception: | |
134 | log.exception('Failed to extract repo_name from URL') |
|
134 | log.exception('Failed to extract repo_name from URL') | |
135 |
|
135 | |||
136 | return None |
|
136 | return None | |
137 |
|
137 | |||
138 | def get_repos_for_root(self, root, traverse=False): |
|
138 | def get_repos_for_root(self, root, traverse=False): | |
139 | if traverse: |
|
139 | if traverse: | |
140 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
140 | like_expression = u'{}%'.format(safe_unicode(root)) | |
141 | repos = Repository.query().filter( |
|
141 | repos = Repository.query().filter( | |
142 | Repository.repo_name.like(like_expression)).all() |
|
142 | Repository.repo_name.like(like_expression)).all() | |
143 | else: |
|
143 | else: | |
144 | if root and not isinstance(root, RepoGroup): |
|
144 | if root and not isinstance(root, RepoGroup): | |
145 | raise ValueError( |
|
145 | raise ValueError( | |
146 | 'Root must be an instance ' |
|
146 | 'Root must be an instance ' | |
147 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
147 | 'of RepoGroup, got:{} instead'.format(type(root))) | |
148 | repos = Repository.query().filter(Repository.group == root).all() |
|
148 | repos = Repository.query().filter(Repository.group == root).all() | |
149 | return repos |
|
149 | return repos | |
150 |
|
150 | |||
151 | def get_url(self, repo, request=None, permalink=False): |
|
151 | def get_url(self, repo, request=None, permalink=False): | |
152 | if not request: |
|
152 | if not request: | |
153 | request = get_current_request() |
|
153 | request = get_current_request() | |
154 |
|
154 | |||
155 | if not request: |
|
155 | if not request: | |
156 | return |
|
156 | return | |
157 |
|
157 | |||
158 | if permalink: |
|
158 | if permalink: | |
159 | return request.route_url( |
|
159 | return request.route_url( | |
160 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) |
|
160 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) | |
161 | else: |
|
161 | else: | |
162 | return request.route_url( |
|
162 | return request.route_url( | |
163 | 'repo_summary', repo_name=safe_str(repo.repo_name)) |
|
163 | 'repo_summary', repo_name=safe_str(repo.repo_name)) | |
164 |
|
164 | |||
165 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): |
|
165 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): | |
166 | if not request: |
|
166 | if not request: | |
167 | request = get_current_request() |
|
167 | request = get_current_request() | |
168 |
|
168 | |||
169 | if not request: |
|
169 | if not request: | |
170 | return |
|
170 | return | |
171 |
|
171 | |||
172 | if permalink: |
|
172 | if permalink: | |
173 | return request.route_url( |
|
173 | return request.route_url( | |
174 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
174 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
175 | commit_id=commit_id) |
|
175 | commit_id=commit_id) | |
176 |
|
176 | |||
177 | else: |
|
177 | else: | |
178 | return request.route_url( |
|
178 | return request.route_url( | |
179 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
179 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
180 | commit_id=commit_id) |
|
180 | commit_id=commit_id) | |
181 |
|
181 | |||
182 | def get_repo_log(self, repo, filter_term): |
|
182 | def get_repo_log(self, repo, filter_term): | |
183 | repo_log = UserLog.query()\ |
|
183 | repo_log = UserLog.query()\ | |
184 | .filter(or_(UserLog.repository_id == repo.repo_id, |
|
184 | .filter(or_(UserLog.repository_id == repo.repo_id, | |
185 | UserLog.repository_name == repo.repo_name))\ |
|
185 | UserLog.repository_name == repo.repo_name))\ | |
186 | .options(joinedload(UserLog.user))\ |
|
186 | .options(joinedload(UserLog.user))\ | |
187 | .options(joinedload(UserLog.repository))\ |
|
187 | .options(joinedload(UserLog.repository))\ | |
188 | .order_by(UserLog.action_date.desc()) |
|
188 | .order_by(UserLog.action_date.desc()) | |
189 |
|
189 | |||
190 | repo_log = user_log_filter(repo_log, filter_term) |
|
190 | repo_log = user_log_filter(repo_log, filter_term) | |
191 | return repo_log |
|
191 | return repo_log | |
192 |
|
192 | |||
193 | @classmethod |
|
193 | @classmethod | |
194 | def update_commit_cache(cls, repositories=None): |
|
194 | def update_commit_cache(cls, repositories=None): | |
195 | if not repositories: |
|
195 | if not repositories: | |
196 | repositories = Repository.getAll() |
|
196 | repositories = Repository.getAll() | |
197 | for repo in repositories: |
|
197 | for repo in repositories: | |
198 | repo.update_commit_cache() |
|
198 | repo.update_commit_cache() | |
199 |
|
199 | |||
200 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
200 | def get_repos_as_dict(self, repo_list=None, admin=False, | |
201 | super_user_actions=False, short_name=None): |
|
201 | super_user_actions=False, short_name=None): | |
202 | _render = get_current_request().get_partial_renderer( |
|
202 | _render = get_current_request().get_partial_renderer( | |
203 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
203 | 'rhodecode:templates/data_table/_dt_elements.mako') | |
204 | c = _render.get_call_context() |
|
204 | c = _render.get_call_context() | |
205 |
|
205 | |||
206 | def quick_menu(repo_name): |
|
206 | def quick_menu(repo_name): | |
207 | return _render('quick_menu', repo_name) |
|
207 | return _render('quick_menu', repo_name) | |
208 |
|
208 | |||
209 | def repo_lnk(name, rtype, rstate, private, archived, fork_of): |
|
209 | def repo_lnk(name, rtype, rstate, private, archived, fork_of): | |
210 | if short_name is not None: |
|
210 | if short_name is not None: | |
211 | short_name_var = short_name |
|
211 | short_name_var = short_name | |
212 | else: |
|
212 | else: | |
213 | short_name_var = not admin |
|
213 | short_name_var = not admin | |
214 | return _render('repo_name', name, rtype, rstate, private, archived, fork_of, |
|
214 | return _render('repo_name', name, rtype, rstate, private, archived, fork_of, | |
215 | short_name=short_name_var, admin=False) |
|
215 | short_name=short_name_var, admin=False) | |
216 |
|
216 | |||
217 | def last_change(last_change): |
|
217 | def last_change(last_change): | |
218 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
218 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: | |
219 | ts = time.time() |
|
219 | ts = time.time() | |
220 | utc_offset = (datetime.datetime.fromtimestamp(ts) |
|
220 | utc_offset = (datetime.datetime.fromtimestamp(ts) | |
221 | - datetime.datetime.utcfromtimestamp(ts)).total_seconds() |
|
221 | - datetime.datetime.utcfromtimestamp(ts)).total_seconds() | |
222 | last_change = last_change + datetime.timedelta(seconds=utc_offset) |
|
222 | last_change = last_change + datetime.timedelta(seconds=utc_offset) | |
223 |
|
223 | |||
224 | return _render("last_change", last_change) |
|
224 | return _render("last_change", last_change) | |
225 |
|
225 | |||
226 | def rss_lnk(repo_name): |
|
226 | def rss_lnk(repo_name): | |
227 | return _render("rss", repo_name) |
|
227 | return _render("rss", repo_name) | |
228 |
|
228 | |||
229 | def atom_lnk(repo_name): |
|
229 | def atom_lnk(repo_name): | |
230 | return _render("atom", repo_name) |
|
230 | return _render("atom", repo_name) | |
231 |
|
231 | |||
232 | def last_rev(repo_name, cs_cache): |
|
232 | def last_rev(repo_name, cs_cache): | |
233 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
233 | return _render('revision', repo_name, cs_cache.get('revision'), | |
234 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
234 | cs_cache.get('raw_id'), cs_cache.get('author'), | |
235 | cs_cache.get('message'), cs_cache.get('date')) |
|
235 | cs_cache.get('message'), cs_cache.get('date')) | |
236 |
|
236 | |||
237 | def desc(desc): |
|
237 | def desc(desc): | |
238 | return _render('repo_desc', desc, c.visual.stylify_metatags) |
|
238 | return _render('repo_desc', desc, c.visual.stylify_metatags) | |
239 |
|
239 | |||
240 | def state(repo_state): |
|
240 | def state(repo_state): | |
241 | return _render("repo_state", repo_state) |
|
241 | return _render("repo_state", repo_state) | |
242 |
|
242 | |||
243 | def repo_actions(repo_name): |
|
243 | def repo_actions(repo_name): | |
244 | return _render('repo_actions', repo_name, super_user_actions) |
|
244 | return _render('repo_actions', repo_name, super_user_actions) | |
245 |
|
245 | |||
246 | def user_profile(username): |
|
246 | def user_profile(username): | |
247 | return _render('user_profile', username) |
|
247 | return _render('user_profile', username) | |
248 |
|
248 | |||
249 | repos_data = [] |
|
249 | repos_data = [] | |
250 | for repo in repo_list: |
|
250 | for repo in repo_list: | |
251 | # NOTE(marcink): because we use only raw column we need to load it like that |
|
251 | # NOTE(marcink): because we use only raw column we need to load it like that | |
252 | changeset_cache = Repository._load_changeset_cache( |
|
252 | changeset_cache = Repository._load_changeset_cache( | |
253 | repo.repo_id, repo._changeset_cache) |
|
253 | repo.repo_id, repo._changeset_cache) | |
254 | last_commit_change = Repository._load_commit_change(changeset_cache) |
|
254 | last_commit_change = Repository._load_commit_change(changeset_cache) | |
255 |
|
255 | |||
256 | row = { |
|
256 | row = { | |
257 | "menu": quick_menu(repo.repo_name), |
|
257 | "menu": quick_menu(repo.repo_name), | |
258 |
|
258 | |||
259 | "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state, |
|
259 | "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state, | |
260 | repo.private, repo.archived, repo.fork), |
|
260 | repo.private, repo.archived, repo.fork), | |
261 | "name_raw": repo.repo_name.lower(), |
|
261 | "name_raw": repo.repo_name.lower(), | |
262 | "desc": desc(repo.description), |
|
262 | "desc": desc(repo.description), | |
263 |
|
263 | |||
264 | "last_change": last_change(last_commit_change), |
|
264 | "last_change": last_change(last_commit_change), | |
265 | "last_change_raw": datetime_to_time(last_commit_change), |
|
265 | "last_change_raw": datetime_to_time(last_commit_change), | |
266 |
|
266 | |||
267 | "last_changeset": last_rev(repo.repo_name, changeset_cache), |
|
267 | "last_changeset": last_rev(repo.repo_name, changeset_cache), | |
268 | "last_changeset_raw": changeset_cache.get('revision'), |
|
268 | "last_changeset_raw": changeset_cache.get('revision'), | |
269 |
|
269 | |||
270 | "owner": user_profile(repo.User.username), |
|
270 | "owner": user_profile(repo.User.username), | |
271 |
|
271 | |||
272 | "state": state(repo.repo_state), |
|
272 | "state": state(repo.repo_state), | |
273 | "rss": rss_lnk(repo.repo_name), |
|
273 | "rss": rss_lnk(repo.repo_name), | |
274 | "atom": atom_lnk(repo.repo_name), |
|
274 | "atom": atom_lnk(repo.repo_name), | |
275 | } |
|
275 | } | |
276 | if admin: |
|
276 | if admin: | |
277 | row.update({ |
|
277 | row.update({ | |
278 | "action": repo_actions(repo.repo_name), |
|
278 | "action": repo_actions(repo.repo_name), | |
279 | }) |
|
279 | }) | |
280 | repos_data.append(row) |
|
280 | repos_data.append(row) | |
281 |
|
281 | |||
282 | return repos_data |
|
282 | return repos_data | |
283 |
|
283 | |||
|
284 | def get_repos_data_table( | |||
|
285 | self, draw, start, limit, | |||
|
286 | search_q, order_by, order_dir, | |||
|
287 | auth_user, repo_group_id): | |||
|
288 | from rhodecode.model.scm import RepoList | |||
|
289 | ||||
|
290 | _perms = ['repository.read', 'repository.write', 'repository.admin'] | |||
|
291 | ||||
|
292 | repos = Repository.query() \ | |||
|
293 | .filter(Repository.group_id == repo_group_id) \ | |||
|
294 | .all() | |||
|
295 | auth_repo_list = RepoList( | |||
|
296 | repos, perm_set=_perms, | |||
|
297 | extra_kwargs=dict(user=auth_user)) | |||
|
298 | ||||
|
299 | allowed_ids = [-1] | |||
|
300 | for repo in auth_repo_list: | |||
|
301 | allowed_ids.append(repo.repo_id) | |||
|
302 | ||||
|
303 | repos_data_total_count = Repository.query() \ | |||
|
304 | .filter(Repository.group_id == repo_group_id) \ | |||
|
305 | .filter(or_( | |||
|
306 | # generate multiple IN to fix limitation problems | |||
|
307 | *in_filter_generator(Repository.repo_id, allowed_ids)) | |||
|
308 | ) \ | |||
|
309 | .count() | |||
|
310 | ||||
|
311 | base_q = Session.query( | |||
|
312 | Repository.repo_id, | |||
|
313 | Repository.repo_name, | |||
|
314 | Repository.description, | |||
|
315 | Repository.repo_type, | |||
|
316 | Repository.repo_state, | |||
|
317 | Repository.private, | |||
|
318 | Repository.archived, | |||
|
319 | Repository.fork, | |||
|
320 | Repository.updated_on, | |||
|
321 | Repository._changeset_cache, | |||
|
322 | User, | |||
|
323 | ) \ | |||
|
324 | .filter(Repository.group_id == repo_group_id) \ | |||
|
325 | .filter(or_( | |||
|
326 | # generate multiple IN to fix limitation problems | |||
|
327 | *in_filter_generator(Repository.repo_id, allowed_ids)) | |||
|
328 | ) \ | |||
|
329 | .join(User, User.user_id == Repository.user_id) \ | |||
|
330 | .group_by(Repository, User) | |||
|
331 | ||||
|
332 | repos_data_total_filtered_count = base_q.count() | |||
|
333 | ||||
|
334 | sort_defined = False | |||
|
335 | if order_by == 'repo_name': | |||
|
336 | sort_col = func.lower(Repository.repo_name) | |||
|
337 | sort_defined = True | |||
|
338 | elif order_by == 'user_username': | |||
|
339 | sort_col = User.username | |||
|
340 | else: | |||
|
341 | sort_col = getattr(Repository, order_by, None) | |||
|
342 | ||||
|
343 | if sort_defined or sort_col: | |||
|
344 | if order_dir == 'asc': | |||
|
345 | sort_col = sort_col.asc() | |||
|
346 | else: | |||
|
347 | sort_col = sort_col.desc() | |||
|
348 | ||||
|
349 | base_q = base_q.order_by(sort_col) | |||
|
350 | base_q = base_q.offset(start).limit(limit) | |||
|
351 | ||||
|
352 | repos_list = base_q.all() | |||
|
353 | ||||
|
354 | repos_data = RepoModel().get_repos_as_dict( | |||
|
355 | repo_list=repos_list, admin=False) | |||
|
356 | ||||
|
357 | data = ({ | |||
|
358 | 'draw': draw, | |||
|
359 | 'data': repos_data, | |||
|
360 | 'recordsTotal': repos_data_total_count, | |||
|
361 | 'recordsFiltered': repos_data_total_filtered_count, | |||
|
362 | }) | |||
|
363 | return data | |||
|
364 | ||||
284 | def _get_defaults(self, repo_name): |
|
365 | def _get_defaults(self, repo_name): | |
285 | """ |
|
366 | """ | |
286 | Gets information about repository, and returns a dict for |
|
367 | Gets information about repository, and returns a dict for | |
287 | usage in forms |
|
368 | usage in forms | |
288 |
|
369 | |||
289 | :param repo_name: |
|
370 | :param repo_name: | |
290 | """ |
|
371 | """ | |
291 |
|
372 | |||
292 | repo_info = Repository.get_by_repo_name(repo_name) |
|
373 | repo_info = Repository.get_by_repo_name(repo_name) | |
293 |
|
374 | |||
294 | if repo_info is None: |
|
375 | if repo_info is None: | |
295 | return None |
|
376 | return None | |
296 |
|
377 | |||
297 | defaults = repo_info.get_dict() |
|
378 | defaults = repo_info.get_dict() | |
298 | defaults['repo_name'] = repo_info.just_name |
|
379 | defaults['repo_name'] = repo_info.just_name | |
299 |
|
380 | |||
300 | groups = repo_info.groups_with_parents |
|
381 | groups = repo_info.groups_with_parents | |
301 | parent_group = groups[-1] if groups else None |
|
382 | parent_group = groups[-1] if groups else None | |
302 |
|
383 | |||
303 | # we use -1 as this is how in HTML, we mark an empty group |
|
384 | # we use -1 as this is how in HTML, we mark an empty group | |
304 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
385 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) | |
305 |
|
386 | |||
306 | keys_to_process = ( |
|
387 | keys_to_process = ( | |
307 | {'k': 'repo_type', 'strip': False}, |
|
388 | {'k': 'repo_type', 'strip': False}, | |
308 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
389 | {'k': 'repo_enable_downloads', 'strip': True}, | |
309 | {'k': 'repo_description', 'strip': True}, |
|
390 | {'k': 'repo_description', 'strip': True}, | |
310 | {'k': 'repo_enable_locking', 'strip': True}, |
|
391 | {'k': 'repo_enable_locking', 'strip': True}, | |
311 | {'k': 'repo_landing_rev', 'strip': True}, |
|
392 | {'k': 'repo_landing_rev', 'strip': True}, | |
312 | {'k': 'clone_uri', 'strip': False}, |
|
393 | {'k': 'clone_uri', 'strip': False}, | |
313 | {'k': 'push_uri', 'strip': False}, |
|
394 | {'k': 'push_uri', 'strip': False}, | |
314 | {'k': 'repo_private', 'strip': True}, |
|
395 | {'k': 'repo_private', 'strip': True}, | |
315 | {'k': 'repo_enable_statistics', 'strip': True} |
|
396 | {'k': 'repo_enable_statistics', 'strip': True} | |
316 | ) |
|
397 | ) | |
317 |
|
398 | |||
318 | for item in keys_to_process: |
|
399 | for item in keys_to_process: | |
319 | attr = item['k'] |
|
400 | attr = item['k'] | |
320 | if item['strip']: |
|
401 | if item['strip']: | |
321 | attr = remove_prefix(item['k'], 'repo_') |
|
402 | attr = remove_prefix(item['k'], 'repo_') | |
322 |
|
403 | |||
323 | val = defaults[attr] |
|
404 | val = defaults[attr] | |
324 | if item['k'] == 'repo_landing_rev': |
|
405 | if item['k'] == 'repo_landing_rev': | |
325 | val = ':'.join(defaults[attr]) |
|
406 | val = ':'.join(defaults[attr]) | |
326 | defaults[item['k']] = val |
|
407 | defaults[item['k']] = val | |
327 | if item['k'] == 'clone_uri': |
|
408 | if item['k'] == 'clone_uri': | |
328 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
409 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden | |
329 | if item['k'] == 'push_uri': |
|
410 | if item['k'] == 'push_uri': | |
330 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden |
|
411 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden | |
331 |
|
412 | |||
332 | # fill owner |
|
413 | # fill owner | |
333 | if repo_info.user: |
|
414 | if repo_info.user: | |
334 | defaults.update({'user': repo_info.user.username}) |
|
415 | defaults.update({'user': repo_info.user.username}) | |
335 | else: |
|
416 | else: | |
336 | replacement_user = User.get_first_super_admin().username |
|
417 | replacement_user = User.get_first_super_admin().username | |
337 | defaults.update({'user': replacement_user}) |
|
418 | defaults.update({'user': replacement_user}) | |
338 |
|
419 | |||
339 | return defaults |
|
420 | return defaults | |
340 |
|
421 | |||
341 | def update(self, repo, **kwargs): |
|
422 | def update(self, repo, **kwargs): | |
342 | try: |
|
423 | try: | |
343 | cur_repo = self._get_repo(repo) |
|
424 | cur_repo = self._get_repo(repo) | |
344 | source_repo_name = cur_repo.repo_name |
|
425 | source_repo_name = cur_repo.repo_name | |
345 | if 'user' in kwargs: |
|
426 | if 'user' in kwargs: | |
346 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
427 | cur_repo.user = User.get_by_username(kwargs['user']) | |
347 |
|
428 | |||
348 | if 'repo_group' in kwargs: |
|
429 | if 'repo_group' in kwargs: | |
349 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
430 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) | |
350 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
431 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) | |
351 |
|
432 | |||
352 | update_keys = [ |
|
433 | update_keys = [ | |
353 | (1, 'repo_description'), |
|
434 | (1, 'repo_description'), | |
354 | (1, 'repo_landing_rev'), |
|
435 | (1, 'repo_landing_rev'), | |
355 | (1, 'repo_private'), |
|
436 | (1, 'repo_private'), | |
356 | (1, 'repo_enable_downloads'), |
|
437 | (1, 'repo_enable_downloads'), | |
357 | (1, 'repo_enable_locking'), |
|
438 | (1, 'repo_enable_locking'), | |
358 | (1, 'repo_enable_statistics'), |
|
439 | (1, 'repo_enable_statistics'), | |
359 | (0, 'clone_uri'), |
|
440 | (0, 'clone_uri'), | |
360 | (0, 'push_uri'), |
|
441 | (0, 'push_uri'), | |
361 | (0, 'fork_id') |
|
442 | (0, 'fork_id') | |
362 | ] |
|
443 | ] | |
363 | for strip, k in update_keys: |
|
444 | for strip, k in update_keys: | |
364 | if k in kwargs: |
|
445 | if k in kwargs: | |
365 | val = kwargs[k] |
|
446 | val = kwargs[k] | |
366 | if strip: |
|
447 | if strip: | |
367 | k = remove_prefix(k, 'repo_') |
|
448 | k = remove_prefix(k, 'repo_') | |
368 |
|
449 | |||
369 | setattr(cur_repo, k, val) |
|
450 | setattr(cur_repo, k, val) | |
370 |
|
451 | |||
371 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
452 | new_name = cur_repo.get_new_name(kwargs['repo_name']) | |
372 | cur_repo.repo_name = new_name |
|
453 | cur_repo.repo_name = new_name | |
373 |
|
454 | |||
374 | # if private flag is set, reset default permission to NONE |
|
455 | # if private flag is set, reset default permission to NONE | |
375 | if kwargs.get('repo_private'): |
|
456 | if kwargs.get('repo_private'): | |
376 | EMPTY_PERM = 'repository.none' |
|
457 | EMPTY_PERM = 'repository.none' | |
377 | RepoModel().grant_user_permission( |
|
458 | RepoModel().grant_user_permission( | |
378 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
459 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM | |
379 | ) |
|
460 | ) | |
380 |
|
461 | |||
381 | # handle extra fields |
|
462 | # handle extra fields | |
382 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs): |
|
463 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs): | |
383 | k = RepositoryField.un_prefix_key(field) |
|
464 | k = RepositoryField.un_prefix_key(field) | |
384 | ex_field = RepositoryField.get_by_key_name( |
|
465 | ex_field = RepositoryField.get_by_key_name( | |
385 | key=k, repo=cur_repo) |
|
466 | key=k, repo=cur_repo) | |
386 | if ex_field: |
|
467 | if ex_field: | |
387 | ex_field.field_value = kwargs[field] |
|
468 | ex_field.field_value = kwargs[field] | |
388 | self.sa.add(ex_field) |
|
469 | self.sa.add(ex_field) | |
389 |
|
470 | |||
390 | self.sa.add(cur_repo) |
|
471 | self.sa.add(cur_repo) | |
391 |
|
472 | |||
392 | if source_repo_name != new_name: |
|
473 | if source_repo_name != new_name: | |
393 | # rename repository |
|
474 | # rename repository | |
394 | self._rename_filesystem_repo( |
|
475 | self._rename_filesystem_repo( | |
395 | old=source_repo_name, new=new_name) |
|
476 | old=source_repo_name, new=new_name) | |
396 |
|
477 | |||
397 | return cur_repo |
|
478 | return cur_repo | |
398 | except Exception: |
|
479 | except Exception: | |
399 | log.error(traceback.format_exc()) |
|
480 | log.error(traceback.format_exc()) | |
400 | raise |
|
481 | raise | |
401 |
|
482 | |||
402 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
483 | def _create_repo(self, repo_name, repo_type, description, owner, | |
403 | private=False, clone_uri=None, repo_group=None, |
|
484 | private=False, clone_uri=None, repo_group=None, | |
404 | landing_rev='rev:tip', fork_of=None, |
|
485 | landing_rev='rev:tip', fork_of=None, | |
405 | copy_fork_permissions=False, enable_statistics=False, |
|
486 | copy_fork_permissions=False, enable_statistics=False, | |
406 | enable_locking=False, enable_downloads=False, |
|
487 | enable_locking=False, enable_downloads=False, | |
407 | copy_group_permissions=False, |
|
488 | copy_group_permissions=False, | |
408 | state=Repository.STATE_PENDING): |
|
489 | state=Repository.STATE_PENDING): | |
409 | """ |
|
490 | """ | |
410 | Create repository inside database with PENDING state, this should be |
|
491 | Create repository inside database with PENDING state, this should be | |
411 | only executed by create() repo. With exception of importing existing |
|
492 | only executed by create() repo. With exception of importing existing | |
412 | repos |
|
493 | repos | |
413 | """ |
|
494 | """ | |
414 | from rhodecode.model.scm import ScmModel |
|
495 | from rhodecode.model.scm import ScmModel | |
415 |
|
496 | |||
416 | owner = self._get_user(owner) |
|
497 | owner = self._get_user(owner) | |
417 | fork_of = self._get_repo(fork_of) |
|
498 | fork_of = self._get_repo(fork_of) | |
418 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
499 | repo_group = self._get_repo_group(safe_int(repo_group)) | |
419 |
|
500 | |||
420 | try: |
|
501 | try: | |
421 | repo_name = safe_unicode(repo_name) |
|
502 | repo_name = safe_unicode(repo_name) | |
422 | description = safe_unicode(description) |
|
503 | description = safe_unicode(description) | |
423 | # repo name is just a name of repository |
|
504 | # repo name is just a name of repository | |
424 | # while repo_name_full is a full qualified name that is combined |
|
505 | # while repo_name_full is a full qualified name that is combined | |
425 | # with name and path of group |
|
506 | # with name and path of group | |
426 | repo_name_full = repo_name |
|
507 | repo_name_full = repo_name | |
427 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
508 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] | |
428 |
|
509 | |||
429 | new_repo = Repository() |
|
510 | new_repo = Repository() | |
430 | new_repo.repo_state = state |
|
511 | new_repo.repo_state = state | |
431 | new_repo.enable_statistics = False |
|
512 | new_repo.enable_statistics = False | |
432 | new_repo.repo_name = repo_name_full |
|
513 | new_repo.repo_name = repo_name_full | |
433 | new_repo.repo_type = repo_type |
|
514 | new_repo.repo_type = repo_type | |
434 | new_repo.user = owner |
|
515 | new_repo.user = owner | |
435 | new_repo.group = repo_group |
|
516 | new_repo.group = repo_group | |
436 | new_repo.description = description or repo_name |
|
517 | new_repo.description = description or repo_name | |
437 | new_repo.private = private |
|
518 | new_repo.private = private | |
438 | new_repo.archived = False |
|
519 | new_repo.archived = False | |
439 | new_repo.clone_uri = clone_uri |
|
520 | new_repo.clone_uri = clone_uri | |
440 | new_repo.landing_rev = landing_rev |
|
521 | new_repo.landing_rev = landing_rev | |
441 |
|
522 | |||
442 | new_repo.enable_statistics = enable_statistics |
|
523 | new_repo.enable_statistics = enable_statistics | |
443 | new_repo.enable_locking = enable_locking |
|
524 | new_repo.enable_locking = enable_locking | |
444 | new_repo.enable_downloads = enable_downloads |
|
525 | new_repo.enable_downloads = enable_downloads | |
445 |
|
526 | |||
446 | if repo_group: |
|
527 | if repo_group: | |
447 | new_repo.enable_locking = repo_group.enable_locking |
|
528 | new_repo.enable_locking = repo_group.enable_locking | |
448 |
|
529 | |||
449 | if fork_of: |
|
530 | if fork_of: | |
450 | parent_repo = fork_of |
|
531 | parent_repo = fork_of | |
451 | new_repo.fork = parent_repo |
|
532 | new_repo.fork = parent_repo | |
452 |
|
533 | |||
453 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
534 | events.trigger(events.RepoPreCreateEvent(new_repo)) | |
454 |
|
535 | |||
455 | self.sa.add(new_repo) |
|
536 | self.sa.add(new_repo) | |
456 |
|
537 | |||
457 | EMPTY_PERM = 'repository.none' |
|
538 | EMPTY_PERM = 'repository.none' | |
458 | if fork_of and copy_fork_permissions: |
|
539 | if fork_of and copy_fork_permissions: | |
459 | repo = fork_of |
|
540 | repo = fork_of | |
460 | user_perms = UserRepoToPerm.query() \ |
|
541 | user_perms = UserRepoToPerm.query() \ | |
461 | .filter(UserRepoToPerm.repository == repo).all() |
|
542 | .filter(UserRepoToPerm.repository == repo).all() | |
462 | group_perms = UserGroupRepoToPerm.query() \ |
|
543 | group_perms = UserGroupRepoToPerm.query() \ | |
463 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
544 | .filter(UserGroupRepoToPerm.repository == repo).all() | |
464 |
|
545 | |||
465 | for perm in user_perms: |
|
546 | for perm in user_perms: | |
466 | UserRepoToPerm.create( |
|
547 | UserRepoToPerm.create( | |
467 | perm.user, new_repo, perm.permission) |
|
548 | perm.user, new_repo, perm.permission) | |
468 |
|
549 | |||
469 | for perm in group_perms: |
|
550 | for perm in group_perms: | |
470 | UserGroupRepoToPerm.create( |
|
551 | UserGroupRepoToPerm.create( | |
471 | perm.users_group, new_repo, perm.permission) |
|
552 | perm.users_group, new_repo, perm.permission) | |
472 | # in case we copy permissions and also set this repo to private |
|
553 | # in case we copy permissions and also set this repo to private | |
473 | # override the default user permission to make it a private repo |
|
554 | # override the default user permission to make it a private repo | |
474 | if private: |
|
555 | if private: | |
475 | RepoModel(self.sa).grant_user_permission( |
|
556 | RepoModel(self.sa).grant_user_permission( | |
476 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
557 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
477 |
|
558 | |||
478 | elif repo_group and copy_group_permissions: |
|
559 | elif repo_group and copy_group_permissions: | |
479 | user_perms = UserRepoGroupToPerm.query() \ |
|
560 | user_perms = UserRepoGroupToPerm.query() \ | |
480 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
561 | .filter(UserRepoGroupToPerm.group == repo_group).all() | |
481 |
|
562 | |||
482 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
563 | group_perms = UserGroupRepoGroupToPerm.query() \ | |
483 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
564 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() | |
484 |
|
565 | |||
485 | for perm in user_perms: |
|
566 | for perm in user_perms: | |
486 | perm_name = perm.permission.permission_name.replace( |
|
567 | perm_name = perm.permission.permission_name.replace( | |
487 | 'group.', 'repository.') |
|
568 | 'group.', 'repository.') | |
488 | perm_obj = Permission.get_by_key(perm_name) |
|
569 | perm_obj = Permission.get_by_key(perm_name) | |
489 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
570 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) | |
490 |
|
571 | |||
491 | for perm in group_perms: |
|
572 | for perm in group_perms: | |
492 | perm_name = perm.permission.permission_name.replace( |
|
573 | perm_name = perm.permission.permission_name.replace( | |
493 | 'group.', 'repository.') |
|
574 | 'group.', 'repository.') | |
494 | perm_obj = Permission.get_by_key(perm_name) |
|
575 | perm_obj = Permission.get_by_key(perm_name) | |
495 | UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) |
|
576 | UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) | |
496 |
|
577 | |||
497 | if private: |
|
578 | if private: | |
498 | RepoModel(self.sa).grant_user_permission( |
|
579 | RepoModel(self.sa).grant_user_permission( | |
499 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
580 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
500 |
|
581 | |||
501 | else: |
|
582 | else: | |
502 | perm_obj = self._create_default_perms(new_repo, private) |
|
583 | perm_obj = self._create_default_perms(new_repo, private) | |
503 | self.sa.add(perm_obj) |
|
584 | self.sa.add(perm_obj) | |
504 |
|
585 | |||
505 | # now automatically start following this repository as owner |
|
586 | # now automatically start following this repository as owner | |
506 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id) |
|
587 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id) | |
507 |
|
588 | |||
508 | # we need to flush here, in order to check if database won't |
|
589 | # we need to flush here, in order to check if database won't | |
509 | # throw any exceptions, create filesystem dirs at the very end |
|
590 | # throw any exceptions, create filesystem dirs at the very end | |
510 | self.sa.flush() |
|
591 | self.sa.flush() | |
511 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
592 | events.trigger(events.RepoCreateEvent(new_repo)) | |
512 | return new_repo |
|
593 | return new_repo | |
513 |
|
594 | |||
514 | except Exception: |
|
595 | except Exception: | |
515 | log.error(traceback.format_exc()) |
|
596 | log.error(traceback.format_exc()) | |
516 | raise |
|
597 | raise | |
517 |
|
598 | |||
518 | def create(self, form_data, cur_user): |
|
599 | def create(self, form_data, cur_user): | |
519 | """ |
|
600 | """ | |
520 | Create repository using celery tasks |
|
601 | Create repository using celery tasks | |
521 |
|
602 | |||
522 | :param form_data: |
|
603 | :param form_data: | |
523 | :param cur_user: |
|
604 | :param cur_user: | |
524 | """ |
|
605 | """ | |
525 | from rhodecode.lib.celerylib import tasks, run_task |
|
606 | from rhodecode.lib.celerylib import tasks, run_task | |
526 | return run_task(tasks.create_repo, form_data, cur_user) |
|
607 | return run_task(tasks.create_repo, form_data, cur_user) | |
527 |
|
608 | |||
528 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
609 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, | |
529 | perm_deletions=None, check_perms=True, |
|
610 | perm_deletions=None, check_perms=True, | |
530 | cur_user=None): |
|
611 | cur_user=None): | |
531 | if not perm_additions: |
|
612 | if not perm_additions: | |
532 | perm_additions = [] |
|
613 | perm_additions = [] | |
533 | if not perm_updates: |
|
614 | if not perm_updates: | |
534 | perm_updates = [] |
|
615 | perm_updates = [] | |
535 | if not perm_deletions: |
|
616 | if not perm_deletions: | |
536 | perm_deletions = [] |
|
617 | perm_deletions = [] | |
537 |
|
618 | |||
538 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
619 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') | |
539 |
|
620 | |||
540 | changes = { |
|
621 | changes = { | |
541 | 'added': [], |
|
622 | 'added': [], | |
542 | 'updated': [], |
|
623 | 'updated': [], | |
543 | 'deleted': [] |
|
624 | 'deleted': [] | |
544 | } |
|
625 | } | |
545 | # update permissions |
|
626 | # update permissions | |
546 | for member_id, perm, member_type in perm_updates: |
|
627 | for member_id, perm, member_type in perm_updates: | |
547 | member_id = int(member_id) |
|
628 | member_id = int(member_id) | |
548 | if member_type == 'user': |
|
629 | if member_type == 'user': | |
549 | member_name = User.get(member_id).username |
|
630 | member_name = User.get(member_id).username | |
550 | # this updates also current one if found |
|
631 | # this updates also current one if found | |
551 | self.grant_user_permission( |
|
632 | self.grant_user_permission( | |
552 | repo=repo, user=member_id, perm=perm) |
|
633 | repo=repo, user=member_id, perm=perm) | |
553 | elif member_type == 'user_group': |
|
634 | elif member_type == 'user_group': | |
554 | # check if we have permissions to alter this usergroup |
|
635 | # check if we have permissions to alter this usergroup | |
555 | member_name = UserGroup.get(member_id).users_group_name |
|
636 | member_name = UserGroup.get(member_id).users_group_name | |
556 | if not check_perms or HasUserGroupPermissionAny( |
|
637 | if not check_perms or HasUserGroupPermissionAny( | |
557 | *req_perms)(member_name, user=cur_user): |
|
638 | *req_perms)(member_name, user=cur_user): | |
558 | self.grant_user_group_permission( |
|
639 | self.grant_user_group_permission( | |
559 | repo=repo, group_name=member_id, perm=perm) |
|
640 | repo=repo, group_name=member_id, perm=perm) | |
560 | else: |
|
641 | else: | |
561 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
642 | raise ValueError("member_type must be 'user' or 'user_group' " | |
562 | "got {} instead".format(member_type)) |
|
643 | "got {} instead".format(member_type)) | |
563 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
644 | changes['updated'].append({'type': member_type, 'id': member_id, | |
564 | 'name': member_name, 'new_perm': perm}) |
|
645 | 'name': member_name, 'new_perm': perm}) | |
565 |
|
646 | |||
566 | # set new permissions |
|
647 | # set new permissions | |
567 | for member_id, perm, member_type in perm_additions: |
|
648 | for member_id, perm, member_type in perm_additions: | |
568 | member_id = int(member_id) |
|
649 | member_id = int(member_id) | |
569 | if member_type == 'user': |
|
650 | if member_type == 'user': | |
570 | member_name = User.get(member_id).username |
|
651 | member_name = User.get(member_id).username | |
571 | self.grant_user_permission( |
|
652 | self.grant_user_permission( | |
572 | repo=repo, user=member_id, perm=perm) |
|
653 | repo=repo, user=member_id, perm=perm) | |
573 | elif member_type == 'user_group': |
|
654 | elif member_type == 'user_group': | |
574 | # check if we have permissions to alter this usergroup |
|
655 | # check if we have permissions to alter this usergroup | |
575 | member_name = UserGroup.get(member_id).users_group_name |
|
656 | member_name = UserGroup.get(member_id).users_group_name | |
576 | if not check_perms or HasUserGroupPermissionAny( |
|
657 | if not check_perms or HasUserGroupPermissionAny( | |
577 | *req_perms)(member_name, user=cur_user): |
|
658 | *req_perms)(member_name, user=cur_user): | |
578 | self.grant_user_group_permission( |
|
659 | self.grant_user_group_permission( | |
579 | repo=repo, group_name=member_id, perm=perm) |
|
660 | repo=repo, group_name=member_id, perm=perm) | |
580 | else: |
|
661 | else: | |
581 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
662 | raise ValueError("member_type must be 'user' or 'user_group' " | |
582 | "got {} instead".format(member_type)) |
|
663 | "got {} instead".format(member_type)) | |
583 |
|
664 | |||
584 | changes['added'].append({'type': member_type, 'id': member_id, |
|
665 | changes['added'].append({'type': member_type, 'id': member_id, | |
585 | 'name': member_name, 'new_perm': perm}) |
|
666 | 'name': member_name, 'new_perm': perm}) | |
586 | # delete permissions |
|
667 | # delete permissions | |
587 | for member_id, perm, member_type in perm_deletions: |
|
668 | for member_id, perm, member_type in perm_deletions: | |
588 | member_id = int(member_id) |
|
669 | member_id = int(member_id) | |
589 | if member_type == 'user': |
|
670 | if member_type == 'user': | |
590 | member_name = User.get(member_id).username |
|
671 | member_name = User.get(member_id).username | |
591 | self.revoke_user_permission(repo=repo, user=member_id) |
|
672 | self.revoke_user_permission(repo=repo, user=member_id) | |
592 | elif member_type == 'user_group': |
|
673 | elif member_type == 'user_group': | |
593 | # check if we have permissions to alter this usergroup |
|
674 | # check if we have permissions to alter this usergroup | |
594 | member_name = UserGroup.get(member_id).users_group_name |
|
675 | member_name = UserGroup.get(member_id).users_group_name | |
595 | if not check_perms or HasUserGroupPermissionAny( |
|
676 | if not check_perms or HasUserGroupPermissionAny( | |
596 | *req_perms)(member_name, user=cur_user): |
|
677 | *req_perms)(member_name, user=cur_user): | |
597 | self.revoke_user_group_permission( |
|
678 | self.revoke_user_group_permission( | |
598 | repo=repo, group_name=member_id) |
|
679 | repo=repo, group_name=member_id) | |
599 | else: |
|
680 | else: | |
600 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
681 | raise ValueError("member_type must be 'user' or 'user_group' " | |
601 | "got {} instead".format(member_type)) |
|
682 | "got {} instead".format(member_type)) | |
602 |
|
683 | |||
603 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
684 | changes['deleted'].append({'type': member_type, 'id': member_id, | |
604 | 'name': member_name, 'new_perm': perm}) |
|
685 | 'name': member_name, 'new_perm': perm}) | |
605 | return changes |
|
686 | return changes | |
606 |
|
687 | |||
607 | def create_fork(self, form_data, cur_user): |
|
688 | def create_fork(self, form_data, cur_user): | |
608 | """ |
|
689 | """ | |
609 | Simple wrapper into executing celery task for fork creation |
|
690 | Simple wrapper into executing celery task for fork creation | |
610 |
|
691 | |||
611 | :param form_data: |
|
692 | :param form_data: | |
612 | :param cur_user: |
|
693 | :param cur_user: | |
613 | """ |
|
694 | """ | |
614 | from rhodecode.lib.celerylib import tasks, run_task |
|
695 | from rhodecode.lib.celerylib import tasks, run_task | |
615 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
696 | return run_task(tasks.create_repo_fork, form_data, cur_user) | |
616 |
|
697 | |||
617 | def archive(self, repo): |
|
698 | def archive(self, repo): | |
618 | """ |
|
699 | """ | |
619 | Archive given repository. Set archive flag. |
|
700 | Archive given repository. Set archive flag. | |
620 |
|
701 | |||
621 | :param repo: |
|
702 | :param repo: | |
622 | """ |
|
703 | """ | |
623 | repo = self._get_repo(repo) |
|
704 | repo = self._get_repo(repo) | |
624 | if repo: |
|
705 | if repo: | |
625 |
|
706 | |||
626 | try: |
|
707 | try: | |
627 | repo.archived = True |
|
708 | repo.archived = True | |
628 | self.sa.add(repo) |
|
709 | self.sa.add(repo) | |
629 | self.sa.commit() |
|
710 | self.sa.commit() | |
630 | except Exception: |
|
711 | except Exception: | |
631 | log.error(traceback.format_exc()) |
|
712 | log.error(traceback.format_exc()) | |
632 | raise |
|
713 | raise | |
633 |
|
714 | |||
634 | def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None): |
|
715 | def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None): | |
635 | """ |
|
716 | """ | |
636 | Delete given repository, forks parameter defines what do do with |
|
717 | Delete given repository, forks parameter defines what do do with | |
637 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
718 | attached forks. Throws AttachedForksError if deleted repo has attached | |
638 | forks |
|
719 | forks | |
639 |
|
720 | |||
640 | :param repo: |
|
721 | :param repo: | |
641 | :param forks: str 'delete' or 'detach' |
|
722 | :param forks: str 'delete' or 'detach' | |
642 | :param pull_requests: str 'delete' or None |
|
723 | :param pull_requests: str 'delete' or None | |
643 | :param fs_remove: remove(archive) repo from filesystem |
|
724 | :param fs_remove: remove(archive) repo from filesystem | |
644 | """ |
|
725 | """ | |
645 | if not cur_user: |
|
726 | if not cur_user: | |
646 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
727 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) | |
647 | repo = self._get_repo(repo) |
|
728 | repo = self._get_repo(repo) | |
648 | if repo: |
|
729 | if repo: | |
649 | if forks == 'detach': |
|
730 | if forks == 'detach': | |
650 | for r in repo.forks: |
|
731 | for r in repo.forks: | |
651 | r.fork = None |
|
732 | r.fork = None | |
652 | self.sa.add(r) |
|
733 | self.sa.add(r) | |
653 | elif forks == 'delete': |
|
734 | elif forks == 'delete': | |
654 | for r in repo.forks: |
|
735 | for r in repo.forks: | |
655 | self.delete(r, forks='delete') |
|
736 | self.delete(r, forks='delete') | |
656 | elif [f for f in repo.forks]: |
|
737 | elif [f for f in repo.forks]: | |
657 | raise AttachedForksError() |
|
738 | raise AttachedForksError() | |
658 |
|
739 | |||
659 | # check for pull requests |
|
740 | # check for pull requests | |
660 | pr_sources = repo.pull_requests_source |
|
741 | pr_sources = repo.pull_requests_source | |
661 | pr_targets = repo.pull_requests_target |
|
742 | pr_targets = repo.pull_requests_target | |
662 | if pull_requests != 'delete' and (pr_sources or pr_targets): |
|
743 | if pull_requests != 'delete' and (pr_sources or pr_targets): | |
663 | raise AttachedPullRequestsError() |
|
744 | raise AttachedPullRequestsError() | |
664 |
|
745 | |||
665 | old_repo_dict = repo.get_dict() |
|
746 | old_repo_dict = repo.get_dict() | |
666 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
747 | events.trigger(events.RepoPreDeleteEvent(repo)) | |
667 | try: |
|
748 | try: | |
668 | self.sa.delete(repo) |
|
749 | self.sa.delete(repo) | |
669 | if fs_remove: |
|
750 | if fs_remove: | |
670 | self._delete_filesystem_repo(repo) |
|
751 | self._delete_filesystem_repo(repo) | |
671 | else: |
|
752 | else: | |
672 | log.debug('skipping removal from filesystem') |
|
753 | log.debug('skipping removal from filesystem') | |
673 | old_repo_dict.update({ |
|
754 | old_repo_dict.update({ | |
674 | 'deleted_by': cur_user, |
|
755 | 'deleted_by': cur_user, | |
675 | 'deleted_on': time.time(), |
|
756 | 'deleted_on': time.time(), | |
676 | }) |
|
757 | }) | |
677 | log_delete_repository(**old_repo_dict) |
|
758 | log_delete_repository(**old_repo_dict) | |
678 | events.trigger(events.RepoDeleteEvent(repo)) |
|
759 | events.trigger(events.RepoDeleteEvent(repo)) | |
679 | except Exception: |
|
760 | except Exception: | |
680 | log.error(traceback.format_exc()) |
|
761 | log.error(traceback.format_exc()) | |
681 | raise |
|
762 | raise | |
682 |
|
763 | |||
683 | def grant_user_permission(self, repo, user, perm): |
|
764 | def grant_user_permission(self, repo, user, perm): | |
684 | """ |
|
765 | """ | |
685 | Grant permission for user on given repository, or update existing one |
|
766 | Grant permission for user on given repository, or update existing one | |
686 | if found |
|
767 | if found | |
687 |
|
768 | |||
688 | :param repo: Instance of Repository, repository_id, or repository name |
|
769 | :param repo: Instance of Repository, repository_id, or repository name | |
689 | :param user: Instance of User, user_id or username |
|
770 | :param user: Instance of User, user_id or username | |
690 | :param perm: Instance of Permission, or permission_name |
|
771 | :param perm: Instance of Permission, or permission_name | |
691 | """ |
|
772 | """ | |
692 | user = self._get_user(user) |
|
773 | user = self._get_user(user) | |
693 | repo = self._get_repo(repo) |
|
774 | repo = self._get_repo(repo) | |
694 | permission = self._get_perm(perm) |
|
775 | permission = self._get_perm(perm) | |
695 |
|
776 | |||
696 | # check if we have that permission already |
|
777 | # check if we have that permission already | |
697 | obj = self.sa.query(UserRepoToPerm) \ |
|
778 | obj = self.sa.query(UserRepoToPerm) \ | |
698 | .filter(UserRepoToPerm.user == user) \ |
|
779 | .filter(UserRepoToPerm.user == user) \ | |
699 | .filter(UserRepoToPerm.repository == repo) \ |
|
780 | .filter(UserRepoToPerm.repository == repo) \ | |
700 | .scalar() |
|
781 | .scalar() | |
701 | if obj is None: |
|
782 | if obj is None: | |
702 | # create new ! |
|
783 | # create new ! | |
703 | obj = UserRepoToPerm() |
|
784 | obj = UserRepoToPerm() | |
704 | obj.repository = repo |
|
785 | obj.repository = repo | |
705 | obj.user = user |
|
786 | obj.user = user | |
706 | obj.permission = permission |
|
787 | obj.permission = permission | |
707 | self.sa.add(obj) |
|
788 | self.sa.add(obj) | |
708 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
789 | log.debug('Granted perm %s to %s on %s', perm, user, repo) | |
709 | action_logger_generic( |
|
790 | action_logger_generic( | |
710 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
791 | 'granted permission: {} to user: {} on repo: {}'.format( | |
711 | perm, user, repo), namespace='security.repo') |
|
792 | perm, user, repo), namespace='security.repo') | |
712 | return obj |
|
793 | return obj | |
713 |
|
794 | |||
714 | def revoke_user_permission(self, repo, user): |
|
795 | def revoke_user_permission(self, repo, user): | |
715 | """ |
|
796 | """ | |
716 | Revoke permission for user on given repository |
|
797 | Revoke permission for user on given repository | |
717 |
|
798 | |||
718 | :param repo: Instance of Repository, repository_id, or repository name |
|
799 | :param repo: Instance of Repository, repository_id, or repository name | |
719 | :param user: Instance of User, user_id or username |
|
800 | :param user: Instance of User, user_id or username | |
720 | """ |
|
801 | """ | |
721 |
|
802 | |||
722 | user = self._get_user(user) |
|
803 | user = self._get_user(user) | |
723 | repo = self._get_repo(repo) |
|
804 | repo = self._get_repo(repo) | |
724 |
|
805 | |||
725 | obj = self.sa.query(UserRepoToPerm) \ |
|
806 | obj = self.sa.query(UserRepoToPerm) \ | |
726 | .filter(UserRepoToPerm.repository == repo) \ |
|
807 | .filter(UserRepoToPerm.repository == repo) \ | |
727 | .filter(UserRepoToPerm.user == user) \ |
|
808 | .filter(UserRepoToPerm.user == user) \ | |
728 | .scalar() |
|
809 | .scalar() | |
729 | if obj: |
|
810 | if obj: | |
730 | self.sa.delete(obj) |
|
811 | self.sa.delete(obj) | |
731 | log.debug('Revoked perm on %s on %s', repo, user) |
|
812 | log.debug('Revoked perm on %s on %s', repo, user) | |
732 | action_logger_generic( |
|
813 | action_logger_generic( | |
733 | 'revoked permission from user: {} on repo: {}'.format( |
|
814 | 'revoked permission from user: {} on repo: {}'.format( | |
734 | user, repo), namespace='security.repo') |
|
815 | user, repo), namespace='security.repo') | |
735 |
|
816 | |||
736 | def grant_user_group_permission(self, repo, group_name, perm): |
|
817 | def grant_user_group_permission(self, repo, group_name, perm): | |
737 | """ |
|
818 | """ | |
738 | Grant permission for user group on given repository, or update |
|
819 | Grant permission for user group on given repository, or update | |
739 | existing one if found |
|
820 | existing one if found | |
740 |
|
821 | |||
741 | :param repo: Instance of Repository, repository_id, or repository name |
|
822 | :param repo: Instance of Repository, repository_id, or repository name | |
742 | :param group_name: Instance of UserGroup, users_group_id, |
|
823 | :param group_name: Instance of UserGroup, users_group_id, | |
743 | or user group name |
|
824 | or user group name | |
744 | :param perm: Instance of Permission, or permission_name |
|
825 | :param perm: Instance of Permission, or permission_name | |
745 | """ |
|
826 | """ | |
746 | repo = self._get_repo(repo) |
|
827 | repo = self._get_repo(repo) | |
747 | group_name = self._get_user_group(group_name) |
|
828 | group_name = self._get_user_group(group_name) | |
748 | permission = self._get_perm(perm) |
|
829 | permission = self._get_perm(perm) | |
749 |
|
830 | |||
750 | # check if we have that permission already |
|
831 | # check if we have that permission already | |
751 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
832 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
752 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
833 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
753 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
834 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
754 | .scalar() |
|
835 | .scalar() | |
755 |
|
836 | |||
756 | if obj is None: |
|
837 | if obj is None: | |
757 | # create new |
|
838 | # create new | |
758 | obj = UserGroupRepoToPerm() |
|
839 | obj = UserGroupRepoToPerm() | |
759 |
|
840 | |||
760 | obj.repository = repo |
|
841 | obj.repository = repo | |
761 | obj.users_group = group_name |
|
842 | obj.users_group = group_name | |
762 | obj.permission = permission |
|
843 | obj.permission = permission | |
763 | self.sa.add(obj) |
|
844 | self.sa.add(obj) | |
764 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
845 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) | |
765 | action_logger_generic( |
|
846 | action_logger_generic( | |
766 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
847 | 'granted permission: {} to usergroup: {} on repo: {}'.format( | |
767 | perm, group_name, repo), namespace='security.repo') |
|
848 | perm, group_name, repo), namespace='security.repo') | |
768 |
|
849 | |||
769 | return obj |
|
850 | return obj | |
770 |
|
851 | |||
771 | def revoke_user_group_permission(self, repo, group_name): |
|
852 | def revoke_user_group_permission(self, repo, group_name): | |
772 | """ |
|
853 | """ | |
773 | Revoke permission for user group on given repository |
|
854 | Revoke permission for user group on given repository | |
774 |
|
855 | |||
775 | :param repo: Instance of Repository, repository_id, or repository name |
|
856 | :param repo: Instance of Repository, repository_id, or repository name | |
776 | :param group_name: Instance of UserGroup, users_group_id, |
|
857 | :param group_name: Instance of UserGroup, users_group_id, | |
777 | or user group name |
|
858 | or user group name | |
778 | """ |
|
859 | """ | |
779 | repo = self._get_repo(repo) |
|
860 | repo = self._get_repo(repo) | |
780 | group_name = self._get_user_group(group_name) |
|
861 | group_name = self._get_user_group(group_name) | |
781 |
|
862 | |||
782 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
863 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
783 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
864 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
784 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
865 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
785 | .scalar() |
|
866 | .scalar() | |
786 | if obj: |
|
867 | if obj: | |
787 | self.sa.delete(obj) |
|
868 | self.sa.delete(obj) | |
788 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
869 | log.debug('Revoked perm to %s on %s', repo, group_name) | |
789 | action_logger_generic( |
|
870 | action_logger_generic( | |
790 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
871 | 'revoked permission from usergroup: {} on repo: {}'.format( | |
791 | group_name, repo), namespace='security.repo') |
|
872 | group_name, repo), namespace='security.repo') | |
792 |
|
873 | |||
793 | def delete_stats(self, repo_name): |
|
874 | def delete_stats(self, repo_name): | |
794 | """ |
|
875 | """ | |
795 | removes stats for given repo |
|
876 | removes stats for given repo | |
796 |
|
877 | |||
797 | :param repo_name: |
|
878 | :param repo_name: | |
798 | """ |
|
879 | """ | |
799 | repo = self._get_repo(repo_name) |
|
880 | repo = self._get_repo(repo_name) | |
800 | try: |
|
881 | try: | |
801 | obj = self.sa.query(Statistics) \ |
|
882 | obj = self.sa.query(Statistics) \ | |
802 | .filter(Statistics.repository == repo).scalar() |
|
883 | .filter(Statistics.repository == repo).scalar() | |
803 | if obj: |
|
884 | if obj: | |
804 | self.sa.delete(obj) |
|
885 | self.sa.delete(obj) | |
805 | except Exception: |
|
886 | except Exception: | |
806 | log.error(traceback.format_exc()) |
|
887 | log.error(traceback.format_exc()) | |
807 | raise |
|
888 | raise | |
808 |
|
889 | |||
809 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
890 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', | |
810 | field_type='str', field_desc=''): |
|
891 | field_type='str', field_desc=''): | |
811 |
|
892 | |||
812 | repo = self._get_repo(repo_name) |
|
893 | repo = self._get_repo(repo_name) | |
813 |
|
894 | |||
814 | new_field = RepositoryField() |
|
895 | new_field = RepositoryField() | |
815 | new_field.repository = repo |
|
896 | new_field.repository = repo | |
816 | new_field.field_key = field_key |
|
897 | new_field.field_key = field_key | |
817 | new_field.field_type = field_type # python type |
|
898 | new_field.field_type = field_type # python type | |
818 | new_field.field_value = field_value |
|
899 | new_field.field_value = field_value | |
819 | new_field.field_desc = field_desc |
|
900 | new_field.field_desc = field_desc | |
820 | new_field.field_label = field_label |
|
901 | new_field.field_label = field_label | |
821 | self.sa.add(new_field) |
|
902 | self.sa.add(new_field) | |
822 | return new_field |
|
903 | return new_field | |
823 |
|
904 | |||
824 | def delete_repo_field(self, repo_name, field_key): |
|
905 | def delete_repo_field(self, repo_name, field_key): | |
825 | repo = self._get_repo(repo_name) |
|
906 | repo = self._get_repo(repo_name) | |
826 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
907 | field = RepositoryField.get_by_key_name(field_key, repo) | |
827 | if field: |
|
908 | if field: | |
828 | self.sa.delete(field) |
|
909 | self.sa.delete(field) | |
829 |
|
910 | |||
830 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
911 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, | |
831 | clone_uri=None, repo_store_location=None, |
|
912 | clone_uri=None, repo_store_location=None, | |
832 | use_global_config=False, install_hooks=True): |
|
913 | use_global_config=False, install_hooks=True): | |
833 | """ |
|
914 | """ | |
834 | makes repository on filesystem. It's group aware means it'll create |
|
915 | makes repository on filesystem. It's group aware means it'll create | |
835 | a repository within a group, and alter the paths accordingly of |
|
916 | a repository within a group, and alter the paths accordingly of | |
836 | group location |
|
917 | group location | |
837 |
|
918 | |||
838 | :param repo_name: |
|
919 | :param repo_name: | |
839 | :param alias: |
|
920 | :param alias: | |
840 | :param parent: |
|
921 | :param parent: | |
841 | :param clone_uri: |
|
922 | :param clone_uri: | |
842 | :param repo_store_location: |
|
923 | :param repo_store_location: | |
843 | """ |
|
924 | """ | |
844 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
925 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group | |
845 | from rhodecode.model.scm import ScmModel |
|
926 | from rhodecode.model.scm import ScmModel | |
846 |
|
927 | |||
847 | if Repository.NAME_SEP in repo_name: |
|
928 | if Repository.NAME_SEP in repo_name: | |
848 | raise ValueError( |
|
929 | raise ValueError( | |
849 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
930 | 'repo_name must not contain groups got `%s`' % repo_name) | |
850 |
|
931 | |||
851 | if isinstance(repo_group, RepoGroup): |
|
932 | if isinstance(repo_group, RepoGroup): | |
852 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
933 | new_parent_path = os.sep.join(repo_group.full_path_splitted) | |
853 | else: |
|
934 | else: | |
854 | new_parent_path = repo_group or '' |
|
935 | new_parent_path = repo_group or '' | |
855 |
|
936 | |||
856 | if repo_store_location: |
|
937 | if repo_store_location: | |
857 | _paths = [repo_store_location] |
|
938 | _paths = [repo_store_location] | |
858 | else: |
|
939 | else: | |
859 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
940 | _paths = [self.repos_path, new_parent_path, repo_name] | |
860 | # we need to make it str for mercurial |
|
941 | # we need to make it str for mercurial | |
861 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
942 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) | |
862 |
|
943 | |||
863 | # check if this path is not a repository |
|
944 | # check if this path is not a repository | |
864 | if is_valid_repo(repo_path, self.repos_path): |
|
945 | if is_valid_repo(repo_path, self.repos_path): | |
865 | raise Exception('This path %s is a valid repository' % repo_path) |
|
946 | raise Exception('This path %s is a valid repository' % repo_path) | |
866 |
|
947 | |||
867 | # check if this path is a group |
|
948 | # check if this path is a group | |
868 | if is_valid_repo_group(repo_path, self.repos_path): |
|
949 | if is_valid_repo_group(repo_path, self.repos_path): | |
869 | raise Exception('This path %s is a valid group' % repo_path) |
|
950 | raise Exception('This path %s is a valid group' % repo_path) | |
870 |
|
951 | |||
871 | log.info('creating repo %s in %s from url: `%s`', |
|
952 | log.info('creating repo %s in %s from url: `%s`', | |
872 | repo_name, safe_unicode(repo_path), |
|
953 | repo_name, safe_unicode(repo_path), | |
873 | obfuscate_url_pw(clone_uri)) |
|
954 | obfuscate_url_pw(clone_uri)) | |
874 |
|
955 | |||
875 | backend = get_backend(repo_type) |
|
956 | backend = get_backend(repo_type) | |
876 |
|
957 | |||
877 | config_repo = None if use_global_config else repo_name |
|
958 | config_repo = None if use_global_config else repo_name | |
878 | if config_repo and new_parent_path: |
|
959 | if config_repo and new_parent_path: | |
879 | config_repo = Repository.NAME_SEP.join( |
|
960 | config_repo = Repository.NAME_SEP.join( | |
880 | (new_parent_path, config_repo)) |
|
961 | (new_parent_path, config_repo)) | |
881 | config = make_db_config(clear_session=False, repo=config_repo) |
|
962 | config = make_db_config(clear_session=False, repo=config_repo) | |
882 | config.set('extensions', 'largefiles', '') |
|
963 | config.set('extensions', 'largefiles', '') | |
883 |
|
964 | |||
884 | # patch and reset hooks section of UI config to not run any |
|
965 | # patch and reset hooks section of UI config to not run any | |
885 | # hooks on creating remote repo |
|
966 | # hooks on creating remote repo | |
886 | config.clear_section('hooks') |
|
967 | config.clear_section('hooks') | |
887 |
|
968 | |||
888 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
969 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice | |
889 | if repo_type == 'git': |
|
970 | if repo_type == 'git': | |
890 | repo = backend( |
|
971 | repo = backend( | |
891 | repo_path, config=config, create=True, src_url=clone_uri, bare=True, |
|
972 | repo_path, config=config, create=True, src_url=clone_uri, bare=True, | |
892 | with_wire={"cache": False}) |
|
973 | with_wire={"cache": False}) | |
893 | else: |
|
974 | else: | |
894 | repo = backend( |
|
975 | repo = backend( | |
895 | repo_path, config=config, create=True, src_url=clone_uri, |
|
976 | repo_path, config=config, create=True, src_url=clone_uri, | |
896 | with_wire={"cache": False}) |
|
977 | with_wire={"cache": False}) | |
897 |
|
978 | |||
898 | if install_hooks: |
|
979 | if install_hooks: | |
899 | repo.install_hooks() |
|
980 | repo.install_hooks() | |
900 |
|
981 | |||
901 | log.debug('Created repo %s with %s backend', |
|
982 | log.debug('Created repo %s with %s backend', | |
902 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
983 | safe_unicode(repo_name), safe_unicode(repo_type)) | |
903 | return repo |
|
984 | return repo | |
904 |
|
985 | |||
905 | def _rename_filesystem_repo(self, old, new): |
|
986 | def _rename_filesystem_repo(self, old, new): | |
906 | """ |
|
987 | """ | |
907 | renames repository on filesystem |
|
988 | renames repository on filesystem | |
908 |
|
989 | |||
909 | :param old: old name |
|
990 | :param old: old name | |
910 | :param new: new name |
|
991 | :param new: new name | |
911 | """ |
|
992 | """ | |
912 | log.info('renaming repo from %s to %s', old, new) |
|
993 | log.info('renaming repo from %s to %s', old, new) | |
913 |
|
994 | |||
914 | old_path = os.path.join(self.repos_path, old) |
|
995 | old_path = os.path.join(self.repos_path, old) | |
915 | new_path = os.path.join(self.repos_path, new) |
|
996 | new_path = os.path.join(self.repos_path, new) | |
916 | if os.path.isdir(new_path): |
|
997 | if os.path.isdir(new_path): | |
917 | raise Exception( |
|
998 | raise Exception( | |
918 | 'Was trying to rename to already existing dir %s' % new_path |
|
999 | 'Was trying to rename to already existing dir %s' % new_path | |
919 | ) |
|
1000 | ) | |
920 | shutil.move(old_path, new_path) |
|
1001 | shutil.move(old_path, new_path) | |
921 |
|
1002 | |||
922 | def _delete_filesystem_repo(self, repo): |
|
1003 | def _delete_filesystem_repo(self, repo): | |
923 | """ |
|
1004 | """ | |
924 | removes repo from filesystem, the removal is acctually made by |
|
1005 | removes repo from filesystem, the removal is acctually made by | |
925 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
1006 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this | |
926 | repository is no longer valid for rhodecode, can be undeleted later on |
|
1007 | repository is no longer valid for rhodecode, can be undeleted later on | |
927 | by reverting the renames on this repository |
|
1008 | by reverting the renames on this repository | |
928 |
|
1009 | |||
929 | :param repo: repo object |
|
1010 | :param repo: repo object | |
930 | """ |
|
1011 | """ | |
931 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
1012 | rm_path = os.path.join(self.repos_path, repo.repo_name) | |
932 | repo_group = repo.group |
|
1013 | repo_group = repo.group | |
933 | log.info("Removing repository %s", rm_path) |
|
1014 | log.info("Removing repository %s", rm_path) | |
934 | # disable hg/git internal that it doesn't get detected as repo |
|
1015 | # disable hg/git internal that it doesn't get detected as repo | |
935 | alias = repo.repo_type |
|
1016 | alias = repo.repo_type | |
936 |
|
1017 | |||
937 | config = make_db_config(clear_session=False) |
|
1018 | config = make_db_config(clear_session=False) | |
938 | config.set('extensions', 'largefiles', '') |
|
1019 | config.set('extensions', 'largefiles', '') | |
939 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
1020 | bare = getattr(repo.scm_instance(config=config), 'bare', False) | |
940 |
|
1021 | |||
941 | # skip this for bare git repos |
|
1022 | # skip this for bare git repos | |
942 | if not bare: |
|
1023 | if not bare: | |
943 | # disable VCS repo |
|
1024 | # disable VCS repo | |
944 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
1025 | vcs_path = os.path.join(rm_path, '.%s' % alias) | |
945 | if os.path.exists(vcs_path): |
|
1026 | if os.path.exists(vcs_path): | |
946 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
1027 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) | |
947 |
|
1028 | |||
948 | _now = datetime.datetime.now() |
|
1029 | _now = datetime.datetime.now() | |
949 | _ms = str(_now.microsecond).rjust(6, '0') |
|
1030 | _ms = str(_now.microsecond).rjust(6, '0') | |
950 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
1031 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), | |
951 | repo.just_name) |
|
1032 | repo.just_name) | |
952 | if repo_group: |
|
1033 | if repo_group: | |
953 | # if repository is in group, prefix the removal path with the group |
|
1034 | # if repository is in group, prefix the removal path with the group | |
954 | args = repo_group.full_path_splitted + [_d] |
|
1035 | args = repo_group.full_path_splitted + [_d] | |
955 | _d = os.path.join(*args) |
|
1036 | _d = os.path.join(*args) | |
956 |
|
1037 | |||
957 | if os.path.isdir(rm_path): |
|
1038 | if os.path.isdir(rm_path): | |
958 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
1039 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
959 |
|
1040 | |||
960 | # finally cleanup diff-cache if it exists |
|
1041 | # finally cleanup diff-cache if it exists | |
961 | cached_diffs_dir = repo.cached_diffs_dir |
|
1042 | cached_diffs_dir = repo.cached_diffs_dir | |
962 | if os.path.isdir(cached_diffs_dir): |
|
1043 | if os.path.isdir(cached_diffs_dir): | |
963 | shutil.rmtree(cached_diffs_dir) |
|
1044 | shutil.rmtree(cached_diffs_dir) | |
964 |
|
1045 | |||
965 |
|
1046 | |||
966 | class ReadmeFinder: |
|
1047 | class ReadmeFinder: | |
967 | """ |
|
1048 | """ | |
968 | Utility which knows how to find a readme for a specific commit. |
|
1049 | Utility which knows how to find a readme for a specific commit. | |
969 |
|
1050 | |||
970 | The main idea is that this is a configurable algorithm. When creating an |
|
1051 | The main idea is that this is a configurable algorithm. When creating an | |
971 | instance you can define parameters, currently only the `default_renderer`. |
|
1052 | instance you can define parameters, currently only the `default_renderer`. | |
972 | Based on this configuration the method :meth:`search` behaves slightly |
|
1053 | Based on this configuration the method :meth:`search` behaves slightly | |
973 | different. |
|
1054 | different. | |
974 | """ |
|
1055 | """ | |
975 |
|
1056 | |||
976 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
1057 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) | |
977 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
1058 | path_re = re.compile(r'^docs?', re.IGNORECASE) | |
978 |
|
1059 | |||
979 | default_priorities = { |
|
1060 | default_priorities = { | |
980 | None: 0, |
|
1061 | None: 0, | |
981 | '.text': 2, |
|
1062 | '.text': 2, | |
982 | '.txt': 3, |
|
1063 | '.txt': 3, | |
983 | '.rst': 1, |
|
1064 | '.rst': 1, | |
984 | '.rest': 2, |
|
1065 | '.rest': 2, | |
985 | '.md': 1, |
|
1066 | '.md': 1, | |
986 | '.mkdn': 2, |
|
1067 | '.mkdn': 2, | |
987 | '.mdown': 3, |
|
1068 | '.mdown': 3, | |
988 | '.markdown': 4, |
|
1069 | '.markdown': 4, | |
989 | } |
|
1070 | } | |
990 |
|
1071 | |||
991 | path_priority = { |
|
1072 | path_priority = { | |
992 | 'doc': 0, |
|
1073 | 'doc': 0, | |
993 | 'docs': 1, |
|
1074 | 'docs': 1, | |
994 | } |
|
1075 | } | |
995 |
|
1076 | |||
996 | FALLBACK_PRIORITY = 99 |
|
1077 | FALLBACK_PRIORITY = 99 | |
997 |
|
1078 | |||
998 | RENDERER_TO_EXTENSION = { |
|
1079 | RENDERER_TO_EXTENSION = { | |
999 | 'rst': ['.rst', '.rest'], |
|
1080 | 'rst': ['.rst', '.rest'], | |
1000 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
1081 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], | |
1001 | } |
|
1082 | } | |
1002 |
|
1083 | |||
1003 | def __init__(self, default_renderer=None): |
|
1084 | def __init__(self, default_renderer=None): | |
1004 | self._default_renderer = default_renderer |
|
1085 | self._default_renderer = default_renderer | |
1005 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
1086 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( | |
1006 | default_renderer, []) |
|
1087 | default_renderer, []) | |
1007 |
|
1088 | |||
1008 | def search(self, commit, path='/'): |
|
1089 | def search(self, commit, path='/'): | |
1009 | """ |
|
1090 | """ | |
1010 | Find a readme in the given `commit`. |
|
1091 | Find a readme in the given `commit`. | |
1011 | """ |
|
1092 | """ | |
1012 | nodes = commit.get_nodes(path) |
|
1093 | nodes = commit.get_nodes(path) | |
1013 | matches = self._match_readmes(nodes) |
|
1094 | matches = self._match_readmes(nodes) | |
1014 | matches = self._sort_according_to_priority(matches) |
|
1095 | matches = self._sort_according_to_priority(matches) | |
1015 | if matches: |
|
1096 | if matches: | |
1016 | return matches[0].node |
|
1097 | return matches[0].node | |
1017 |
|
1098 | |||
1018 | paths = self._match_paths(nodes) |
|
1099 | paths = self._match_paths(nodes) | |
1019 | paths = self._sort_paths_according_to_priority(paths) |
|
1100 | paths = self._sort_paths_according_to_priority(paths) | |
1020 | for path in paths: |
|
1101 | for path in paths: | |
1021 | match = self.search(commit, path=path) |
|
1102 | match = self.search(commit, path=path) | |
1022 | if match: |
|
1103 | if match: | |
1023 | return match |
|
1104 | return match | |
1024 |
|
1105 | |||
1025 | return None |
|
1106 | return None | |
1026 |
|
1107 | |||
1027 | def _match_readmes(self, nodes): |
|
1108 | def _match_readmes(self, nodes): | |
1028 | for node in nodes: |
|
1109 | for node in nodes: | |
1029 | if not node.is_file(): |
|
1110 | if not node.is_file(): | |
1030 | continue |
|
1111 | continue | |
1031 | path = node.path.rsplit('/', 1)[-1] |
|
1112 | path = node.path.rsplit('/', 1)[-1] | |
1032 | match = self.readme_re.match(path) |
|
1113 | match = self.readme_re.match(path) | |
1033 | if match: |
|
1114 | if match: | |
1034 | extension = match.group(1) |
|
1115 | extension = match.group(1) | |
1035 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
1116 | yield ReadmeMatch(node, match, self._priority(extension)) | |
1036 |
|
1117 | |||
1037 | def _match_paths(self, nodes): |
|
1118 | def _match_paths(self, nodes): | |
1038 | for node in nodes: |
|
1119 | for node in nodes: | |
1039 | if not node.is_dir(): |
|
1120 | if not node.is_dir(): | |
1040 | continue |
|
1121 | continue | |
1041 | match = self.path_re.match(node.path) |
|
1122 | match = self.path_re.match(node.path) | |
1042 | if match: |
|
1123 | if match: | |
1043 | yield node.path |
|
1124 | yield node.path | |
1044 |
|
1125 | |||
1045 | def _priority(self, extension): |
|
1126 | def _priority(self, extension): | |
1046 | renderer_priority = ( |
|
1127 | renderer_priority = ( | |
1047 | 0 if extension in self._renderer_extensions else 1) |
|
1128 | 0 if extension in self._renderer_extensions else 1) | |
1048 | extension_priority = self.default_priorities.get( |
|
1129 | extension_priority = self.default_priorities.get( | |
1049 | extension, self.FALLBACK_PRIORITY) |
|
1130 | extension, self.FALLBACK_PRIORITY) | |
1050 | return (renderer_priority, extension_priority) |
|
1131 | return (renderer_priority, extension_priority) | |
1051 |
|
1132 | |||
1052 | def _sort_according_to_priority(self, matches): |
|
1133 | def _sort_according_to_priority(self, matches): | |
1053 |
|
1134 | |||
1054 | def priority_and_path(match): |
|
1135 | def priority_and_path(match): | |
1055 | return (match.priority, match.path) |
|
1136 | return (match.priority, match.path) | |
1056 |
|
1137 | |||
1057 | return sorted(matches, key=priority_and_path) |
|
1138 | return sorted(matches, key=priority_and_path) | |
1058 |
|
1139 | |||
1059 | def _sort_paths_according_to_priority(self, paths): |
|
1140 | def _sort_paths_according_to_priority(self, paths): | |
1060 |
|
1141 | |||
1061 | def priority_and_path(path): |
|
1142 | def priority_and_path(path): | |
1062 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1143 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) | |
1063 |
|
1144 | |||
1064 | return sorted(paths, key=priority_and_path) |
|
1145 | return sorted(paths, key=priority_and_path) | |
1065 |
|
1146 | |||
1066 |
|
1147 | |||
1067 | class ReadmeMatch: |
|
1148 | class ReadmeMatch: | |
1068 |
|
1149 | |||
1069 | def __init__(self, node, match, priority): |
|
1150 | def __init__(self, node, match, priority): | |
1070 | self.node = node |
|
1151 | self.node = node | |
1071 | self._match = match |
|
1152 | self._match = match | |
1072 | self.priority = priority |
|
1153 | self.priority = priority | |
1073 |
|
1154 | |||
1074 | @property |
|
1155 | @property | |
1075 | def path(self): |
|
1156 | def path(self): | |
1076 | return self.node.path |
|
1157 | return self.node.path | |
1077 |
|
1158 | |||
1078 | def __repr__(self): |
|
1159 | def __repr__(self): | |
1079 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
|
1160 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,799 +1,877 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | repo group model for RhodeCode |
|
23 | repo group model for RhodeCode | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import datetime |
|
27 | import datetime | |
28 | import itertools |
|
28 | import itertools | |
29 | import logging |
|
29 | import logging | |
30 | import shutil |
|
30 | import shutil | |
31 | import time |
|
31 | import time | |
32 | import traceback |
|
32 | import traceback | |
33 | import string |
|
33 | import string | |
34 |
|
34 | |||
35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
35 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
36 |
|
36 | |||
37 | from rhodecode import events |
|
37 | from rhodecode import events | |
38 | from rhodecode.model import BaseModel |
|
38 | from rhodecode.model import BaseModel | |
39 | from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator, |
|
39 | from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator, | |
40 | Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm, |
|
40 | Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm, | |
41 | UserGroup, Repository) |
|
41 | UserGroup, Repository) | |
42 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
42 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel | |
43 | from rhodecode.lib.caching_query import FromCache |
|
43 | from rhodecode.lib.caching_query import FromCache | |
44 | from rhodecode.lib.utils2 import action_logger_generic, datetime_to_time |
|
44 | from rhodecode.lib.utils2 import action_logger_generic, datetime_to_time | |
45 |
|
45 | |||
46 | log = logging.getLogger(__name__) |
|
46 | log = logging.getLogger(__name__) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | class RepoGroupModel(BaseModel): |
|
49 | class RepoGroupModel(BaseModel): | |
50 |
|
50 | |||
51 | cls = RepoGroup |
|
51 | cls = RepoGroup | |
52 | PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`' |
|
52 | PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`' | |
53 | PERSONAL_GROUP_PATTERN = '${username}' # default |
|
53 | PERSONAL_GROUP_PATTERN = '${username}' # default | |
54 |
|
54 | |||
55 | def _get_user_group(self, users_group): |
|
55 | def _get_user_group(self, users_group): | |
56 | return self._get_instance(UserGroup, users_group, |
|
56 | return self._get_instance(UserGroup, users_group, | |
57 | callback=UserGroup.get_by_group_name) |
|
57 | callback=UserGroup.get_by_group_name) | |
58 |
|
58 | |||
59 | def _get_repo_group(self, repo_group): |
|
59 | def _get_repo_group(self, repo_group): | |
60 | return self._get_instance(RepoGroup, repo_group, |
|
60 | return self._get_instance(RepoGroup, repo_group, | |
61 | callback=RepoGroup.get_by_group_name) |
|
61 | callback=RepoGroup.get_by_group_name) | |
62 |
|
62 | |||
63 | @LazyProperty |
|
63 | @LazyProperty | |
64 | def repos_path(self): |
|
64 | def repos_path(self): | |
65 | """ |
|
65 | """ | |
66 | Gets the repositories root path from database |
|
66 | Gets the repositories root path from database | |
67 | """ |
|
67 | """ | |
68 |
|
68 | |||
69 | settings_model = VcsSettingsModel(sa=self.sa) |
|
69 | settings_model = VcsSettingsModel(sa=self.sa) | |
70 | return settings_model.get_repos_location() |
|
70 | return settings_model.get_repos_location() | |
71 |
|
71 | |||
72 | def get_by_group_name(self, repo_group_name, cache=None): |
|
72 | def get_by_group_name(self, repo_group_name, cache=None): | |
73 | repo = self.sa.query(RepoGroup) \ |
|
73 | repo = self.sa.query(RepoGroup) \ | |
74 | .filter(RepoGroup.group_name == repo_group_name) |
|
74 | .filter(RepoGroup.group_name == repo_group_name) | |
75 |
|
75 | |||
76 | if cache: |
|
76 | if cache: | |
77 | name_key = _hash_key(repo_group_name) |
|
77 | name_key = _hash_key(repo_group_name) | |
78 | repo = repo.options( |
|
78 | repo = repo.options( | |
79 | FromCache("sql_cache_short", "get_repo_group_%s" % name_key)) |
|
79 | FromCache("sql_cache_short", "get_repo_group_%s" % name_key)) | |
80 | return repo.scalar() |
|
80 | return repo.scalar() | |
81 |
|
81 | |||
82 | def get_default_create_personal_repo_group(self): |
|
82 | def get_default_create_personal_repo_group(self): | |
83 | value = SettingsModel().get_setting_by_name( |
|
83 | value = SettingsModel().get_setting_by_name( | |
84 | 'create_personal_repo_group') |
|
84 | 'create_personal_repo_group') | |
85 | return value.app_settings_value if value else None or False |
|
85 | return value.app_settings_value if value else None or False | |
86 |
|
86 | |||
87 | def get_personal_group_name_pattern(self): |
|
87 | def get_personal_group_name_pattern(self): | |
88 | value = SettingsModel().get_setting_by_name( |
|
88 | value = SettingsModel().get_setting_by_name( | |
89 | 'personal_repo_group_pattern') |
|
89 | 'personal_repo_group_pattern') | |
90 | val = value.app_settings_value if value else None |
|
90 | val = value.app_settings_value if value else None | |
91 | group_template = val or self.PERSONAL_GROUP_PATTERN |
|
91 | group_template = val or self.PERSONAL_GROUP_PATTERN | |
92 |
|
92 | |||
93 | group_template = group_template.lstrip('/') |
|
93 | group_template = group_template.lstrip('/') | |
94 | return group_template |
|
94 | return group_template | |
95 |
|
95 | |||
96 | def get_personal_group_name(self, user): |
|
96 | def get_personal_group_name(self, user): | |
97 | template = self.get_personal_group_name_pattern() |
|
97 | template = self.get_personal_group_name_pattern() | |
98 | return string.Template(template).safe_substitute( |
|
98 | return string.Template(template).safe_substitute( | |
99 | username=user.username, |
|
99 | username=user.username, | |
100 | user_id=user.user_id, |
|
100 | user_id=user.user_id, | |
101 | first_name=user.first_name, |
|
101 | first_name=user.first_name, | |
102 | last_name=user.last_name, |
|
102 | last_name=user.last_name, | |
103 | ) |
|
103 | ) | |
104 |
|
104 | |||
105 | def create_personal_repo_group(self, user, commit_early=True): |
|
105 | def create_personal_repo_group(self, user, commit_early=True): | |
106 | desc = self.PERSONAL_GROUP_DESC % {'username': user.username} |
|
106 | desc = self.PERSONAL_GROUP_DESC % {'username': user.username} | |
107 | personal_repo_group_name = self.get_personal_group_name(user) |
|
107 | personal_repo_group_name = self.get_personal_group_name(user) | |
108 |
|
108 | |||
109 | # create a new one |
|
109 | # create a new one | |
110 | RepoGroupModel().create( |
|
110 | RepoGroupModel().create( | |
111 | group_name=personal_repo_group_name, |
|
111 | group_name=personal_repo_group_name, | |
112 | group_description=desc, |
|
112 | group_description=desc, | |
113 | owner=user.username, |
|
113 | owner=user.username, | |
114 | personal=True, |
|
114 | personal=True, | |
115 | commit_early=commit_early) |
|
115 | commit_early=commit_early) | |
116 |
|
116 | |||
117 | def _create_default_perms(self, new_group): |
|
117 | def _create_default_perms(self, new_group): | |
118 | # create default permission |
|
118 | # create default permission | |
119 | default_perm = 'group.read' |
|
119 | default_perm = 'group.read' | |
120 | def_user = User.get_default_user() |
|
120 | def_user = User.get_default_user() | |
121 | for p in def_user.user_perms: |
|
121 | for p in def_user.user_perms: | |
122 | if p.permission.permission_name.startswith('group.'): |
|
122 | if p.permission.permission_name.startswith('group.'): | |
123 | default_perm = p.permission.permission_name |
|
123 | default_perm = p.permission.permission_name | |
124 | break |
|
124 | break | |
125 |
|
125 | |||
126 | repo_group_to_perm = UserRepoGroupToPerm() |
|
126 | repo_group_to_perm = UserRepoGroupToPerm() | |
127 | repo_group_to_perm.permission = Permission.get_by_key(default_perm) |
|
127 | repo_group_to_perm.permission = Permission.get_by_key(default_perm) | |
128 |
|
128 | |||
129 | repo_group_to_perm.group = new_group |
|
129 | repo_group_to_perm.group = new_group | |
130 | repo_group_to_perm.user_id = def_user.user_id |
|
130 | repo_group_to_perm.user_id = def_user.user_id | |
131 | return repo_group_to_perm |
|
131 | return repo_group_to_perm | |
132 |
|
132 | |||
133 | def _get_group_name_and_parent(self, group_name_full, repo_in_path=False, |
|
133 | def _get_group_name_and_parent(self, group_name_full, repo_in_path=False, | |
134 | get_object=False): |
|
134 | get_object=False): | |
135 | """ |
|
135 | """ | |
136 | Get's the group name and a parent group name from given group name. |
|
136 | Get's the group name and a parent group name from given group name. | |
137 | If repo_in_path is set to truth, we asume the full path also includes |
|
137 | If repo_in_path is set to truth, we asume the full path also includes | |
138 | repo name, in such case we clean the last element. |
|
138 | repo name, in such case we clean the last element. | |
139 |
|
139 | |||
140 | :param group_name_full: |
|
140 | :param group_name_full: | |
141 | """ |
|
141 | """ | |
142 | split_paths = 1 |
|
142 | split_paths = 1 | |
143 | if repo_in_path: |
|
143 | if repo_in_path: | |
144 | split_paths = 2 |
|
144 | split_paths = 2 | |
145 | _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths) |
|
145 | _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths) | |
146 |
|
146 | |||
147 | if repo_in_path and len(_parts) > 1: |
|
147 | if repo_in_path and len(_parts) > 1: | |
148 | # such case last element is the repo_name |
|
148 | # such case last element is the repo_name | |
149 | _parts.pop(-1) |
|
149 | _parts.pop(-1) | |
150 | group_name_cleaned = _parts[-1] # just the group name |
|
150 | group_name_cleaned = _parts[-1] # just the group name | |
151 | parent_repo_group_name = None |
|
151 | parent_repo_group_name = None | |
152 |
|
152 | |||
153 | if len(_parts) > 1: |
|
153 | if len(_parts) > 1: | |
154 | parent_repo_group_name = _parts[0] |
|
154 | parent_repo_group_name = _parts[0] | |
155 |
|
155 | |||
156 | parent_group = None |
|
156 | parent_group = None | |
157 | if parent_repo_group_name: |
|
157 | if parent_repo_group_name: | |
158 | parent_group = RepoGroup.get_by_group_name(parent_repo_group_name) |
|
158 | parent_group = RepoGroup.get_by_group_name(parent_repo_group_name) | |
159 |
|
159 | |||
160 | if get_object: |
|
160 | if get_object: | |
161 | return group_name_cleaned, parent_repo_group_name, parent_group |
|
161 | return group_name_cleaned, parent_repo_group_name, parent_group | |
162 |
|
162 | |||
163 | return group_name_cleaned, parent_repo_group_name |
|
163 | return group_name_cleaned, parent_repo_group_name | |
164 |
|
164 | |||
165 | def check_exist_filesystem(self, group_name, exc_on_failure=True): |
|
165 | def check_exist_filesystem(self, group_name, exc_on_failure=True): | |
166 | create_path = os.path.join(self.repos_path, group_name) |
|
166 | create_path = os.path.join(self.repos_path, group_name) | |
167 | log.debug('creating new group in %s', create_path) |
|
167 | log.debug('creating new group in %s', create_path) | |
168 |
|
168 | |||
169 | if os.path.isdir(create_path): |
|
169 | if os.path.isdir(create_path): | |
170 | if exc_on_failure: |
|
170 | if exc_on_failure: | |
171 | abs_create_path = os.path.abspath(create_path) |
|
171 | abs_create_path = os.path.abspath(create_path) | |
172 | raise Exception('Directory `{}` already exists !'.format(abs_create_path)) |
|
172 | raise Exception('Directory `{}` already exists !'.format(abs_create_path)) | |
173 | return False |
|
173 | return False | |
174 | return True |
|
174 | return True | |
175 |
|
175 | |||
176 | def _create_group(self, group_name): |
|
176 | def _create_group(self, group_name): | |
177 | """ |
|
177 | """ | |
178 | makes repository group on filesystem |
|
178 | makes repository group on filesystem | |
179 |
|
179 | |||
180 | :param repo_name: |
|
180 | :param repo_name: | |
181 | :param parent_id: |
|
181 | :param parent_id: | |
182 | """ |
|
182 | """ | |
183 |
|
183 | |||
184 | self.check_exist_filesystem(group_name) |
|
184 | self.check_exist_filesystem(group_name) | |
185 | create_path = os.path.join(self.repos_path, group_name) |
|
185 | create_path = os.path.join(self.repos_path, group_name) | |
186 | log.debug('creating new group in %s', create_path) |
|
186 | log.debug('creating new group in %s', create_path) | |
187 | os.makedirs(create_path, mode=0o755) |
|
187 | os.makedirs(create_path, mode=0o755) | |
188 | log.debug('created group in %s', create_path) |
|
188 | log.debug('created group in %s', create_path) | |
189 |
|
189 | |||
190 | def _rename_group(self, old, new): |
|
190 | def _rename_group(self, old, new): | |
191 | """ |
|
191 | """ | |
192 | Renames a group on filesystem |
|
192 | Renames a group on filesystem | |
193 |
|
193 | |||
194 | :param group_name: |
|
194 | :param group_name: | |
195 | """ |
|
195 | """ | |
196 |
|
196 | |||
197 | if old == new: |
|
197 | if old == new: | |
198 | log.debug('skipping group rename') |
|
198 | log.debug('skipping group rename') | |
199 | return |
|
199 | return | |
200 |
|
200 | |||
201 | log.debug('renaming repository group from %s to %s', old, new) |
|
201 | log.debug('renaming repository group from %s to %s', old, new) | |
202 |
|
202 | |||
203 | old_path = os.path.join(self.repos_path, old) |
|
203 | old_path = os.path.join(self.repos_path, old) | |
204 | new_path = os.path.join(self.repos_path, new) |
|
204 | new_path = os.path.join(self.repos_path, new) | |
205 |
|
205 | |||
206 | log.debug('renaming repos paths from %s to %s', old_path, new_path) |
|
206 | log.debug('renaming repos paths from %s to %s', old_path, new_path) | |
207 |
|
207 | |||
208 | if os.path.isdir(new_path): |
|
208 | if os.path.isdir(new_path): | |
209 | raise Exception('Was trying to rename to already ' |
|
209 | raise Exception('Was trying to rename to already ' | |
210 | 'existing dir %s' % new_path) |
|
210 | 'existing dir %s' % new_path) | |
211 | shutil.move(old_path, new_path) |
|
211 | shutil.move(old_path, new_path) | |
212 |
|
212 | |||
213 | def _delete_filesystem_group(self, group, force_delete=False): |
|
213 | def _delete_filesystem_group(self, group, force_delete=False): | |
214 | """ |
|
214 | """ | |
215 | Deletes a group from a filesystem |
|
215 | Deletes a group from a filesystem | |
216 |
|
216 | |||
217 | :param group: instance of group from database |
|
217 | :param group: instance of group from database | |
218 | :param force_delete: use shutil rmtree to remove all objects |
|
218 | :param force_delete: use shutil rmtree to remove all objects | |
219 | """ |
|
219 | """ | |
220 | paths = group.full_path.split(RepoGroup.url_sep()) |
|
220 | paths = group.full_path.split(RepoGroup.url_sep()) | |
221 | paths = os.sep.join(paths) |
|
221 | paths = os.sep.join(paths) | |
222 |
|
222 | |||
223 | rm_path = os.path.join(self.repos_path, paths) |
|
223 | rm_path = os.path.join(self.repos_path, paths) | |
224 | log.info("Removing group %s", rm_path) |
|
224 | log.info("Removing group %s", rm_path) | |
225 | # delete only if that path really exists |
|
225 | # delete only if that path really exists | |
226 | if os.path.isdir(rm_path): |
|
226 | if os.path.isdir(rm_path): | |
227 | if force_delete: |
|
227 | if force_delete: | |
228 | shutil.rmtree(rm_path) |
|
228 | shutil.rmtree(rm_path) | |
229 | else: |
|
229 | else: | |
230 | # archive that group` |
|
230 | # archive that group` | |
231 | _now = datetime.datetime.now() |
|
231 | _now = datetime.datetime.now() | |
232 | _ms = str(_now.microsecond).rjust(6, '0') |
|
232 | _ms = str(_now.microsecond).rjust(6, '0') | |
233 | _d = 'rm__%s_GROUP_%s' % ( |
|
233 | _d = 'rm__%s_GROUP_%s' % ( | |
234 | _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name) |
|
234 | _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name) | |
235 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
235 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
236 |
|
236 | |||
237 | def create(self, group_name, group_description, owner, just_db=False, |
|
237 | def create(self, group_name, group_description, owner, just_db=False, | |
238 | copy_permissions=False, personal=None, commit_early=True): |
|
238 | copy_permissions=False, personal=None, commit_early=True): | |
239 |
|
239 | |||
240 | (group_name_cleaned, |
|
240 | (group_name_cleaned, | |
241 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name) |
|
241 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name) | |
242 |
|
242 | |||
243 | parent_group = None |
|
243 | parent_group = None | |
244 | if parent_group_name: |
|
244 | if parent_group_name: | |
245 | parent_group = self._get_repo_group(parent_group_name) |
|
245 | parent_group = self._get_repo_group(parent_group_name) | |
246 | if not parent_group: |
|
246 | if not parent_group: | |
247 | # we tried to create a nested group, but the parent is not |
|
247 | # we tried to create a nested group, but the parent is not | |
248 | # existing |
|
248 | # existing | |
249 | raise ValueError( |
|
249 | raise ValueError( | |
250 | 'Parent group `%s` given in `%s` group name ' |
|
250 | 'Parent group `%s` given in `%s` group name ' | |
251 | 'is not yet existing.' % (parent_group_name, group_name)) |
|
251 | 'is not yet existing.' % (parent_group_name, group_name)) | |
252 |
|
252 | |||
253 | # because we are doing a cleanup, we need to check if such directory |
|
253 | # because we are doing a cleanup, we need to check if such directory | |
254 | # already exists. If we don't do that we can accidentally delete |
|
254 | # already exists. If we don't do that we can accidentally delete | |
255 | # existing directory via cleanup that can cause data issues, since |
|
255 | # existing directory via cleanup that can cause data issues, since | |
256 | # delete does a folder rename to special syntax later cleanup |
|
256 | # delete does a folder rename to special syntax later cleanup | |
257 | # functions can delete this |
|
257 | # functions can delete this | |
258 | cleanup_group = self.check_exist_filesystem(group_name, |
|
258 | cleanup_group = self.check_exist_filesystem(group_name, | |
259 | exc_on_failure=False) |
|
259 | exc_on_failure=False) | |
260 | user = self._get_user(owner) |
|
260 | user = self._get_user(owner) | |
261 | if not user: |
|
261 | if not user: | |
262 | raise ValueError('Owner %s not found as rhodecode user', owner) |
|
262 | raise ValueError('Owner %s not found as rhodecode user', owner) | |
263 |
|
263 | |||
264 | try: |
|
264 | try: | |
265 | new_repo_group = RepoGroup() |
|
265 | new_repo_group = RepoGroup() | |
266 | new_repo_group.user = user |
|
266 | new_repo_group.user = user | |
267 | new_repo_group.group_description = group_description or group_name |
|
267 | new_repo_group.group_description = group_description or group_name | |
268 | new_repo_group.parent_group = parent_group |
|
268 | new_repo_group.parent_group = parent_group | |
269 | new_repo_group.group_name = group_name |
|
269 | new_repo_group.group_name = group_name | |
270 | new_repo_group.personal = personal |
|
270 | new_repo_group.personal = personal | |
271 |
|
271 | |||
272 | self.sa.add(new_repo_group) |
|
272 | self.sa.add(new_repo_group) | |
273 |
|
273 | |||
274 | # create an ADMIN permission for owner except if we're super admin, |
|
274 | # create an ADMIN permission for owner except if we're super admin, | |
275 | # later owner should go into the owner field of groups |
|
275 | # later owner should go into the owner field of groups | |
276 | if not user.is_admin: |
|
276 | if not user.is_admin: | |
277 | self.grant_user_permission(repo_group=new_repo_group, |
|
277 | self.grant_user_permission(repo_group=new_repo_group, | |
278 | user=owner, perm='group.admin') |
|
278 | user=owner, perm='group.admin') | |
279 |
|
279 | |||
280 | if parent_group and copy_permissions: |
|
280 | if parent_group and copy_permissions: | |
281 | # copy permissions from parent |
|
281 | # copy permissions from parent | |
282 | user_perms = UserRepoGroupToPerm.query() \ |
|
282 | user_perms = UserRepoGroupToPerm.query() \ | |
283 | .filter(UserRepoGroupToPerm.group == parent_group).all() |
|
283 | .filter(UserRepoGroupToPerm.group == parent_group).all() | |
284 |
|
284 | |||
285 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
285 | group_perms = UserGroupRepoGroupToPerm.query() \ | |
286 | .filter(UserGroupRepoGroupToPerm.group == parent_group).all() |
|
286 | .filter(UserGroupRepoGroupToPerm.group == parent_group).all() | |
287 |
|
287 | |||
288 | for perm in user_perms: |
|
288 | for perm in user_perms: | |
289 | # don't copy over the permission for user who is creating |
|
289 | # don't copy over the permission for user who is creating | |
290 | # this group, if he is not super admin he get's admin |
|
290 | # this group, if he is not super admin he get's admin | |
291 | # permission set above |
|
291 | # permission set above | |
292 | if perm.user != user or user.is_admin: |
|
292 | if perm.user != user or user.is_admin: | |
293 | UserRepoGroupToPerm.create( |
|
293 | UserRepoGroupToPerm.create( | |
294 | perm.user, new_repo_group, perm.permission) |
|
294 | perm.user, new_repo_group, perm.permission) | |
295 |
|
295 | |||
296 | for perm in group_perms: |
|
296 | for perm in group_perms: | |
297 | UserGroupRepoGroupToPerm.create( |
|
297 | UserGroupRepoGroupToPerm.create( | |
298 | perm.users_group, new_repo_group, perm.permission) |
|
298 | perm.users_group, new_repo_group, perm.permission) | |
299 | else: |
|
299 | else: | |
300 | perm_obj = self._create_default_perms(new_repo_group) |
|
300 | perm_obj = self._create_default_perms(new_repo_group) | |
301 | self.sa.add(perm_obj) |
|
301 | self.sa.add(perm_obj) | |
302 |
|
302 | |||
303 | # now commit the changes, earlier so we are sure everything is in |
|
303 | # now commit the changes, earlier so we are sure everything is in | |
304 | # the database. |
|
304 | # the database. | |
305 | if commit_early: |
|
305 | if commit_early: | |
306 | self.sa.commit() |
|
306 | self.sa.commit() | |
307 | if not just_db: |
|
307 | if not just_db: | |
308 | self._create_group(new_repo_group.group_name) |
|
308 | self._create_group(new_repo_group.group_name) | |
309 |
|
309 | |||
310 | # trigger the post hook |
|
310 | # trigger the post hook | |
311 | from rhodecode.lib.hooks_base import log_create_repository_group |
|
311 | from rhodecode.lib.hooks_base import log_create_repository_group | |
312 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
312 | repo_group = RepoGroup.get_by_group_name(group_name) | |
313 |
|
313 | |||
314 | # update repo group commit caches initially |
|
314 | # update repo group commit caches initially | |
315 | repo_group.update_commit_cache() |
|
315 | repo_group.update_commit_cache() | |
316 |
|
316 | |||
317 | log_create_repository_group( |
|
317 | log_create_repository_group( | |
318 | created_by=user.username, **repo_group.get_dict()) |
|
318 | created_by=user.username, **repo_group.get_dict()) | |
319 |
|
319 | |||
320 | # Trigger create event. |
|
320 | # Trigger create event. | |
321 | events.trigger(events.RepoGroupCreateEvent(repo_group)) |
|
321 | events.trigger(events.RepoGroupCreateEvent(repo_group)) | |
322 |
|
322 | |||
323 | return new_repo_group |
|
323 | return new_repo_group | |
324 | except Exception: |
|
324 | except Exception: | |
325 | self.sa.rollback() |
|
325 | self.sa.rollback() | |
326 | log.exception('Exception occurred when creating repository group, ' |
|
326 | log.exception('Exception occurred when creating repository group, ' | |
327 | 'doing cleanup...') |
|
327 | 'doing cleanup...') | |
328 | # rollback things manually ! |
|
328 | # rollback things manually ! | |
329 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
329 | repo_group = RepoGroup.get_by_group_name(group_name) | |
330 | if repo_group: |
|
330 | if repo_group: | |
331 | RepoGroup.delete(repo_group.group_id) |
|
331 | RepoGroup.delete(repo_group.group_id) | |
332 | self.sa.commit() |
|
332 | self.sa.commit() | |
333 | if cleanup_group: |
|
333 | if cleanup_group: | |
334 | RepoGroupModel()._delete_filesystem_group(repo_group) |
|
334 | RepoGroupModel()._delete_filesystem_group(repo_group) | |
335 | raise |
|
335 | raise | |
336 |
|
336 | |||
337 | def update_permissions( |
|
337 | def update_permissions( | |
338 | self, repo_group, perm_additions=None, perm_updates=None, |
|
338 | self, repo_group, perm_additions=None, perm_updates=None, | |
339 | perm_deletions=None, recursive=None, check_perms=True, |
|
339 | perm_deletions=None, recursive=None, check_perms=True, | |
340 | cur_user=None): |
|
340 | cur_user=None): | |
341 | from rhodecode.model.repo import RepoModel |
|
341 | from rhodecode.model.repo import RepoModel | |
342 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
342 | from rhodecode.lib.auth import HasUserGroupPermissionAny | |
343 |
|
343 | |||
344 | if not perm_additions: |
|
344 | if not perm_additions: | |
345 | perm_additions = [] |
|
345 | perm_additions = [] | |
346 | if not perm_updates: |
|
346 | if not perm_updates: | |
347 | perm_updates = [] |
|
347 | perm_updates = [] | |
348 | if not perm_deletions: |
|
348 | if not perm_deletions: | |
349 | perm_deletions = [] |
|
349 | perm_deletions = [] | |
350 |
|
350 | |||
351 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
351 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') | |
352 |
|
352 | |||
353 | changes = { |
|
353 | changes = { | |
354 | 'added': [], |
|
354 | 'added': [], | |
355 | 'updated': [], |
|
355 | 'updated': [], | |
356 | 'deleted': [] |
|
356 | 'deleted': [] | |
357 | } |
|
357 | } | |
358 |
|
358 | |||
359 | def _set_perm_user(obj, user, perm): |
|
359 | def _set_perm_user(obj, user, perm): | |
360 | if isinstance(obj, RepoGroup): |
|
360 | if isinstance(obj, RepoGroup): | |
361 | self.grant_user_permission( |
|
361 | self.grant_user_permission( | |
362 | repo_group=obj, user=user, perm=perm) |
|
362 | repo_group=obj, user=user, perm=perm) | |
363 | elif isinstance(obj, Repository): |
|
363 | elif isinstance(obj, Repository): | |
364 | # private repos will not allow to change the default |
|
364 | # private repos will not allow to change the default | |
365 | # permissions using recursive mode |
|
365 | # permissions using recursive mode | |
366 | if obj.private and user == User.DEFAULT_USER: |
|
366 | if obj.private and user == User.DEFAULT_USER: | |
367 | return |
|
367 | return | |
368 |
|
368 | |||
369 | # we set group permission but we have to switch to repo |
|
369 | # we set group permission but we have to switch to repo | |
370 | # permission |
|
370 | # permission | |
371 | perm = perm.replace('group.', 'repository.') |
|
371 | perm = perm.replace('group.', 'repository.') | |
372 | RepoModel().grant_user_permission( |
|
372 | RepoModel().grant_user_permission( | |
373 | repo=obj, user=user, perm=perm) |
|
373 | repo=obj, user=user, perm=perm) | |
374 |
|
374 | |||
375 | def _set_perm_group(obj, users_group, perm): |
|
375 | def _set_perm_group(obj, users_group, perm): | |
376 | if isinstance(obj, RepoGroup): |
|
376 | if isinstance(obj, RepoGroup): | |
377 | self.grant_user_group_permission( |
|
377 | self.grant_user_group_permission( | |
378 | repo_group=obj, group_name=users_group, perm=perm) |
|
378 | repo_group=obj, group_name=users_group, perm=perm) | |
379 | elif isinstance(obj, Repository): |
|
379 | elif isinstance(obj, Repository): | |
380 | # we set group permission but we have to switch to repo |
|
380 | # we set group permission but we have to switch to repo | |
381 | # permission |
|
381 | # permission | |
382 | perm = perm.replace('group.', 'repository.') |
|
382 | perm = perm.replace('group.', 'repository.') | |
383 | RepoModel().grant_user_group_permission( |
|
383 | RepoModel().grant_user_group_permission( | |
384 | repo=obj, group_name=users_group, perm=perm) |
|
384 | repo=obj, group_name=users_group, perm=perm) | |
385 |
|
385 | |||
386 | def _revoke_perm_user(obj, user): |
|
386 | def _revoke_perm_user(obj, user): | |
387 | if isinstance(obj, RepoGroup): |
|
387 | if isinstance(obj, RepoGroup): | |
388 | self.revoke_user_permission(repo_group=obj, user=user) |
|
388 | self.revoke_user_permission(repo_group=obj, user=user) | |
389 | elif isinstance(obj, Repository): |
|
389 | elif isinstance(obj, Repository): | |
390 | RepoModel().revoke_user_permission(repo=obj, user=user) |
|
390 | RepoModel().revoke_user_permission(repo=obj, user=user) | |
391 |
|
391 | |||
392 | def _revoke_perm_group(obj, user_group): |
|
392 | def _revoke_perm_group(obj, user_group): | |
393 | if isinstance(obj, RepoGroup): |
|
393 | if isinstance(obj, RepoGroup): | |
394 | self.revoke_user_group_permission( |
|
394 | self.revoke_user_group_permission( | |
395 | repo_group=obj, group_name=user_group) |
|
395 | repo_group=obj, group_name=user_group) | |
396 | elif isinstance(obj, Repository): |
|
396 | elif isinstance(obj, Repository): | |
397 | RepoModel().revoke_user_group_permission( |
|
397 | RepoModel().revoke_user_group_permission( | |
398 | repo=obj, group_name=user_group) |
|
398 | repo=obj, group_name=user_group) | |
399 |
|
399 | |||
400 | # start updates |
|
400 | # start updates | |
401 | log.debug('Now updating permissions for %s in recursive mode:%s', |
|
401 | log.debug('Now updating permissions for %s in recursive mode:%s', | |
402 | repo_group, recursive) |
|
402 | repo_group, recursive) | |
403 |
|
403 | |||
404 | # initialize check function, we'll call that multiple times |
|
404 | # initialize check function, we'll call that multiple times | |
405 | has_group_perm = HasUserGroupPermissionAny(*req_perms) |
|
405 | has_group_perm = HasUserGroupPermissionAny(*req_perms) | |
406 |
|
406 | |||
407 | for obj in repo_group.recursive_groups_and_repos(): |
|
407 | for obj in repo_group.recursive_groups_and_repos(): | |
408 | # iterated obj is an instance of a repos group or repository in |
|
408 | # iterated obj is an instance of a repos group or repository in | |
409 | # that group, recursive option can be: none, repos, groups, all |
|
409 | # that group, recursive option can be: none, repos, groups, all | |
410 | if recursive == 'all': |
|
410 | if recursive == 'all': | |
411 | obj = obj |
|
411 | obj = obj | |
412 | elif recursive == 'repos': |
|
412 | elif recursive == 'repos': | |
413 | # skip groups, other than this one |
|
413 | # skip groups, other than this one | |
414 | if isinstance(obj, RepoGroup) and not obj == repo_group: |
|
414 | if isinstance(obj, RepoGroup) and not obj == repo_group: | |
415 | continue |
|
415 | continue | |
416 | elif recursive == 'groups': |
|
416 | elif recursive == 'groups': | |
417 | # skip repos |
|
417 | # skip repos | |
418 | if isinstance(obj, Repository): |
|
418 | if isinstance(obj, Repository): | |
419 | continue |
|
419 | continue | |
420 | else: # recursive == 'none': |
|
420 | else: # recursive == 'none': | |
421 | # DEFAULT option - don't apply to iterated objects |
|
421 | # DEFAULT option - don't apply to iterated objects | |
422 | # also we do a break at the end of this loop. if we are not |
|
422 | # also we do a break at the end of this loop. if we are not | |
423 | # in recursive mode |
|
423 | # in recursive mode | |
424 | obj = repo_group |
|
424 | obj = repo_group | |
425 |
|
425 | |||
426 | change_obj = obj.get_api_data() |
|
426 | change_obj = obj.get_api_data() | |
427 |
|
427 | |||
428 | # update permissions |
|
428 | # update permissions | |
429 | for member_id, perm, member_type in perm_updates: |
|
429 | for member_id, perm, member_type in perm_updates: | |
430 | member_id = int(member_id) |
|
430 | member_id = int(member_id) | |
431 | if member_type == 'user': |
|
431 | if member_type == 'user': | |
432 | member_name = User.get(member_id).username |
|
432 | member_name = User.get(member_id).username | |
433 | # this updates also current one if found |
|
433 | # this updates also current one if found | |
434 | _set_perm_user(obj, user=member_id, perm=perm) |
|
434 | _set_perm_user(obj, user=member_id, perm=perm) | |
435 | elif member_type == 'user_group': |
|
435 | elif member_type == 'user_group': | |
436 | member_name = UserGroup.get(member_id).users_group_name |
|
436 | member_name = UserGroup.get(member_id).users_group_name | |
437 | if not check_perms or has_group_perm(member_name, |
|
437 | if not check_perms or has_group_perm(member_name, | |
438 | user=cur_user): |
|
438 | user=cur_user): | |
439 | _set_perm_group(obj, users_group=member_id, perm=perm) |
|
439 | _set_perm_group(obj, users_group=member_id, perm=perm) | |
440 | else: |
|
440 | else: | |
441 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
441 | raise ValueError("member_type must be 'user' or 'user_group' " | |
442 | "got {} instead".format(member_type)) |
|
442 | "got {} instead".format(member_type)) | |
443 |
|
443 | |||
444 | changes['updated'].append( |
|
444 | changes['updated'].append( | |
445 | {'change_obj': change_obj, 'type': member_type, |
|
445 | {'change_obj': change_obj, 'type': member_type, | |
446 | 'id': member_id, 'name': member_name, 'new_perm': perm}) |
|
446 | 'id': member_id, 'name': member_name, 'new_perm': perm}) | |
447 |
|
447 | |||
448 | # set new permissions |
|
448 | # set new permissions | |
449 | for member_id, perm, member_type in perm_additions: |
|
449 | for member_id, perm, member_type in perm_additions: | |
450 | member_id = int(member_id) |
|
450 | member_id = int(member_id) | |
451 | if member_type == 'user': |
|
451 | if member_type == 'user': | |
452 | member_name = User.get(member_id).username |
|
452 | member_name = User.get(member_id).username | |
453 | _set_perm_user(obj, user=member_id, perm=perm) |
|
453 | _set_perm_user(obj, user=member_id, perm=perm) | |
454 | elif member_type == 'user_group': |
|
454 | elif member_type == 'user_group': | |
455 | # check if we have permissions to alter this usergroup |
|
455 | # check if we have permissions to alter this usergroup | |
456 | member_name = UserGroup.get(member_id).users_group_name |
|
456 | member_name = UserGroup.get(member_id).users_group_name | |
457 | if not check_perms or has_group_perm(member_name, |
|
457 | if not check_perms or has_group_perm(member_name, | |
458 | user=cur_user): |
|
458 | user=cur_user): | |
459 | _set_perm_group(obj, users_group=member_id, perm=perm) |
|
459 | _set_perm_group(obj, users_group=member_id, perm=perm) | |
460 | else: |
|
460 | else: | |
461 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
461 | raise ValueError("member_type must be 'user' or 'user_group' " | |
462 | "got {} instead".format(member_type)) |
|
462 | "got {} instead".format(member_type)) | |
463 |
|
463 | |||
464 | changes['added'].append( |
|
464 | changes['added'].append( | |
465 | {'change_obj': change_obj, 'type': member_type, |
|
465 | {'change_obj': change_obj, 'type': member_type, | |
466 | 'id': member_id, 'name': member_name, 'new_perm': perm}) |
|
466 | 'id': member_id, 'name': member_name, 'new_perm': perm}) | |
467 |
|
467 | |||
468 | # delete permissions |
|
468 | # delete permissions | |
469 | for member_id, perm, member_type in perm_deletions: |
|
469 | for member_id, perm, member_type in perm_deletions: | |
470 | member_id = int(member_id) |
|
470 | member_id = int(member_id) | |
471 | if member_type == 'user': |
|
471 | if member_type == 'user': | |
472 | member_name = User.get(member_id).username |
|
472 | member_name = User.get(member_id).username | |
473 | _revoke_perm_user(obj, user=member_id) |
|
473 | _revoke_perm_user(obj, user=member_id) | |
474 | elif member_type == 'user_group': |
|
474 | elif member_type == 'user_group': | |
475 | # check if we have permissions to alter this usergroup |
|
475 | # check if we have permissions to alter this usergroup | |
476 | member_name = UserGroup.get(member_id).users_group_name |
|
476 | member_name = UserGroup.get(member_id).users_group_name | |
477 | if not check_perms or has_group_perm(member_name, |
|
477 | if not check_perms or has_group_perm(member_name, | |
478 | user=cur_user): |
|
478 | user=cur_user): | |
479 | _revoke_perm_group(obj, user_group=member_id) |
|
479 | _revoke_perm_group(obj, user_group=member_id) | |
480 | else: |
|
480 | else: | |
481 | raise ValueError("member_type must be 'user' or 'user_group' " |
|
481 | raise ValueError("member_type must be 'user' or 'user_group' " | |
482 | "got {} instead".format(member_type)) |
|
482 | "got {} instead".format(member_type)) | |
483 |
|
483 | |||
484 | changes['deleted'].append( |
|
484 | changes['deleted'].append( | |
485 | {'change_obj': change_obj, 'type': member_type, |
|
485 | {'change_obj': change_obj, 'type': member_type, | |
486 | 'id': member_id, 'name': member_name, 'new_perm': perm}) |
|
486 | 'id': member_id, 'name': member_name, 'new_perm': perm}) | |
487 |
|
487 | |||
488 | # if it's not recursive call for all,repos,groups |
|
488 | # if it's not recursive call for all,repos,groups | |
489 | # break the loop and don't proceed with other changes |
|
489 | # break the loop and don't proceed with other changes | |
490 | if recursive not in ['all', 'repos', 'groups']: |
|
490 | if recursive not in ['all', 'repos', 'groups']: | |
491 | break |
|
491 | break | |
492 |
|
492 | |||
493 | return changes |
|
493 | return changes | |
494 |
|
494 | |||
495 | def update(self, repo_group, form_data): |
|
495 | def update(self, repo_group, form_data): | |
496 | try: |
|
496 | try: | |
497 | repo_group = self._get_repo_group(repo_group) |
|
497 | repo_group = self._get_repo_group(repo_group) | |
498 | old_path = repo_group.full_path |
|
498 | old_path = repo_group.full_path | |
499 |
|
499 | |||
500 | # change properties |
|
500 | # change properties | |
501 | if 'group_description' in form_data: |
|
501 | if 'group_description' in form_data: | |
502 | repo_group.group_description = form_data['group_description'] |
|
502 | repo_group.group_description = form_data['group_description'] | |
503 |
|
503 | |||
504 | if 'enable_locking' in form_data: |
|
504 | if 'enable_locking' in form_data: | |
505 | repo_group.enable_locking = form_data['enable_locking'] |
|
505 | repo_group.enable_locking = form_data['enable_locking'] | |
506 |
|
506 | |||
507 | if 'group_parent_id' in form_data: |
|
507 | if 'group_parent_id' in form_data: | |
508 | parent_group = ( |
|
508 | parent_group = ( | |
509 | self._get_repo_group(form_data['group_parent_id'])) |
|
509 | self._get_repo_group(form_data['group_parent_id'])) | |
510 | repo_group.group_parent_id = ( |
|
510 | repo_group.group_parent_id = ( | |
511 | parent_group.group_id if parent_group else None) |
|
511 | parent_group.group_id if parent_group else None) | |
512 | repo_group.parent_group = parent_group |
|
512 | repo_group.parent_group = parent_group | |
513 |
|
513 | |||
514 | # mikhail: to update the full_path, we have to explicitly |
|
514 | # mikhail: to update the full_path, we have to explicitly | |
515 | # update group_name |
|
515 | # update group_name | |
516 | group_name = form_data.get('group_name', repo_group.name) |
|
516 | group_name = form_data.get('group_name', repo_group.name) | |
517 | repo_group.group_name = repo_group.get_new_name(group_name) |
|
517 | repo_group.group_name = repo_group.get_new_name(group_name) | |
518 |
|
518 | |||
519 | new_path = repo_group.full_path |
|
519 | new_path = repo_group.full_path | |
520 |
|
520 | |||
521 | if 'user' in form_data: |
|
521 | if 'user' in form_data: | |
522 | repo_group.user = User.get_by_username(form_data['user']) |
|
522 | repo_group.user = User.get_by_username(form_data['user']) | |
523 |
|
523 | |||
524 | self.sa.add(repo_group) |
|
524 | self.sa.add(repo_group) | |
525 |
|
525 | |||
526 | # iterate over all members of this groups and do fixes |
|
526 | # iterate over all members of this groups and do fixes | |
527 | # set locking if given |
|
527 | # set locking if given | |
528 | # if obj is a repoGroup also fix the name of the group according |
|
528 | # if obj is a repoGroup also fix the name of the group according | |
529 | # to the parent |
|
529 | # to the parent | |
530 | # if obj is a Repo fix it's name |
|
530 | # if obj is a Repo fix it's name | |
531 | # this can be potentially heavy operation |
|
531 | # this can be potentially heavy operation | |
532 | for obj in repo_group.recursive_groups_and_repos(): |
|
532 | for obj in repo_group.recursive_groups_and_repos(): | |
533 | # set the value from it's parent |
|
533 | # set the value from it's parent | |
534 | obj.enable_locking = repo_group.enable_locking |
|
534 | obj.enable_locking = repo_group.enable_locking | |
535 | if isinstance(obj, RepoGroup): |
|
535 | if isinstance(obj, RepoGroup): | |
536 | new_name = obj.get_new_name(obj.name) |
|
536 | new_name = obj.get_new_name(obj.name) | |
537 | log.debug('Fixing group %s to new name %s', |
|
537 | log.debug('Fixing group %s to new name %s', | |
538 | obj.group_name, new_name) |
|
538 | obj.group_name, new_name) | |
539 | obj.group_name = new_name |
|
539 | obj.group_name = new_name | |
540 |
|
540 | |||
541 | elif isinstance(obj, Repository): |
|
541 | elif isinstance(obj, Repository): | |
542 | # we need to get all repositories from this new group and |
|
542 | # we need to get all repositories from this new group and | |
543 | # rename them accordingly to new group path |
|
543 | # rename them accordingly to new group path | |
544 | new_name = obj.get_new_name(obj.just_name) |
|
544 | new_name = obj.get_new_name(obj.just_name) | |
545 | log.debug('Fixing repo %s to new name %s', |
|
545 | log.debug('Fixing repo %s to new name %s', | |
546 | obj.repo_name, new_name) |
|
546 | obj.repo_name, new_name) | |
547 | obj.repo_name = new_name |
|
547 | obj.repo_name = new_name | |
548 |
|
548 | |||
549 | self.sa.add(obj) |
|
549 | self.sa.add(obj) | |
550 |
|
550 | |||
551 | self._rename_group(old_path, new_path) |
|
551 | self._rename_group(old_path, new_path) | |
552 |
|
552 | |||
553 | # Trigger update event. |
|
553 | # Trigger update event. | |
554 | events.trigger(events.RepoGroupUpdateEvent(repo_group)) |
|
554 | events.trigger(events.RepoGroupUpdateEvent(repo_group)) | |
555 |
|
555 | |||
556 | return repo_group |
|
556 | return repo_group | |
557 | except Exception: |
|
557 | except Exception: | |
558 | log.error(traceback.format_exc()) |
|
558 | log.error(traceback.format_exc()) | |
559 | raise |
|
559 | raise | |
560 |
|
560 | |||
561 | def delete(self, repo_group, force_delete=False, fs_remove=True): |
|
561 | def delete(self, repo_group, force_delete=False, fs_remove=True): | |
562 | repo_group = self._get_repo_group(repo_group) |
|
562 | repo_group = self._get_repo_group(repo_group) | |
563 | if not repo_group: |
|
563 | if not repo_group: | |
564 | return False |
|
564 | return False | |
565 | try: |
|
565 | try: | |
566 | self.sa.delete(repo_group) |
|
566 | self.sa.delete(repo_group) | |
567 | if fs_remove: |
|
567 | if fs_remove: | |
568 | self._delete_filesystem_group(repo_group, force_delete) |
|
568 | self._delete_filesystem_group(repo_group, force_delete) | |
569 | else: |
|
569 | else: | |
570 | log.debug('skipping removal from filesystem') |
|
570 | log.debug('skipping removal from filesystem') | |
571 |
|
571 | |||
572 | # Trigger delete event. |
|
572 | # Trigger delete event. | |
573 | events.trigger(events.RepoGroupDeleteEvent(repo_group)) |
|
573 | events.trigger(events.RepoGroupDeleteEvent(repo_group)) | |
574 | return True |
|
574 | return True | |
575 |
|
575 | |||
576 | except Exception: |
|
576 | except Exception: | |
577 | log.error('Error removing repo_group %s', repo_group) |
|
577 | log.error('Error removing repo_group %s', repo_group) | |
578 | raise |
|
578 | raise | |
579 |
|
579 | |||
580 | def grant_user_permission(self, repo_group, user, perm): |
|
580 | def grant_user_permission(self, repo_group, user, perm): | |
581 | """ |
|
581 | """ | |
582 | Grant permission for user on given repository group, or update |
|
582 | Grant permission for user on given repository group, or update | |
583 | existing one if found |
|
583 | existing one if found | |
584 |
|
584 | |||
585 | :param repo_group: Instance of RepoGroup, repositories_group_id, |
|
585 | :param repo_group: Instance of RepoGroup, repositories_group_id, | |
586 | or repositories_group name |
|
586 | or repositories_group name | |
587 | :param user: Instance of User, user_id or username |
|
587 | :param user: Instance of User, user_id or username | |
588 | :param perm: Instance of Permission, or permission_name |
|
588 | :param perm: Instance of Permission, or permission_name | |
589 | """ |
|
589 | """ | |
590 |
|
590 | |||
591 | repo_group = self._get_repo_group(repo_group) |
|
591 | repo_group = self._get_repo_group(repo_group) | |
592 | user = self._get_user(user) |
|
592 | user = self._get_user(user) | |
593 | permission = self._get_perm(perm) |
|
593 | permission = self._get_perm(perm) | |
594 |
|
594 | |||
595 | # check if we have that permission already |
|
595 | # check if we have that permission already | |
596 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
596 | obj = self.sa.query(UserRepoGroupToPerm)\ | |
597 | .filter(UserRepoGroupToPerm.user == user)\ |
|
597 | .filter(UserRepoGroupToPerm.user == user)\ | |
598 | .filter(UserRepoGroupToPerm.group == repo_group)\ |
|
598 | .filter(UserRepoGroupToPerm.group == repo_group)\ | |
599 | .scalar() |
|
599 | .scalar() | |
600 | if obj is None: |
|
600 | if obj is None: | |
601 | # create new ! |
|
601 | # create new ! | |
602 | obj = UserRepoGroupToPerm() |
|
602 | obj = UserRepoGroupToPerm() | |
603 | obj.group = repo_group |
|
603 | obj.group = repo_group | |
604 | obj.user = user |
|
604 | obj.user = user | |
605 | obj.permission = permission |
|
605 | obj.permission = permission | |
606 | self.sa.add(obj) |
|
606 | self.sa.add(obj) | |
607 | log.debug('Granted perm %s to %s on %s', perm, user, repo_group) |
|
607 | log.debug('Granted perm %s to %s on %s', perm, user, repo_group) | |
608 | action_logger_generic( |
|
608 | action_logger_generic( | |
609 | 'granted permission: {} to user: {} on repogroup: {}'.format( |
|
609 | 'granted permission: {} to user: {} on repogroup: {}'.format( | |
610 | perm, user, repo_group), namespace='security.repogroup') |
|
610 | perm, user, repo_group), namespace='security.repogroup') | |
611 | return obj |
|
611 | return obj | |
612 |
|
612 | |||
613 | def revoke_user_permission(self, repo_group, user): |
|
613 | def revoke_user_permission(self, repo_group, user): | |
614 | """ |
|
614 | """ | |
615 | Revoke permission for user on given repository group |
|
615 | Revoke permission for user on given repository group | |
616 |
|
616 | |||
617 | :param repo_group: Instance of RepoGroup, repositories_group_id, |
|
617 | :param repo_group: Instance of RepoGroup, repositories_group_id, | |
618 | or repositories_group name |
|
618 | or repositories_group name | |
619 | :param user: Instance of User, user_id or username |
|
619 | :param user: Instance of User, user_id or username | |
620 | """ |
|
620 | """ | |
621 |
|
621 | |||
622 | repo_group = self._get_repo_group(repo_group) |
|
622 | repo_group = self._get_repo_group(repo_group) | |
623 | user = self._get_user(user) |
|
623 | user = self._get_user(user) | |
624 |
|
624 | |||
625 | obj = self.sa.query(UserRepoGroupToPerm)\ |
|
625 | obj = self.sa.query(UserRepoGroupToPerm)\ | |
626 | .filter(UserRepoGroupToPerm.user == user)\ |
|
626 | .filter(UserRepoGroupToPerm.user == user)\ | |
627 | .filter(UserRepoGroupToPerm.group == repo_group)\ |
|
627 | .filter(UserRepoGroupToPerm.group == repo_group)\ | |
628 | .scalar() |
|
628 | .scalar() | |
629 | if obj: |
|
629 | if obj: | |
630 | self.sa.delete(obj) |
|
630 | self.sa.delete(obj) | |
631 | log.debug('Revoked perm on %s on %s', repo_group, user) |
|
631 | log.debug('Revoked perm on %s on %s', repo_group, user) | |
632 | action_logger_generic( |
|
632 | action_logger_generic( | |
633 | 'revoked permission from user: {} on repogroup: {}'.format( |
|
633 | 'revoked permission from user: {} on repogroup: {}'.format( | |
634 | user, repo_group), namespace='security.repogroup') |
|
634 | user, repo_group), namespace='security.repogroup') | |
635 |
|
635 | |||
636 | def grant_user_group_permission(self, repo_group, group_name, perm): |
|
636 | def grant_user_group_permission(self, repo_group, group_name, perm): | |
637 | """ |
|
637 | """ | |
638 | Grant permission for user group on given repository group, or update |
|
638 | Grant permission for user group on given repository group, or update | |
639 | existing one if found |
|
639 | existing one if found | |
640 |
|
640 | |||
641 | :param repo_group: Instance of RepoGroup, repositories_group_id, |
|
641 | :param repo_group: Instance of RepoGroup, repositories_group_id, | |
642 | or repositories_group name |
|
642 | or repositories_group name | |
643 | :param group_name: Instance of UserGroup, users_group_id, |
|
643 | :param group_name: Instance of UserGroup, users_group_id, | |
644 | or user group name |
|
644 | or user group name | |
645 | :param perm: Instance of Permission, or permission_name |
|
645 | :param perm: Instance of Permission, or permission_name | |
646 | """ |
|
646 | """ | |
647 | repo_group = self._get_repo_group(repo_group) |
|
647 | repo_group = self._get_repo_group(repo_group) | |
648 | group_name = self._get_user_group(group_name) |
|
648 | group_name = self._get_user_group(group_name) | |
649 | permission = self._get_perm(perm) |
|
649 | permission = self._get_perm(perm) | |
650 |
|
650 | |||
651 | # check if we have that permission already |
|
651 | # check if we have that permission already | |
652 | obj = self.sa.query(UserGroupRepoGroupToPerm)\ |
|
652 | obj = self.sa.query(UserGroupRepoGroupToPerm)\ | |
653 | .filter(UserGroupRepoGroupToPerm.group == repo_group)\ |
|
653 | .filter(UserGroupRepoGroupToPerm.group == repo_group)\ | |
654 | .filter(UserGroupRepoGroupToPerm.users_group == group_name)\ |
|
654 | .filter(UserGroupRepoGroupToPerm.users_group == group_name)\ | |
655 | .scalar() |
|
655 | .scalar() | |
656 |
|
656 | |||
657 | if obj is None: |
|
657 | if obj is None: | |
658 | # create new |
|
658 | # create new | |
659 | obj = UserGroupRepoGroupToPerm() |
|
659 | obj = UserGroupRepoGroupToPerm() | |
660 |
|
660 | |||
661 | obj.group = repo_group |
|
661 | obj.group = repo_group | |
662 | obj.users_group = group_name |
|
662 | obj.users_group = group_name | |
663 | obj.permission = permission |
|
663 | obj.permission = permission | |
664 | self.sa.add(obj) |
|
664 | self.sa.add(obj) | |
665 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group) |
|
665 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group) | |
666 | action_logger_generic( |
|
666 | action_logger_generic( | |
667 | 'granted permission: {} to usergroup: {} on repogroup: {}'.format( |
|
667 | 'granted permission: {} to usergroup: {} on repogroup: {}'.format( | |
668 | perm, group_name, repo_group), namespace='security.repogroup') |
|
668 | perm, group_name, repo_group), namespace='security.repogroup') | |
669 | return obj |
|
669 | return obj | |
670 |
|
670 | |||
671 | def revoke_user_group_permission(self, repo_group, group_name): |
|
671 | def revoke_user_group_permission(self, repo_group, group_name): | |
672 | """ |
|
672 | """ | |
673 | Revoke permission for user group on given repository group |
|
673 | Revoke permission for user group on given repository group | |
674 |
|
674 | |||
675 | :param repo_group: Instance of RepoGroup, repositories_group_id, |
|
675 | :param repo_group: Instance of RepoGroup, repositories_group_id, | |
676 | or repositories_group name |
|
676 | or repositories_group name | |
677 | :param group_name: Instance of UserGroup, users_group_id, |
|
677 | :param group_name: Instance of UserGroup, users_group_id, | |
678 | or user group name |
|
678 | or user group name | |
679 | """ |
|
679 | """ | |
680 | repo_group = self._get_repo_group(repo_group) |
|
680 | repo_group = self._get_repo_group(repo_group) | |
681 | group_name = self._get_user_group(group_name) |
|
681 | group_name = self._get_user_group(group_name) | |
682 |
|
682 | |||
683 | obj = self.sa.query(UserGroupRepoGroupToPerm)\ |
|
683 | obj = self.sa.query(UserGroupRepoGroupToPerm)\ | |
684 | .filter(UserGroupRepoGroupToPerm.group == repo_group)\ |
|
684 | .filter(UserGroupRepoGroupToPerm.group == repo_group)\ | |
685 | .filter(UserGroupRepoGroupToPerm.users_group == group_name)\ |
|
685 | .filter(UserGroupRepoGroupToPerm.users_group == group_name)\ | |
686 | .scalar() |
|
686 | .scalar() | |
687 | if obj: |
|
687 | if obj: | |
688 | self.sa.delete(obj) |
|
688 | self.sa.delete(obj) | |
689 | log.debug('Revoked perm to %s on %s', repo_group, group_name) |
|
689 | log.debug('Revoked perm to %s on %s', repo_group, group_name) | |
690 | action_logger_generic( |
|
690 | action_logger_generic( | |
691 | 'revoked permission from usergroup: {} on repogroup: {}'.format( |
|
691 | 'revoked permission from usergroup: {} on repogroup: {}'.format( | |
692 | group_name, repo_group), namespace='security.repogroup') |
|
692 | group_name, repo_group), namespace='security.repogroup') | |
693 |
|
693 | |||
694 | @classmethod |
|
694 | @classmethod | |
695 | def update_commit_cache(cls, repo_groups=None): |
|
695 | def update_commit_cache(cls, repo_groups=None): | |
696 | if not repo_groups: |
|
696 | if not repo_groups: | |
697 | repo_groups = RepoGroup.getAll() |
|
697 | repo_groups = RepoGroup.getAll() | |
698 | for repo_group in repo_groups: |
|
698 | for repo_group in repo_groups: | |
699 | repo_group.update_commit_cache() |
|
699 | repo_group.update_commit_cache() | |
700 |
|
700 | |||
701 |
|
701 | |||
702 |
|
702 | |||
703 | def get_repo_groups_as_dict(self, repo_group_list=None, admin=False, |
|
703 | def get_repo_groups_as_dict(self, repo_group_list=None, admin=False, | |
704 | super_user_actions=False): |
|
704 | super_user_actions=False): | |
705 |
|
705 | |||
706 | from pyramid.threadlocal import get_current_request |
|
706 | from pyramid.threadlocal import get_current_request | |
707 | _render = get_current_request().get_partial_renderer( |
|
707 | _render = get_current_request().get_partial_renderer( | |
708 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
708 | 'rhodecode:templates/data_table/_dt_elements.mako') | |
709 | c = _render.get_call_context() |
|
709 | c = _render.get_call_context() | |
710 | h = _render.get_helpers() |
|
710 | h = _render.get_helpers() | |
711 |
|
711 | |||
712 | def quick_menu(repo_group_name): |
|
712 | def quick_menu(repo_group_name): | |
713 | return _render('quick_repo_group_menu', repo_group_name) |
|
713 | return _render('quick_repo_group_menu', repo_group_name) | |
714 |
|
714 | |||
715 | def repo_group_lnk(repo_group_name): |
|
715 | def repo_group_lnk(repo_group_name): | |
716 | return _render('repo_group_name', repo_group_name) |
|
716 | return _render('repo_group_name', repo_group_name) | |
717 |
|
717 | |||
718 | def last_change(last_change): |
|
718 | def last_change(last_change): | |
719 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
719 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: | |
720 | ts = time.time() |
|
720 | ts = time.time() | |
721 | utc_offset = (datetime.datetime.fromtimestamp(ts) |
|
721 | utc_offset = (datetime.datetime.fromtimestamp(ts) | |
722 | - datetime.datetime.utcfromtimestamp(ts)).total_seconds() |
|
722 | - datetime.datetime.utcfromtimestamp(ts)).total_seconds() | |
723 | last_change = last_change + datetime.timedelta(seconds=utc_offset) |
|
723 | last_change = last_change + datetime.timedelta(seconds=utc_offset) | |
724 | return _render("last_change", last_change) |
|
724 | return _render("last_change", last_change) | |
725 |
|
725 | |||
726 | def desc(desc, personal): |
|
726 | def desc(desc, personal): | |
727 | return _render( |
|
727 | return _render( | |
728 | 'repo_group_desc', desc, personal, c.visual.stylify_metatags) |
|
728 | 'repo_group_desc', desc, personal, c.visual.stylify_metatags) | |
729 |
|
729 | |||
730 | def repo_group_actions(repo_group_id, repo_group_name, gr_count): |
|
730 | def repo_group_actions(repo_group_id, repo_group_name, gr_count): | |
731 | return _render( |
|
731 | return _render( | |
732 | 'repo_group_actions', repo_group_id, repo_group_name, gr_count) |
|
732 | 'repo_group_actions', repo_group_id, repo_group_name, gr_count) | |
733 |
|
733 | |||
734 | def repo_group_name(repo_group_name, children_groups): |
|
734 | def repo_group_name(repo_group_name, children_groups): | |
735 | return _render("repo_group_name", repo_group_name, children_groups) |
|
735 | return _render("repo_group_name", repo_group_name, children_groups) | |
736 |
|
736 | |||
737 | def user_profile(username): |
|
737 | def user_profile(username): | |
738 | return _render('user_profile', username) |
|
738 | return _render('user_profile', username) | |
739 |
|
739 | |||
740 | repo_group_data = [] |
|
740 | repo_group_data = [] | |
741 | for group in repo_group_list: |
|
741 | for group in repo_group_list: | |
742 | # NOTE(marcink): because we use only raw column we need to load it like that |
|
742 | # NOTE(marcink): because we use only raw column we need to load it like that | |
743 | changeset_cache = RepoGroup._load_changeset_cache( |
|
743 | changeset_cache = RepoGroup._load_changeset_cache( | |
744 | '', group._changeset_cache) |
|
744 | '', group._changeset_cache) | |
745 | last_commit_change = RepoGroup._load_commit_change(changeset_cache) |
|
745 | last_commit_change = RepoGroup._load_commit_change(changeset_cache) | |
746 | row = { |
|
746 | row = { | |
747 | "menu": quick_menu(group.group_name), |
|
747 | "menu": quick_menu(group.group_name), | |
748 | "name": repo_group_lnk(group.group_name), |
|
748 | "name": repo_group_lnk(group.group_name), | |
749 | "name_raw": group.group_name, |
|
749 | "name_raw": group.group_name, | |
750 |
|
750 | |||
751 | "last_change": last_change(last_commit_change), |
|
751 | "last_change": last_change(last_commit_change), | |
752 | "last_change_raw": datetime_to_time(last_commit_change), |
|
752 | "last_change_raw": datetime_to_time(last_commit_change), | |
753 |
|
753 | |||
754 | "last_changeset": "", |
|
754 | "last_changeset": "", | |
755 | "last_changeset_raw": "", |
|
755 | "last_changeset_raw": "", | |
756 |
|
756 | |||
757 | "desc": desc(group.group_description, group.personal), |
|
757 | "desc": desc(group.group_description, group.personal), | |
758 | "top_level_repos": 0, |
|
758 | "top_level_repos": 0, | |
759 | "owner": user_profile(group.User.username) |
|
759 | "owner": user_profile(group.User.username) | |
760 | } |
|
760 | } | |
761 | if admin: |
|
761 | if admin: | |
762 | repo_count = group.repositories.count() |
|
762 | repo_count = group.repositories.count() | |
763 | children_groups = map( |
|
763 | children_groups = map( | |
764 | h.safe_unicode, |
|
764 | h.safe_unicode, | |
765 | itertools.chain((g.name for g in group.parents), |
|
765 | itertools.chain((g.name for g in group.parents), | |
766 | (x.name for x in [group]))) |
|
766 | (x.name for x in [group]))) | |
767 | row.update({ |
|
767 | row.update({ | |
768 | "action": repo_group_actions( |
|
768 | "action": repo_group_actions( | |
769 | group.group_id, group.group_name, repo_count), |
|
769 | group.group_id, group.group_name, repo_count), | |
770 | "top_level_repos": repo_count, |
|
770 | "top_level_repos": repo_count, | |
771 | "name": repo_group_name(group.group_name, children_groups), |
|
771 | "name": repo_group_name(group.group_name, children_groups), | |
772 |
|
772 | |||
773 | }) |
|
773 | }) | |
774 | repo_group_data.append(row) |
|
774 | repo_group_data.append(row) | |
775 |
|
775 | |||
776 | return repo_group_data |
|
776 | return repo_group_data | |
777 |
|
777 | |||
|
778 | def get_repo_groups_data_table( | |||
|
779 | self, draw, start, limit, | |||
|
780 | search_q, order_by, order_dir, | |||
|
781 | auth_user, repo_group_id): | |||
|
782 | from rhodecode.model.scm import RepoGroupList | |||
|
783 | ||||
|
784 | _perms = ['group.read', 'group.write', 'group.admin'] | |||
|
785 | repo_groups = RepoGroup.query() \ | |||
|
786 | .filter(RepoGroup.group_parent_id == repo_group_id) \ | |||
|
787 | .all() | |||
|
788 | auth_repo_group_list = RepoGroupList( | |||
|
789 | repo_groups, perm_set=_perms, | |||
|
790 | extra_kwargs=dict(user=auth_user)) | |||
|
791 | ||||
|
792 | allowed_ids = [-1] | |||
|
793 | for repo_group in auth_repo_group_list: | |||
|
794 | allowed_ids.append(repo_group.group_id) | |||
|
795 | ||||
|
796 | repo_groups_data_total_count = RepoGroup.query() \ | |||
|
797 | .filter(RepoGroup.group_parent_id == repo_group_id) \ | |||
|
798 | .filter(or_( | |||
|
799 | # generate multiple IN to fix limitation problems | |||
|
800 | *in_filter_generator(RepoGroup.group_id, allowed_ids)) | |||
|
801 | ) \ | |||
|
802 | .count() | |||
|
803 | ||||
|
804 | base_q = Session.query( | |||
|
805 | RepoGroup.group_name, | |||
|
806 | RepoGroup.group_name_hash, | |||
|
807 | RepoGroup.group_description, | |||
|
808 | RepoGroup.group_id, | |||
|
809 | RepoGroup.personal, | |||
|
810 | RepoGroup.updated_on, | |||
|
811 | RepoGroup._changeset_cache, | |||
|
812 | User, | |||
|
813 | ) \ | |||
|
814 | .filter(RepoGroup.group_parent_id == repo_group_id) \ | |||
|
815 | .filter(or_( | |||
|
816 | # generate multiple IN to fix limitation problems | |||
|
817 | *in_filter_generator(RepoGroup.group_id, allowed_ids)) | |||
|
818 | ) \ | |||
|
819 | .join(User, User.user_id == RepoGroup.user_id) \ | |||
|
820 | .group_by(RepoGroup, User) | |||
|
821 | ||||
|
822 | repo_groups_data_total_filtered_count = base_q.count() | |||
|
823 | ||||
|
824 | sort_defined = False | |||
|
825 | ||||
|
826 | if order_by == 'group_name': | |||
|
827 | sort_col = func.lower(RepoGroup.group_name) | |||
|
828 | sort_defined = True | |||
|
829 | elif order_by == 'user_username': | |||
|
830 | sort_col = User.username | |||
|
831 | else: | |||
|
832 | sort_col = getattr(RepoGroup, order_by, None) | |||
|
833 | ||||
|
834 | if sort_defined or sort_col: | |||
|
835 | if order_dir == 'asc': | |||
|
836 | sort_col = sort_col.asc() | |||
|
837 | else: | |||
|
838 | sort_col = sort_col.desc() | |||
|
839 | ||||
|
840 | base_q = base_q.order_by(sort_col) | |||
|
841 | base_q = base_q.offset(start).limit(limit) | |||
|
842 | ||||
|
843 | repo_group_list = base_q.all() | |||
|
844 | ||||
|
845 | repo_groups_data = RepoGroupModel().get_repo_groups_as_dict( | |||
|
846 | repo_group_list=repo_group_list, admin=False) | |||
|
847 | ||||
|
848 | data = ({ | |||
|
849 | 'draw': draw, | |||
|
850 | 'data': repo_groups_data, | |||
|
851 | 'recordsTotal': repo_groups_data_total_count, | |||
|
852 | 'recordsFiltered': repo_groups_data_total_filtered_count, | |||
|
853 | }) | |||
|
854 | return data | |||
|
855 | ||||
778 | def _get_defaults(self, repo_group_name): |
|
856 | def _get_defaults(self, repo_group_name): | |
779 | repo_group = RepoGroup.get_by_group_name(repo_group_name) |
|
857 | repo_group = RepoGroup.get_by_group_name(repo_group_name) | |
780 |
|
858 | |||
781 | if repo_group is None: |
|
859 | if repo_group is None: | |
782 | return None |
|
860 | return None | |
783 |
|
861 | |||
784 | defaults = repo_group.get_dict() |
|
862 | defaults = repo_group.get_dict() | |
785 | defaults['repo_group_name'] = repo_group.name |
|
863 | defaults['repo_group_name'] = repo_group.name | |
786 | defaults['repo_group_description'] = repo_group.group_description |
|
864 | defaults['repo_group_description'] = repo_group.group_description | |
787 | defaults['repo_group_enable_locking'] = repo_group.enable_locking |
|
865 | defaults['repo_group_enable_locking'] = repo_group.enable_locking | |
788 |
|
866 | |||
789 | # we use -1 as this is how in HTML, we mark an empty group |
|
867 | # we use -1 as this is how in HTML, we mark an empty group | |
790 | defaults['repo_group'] = defaults['group_parent_id'] or -1 |
|
868 | defaults['repo_group'] = defaults['group_parent_id'] or -1 | |
791 |
|
869 | |||
792 | # fill owner |
|
870 | # fill owner | |
793 | if repo_group.user: |
|
871 | if repo_group.user: | |
794 | defaults.update({'user': repo_group.user.username}) |
|
872 | defaults.update({'user': repo_group.user.username}) | |
795 | else: |
|
873 | else: | |
796 | replacement_user = User.get_first_super_admin().username |
|
874 | replacement_user = User.get_first_super_admin().username | |
797 | defaults.update({'user': replacement_user}) |
|
875 | defaults.update({'user': replacement_user}) | |
798 |
|
876 | |||
799 | return defaults |
|
877 | return defaults |
@@ -1,390 +1,392 b'' | |||||
1 |
|
1 | |||
2 | /****************************************************************************** |
|
2 | /****************************************************************************** | |
3 | * * |
|
3 | * * | |
4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
4 | * DO NOT CHANGE THIS FILE MANUALLY * | |
5 | * * |
|
5 | * * | |
6 | * * |
|
6 | * * | |
7 | * This file is automatically generated when the app starts up with * |
|
7 | * This file is automatically generated when the app starts up with * | |
8 | * generate_js_files = true * |
|
8 | * generate_js_files = true * | |
9 | * * |
|
9 | * * | |
10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
10 | * To add a route here pass jsroute=True to the route definition in the app * | |
11 | * * |
|
11 | * * | |
12 | ******************************************************************************/ |
|
12 | ******************************************************************************/ | |
13 | function registerRCRoutes() { |
|
13 | function registerRCRoutes() { | |
14 | // routes registration |
|
14 | // routes registration | |
15 | pyroutes.register('favicon', '/favicon.ico', []); |
|
15 | pyroutes.register('favicon', '/favicon.ico', []); | |
16 | pyroutes.register('robots', '/robots.txt', []); |
|
16 | pyroutes.register('robots', '/robots.txt', []); | |
17 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
17 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); | |
18 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
18 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); | |
19 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
19 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); | |
20 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
20 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); | |
21 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
21 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); | |
22 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
22 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); | |
23 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); |
|
23 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); | |
24 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); |
|
24 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); | |
25 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
25 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); | |
26 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
26 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); | |
27 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
27 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); | |
28 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
28 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); | |
29 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
29 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); | |
30 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
30 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); | |
31 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
31 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); | |
32 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
32 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); | |
33 | pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']); |
|
33 | pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']); | |
34 | pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']); |
|
34 | pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']); | |
35 | pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']); |
|
35 | pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']); | |
36 | pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']); |
|
36 | pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']); | |
37 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
37 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); | |
38 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
38 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); | |
39 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
39 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); | |
40 | pyroutes.register('ops_ping_legacy', '/_admin/ping', []); |
|
40 | pyroutes.register('ops_ping_legacy', '/_admin/ping', []); | |
41 | pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []); |
|
41 | pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []); | |
42 | pyroutes.register('admin_home', '/_admin', []); |
|
42 | pyroutes.register('admin_home', '/_admin', []); | |
43 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
43 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); | |
44 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); |
|
44 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); | |
45 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
45 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); | |
46 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
46 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); | |
47 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
47 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); | |
48 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
48 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); | |
49 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
49 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); | |
50 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
50 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); | |
51 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
51 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); | |
52 | pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []); |
|
52 | pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []); | |
53 | pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []); |
|
53 | pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []); | |
54 | pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']); |
|
54 | pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']); | |
55 | pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']); |
|
55 | pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']); | |
56 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
56 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); | |
57 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
57 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); | |
58 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
58 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); | |
59 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); |
|
59 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); | |
60 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
60 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); | |
61 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); |
|
61 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); | |
62 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); |
|
62 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); | |
63 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); |
|
63 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); | |
64 | pyroutes.register('admin_settings', '/_admin/settings', []); |
|
64 | pyroutes.register('admin_settings', '/_admin/settings', []); | |
65 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); |
|
65 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); | |
66 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); |
|
66 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); | |
67 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); |
|
67 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); | |
68 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); |
|
68 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); | |
69 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); |
|
69 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); | |
70 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); |
|
70 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); | |
71 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); |
|
71 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); | |
72 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); |
|
72 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); | |
73 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); |
|
73 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); | |
74 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); |
|
74 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); | |
75 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); |
|
75 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); | |
76 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); |
|
76 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); | |
77 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); |
|
77 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); | |
78 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); |
|
78 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); | |
79 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); |
|
79 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); | |
80 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); |
|
80 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); | |
81 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); |
|
81 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); | |
82 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); |
|
82 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); | |
83 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); |
|
83 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); | |
84 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); |
|
84 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); | |
85 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); |
|
85 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); | |
86 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); |
|
86 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); | |
87 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
87 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); | |
88 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
88 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); | |
89 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
89 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); | |
90 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
90 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); | |
91 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
91 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); | |
92 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
92 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); | |
93 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
93 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); | |
94 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
94 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); | |
95 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
95 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); | |
96 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); |
|
96 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); | |
97 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); |
|
97 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); | |
98 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); |
|
98 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); | |
99 | pyroutes.register('users', '/_admin/users', []); |
|
99 | pyroutes.register('users', '/_admin/users', []); | |
100 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
100 | pyroutes.register('users_data', '/_admin/users_data', []); | |
101 | pyroutes.register('users_create', '/_admin/users/create', []); |
|
101 | pyroutes.register('users_create', '/_admin/users/create', []); | |
102 | pyroutes.register('users_new', '/_admin/users/new', []); |
|
102 | pyroutes.register('users_new', '/_admin/users/new', []); | |
103 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
103 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); | |
104 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); |
|
104 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); | |
105 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); |
|
105 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); | |
106 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); |
|
106 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); | |
107 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); |
|
107 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); | |
108 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); |
|
108 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); | |
109 | pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']); |
|
109 | pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']); | |
110 | pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']); |
|
110 | pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']); | |
111 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); |
|
111 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); | |
112 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
112 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); | |
113 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
113 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); | |
114 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
114 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); | |
115 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
115 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); | |
116 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
116 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); | |
117 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
117 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); | |
118 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
118 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); | |
119 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
119 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); | |
120 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
120 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); | |
121 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
121 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); | |
122 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
122 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); | |
123 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
123 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); | |
124 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
124 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); | |
125 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
125 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); | |
126 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
126 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); | |
127 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
127 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); | |
128 | pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']); |
|
128 | pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']); | |
129 | pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); |
|
129 | pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); | |
130 | pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); |
|
130 | pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); | |
131 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
131 | pyroutes.register('user_groups', '/_admin/user_groups', []); | |
132 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
132 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); | |
133 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); |
|
133 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); | |
134 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); |
|
134 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); | |
135 | pyroutes.register('repos', '/_admin/repos', []); |
|
135 | pyroutes.register('repos', '/_admin/repos', []); | |
136 | pyroutes.register('repos_data', '/_admin/repos_data', []); |
|
136 | pyroutes.register('repos_data', '/_admin/repos_data', []); | |
137 | pyroutes.register('repo_new', '/_admin/repos/new', []); |
|
137 | pyroutes.register('repo_new', '/_admin/repos/new', []); | |
138 | pyroutes.register('repo_create', '/_admin/repos/create', []); |
|
138 | pyroutes.register('repo_create', '/_admin/repos/create', []); | |
139 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); |
|
139 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); | |
140 | pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []); |
|
140 | pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []); | |
141 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); |
|
141 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); | |
142 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); |
|
142 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); | |
143 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
143 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); | |
144 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
144 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); | |
145 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
145 | pyroutes.register('channelstream_proxy', '/_channelstream', []); | |
146 | pyroutes.register('upload_file', '/_file_store/upload', []); |
|
146 | pyroutes.register('upload_file', '/_file_store/upload', []); | |
147 | pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']); |
|
147 | pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']); | |
148 | pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']); |
|
148 | pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']); | |
149 | pyroutes.register('logout', '/_admin/logout', []); |
|
149 | pyroutes.register('logout', '/_admin/logout', []); | |
150 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
150 | pyroutes.register('reset_password', '/_admin/password_reset', []); | |
151 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
151 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); | |
152 | pyroutes.register('home', '/', []); |
|
152 | pyroutes.register('home', '/', []); | |
|
153 | pyroutes.register('main_page_repos_data', '/_home_repos', []); | |||
|
154 | pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []); | |||
153 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
155 | pyroutes.register('user_autocomplete_data', '/_users', []); | |
154 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
156 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); | |
155 | pyroutes.register('repo_list_data', '/_repos', []); |
|
157 | pyroutes.register('repo_list_data', '/_repos', []); | |
156 | pyroutes.register('repo_group_list_data', '/_repo_groups', []); |
|
158 | pyroutes.register('repo_group_list_data', '/_repo_groups', []); | |
157 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
159 | pyroutes.register('goto_switcher_data', '/_goto_data', []); | |
158 | pyroutes.register('markup_preview', '/_markup_preview', []); |
|
160 | pyroutes.register('markup_preview', '/_markup_preview', []); | |
159 | pyroutes.register('file_preview', '/_file_preview', []); |
|
161 | pyroutes.register('file_preview', '/_file_preview', []); | |
160 | pyroutes.register('store_user_session_value', '/_store_session_attr', []); |
|
162 | pyroutes.register('store_user_session_value', '/_store_session_attr', []); | |
161 | pyroutes.register('journal', '/_admin/journal', []); |
|
163 | pyroutes.register('journal', '/_admin/journal', []); | |
162 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
164 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); | |
163 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
165 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); | |
164 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
166 | pyroutes.register('journal_public', '/_admin/public_journal', []); | |
165 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
167 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); | |
166 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
168 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); | |
167 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
169 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); | |
168 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
170 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); | |
169 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
171 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); | |
170 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
172 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); | |
171 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
173 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); | |
172 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
174 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); | |
173 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
175 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); | |
174 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
176 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); | |
175 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
177 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); | |
176 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
178 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); | |
177 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
179 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); | |
178 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
180 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); | |
179 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
181 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); | |
180 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
182 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); | |
181 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
183 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); | |
182 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
184 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); | |
183 | pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']); |
|
185 | pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']); | |
184 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
186 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); | |
185 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
187 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); | |
186 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
188 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); | |
187 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
189 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); | |
188 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
190 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); | |
189 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
191 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
190 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
192 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); | |
191 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
193 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); | |
192 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
194 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
193 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
195 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
194 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
196 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
195 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
197 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
196 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
198 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); | |
197 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
199 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
198 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
200 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
199 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
201 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
200 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
202 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
201 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
203 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
202 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
204 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
203 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
205 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
204 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
206 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
205 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
207 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
206 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
208 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
207 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
209 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
208 | pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
210 | pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
209 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
211 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
210 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
212 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); | |
211 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
213 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); | |
212 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
214 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); | |
213 | pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']); |
|
215 | pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']); | |
214 | pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
216 | pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
215 | pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']); |
|
217 | pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']); | |
216 | pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
218 | pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
217 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
219 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); | |
218 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
220 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
219 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
221 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); | |
220 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
222 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); | |
221 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
223 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); | |
222 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
224 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); | |
223 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
225 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); | |
224 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
226 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); | |
225 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
227 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); | |
226 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
228 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); | |
227 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
229 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); | |
228 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
230 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); | |
229 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
231 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); | |
230 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
232 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); | |
231 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
233 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); | |
232 | pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']); |
|
234 | pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']); | |
233 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
235 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); | |
234 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
236 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); | |
235 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
237 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); | |
236 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
238 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); | |
237 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
239 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); | |
238 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
240 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); | |
239 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
241 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); | |
240 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
242 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); | |
241 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
243 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); | |
242 | pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); |
|
244 | pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); | |
243 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
245 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); | |
244 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
246 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); | |
245 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
247 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); | |
246 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
248 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); | |
247 | pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']); |
|
249 | pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']); | |
248 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
250 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); | |
249 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
251 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); | |
250 | pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']); |
|
252 | pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']); | |
251 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); |
|
253 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); | |
252 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
254 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); | |
253 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); |
|
255 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); | |
254 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); |
|
256 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); | |
255 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); |
|
257 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); | |
256 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); |
|
258 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); | |
257 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); |
|
259 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); | |
258 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); |
|
260 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); | |
259 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); |
|
261 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); | |
260 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); |
|
262 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); | |
261 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); |
|
263 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); | |
262 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); |
|
264 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); | |
263 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); |
|
265 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); | |
264 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); |
|
266 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); | |
265 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); |
|
267 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); | |
266 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); |
|
268 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); | |
267 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); |
|
269 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); | |
268 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
270 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); | |
269 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
271 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); | |
270 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
272 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); | |
271 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
273 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); | |
272 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
274 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); | |
273 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); |
|
275 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); | |
274 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']); |
|
276 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']); | |
275 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']); |
|
277 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']); | |
276 | pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
278 | pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']); | |
277 | pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
279 | pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']); | |
278 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
280 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); | |
279 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
281 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); | |
280 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); |
|
282 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); | |
281 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); |
|
283 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); | |
282 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); |
|
284 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); | |
283 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); |
|
285 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); | |
284 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); |
|
286 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); | |
285 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
287 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); | |
286 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
288 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); | |
287 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
289 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); | |
288 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
290 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); | |
289 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
291 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); | |
290 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); |
|
292 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); | |
291 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); |
|
293 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); | |
292 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); |
|
294 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); | |
293 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); |
|
295 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); | |
294 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); |
|
296 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); | |
295 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); |
|
297 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); | |
296 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); |
|
298 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); | |
297 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); |
|
299 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); | |
298 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); |
|
300 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); | |
299 | pyroutes.register('search', '/_admin/search', []); |
|
301 | pyroutes.register('search', '/_admin/search', []); | |
300 | pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']); |
|
302 | pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']); | |
301 | pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']); |
|
303 | pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']); | |
302 | pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']); |
|
304 | pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']); | |
303 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
305 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); | |
304 | pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']); |
|
306 | pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']); | |
305 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
307 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); | |
306 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
308 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); | |
307 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
309 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); | |
308 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
310 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); | |
309 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
311 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); | |
310 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
312 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); | |
311 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); |
|
313 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); | |
312 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); |
|
314 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); | |
313 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); |
|
315 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); | |
314 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); |
|
316 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); | |
315 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); |
|
317 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); | |
316 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
318 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); | |
317 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
319 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); | |
318 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
320 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); | |
319 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
321 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); | |
320 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
322 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); | |
321 | pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []); |
|
323 | pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []); | |
322 | pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []); |
|
324 | pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []); | |
323 | pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']); |
|
325 | pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']); | |
324 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
326 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); | |
325 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
327 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); | |
326 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
328 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); | |
327 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
329 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); | |
328 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
330 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); | |
329 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
331 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); | |
330 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
332 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); | |
331 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
333 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); | |
332 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
334 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); | |
333 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
335 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); | |
334 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
336 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); | |
335 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
337 | pyroutes.register('gists_show', '/_admin/gists', []); | |
336 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
338 | pyroutes.register('gists_new', '/_admin/gists/new', []); | |
337 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
339 | pyroutes.register('gists_create', '/_admin/gists/create', []); | |
338 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
340 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); | |
339 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
341 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); | |
340 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
342 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); | |
341 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
343 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); | |
342 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
344 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); | |
343 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
345 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); | |
344 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
346 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); | |
345 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
347 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); | |
346 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
348 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); | |
347 | pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']); |
|
349 | pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']); | |
348 | pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']); |
|
350 | pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']); | |
349 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
351 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); | |
350 | pyroutes.register('apiv2', '/_admin/api', []); |
|
352 | pyroutes.register('apiv2', '/_admin/api', []); | |
351 | pyroutes.register('admin_settings_license', '/_admin/settings/license', []); |
|
353 | pyroutes.register('admin_settings_license', '/_admin/settings/license', []); | |
352 | pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); |
|
354 | pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); | |
353 | pyroutes.register('login', '/_admin/login', []); |
|
355 | pyroutes.register('login', '/_admin/login', []); | |
354 | pyroutes.register('register', '/_admin/register', []); |
|
356 | pyroutes.register('register', '/_admin/register', []); | |
355 | pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); |
|
357 | pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); | |
356 | pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); |
|
358 | pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); | |
357 | pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
359 | pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); | |
358 | pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); |
|
360 | pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); | |
359 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
361 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); | |
360 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
362 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); | |
361 | pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []); |
|
363 | pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []); | |
362 | pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []); |
|
364 | pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []); | |
363 | pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []); |
|
365 | pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []); | |
364 | pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []); |
|
366 | pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []); | |
365 | pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']); |
|
367 | pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']); | |
366 | pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']); |
|
368 | pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']); | |
367 | pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']); |
|
369 | pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']); | |
368 | pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']); |
|
370 | pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']); | |
369 | pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []); |
|
371 | pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []); | |
370 | pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']); |
|
372 | pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']); | |
371 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); |
|
373 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); | |
372 | pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); |
|
374 | pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); | |
373 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
375 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); | |
374 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
376 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); | |
375 | pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); |
|
377 | pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); | |
376 | pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); |
|
378 | pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); | |
377 | pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']); |
|
379 | pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']); | |
378 | pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']); |
|
380 | pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']); | |
379 | pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); |
|
381 | pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); | |
380 | pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); |
|
382 | pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); | |
381 | pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); |
|
383 | pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); | |
382 | pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']); |
|
384 | pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']); | |
383 | pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); |
|
385 | pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); | |
384 | pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']); |
|
386 | pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']); | |
385 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); |
|
387 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); | |
386 | pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); |
|
388 | pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); | |
387 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); |
|
389 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); | |
388 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); |
|
390 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); | |
389 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
391 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); | |
390 | } |
|
392 | } |
@@ -1,121 +1,221 b'' | |||||
1 | <%inherit file="/base/base.mako"/> |
|
1 | <%inherit file="/base/base.mako"/> | |
2 |
|
2 | |||
3 |
|
3 | |||
4 | <%def name="menu_bar_subnav()"> |
|
4 | <%def name="menu_bar_subnav()"> | |
5 | % if c.repo_group: |
|
5 | % if c.repo_group: | |
6 | ${self.repo_group_menu(active='home')} |
|
6 | ${self.repo_group_menu(active='home')} | |
7 | % endif |
|
7 | % endif | |
8 | </%def> |
|
8 | </%def> | |
9 |
|
9 | |||
10 |
|
10 | |||
11 | <%def name="main()"> |
|
11 | <%def name="main()"> | |
12 | <div class="box"> |
|
12 | <div class="box"> | |
13 | <!-- box / title --> |
|
13 | <!-- box / title --> | |
14 | <div class="title"> |
|
14 | <div class="title"> | |
15 |
|
15 | |||
16 | </div> |
|
16 | </div> | |
17 | <!-- end box / title --> |
|
17 | <!-- end box / title --> | |
|
18 | <div id="no_grid_data" class="table" style="display: none"> | |||
|
19 | <h2 class="no-object-border"> | |||
|
20 | ${_('No repositories or repositories groups exists here.')} | |||
|
21 | </h2> | |||
|
22 | </div> | |||
|
23 | ||||
18 | <div class="table"> |
|
24 | <div class="table"> | |
19 | <div id="groups_list_wrap"> |
|
25 | <div id="groups_list_wrap" style="min-height: 200px;"> | |
20 | <table id="group_list_table" class="display" style="width: 100%"></table> |
|
26 | <table id="group_list_table" class="display" style="width: 100%;"></table> | |
21 | </div> |
|
27 | </div> | |
22 | </div> |
|
28 | </div> | |
23 |
|
29 | |||
24 | <div class="table"> |
|
30 | <div class="table"> | |
25 | <div id="repos_list_wrap"> |
|
31 | <div id="repos_list_wrap" style="min-height: 200px;"> | |
26 | <table id="repo_list_table" class="display" style="width: 100%"></table> |
|
32 | <table id="repo_list_table" class="display" style="width: 100%;"></table> | |
27 | </div> |
|
33 | </div> | |
28 | </div> |
|
34 | </div> | |
29 |
|
35 | |||
30 | ## no repository groups and repos present, show something to the users |
|
|||
31 | % if c.repo_groups_data == '[]' and c.repos_data == '[]': |
|
|||
32 | <div class="table"> |
|
|||
33 | <h2 class="no-object-border"> |
|
|||
34 | ${_('No repositories or repositories groups exists here.')} |
|
|||
35 | </h2> |
|
|||
36 | </div> |
|
|||
37 | % endif |
|
|||
38 |
|
||||
39 | </div> |
|
36 | </div> | |
40 | <script> |
|
37 | <script> | |
41 |
|
|
38 | $(document).ready(function () { | |
42 |
|
39 | |||
43 | // repo group list |
|
40 | // repo group list | |
44 | % if c.repo_groups_data != '[]': |
|
41 | var $groupListTable = $('#group_list_table'); | |
45 | $('#group_list_table').DataTable({ |
|
42 | ||
46 | data: ${c.repo_groups_data|n}, |
|
43 | $groupListTable.DataTable({ | |
|
44 | processing: true, | |||
|
45 | serverSide: true, | |||
|
46 | ajax: { | |||
|
47 | "url": "${h.route_path('main_page_repo_groups_data')}", | |||
|
48 | "data": function (d) { | |||
|
49 | % if c.repo_group: | |||
|
50 | d.repo_group_id = ${c.repo_group.group_id} | |||
|
51 | % endif | |||
|
52 | } | |||
|
53 | }, | |||
47 | dom: 'rtp', |
|
54 | dom: 'rtp', | |
48 | pageLength: ${c.visual.dashboard_items}, |
|
55 | pageLength: ${c.visual.dashboard_items}, | |
49 |
order: [[ |
|
56 | order: [[0, "asc"]], | |
50 | columns: [ |
|
57 | columns: [ | |
51 | { data: {"_": "name", |
|
58 | { | |
52 | "sort": "name_raw"}, title: "${_('Name')}", className: "truncate-wrap td-grid-name" }, |
|
59 | data: { | |
|
60 | "_": "name", | |||
|
61 | "sort": "name_raw" | |||
|
62 | }, title: "${_('Name')}", className: "truncate-wrap td-grid-name" | |||
|
63 | }, | |||
53 |
{ |
|
64 | {data: 'menu', "bSortable": false, className: "quick_repo_menu"}, | |
54 | { data: {"_": "desc", |
|
65 | { | |
55 | "sort": "desc"}, title: "${_('Description')}", className: "td-description" }, |
|
66 | data: { | |
56 |
|
|
67 | "_": "desc", | |
|
68 | "sort": "desc" | |||
|
69 | }, title: "${_('Description')}", className: "td-description" | |||
|
70 | }, | |||
|
71 | { | |||
|
72 | data: { | |||
|
73 | "_": "last_change", | |||
57 | "sort": "last_change_raw", |
|
74 | "sort": "last_change_raw", | |
58 | "type": Number}, title: "${_('Last Change')}", className: "td-time" }, |
|
75 | "type": Number | |
59 | { data: {"_": "last_changeset", |
|
76 | }, title: "${_('Last Change')}", className: "td-time" | |
|
77 | }, | |||
|
78 | { | |||
|
79 | data: { | |||
|
80 | "_": "last_changeset", | |||
60 | "sort": "last_changeset_raw", |
|
81 | "sort": "last_changeset_raw", | |
61 |
"type": Number |
|
82 | "type": Number | |
62 | { data: {"_": "owner", |
|
83 | }, title: "", className: "td-hash" | |
63 | "sort": "owner"}, title: "${_('Owner')}", className: "td-user" } |
|
84 | }, | |
|
85 | { | |||
|
86 | data: { | |||
|
87 | "_": "owner", | |||
|
88 | "sort": "owner" | |||
|
89 | }, title: "${_('Owner')}", className: "td-user" | |||
|
90 | } | |||
64 | ], |
|
91 | ], | |
65 | language: { |
|
92 | language: { | |
66 | paginate: DEFAULT_GRID_PAGINATION, |
|
93 | paginate: DEFAULT_GRID_PAGINATION, | |
67 | emptyTable: _gettext("No repository groups available yet.") |
|
94 | sProcessing: _gettext('loading...'), | |
|
95 | emptyTable: _gettext("No repository groups present.") | |||
68 | }, |
|
96 | }, | |
69 |
"drawCallback": function( |
|
97 | "drawCallback": function (settings, json) { | |
|
98 | // hide grid if it's empty | |||
|
99 | if (settings.fnRecordsDisplay() === 0) { | |||
|
100 | $('#groups_list_wrap').hide(); | |||
|
101 | // both hidden, show no-data | |||
|
102 | if ($('#repos_list_wrap').is(':hidden')) { | |||
|
103 | $('#no_grid_data').show(); | |||
|
104 | } | |||
|
105 | } else { | |||
|
106 | $('#groups_list_wrap').show(); | |||
|
107 | } | |||
|
108 | ||||
70 | timeagoActivate(); |
|
109 | timeagoActivate(); | |
71 | tooltipActivate(); |
|
110 | tooltipActivate(); | |
72 | quick_repo_menu(); |
|
111 | quick_repo_menu(); | |
73 | // hide pagination for single page |
|
112 | // hide pagination for single page | |
74 | if (settings._iDisplayLength >= settings.fnRecordsDisplay()) { |
|
113 | if (settings._iDisplayLength >= settings.fnRecordsDisplay()) { | |
75 | $(settings.nTableWrapper).find('.dataTables_paginate').hide(); |
|
114 | $(settings.nTableWrapper).find('.dataTables_paginate').hide(); | |
76 | } |
|
115 | } | |
77 | } |
|
116 | ||
|
117 | }, | |||
78 | }); |
|
118 | }); | |
79 | % endif |
|
119 | ||
|
120 | $groupListTable.on('xhr.dt', function (e, settings, json, xhr) { | |||
|
121 | $groupListTable.css('opacity', 1); | |||
|
122 | }); | |||
|
123 | ||||
|
124 | $groupListTable.on('preXhr.dt', function (e, settings, data) { | |||
|
125 | $groupListTable.css('opacity', 0.3); | |||
|
126 | }); | |||
|
127 | ||||
80 |
|
128 | |||
81 | // repo list |
|
129 | ## // repo list | |
82 | % if c.repos_data != '[]': |
|
130 | var $repoListTable = $('#repo_list_table'); | |
83 | $('#repo_list_table').DataTable({ |
|
131 | ||
84 | data: ${c.repos_data|n}, |
|
132 | $repoListTable.DataTable({ | |
|
133 | processing: true, | |||
|
134 | serverSide: true, | |||
|
135 | ajax: { | |||
|
136 | "url": "${h.route_path('main_page_repos_data')}", | |||
|
137 | "data": function (d) { | |||
|
138 | % if c.repo_group: | |||
|
139 | d.repo_group_id = ${c.repo_group.group_id} | |||
|
140 | % endif | |||
|
141 | } | |||
|
142 | }, | |||
|
143 | order: [[0, "asc"]], | |||
85 | dom: 'rtp', |
|
144 | dom: 'rtp', | |
86 | order: [[ 0, "asc" ]], |
|
|||
87 | pageLength: ${c.visual.dashboard_items}, |
|
145 | pageLength: ${c.visual.dashboard_items}, | |
88 | columns: [ |
|
146 | columns: [ | |
89 | { data: {"_": "name", |
|
147 | { | |
90 | "sort": "name_raw"}, title: "${_('Name')}", className: "truncate-wrap td-grid-name" }, |
|
148 | data: { | |
91 | { data: 'menu', "bSortable": false, className: "quick_repo_menu" }, |
|
149 | "_": "name", | |
92 | { data: {"_": "desc", |
|
150 | "sort": "name_raw" | |
93 |
|
|
151 | }, title: "${_('Name')}", className: "truncate-wrap td-grid-name" | |
94 | { data: {"_": "last_change", |
|
152 | }, | |
|
153 | { | |||
|
154 | data: 'menu', "bSortable": false, className: "quick_repo_menu" | |||
|
155 | }, | |||
|
156 | { | |||
|
157 | data: { | |||
|
158 | "_": "desc", | |||
|
159 | "sort": "desc" | |||
|
160 | }, title: "${_('Description')}", className: "td-description" | |||
|
161 | }, | |||
|
162 | { | |||
|
163 | data: { | |||
|
164 | "_": "last_change", | |||
95 | "sort": "last_change_raw", |
|
165 | "sort": "last_change_raw", | |
96 | "type": Number}, title: "${_('Last Change')}", className: "td-time" }, |
|
166 | "type": Number | |
97 | { data: {"_": "last_changeset", |
|
167 | }, title: "${_('Last Change')}", className: "td-time", orderable: false | |
|
168 | }, | |||
|
169 | { | |||
|
170 | data: { | |||
|
171 | "_": "last_changeset", | |||
98 | "sort": "last_changeset_raw", |
|
172 | "sort": "last_changeset_raw", | |
99 | "type": Number}, title: "${_('Commit')}", className: "td-hash" }, |
|
173 | "type": Number | |
100 | { data: {"_": "owner", |
|
174 | }, title: "${_('Commit')}", className: "td-hash" | |
101 | "sort": "owner"}, title: "${_('Owner')}", className: "td-user" } |
|
175 | }, | |
|
176 | { | |||
|
177 | data: { | |||
|
178 | "_": "owner", | |||
|
179 | "sort": "owner" | |||
|
180 | }, title: "${_('Owner')}", className: "td-user" | |||
|
181 | } | |||
102 | ], |
|
182 | ], | |
103 | language: { |
|
183 | language: { | |
104 | paginate: DEFAULT_GRID_PAGINATION, |
|
184 | paginate: DEFAULT_GRID_PAGINATION, | |
105 | emptyTable: _gettext("No repositories available yet.") |
|
185 | sProcessing: _gettext('loading...'), | |
|
186 | emptyTable: _gettext("No repositories present.") | |||
106 | }, |
|
187 | }, | |
107 |
"drawCallback": function( |
|
188 | "drawCallback": function (settings, json) { | |
|
189 | // hide grid if it's empty | |||
|
190 | if (settings.fnRecordsDisplay() == 0) { | |||
|
191 | $('#repos_list_wrap').hide() | |||
|
192 | // both hidden, show no-data | |||
|
193 | if ($('#groups_list_wrap').is(':hidden')) { | |||
|
194 | $('#no_grid_data').show() | |||
|
195 | } | |||
|
196 | } else { | |||
|
197 | $('#repos_list_wrap').show() | |||
|
198 | } | |||
|
199 | ||||
108 | timeagoActivate(); |
|
200 | timeagoActivate(); | |
109 | tooltipActivate(); |
|
201 | tooltipActivate(); | |
110 | quick_repo_menu(); |
|
202 | quick_repo_menu(); | |
111 | // hide pagination for single page |
|
203 | // hide pagination for single page | |
112 | if (settings._iDisplayLength >= settings.fnRecordsDisplay()) { |
|
204 | if (settings._iDisplayLength >= settings.fnRecordsDisplay()) { | |
113 | $(settings.nTableWrapper).find('.dataTables_paginate').hide(); |
|
205 | $(settings.nTableWrapper).find('.dataTables_paginate').hide(); | |
114 | } |
|
206 | } | |
115 | } |
|
207 | ||
|
208 | }, | |||
116 | }); |
|
209 | }); | |
117 | % endif |
|
210 | ||
|
211 | $repoListTable.on('xhr.dt', function (e, settings, json, xhr) { | |||
|
212 | $repoListTable.css('opacity', 1); | |||
|
213 | }); | |||
|
214 | ||||
|
215 | $repoListTable.on('preXhr.dt', function (e, settings, data) { | |||
|
216 | $repoListTable.css('opacity', 0.3); | |||
|
217 | }); | |||
118 |
|
218 | |||
119 |
|
|
219 | }); | |
120 | </script> |
|
220 | </script> | |
121 | </%def> |
|
221 | </%def> |
General Comments 0
You need to be logged in to leave comments.
Login now