Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,180 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import logging | |
|
22 | import formencode | |
|
23 | ||
|
24 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden | |
|
25 | from pyramid.view import view_config | |
|
26 | from pyramid.renderers import render | |
|
27 | from pyramid.response import Response | |
|
28 | ||
|
29 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |
|
30 | ||
|
31 | from rhodecode.lib.ext_json import json | |
|
32 | from rhodecode.lib.auth import ( | |
|
33 | LoginRequired, CSRFRequired, NotAnonymous, | |
|
34 | HasPermissionAny, HasRepoGroupPermissionAny) | |
|
35 | from rhodecode.lib import helpers as h | |
|
36 | from rhodecode.lib.utils import repo_name_slug | |
|
37 | from rhodecode.lib.utils2 import safe_int, safe_unicode | |
|
38 | from rhodecode.model.forms import RepoForm | |
|
39 | from rhodecode.model.repo import RepoModel | |
|
40 | from rhodecode.model.scm import RepoList, RepoGroupList, ScmModel | |
|
41 | from rhodecode.model.settings import SettingsModel | |
|
42 | from rhodecode.model.db import Repository, RepoGroup | |
|
43 | ||
|
44 | log = logging.getLogger(__name__) | |
|
45 | ||
|
46 | ||
|
47 | class AdminReposView(BaseAppView, DataGridAppView): | |
|
48 | ||
|
49 | def load_default_context(self): | |
|
50 | c = self._get_local_tmpl_context() | |
|
51 | self._register_global_c(c) | |
|
52 | return c | |
|
53 | ||
|
54 | def _load_form_data(self, c): | |
|
55 | acl_groups = RepoGroupList(RepoGroup.query().all(), | |
|
56 | perm_set=['group.write', 'group.admin']) | |
|
57 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) | |
|
58 | c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups) | |
|
59 | c.landing_revs_choices, c.landing_revs = \ | |
|
60 | ScmModel().get_repo_landing_revs() | |
|
61 | c.personal_repo_group = self._rhodecode_user.personal_repo_group | |
|
62 | ||
|
63 | @LoginRequired() | |
|
64 | @NotAnonymous() | |
|
65 | @view_config( | |
|
66 | route_name='repos', request_method='GET', | |
|
67 | renderer='rhodecode:templates/admin/repos/repos.mako') | |
|
68 | def repository_list(self): | |
|
69 | c = self.load_default_context() | |
|
70 | ||
|
71 | repo_list = Repository.get_all_repos() | |
|
72 | c.repo_list = RepoList(repo_list, perm_set=['repository.admin']) | |
|
73 | repos_data = RepoModel().get_repos_as_dict( | |
|
74 | repo_list=c.repo_list, admin=True, super_user_actions=True) | |
|
75 | # json used to render the grid | |
|
76 | c.data = json.dumps(repos_data) | |
|
77 | ||
|
78 | return self._get_template_context(c) | |
|
79 | ||
|
80 | @LoginRequired() | |
|
81 | @NotAnonymous() | |
|
82 | # perms check inside | |
|
83 | @view_config( | |
|
84 | route_name='repo_new', request_method='GET', | |
|
85 | renderer='rhodecode:templates/admin/repos/repo_add.mako') | |
|
86 | def repository_new(self): | |
|
87 | c = self.load_default_context() | |
|
88 | ||
|
89 | new_repo = self.request.GET.get('repo', '') | |
|
90 | parent_group = safe_int(self.request.GET.get('parent_group')) | |
|
91 | _gr = RepoGroup.get(parent_group) | |
|
92 | ||
|
93 | if not HasPermissionAny('hg.admin', 'hg.create.repository')(): | |
|
94 | # you're not super admin nor have global create permissions, | |
|
95 | # but maybe you have at least write permission to a parent group ? | |
|
96 | ||
|
97 | gr_name = _gr.group_name if _gr else None | |
|
98 | # create repositories with write permission on group is set to true | |
|
99 | create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')() | |
|
100 | group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name) | |
|
101 | group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name) | |
|
102 | if not (group_admin or (group_write and create_on_write)): | |
|
103 | raise HTTPForbidden() | |
|
104 | ||
|
105 | self._load_form_data(c) | |
|
106 | c.new_repo = repo_name_slug(new_repo) | |
|
107 | ||
|
108 | # apply the defaults from defaults page | |
|
109 | defaults = SettingsModel().get_default_repo_settings(strip_prefix=True) | |
|
110 | # set checkbox to autochecked | |
|
111 | defaults['repo_copy_permissions'] = True | |
|
112 | ||
|
113 | parent_group_choice = '-1' | |
|
114 | if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group: | |
|
115 | parent_group_choice = self._rhodecode_user.personal_repo_group | |
|
116 | ||
|
117 | if parent_group and _gr: | |
|
118 | if parent_group in [x[0] for x in c.repo_groups]: | |
|
119 | parent_group_choice = safe_unicode(parent_group) | |
|
120 | ||
|
121 | defaults.update({'repo_group': parent_group_choice}) | |
|
122 | ||
|
123 | data = render('rhodecode:templates/admin/repos/repo_add.mako', | |
|
124 | self._get_template_context(c), self.request) | |
|
125 | html = formencode.htmlfill.render( | |
|
126 | data, | |
|
127 | defaults=defaults, | |
|
128 | encoding="UTF-8", | |
|
129 | force_defaults=False | |
|
130 | ) | |
|
131 | return Response(html) | |
|
132 | ||
|
133 | @LoginRequired() | |
|
134 | @NotAnonymous() | |
|
135 | @CSRFRequired() | |
|
136 | # perms check inside | |
|
137 | @view_config( | |
|
138 | route_name='repo_create', request_method='POST', | |
|
139 | renderer='rhodecode:templates/admin/repos/repos.mako') | |
|
140 | def repository_create(self): | |
|
141 | c = self.load_default_context() | |
|
142 | ||
|
143 | form_result = {} | |
|
144 | task_id = None | |
|
145 | self._load_form_data(c) | |
|
146 | ||
|
147 | try: | |
|
148 | # CanWriteToGroup validators checks permissions of this POST | |
|
149 | form_result = RepoForm(repo_groups=c.repo_groups_choices, | |
|
150 | landing_revs=c.landing_revs_choices)()\ | |
|
151 | .to_python(dict(self.request.POST)) | |
|
152 | ||
|
153 | # create is done sometimes async on celery, db transaction | |
|
154 | # management is handled there. | |
|
155 | task = RepoModel().create(form_result, self._rhodecode_user.user_id) | |
|
156 | from celery.result import BaseAsyncResult | |
|
157 | if isinstance(task, BaseAsyncResult): | |
|
158 | task_id = task.task_id | |
|
159 | except formencode.Invalid as errors: | |
|
160 | data = render('rhodecode:templates/admin/repos/repo_add.mako', | |
|
161 | self._get_template_context(c), self.request) | |
|
162 | html = formencode.htmlfill.render( | |
|
163 | data, | |
|
164 | defaults=errors.value, | |
|
165 | errors=errors.error_dict or {}, | |
|
166 | prefix_error=False, | |
|
167 | encoding="UTF-8", | |
|
168 | force_defaults=False | |
|
169 | ) | |
|
170 | return Response(html) | |
|
171 | ||
|
172 | except Exception as e: | |
|
173 | msg = self._log_creation_exception(e, form_result.get('repo_name')) | |
|
174 | h.flash(msg, category='error') | |
|
175 | raise HTTPFound(h.route_path('home')) | |
|
176 | ||
|
177 | raise HTTPFound( | |
|
178 | h.route_path('repo_creating', | |
|
179 | repo_name=form_result['repo_name_full'], | |
|
180 | _query=dict(task_id=task_id))) |
|
1 | NO CONTENT: new file 100644 |
|
1 | NO CONTENT: new file 100644 |
This diff has been collapsed as it changes many lines, (685 lines changed) Show them Hide them | |||
@@ -0,0 +1,685 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import mock | |
|
22 | import pytest | |
|
23 | ||
|
24 | from rhodecode.lib import auth | |
|
25 | from rhodecode.lib.utils2 import str2bool | |
|
26 | from rhodecode.model.db import ( | |
|
27 | Repository, UserRepoToPerm, User) | |
|
28 | from rhodecode.model.meta import Session | |
|
29 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel | |
|
30 | from rhodecode.model.user import UserModel | |
|
31 | from rhodecode.tests import ( | |
|
32 | login_user_session, logout_user_session, | |
|
33 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
|
34 | from rhodecode.tests.fixture import Fixture | |
|
35 | from rhodecode.tests.utils import AssertResponse | |
|
36 | ||
|
37 | fixture = Fixture() | |
|
38 | ||
|
39 | ||
|
40 | def route_path(name, params=None, **kwargs): | |
|
41 | import urllib | |
|
42 | ||
|
43 | base_url = { | |
|
44 | 'repo_summary': '/{repo_name}', | |
|
45 | 'repo_creating_check': '/{repo_name}/repo_creating_check', | |
|
46 | 'edit_repo': '/{repo_name}/settings', | |
|
47 | 'edit_repo_vcs': '/{repo_name}/settings/vcs', | |
|
48 | 'edit_repo_vcs_update': '/{repo_name}/settings/vcs/update', | |
|
49 | 'edit_repo_vcs_svn_pattern_delete': '/{repo_name}/settings/vcs/svn_pattern/delete' | |
|
50 | }[name].format(**kwargs) | |
|
51 | ||
|
52 | if params: | |
|
53 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
|
54 | return base_url | |
|
55 | ||
|
56 | ||
|
57 | @pytest.mark.usefixtures("app") | |
|
58 | class TestVcsSettings(object): | |
|
59 | FORM_DATA = { | |
|
60 | 'inherit_global_settings': False, | |
|
61 | 'hooks_changegroup_repo_size': False, | |
|
62 | 'hooks_changegroup_push_logger': False, | |
|
63 | 'hooks_outgoing_pull_logger': False, | |
|
64 | 'extensions_largefiles': False, | |
|
65 | 'extensions_evolve': False, | |
|
66 | 'phases_publish': 'False', | |
|
67 | 'rhodecode_pr_merge_enabled': False, | |
|
68 | 'rhodecode_use_outdated_comments': False, | |
|
69 | 'new_svn_branch': '', | |
|
70 | 'new_svn_tag': '' | |
|
71 | } | |
|
72 | ||
|
73 | @pytest.mark.skip_backends('svn') | |
|
74 | def test_global_settings_initial_values(self, autologin_user, backend): | |
|
75 | repo_name = backend.repo_name | |
|
76 | response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name)) | |
|
77 | ||
|
78 | expected_settings = ( | |
|
79 | 'rhodecode_use_outdated_comments', 'rhodecode_pr_merge_enabled', | |
|
80 | 'hooks_changegroup_repo_size', 'hooks_changegroup_push_logger', | |
|
81 | 'hooks_outgoing_pull_logger' | |
|
82 | ) | |
|
83 | for setting in expected_settings: | |
|
84 | self.assert_repo_value_equals_global_value(response, setting) | |
|
85 | ||
|
86 | def test_show_settings_requires_repo_admin_permission( | |
|
87 | self, backend, user_util, settings_util): | |
|
88 | repo = backend.create_repo() | |
|
89 | repo_name = repo.repo_name | |
|
90 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
|
91 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |
|
92 | login_user_session( | |
|
93 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
|
94 | self.app.get(route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |
|
95 | ||
|
96 | def test_inherit_global_settings_flag_is_true_by_default( | |
|
97 | self, autologin_user, backend): | |
|
98 | repo_name = backend.repo_name | |
|
99 | response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name)) | |
|
100 | ||
|
101 | assert_response = AssertResponse(response) | |
|
102 | element = assert_response.get_element('#inherit_global_settings') | |
|
103 | assert element.checked | |
|
104 | ||
|
105 | @pytest.mark.parametrize('checked_value', [True, False]) | |
|
106 | def test_inherit_global_settings_value( | |
|
107 | self, autologin_user, backend, checked_value, settings_util): | |
|
108 | repo = backend.create_repo() | |
|
109 | repo_name = repo.repo_name | |
|
110 | settings_util.create_repo_rhodecode_setting( | |
|
111 | repo, 'inherit_vcs_settings', checked_value, 'bool') | |
|
112 | response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name)) | |
|
113 | ||
|
114 | assert_response = AssertResponse(response) | |
|
115 | element = assert_response.get_element('#inherit_global_settings') | |
|
116 | assert element.checked == checked_value | |
|
117 | ||
|
118 | @pytest.mark.skip_backends('svn') | |
|
119 | def test_hooks_settings_are_created( | |
|
120 | self, autologin_user, backend, csrf_token): | |
|
121 | repo_name = backend.repo_name | |
|
122 | data = self.FORM_DATA.copy() | |
|
123 | data['csrf_token'] = csrf_token | |
|
124 | self.app.post( | |
|
125 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
126 | settings = SettingsModel(repo=repo_name) | |
|
127 | try: | |
|
128 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
129 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
130 | assert ui.ui_active is False | |
|
131 | finally: | |
|
132 | self._cleanup_repo_settings(settings) | |
|
133 | ||
|
134 | def test_hooks_settings_are_not_created_for_svn( | |
|
135 | self, autologin_user, backend_svn, csrf_token): | |
|
136 | repo_name = backend_svn.repo_name | |
|
137 | data = self.FORM_DATA.copy() | |
|
138 | data['csrf_token'] = csrf_token | |
|
139 | self.app.post( | |
|
140 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
141 | settings = SettingsModel(repo=repo_name) | |
|
142 | try: | |
|
143 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
144 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
145 | assert ui is None | |
|
146 | finally: | |
|
147 | self._cleanup_repo_settings(settings) | |
|
148 | ||
|
149 | @pytest.mark.skip_backends('svn') | |
|
150 | def test_hooks_settings_are_updated( | |
|
151 | self, autologin_user, backend, csrf_token): | |
|
152 | repo_name = backend.repo_name | |
|
153 | settings = SettingsModel(repo=repo_name) | |
|
154 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
155 | settings.create_ui_section_value(section, '', key=key, active=True) | |
|
156 | ||
|
157 | data = self.FORM_DATA.copy() | |
|
158 | data['csrf_token'] = csrf_token | |
|
159 | self.app.post( | |
|
160 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
161 | try: | |
|
162 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
163 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
164 | assert ui.ui_active is False | |
|
165 | finally: | |
|
166 | self._cleanup_repo_settings(settings) | |
|
167 | ||
|
168 | def test_hooks_settings_are_not_updated_for_svn( | |
|
169 | self, autologin_user, backend_svn, csrf_token): | |
|
170 | repo_name = backend_svn.repo_name | |
|
171 | settings = SettingsModel(repo=repo_name) | |
|
172 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
173 | settings.create_ui_section_value(section, '', key=key, active=True) | |
|
174 | ||
|
175 | data = self.FORM_DATA.copy() | |
|
176 | data['csrf_token'] = csrf_token | |
|
177 | self.app.post( | |
|
178 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
179 | try: | |
|
180 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
181 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
182 | assert ui.ui_active is True | |
|
183 | finally: | |
|
184 | self._cleanup_repo_settings(settings) | |
|
185 | ||
|
186 | @pytest.mark.skip_backends('svn') | |
|
187 | def test_pr_settings_are_created( | |
|
188 | self, autologin_user, backend, csrf_token): | |
|
189 | repo_name = backend.repo_name | |
|
190 | data = self.FORM_DATA.copy() | |
|
191 | data['csrf_token'] = csrf_token | |
|
192 | self.app.post( | |
|
193 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
194 | settings = SettingsModel(repo=repo_name) | |
|
195 | try: | |
|
196 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
197 | setting = settings.get_setting_by_name(name) | |
|
198 | assert setting.app_settings_value is False | |
|
199 | finally: | |
|
200 | self._cleanup_repo_settings(settings) | |
|
201 | ||
|
202 | def test_pr_settings_are_not_created_for_svn( | |
|
203 | self, autologin_user, backend_svn, csrf_token): | |
|
204 | repo_name = backend_svn.repo_name | |
|
205 | data = self.FORM_DATA.copy() | |
|
206 | data['csrf_token'] = csrf_token | |
|
207 | self.app.post( | |
|
208 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
209 | settings = SettingsModel(repo=repo_name) | |
|
210 | try: | |
|
211 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
212 | setting = settings.get_setting_by_name(name) | |
|
213 | assert setting is None | |
|
214 | finally: | |
|
215 | self._cleanup_repo_settings(settings) | |
|
216 | ||
|
217 | def test_pr_settings_creation_requires_repo_admin_permission( | |
|
218 | self, backend, user_util, settings_util, csrf_token): | |
|
219 | repo = backend.create_repo() | |
|
220 | repo_name = repo.repo_name | |
|
221 | ||
|
222 | logout_user_session(self.app, csrf_token) | |
|
223 | session = login_user_session( | |
|
224 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
|
225 | new_csrf_token = auth.get_csrf_token(session) | |
|
226 | ||
|
227 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
|
228 | repo = Repository.get_by_repo_name(repo_name) | |
|
229 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |
|
230 | data = self.FORM_DATA.copy() | |
|
231 | data['csrf_token'] = new_csrf_token | |
|
232 | settings = SettingsModel(repo=repo_name) | |
|
233 | ||
|
234 | try: | |
|
235 | self.app.post( | |
|
236 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, | |
|
237 | status=302) | |
|
238 | finally: | |
|
239 | self._cleanup_repo_settings(settings) | |
|
240 | ||
|
241 | @pytest.mark.skip_backends('svn') | |
|
242 | def test_pr_settings_are_updated( | |
|
243 | self, autologin_user, backend, csrf_token): | |
|
244 | repo_name = backend.repo_name | |
|
245 | settings = SettingsModel(repo=repo_name) | |
|
246 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
247 | settings.create_or_update_setting(name, True, 'bool') | |
|
248 | ||
|
249 | data = self.FORM_DATA.copy() | |
|
250 | data['csrf_token'] = csrf_token | |
|
251 | self.app.post( | |
|
252 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
253 | try: | |
|
254 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
255 | setting = settings.get_setting_by_name(name) | |
|
256 | assert setting.app_settings_value is False | |
|
257 | finally: | |
|
258 | self._cleanup_repo_settings(settings) | |
|
259 | ||
|
260 | def test_pr_settings_are_not_updated_for_svn( | |
|
261 | self, autologin_user, backend_svn, csrf_token): | |
|
262 | repo_name = backend_svn.repo_name | |
|
263 | settings = SettingsModel(repo=repo_name) | |
|
264 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
265 | settings.create_or_update_setting(name, True, 'bool') | |
|
266 | ||
|
267 | data = self.FORM_DATA.copy() | |
|
268 | data['csrf_token'] = csrf_token | |
|
269 | self.app.post( | |
|
270 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
271 | try: | |
|
272 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
273 | setting = settings.get_setting_by_name(name) | |
|
274 | assert setting.app_settings_value is True | |
|
275 | finally: | |
|
276 | self._cleanup_repo_settings(settings) | |
|
277 | ||
|
278 | def test_svn_settings_are_created( | |
|
279 | self, autologin_user, backend_svn, csrf_token, settings_util): | |
|
280 | repo_name = backend_svn.repo_name | |
|
281 | data = self.FORM_DATA.copy() | |
|
282 | data['new_svn_tag'] = 'svn-tag' | |
|
283 | data['new_svn_branch'] = 'svn-branch' | |
|
284 | data['csrf_token'] = csrf_token | |
|
285 | ||
|
286 | # Create few global settings to make sure that uniqueness validators | |
|
287 | # are not triggered | |
|
288 | settings_util.create_rhodecode_ui( | |
|
289 | VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch') | |
|
290 | settings_util.create_rhodecode_ui( | |
|
291 | VcsSettingsModel.SVN_TAG_SECTION, 'svn-tag') | |
|
292 | ||
|
293 | self.app.post( | |
|
294 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
295 | settings = SettingsModel(repo=repo_name) | |
|
296 | try: | |
|
297 | svn_branches = settings.get_ui_by_section( | |
|
298 | VcsSettingsModel.SVN_BRANCH_SECTION) | |
|
299 | svn_branch_names = [b.ui_value for b in svn_branches] | |
|
300 | svn_tags = settings.get_ui_by_section( | |
|
301 | VcsSettingsModel.SVN_TAG_SECTION) | |
|
302 | svn_tag_names = [b.ui_value for b in svn_tags] | |
|
303 | assert 'svn-branch' in svn_branch_names | |
|
304 | assert 'svn-tag' in svn_tag_names | |
|
305 | finally: | |
|
306 | self._cleanup_repo_settings(settings) | |
|
307 | ||
|
308 | def test_svn_settings_are_unique( | |
|
309 | self, autologin_user, backend_svn, csrf_token, settings_util): | |
|
310 | repo = backend_svn.repo | |
|
311 | repo_name = repo.repo_name | |
|
312 | data = self.FORM_DATA.copy() | |
|
313 | data['new_svn_tag'] = 'test_tag' | |
|
314 | data['new_svn_branch'] = 'test_branch' | |
|
315 | data['csrf_token'] = csrf_token | |
|
316 | settings_util.create_repo_rhodecode_ui( | |
|
317 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch') | |
|
318 | settings_util.create_repo_rhodecode_ui( | |
|
319 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag') | |
|
320 | ||
|
321 | response = self.app.post( | |
|
322 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=200) | |
|
323 | response.mustcontain('Pattern already exists') | |
|
324 | ||
|
325 | def test_svn_settings_with_empty_values_are_not_created( | |
|
326 | self, autologin_user, backend_svn, csrf_token): | |
|
327 | repo_name = backend_svn.repo_name | |
|
328 | data = self.FORM_DATA.copy() | |
|
329 | data['csrf_token'] = csrf_token | |
|
330 | self.app.post( | |
|
331 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
332 | settings = SettingsModel(repo=repo_name) | |
|
333 | try: | |
|
334 | svn_branches = settings.get_ui_by_section( | |
|
335 | VcsSettingsModel.SVN_BRANCH_SECTION) | |
|
336 | svn_tags = settings.get_ui_by_section( | |
|
337 | VcsSettingsModel.SVN_TAG_SECTION) | |
|
338 | assert len(svn_branches) == 0 | |
|
339 | assert len(svn_tags) == 0 | |
|
340 | finally: | |
|
341 | self._cleanup_repo_settings(settings) | |
|
342 | ||
|
343 | def test_svn_settings_are_shown_for_svn_repository( | |
|
344 | self, autologin_user, backend_svn, csrf_token): | |
|
345 | repo_name = backend_svn.repo_name | |
|
346 | response = self.app.get( | |
|
347 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |
|
348 | response.mustcontain('Subversion Settings') | |
|
349 | ||
|
350 | @pytest.mark.skip_backends('svn') | |
|
351 | def test_svn_settings_are_not_created_for_not_svn_repository( | |
|
352 | self, autologin_user, backend, csrf_token): | |
|
353 | repo_name = backend.repo_name | |
|
354 | data = self.FORM_DATA.copy() | |
|
355 | data['csrf_token'] = csrf_token | |
|
356 | self.app.post( | |
|
357 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
358 | settings = SettingsModel(repo=repo_name) | |
|
359 | try: | |
|
360 | svn_branches = settings.get_ui_by_section( | |
|
361 | VcsSettingsModel.SVN_BRANCH_SECTION) | |
|
362 | svn_tags = settings.get_ui_by_section( | |
|
363 | VcsSettingsModel.SVN_TAG_SECTION) | |
|
364 | assert len(svn_branches) == 0 | |
|
365 | assert len(svn_tags) == 0 | |
|
366 | finally: | |
|
367 | self._cleanup_repo_settings(settings) | |
|
368 | ||
|
369 | @pytest.mark.skip_backends('svn') | |
|
370 | def test_svn_settings_are_shown_only_for_svn_repository( | |
|
371 | self, autologin_user, backend, csrf_token): | |
|
372 | repo_name = backend.repo_name | |
|
373 | response = self.app.get( | |
|
374 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |
|
375 | response.mustcontain(no='Subversion Settings') | |
|
376 | ||
|
377 | def test_hg_settings_are_created( | |
|
378 | self, autologin_user, backend_hg, csrf_token): | |
|
379 | repo_name = backend_hg.repo_name | |
|
380 | data = self.FORM_DATA.copy() | |
|
381 | data['new_svn_tag'] = 'svn-tag' | |
|
382 | data['new_svn_branch'] = 'svn-branch' | |
|
383 | data['csrf_token'] = csrf_token | |
|
384 | self.app.post( | |
|
385 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
386 | settings = SettingsModel(repo=repo_name) | |
|
387 | try: | |
|
388 | largefiles_ui = settings.get_ui_by_section_and_key( | |
|
389 | 'extensions', 'largefiles') | |
|
390 | assert largefiles_ui.ui_active is False | |
|
391 | phases_ui = settings.get_ui_by_section_and_key( | |
|
392 | 'phases', 'publish') | |
|
393 | assert str2bool(phases_ui.ui_value) is False | |
|
394 | finally: | |
|
395 | self._cleanup_repo_settings(settings) | |
|
396 | ||
|
397 | def test_hg_settings_are_updated( | |
|
398 | self, autologin_user, backend_hg, csrf_token): | |
|
399 | repo_name = backend_hg.repo_name | |
|
400 | settings = SettingsModel(repo=repo_name) | |
|
401 | settings.create_ui_section_value( | |
|
402 | 'extensions', '', key='largefiles', active=True) | |
|
403 | settings.create_ui_section_value( | |
|
404 | 'phases', '1', key='publish', active=True) | |
|
405 | ||
|
406 | data = self.FORM_DATA.copy() | |
|
407 | data['csrf_token'] = csrf_token | |
|
408 | self.app.post( | |
|
409 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
410 | try: | |
|
411 | largefiles_ui = settings.get_ui_by_section_and_key( | |
|
412 | 'extensions', 'largefiles') | |
|
413 | assert largefiles_ui.ui_active is False | |
|
414 | phases_ui = settings.get_ui_by_section_and_key( | |
|
415 | 'phases', 'publish') | |
|
416 | assert str2bool(phases_ui.ui_value) is False | |
|
417 | finally: | |
|
418 | self._cleanup_repo_settings(settings) | |
|
419 | ||
|
420 | def test_hg_settings_are_shown_for_hg_repository( | |
|
421 | self, autologin_user, backend_hg, csrf_token): | |
|
422 | repo_name = backend_hg.repo_name | |
|
423 | response = self.app.get( | |
|
424 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |
|
425 | response.mustcontain('Mercurial Settings') | |
|
426 | ||
|
427 | @pytest.mark.skip_backends('hg') | |
|
428 | def test_hg_settings_are_created_only_for_hg_repository( | |
|
429 | self, autologin_user, backend, csrf_token): | |
|
430 | repo_name = backend.repo_name | |
|
431 | data = self.FORM_DATA.copy() | |
|
432 | data['csrf_token'] = csrf_token | |
|
433 | self.app.post( | |
|
434 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
435 | settings = SettingsModel(repo=repo_name) | |
|
436 | try: | |
|
437 | largefiles_ui = settings.get_ui_by_section_and_key( | |
|
438 | 'extensions', 'largefiles') | |
|
439 | assert largefiles_ui is None | |
|
440 | phases_ui = settings.get_ui_by_section_and_key( | |
|
441 | 'phases', 'publish') | |
|
442 | assert phases_ui is None | |
|
443 | finally: | |
|
444 | self._cleanup_repo_settings(settings) | |
|
445 | ||
|
446 | @pytest.mark.skip_backends('hg') | |
|
447 | def test_hg_settings_are_shown_only_for_hg_repository( | |
|
448 | self, autologin_user, backend, csrf_token): | |
|
449 | repo_name = backend.repo_name | |
|
450 | response = self.app.get( | |
|
451 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |
|
452 | response.mustcontain(no='Mercurial Settings') | |
|
453 | ||
|
454 | @pytest.mark.skip_backends('hg') | |
|
455 | def test_hg_settings_are_updated_only_for_hg_repository( | |
|
456 | self, autologin_user, backend, csrf_token): | |
|
457 | repo_name = backend.repo_name | |
|
458 | settings = SettingsModel(repo=repo_name) | |
|
459 | settings.create_ui_section_value( | |
|
460 | 'extensions', '', key='largefiles', active=True) | |
|
461 | settings.create_ui_section_value( | |
|
462 | 'phases', '1', key='publish', active=True) | |
|
463 | ||
|
464 | data = self.FORM_DATA.copy() | |
|
465 | data['csrf_token'] = csrf_token | |
|
466 | self.app.post( | |
|
467 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
468 | try: | |
|
469 | largefiles_ui = settings.get_ui_by_section_and_key( | |
|
470 | 'extensions', 'largefiles') | |
|
471 | assert largefiles_ui.ui_active is True | |
|
472 | phases_ui = settings.get_ui_by_section_and_key( | |
|
473 | 'phases', 'publish') | |
|
474 | assert phases_ui.ui_value == '1' | |
|
475 | finally: | |
|
476 | self._cleanup_repo_settings(settings) | |
|
477 | ||
|
478 | def test_per_repo_svn_settings_are_displayed( | |
|
479 | self, autologin_user, backend_svn, settings_util): | |
|
480 | repo = backend_svn.create_repo() | |
|
481 | repo_name = repo.repo_name | |
|
482 | branches = [ | |
|
483 | settings_util.create_repo_rhodecode_ui( | |
|
484 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, | |
|
485 | 'branch_{}'.format(i)) | |
|
486 | for i in range(10)] | |
|
487 | tags = [ | |
|
488 | settings_util.create_repo_rhodecode_ui( | |
|
489 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'tag_{}'.format(i)) | |
|
490 | for i in range(10)] | |
|
491 | ||
|
492 | response = self.app.get( | |
|
493 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |
|
494 | assert_response = AssertResponse(response) | |
|
495 | for branch in branches: | |
|
496 | css_selector = '[name=branch_value_{}]'.format(branch.ui_id) | |
|
497 | element = assert_response.get_element(css_selector) | |
|
498 | assert element.value == branch.ui_value | |
|
499 | for tag in tags: | |
|
500 | css_selector = '[name=tag_ui_value_new_{}]'.format(tag.ui_id) | |
|
501 | element = assert_response.get_element(css_selector) | |
|
502 | assert element.value == tag.ui_value | |
|
503 | ||
|
504 | def test_per_repo_hg_and_pr_settings_are_not_displayed_for_svn( | |
|
505 | self, autologin_user, backend_svn, settings_util): | |
|
506 | repo = backend_svn.create_repo() | |
|
507 | repo_name = repo.repo_name | |
|
508 | response = self.app.get( | |
|
509 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |
|
510 | response.mustcontain(no='<label>Hooks:</label>') | |
|
511 | response.mustcontain(no='<label>Pull Request Settings:</label>') | |
|
512 | ||
|
513 | def test_inherit_global_settings_value_is_saved( | |
|
514 | self, autologin_user, backend, csrf_token): | |
|
515 | repo_name = backend.repo_name | |
|
516 | data = self.FORM_DATA.copy() | |
|
517 | data['csrf_token'] = csrf_token | |
|
518 | data['inherit_global_settings'] = True | |
|
519 | self.app.post( | |
|
520 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
521 | ||
|
522 | settings = SettingsModel(repo=repo_name) | |
|
523 | vcs_settings = VcsSettingsModel(repo=repo_name) | |
|
524 | try: | |
|
525 | assert vcs_settings.inherit_global_settings is True | |
|
526 | finally: | |
|
527 | self._cleanup_repo_settings(settings) | |
|
528 | ||
|
529 | def test_repo_cache_is_invalidated_when_settings_are_updated( | |
|
530 | self, autologin_user, backend, csrf_token): | |
|
531 | repo_name = backend.repo_name | |
|
532 | data = self.FORM_DATA.copy() | |
|
533 | data['csrf_token'] = csrf_token | |
|
534 | data['inherit_global_settings'] = True | |
|
535 | settings = SettingsModel(repo=repo_name) | |
|
536 | ||
|
537 | invalidation_patcher = mock.patch( | |
|
538 | 'rhodecode.model.scm.ScmModel.mark_for_invalidation') | |
|
539 | with invalidation_patcher as invalidation_mock: | |
|
540 | self.app.post( | |
|
541 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, | |
|
542 | status=302) | |
|
543 | try: | |
|
544 | invalidation_mock.assert_called_once_with(repo_name, delete=True) | |
|
545 | finally: | |
|
546 | self._cleanup_repo_settings(settings) | |
|
547 | ||
|
548 | def test_other_settings_not_saved_inherit_global_settings_is_true( | |
|
549 | self, autologin_user, backend, csrf_token): | |
|
550 | repo_name = backend.repo_name | |
|
551 | data = self.FORM_DATA.copy() | |
|
552 | data['csrf_token'] = csrf_token | |
|
553 | data['inherit_global_settings'] = True | |
|
554 | self.app.post( | |
|
555 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |
|
556 | ||
|
557 | settings = SettingsModel(repo=repo_name) | |
|
558 | ui_settings = ( | |
|
559 | VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS) | |
|
560 | ||
|
561 | vcs_settings = [] | |
|
562 | try: | |
|
563 | for section, key in ui_settings: | |
|
564 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
565 | if ui: | |
|
566 | vcs_settings.append(ui) | |
|
567 | vcs_settings.extend(settings.get_ui_by_section( | |
|
568 | VcsSettingsModel.SVN_BRANCH_SECTION)) | |
|
569 | vcs_settings.extend(settings.get_ui_by_section( | |
|
570 | VcsSettingsModel.SVN_TAG_SECTION)) | |
|
571 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
572 | setting = settings.get_setting_by_name(name) | |
|
573 | if setting: | |
|
574 | vcs_settings.append(setting) | |
|
575 | assert vcs_settings == [] | |
|
576 | finally: | |
|
577 | self._cleanup_repo_settings(settings) | |
|
578 | ||
|
579 | def test_delete_svn_branch_and_tag_patterns( | |
|
580 | self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header): | |
|
581 | repo = backend_svn.create_repo() | |
|
582 | repo_name = repo.repo_name | |
|
583 | branch = settings_util.create_repo_rhodecode_ui( | |
|
584 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch', | |
|
585 | cleanup=False) | |
|
586 | tag = settings_util.create_repo_rhodecode_ui( | |
|
587 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag', cleanup=False) | |
|
588 | data = { | |
|
589 | 'csrf_token': csrf_token | |
|
590 | } | |
|
591 | for id_ in (branch.ui_id, tag.ui_id): | |
|
592 | data['delete_svn_pattern'] = id_, | |
|
593 | self.app.post( | |
|
594 | route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name), | |
|
595 | data, extra_environ=xhr_header, status=200) | |
|
596 | settings = VcsSettingsModel(repo=repo_name) | |
|
597 | assert settings.get_repo_svn_branch_patterns() == [] | |
|
598 | ||
|
599 | def test_delete_svn_branch_requires_repo_admin_permission( | |
|
600 | self, backend_svn, user_util, settings_util, csrf_token, xhr_header): | |
|
601 | repo = backend_svn.create_repo() | |
|
602 | repo_name = repo.repo_name | |
|
603 | ||
|
604 | logout_user_session(self.app, csrf_token) | |
|
605 | session = login_user_session( | |
|
606 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
|
607 | csrf_token = auth.get_csrf_token(session) | |
|
608 | ||
|
609 | repo = Repository.get_by_repo_name(repo_name) | |
|
610 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
|
611 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |
|
612 | branch = settings_util.create_repo_rhodecode_ui( | |
|
613 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch', | |
|
614 | cleanup=False) | |
|
615 | data = { | |
|
616 | 'csrf_token': csrf_token, | |
|
617 | 'delete_svn_pattern': branch.ui_id | |
|
618 | } | |
|
619 | self.app.post( | |
|
620 | route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name), | |
|
621 | data, extra_environ=xhr_header, status=200) | |
|
622 | ||
|
623 | def test_delete_svn_branch_raises_400_when_not_found( | |
|
624 | self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header): | |
|
625 | repo_name = backend_svn.repo_name | |
|
626 | data = { | |
|
627 | 'delete_svn_pattern': 123, | |
|
628 | 'csrf_token': csrf_token | |
|
629 | } | |
|
630 | self.app.post( | |
|
631 | route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name), | |
|
632 | data, extra_environ=xhr_header, status=400) | |
|
633 | ||
|
634 | def test_delete_svn_branch_raises_400_when_no_id_specified( | |
|
635 | self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header): | |
|
636 | repo_name = backend_svn.repo_name | |
|
637 | data = { | |
|
638 | 'csrf_token': csrf_token | |
|
639 | } | |
|
640 | self.app.post( | |
|
641 | route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name), | |
|
642 | data, extra_environ=xhr_header, status=400) | |
|
643 | ||
|
644 | def _cleanup_repo_settings(self, settings_model): | |
|
645 | cleanup = [] | |
|
646 | ui_settings = ( | |
|
647 | VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS) | |
|
648 | ||
|
649 | for section, key in ui_settings: | |
|
650 | ui = settings_model.get_ui_by_section_and_key(section, key) | |
|
651 | if ui: | |
|
652 | cleanup.append(ui) | |
|
653 | ||
|
654 | cleanup.extend(settings_model.get_ui_by_section( | |
|
655 | VcsSettingsModel.INHERIT_SETTINGS)) | |
|
656 | cleanup.extend(settings_model.get_ui_by_section( | |
|
657 | VcsSettingsModel.SVN_BRANCH_SECTION)) | |
|
658 | cleanup.extend(settings_model.get_ui_by_section( | |
|
659 | VcsSettingsModel.SVN_TAG_SECTION)) | |
|
660 | ||
|
661 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
662 | setting = settings_model.get_setting_by_name(name) | |
|
663 | if setting: | |
|
664 | cleanup.append(setting) | |
|
665 | ||
|
666 | for object_ in cleanup: | |
|
667 | Session().delete(object_) | |
|
668 | Session().commit() | |
|
669 | ||
|
670 | def assert_repo_value_equals_global_value(self, response, setting): | |
|
671 | assert_response = AssertResponse(response) | |
|
672 | global_css_selector = '[name={}_inherited]'.format(setting) | |
|
673 | repo_css_selector = '[name={}]'.format(setting) | |
|
674 | repo_element = assert_response.get_element(repo_css_selector) | |
|
675 | global_element = assert_response.get_element(global_css_selector) | |
|
676 | assert repo_element.value == global_element.value | |
|
677 | ||
|
678 | ||
|
679 | def _get_permission_for_user(user, repo): | |
|
680 | perm = UserRepoToPerm.query()\ | |
|
681 | .filter(UserRepoToPerm.repository == | |
|
682 | Repository.get_by_repo_name(repo))\ | |
|
683 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ | |
|
684 | .all() | |
|
685 | return perm |
@@ -0,0 +1,113 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import logging | |
|
22 | ||
|
23 | import formencode | |
|
24 | ||
|
25 | from pyramid.httpexceptions import HTTPFound | |
|
26 | from pyramid.view import view_config | |
|
27 | ||
|
28 | from rhodecode.apps._base import RepoAppView | |
|
29 | from rhodecode.lib import audit_logger | |
|
30 | from rhodecode.lib import helpers as h | |
|
31 | from rhodecode.lib.auth import ( | |
|
32 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
|
33 | from rhodecode.model.db import RepositoryField | |
|
34 | from rhodecode.model.forms import RepoFieldForm | |
|
35 | from rhodecode.model.meta import Session | |
|
36 | from rhodecode.model.repo import RepoModel | |
|
37 | ||
|
38 | log = logging.getLogger(__name__) | |
|
39 | ||
|
40 | ||
|
41 | class RepoSettingsFieldsView(RepoAppView): | |
|
42 | def load_default_context(self): | |
|
43 | c = self._get_local_tmpl_context() | |
|
44 | ||
|
45 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
|
46 | c.repo_info = self.db_repo | |
|
47 | ||
|
48 | self._register_global_c(c) | |
|
49 | return c | |
|
50 | ||
|
51 | @LoginRequired() | |
|
52 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
53 | @view_config( | |
|
54 | route_name='edit_repo_fields', request_method='GET', | |
|
55 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
56 | def repo_field_edit(self): | |
|
57 | c = self.load_default_context() | |
|
58 | ||
|
59 | c.active = 'fields' | |
|
60 | c.repo_fields = RepositoryField.query() \ | |
|
61 | .filter(RepositoryField.repository == self.db_repo).all() | |
|
62 | ||
|
63 | return self._get_template_context(c) | |
|
64 | ||
|
65 | @LoginRequired() | |
|
66 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
67 | @CSRFRequired() | |
|
68 | @view_config( | |
|
69 | route_name='edit_repo_fields_create', request_method='POST', | |
|
70 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
71 | def repo_field_create(self): | |
|
72 | _ = self.request.translate | |
|
73 | ||
|
74 | try: | |
|
75 | form_result = RepoFieldForm()().to_python(dict(self.request.POST)) | |
|
76 | RepoModel().add_repo_field( | |
|
77 | self.db_repo_name, | |
|
78 | form_result['new_field_key'], | |
|
79 | field_type=form_result['new_field_type'], | |
|
80 | field_value=form_result['new_field_value'], | |
|
81 | field_label=form_result['new_field_label'], | |
|
82 | field_desc=form_result['new_field_desc']) | |
|
83 | ||
|
84 | Session().commit() | |
|
85 | except Exception as e: | |
|
86 | log.exception("Exception creating field") | |
|
87 | msg = _('An error occurred during creation of field') | |
|
88 | if isinstance(e, formencode.Invalid): | |
|
89 | msg += ". " + e.msg | |
|
90 | h.flash(msg, category='error') | |
|
91 | ||
|
92 | raise HTTPFound( | |
|
93 | h.route_path('edit_repo_fields', repo_name=self.db_repo_name)) | |
|
94 | ||
|
95 | @LoginRequired() | |
|
96 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
97 | @CSRFRequired() | |
|
98 | @view_config( | |
|
99 | route_name='edit_repo_fields_delete', request_method='POST', | |
|
100 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
101 | def repo_field_delete(self): | |
|
102 | _ = self.request.translate | |
|
103 | field = RepositoryField.get_or_404(self.request.matchdict['field_id']) | |
|
104 | try: | |
|
105 | RepoModel().delete_repo_field(self.db_repo_name, field.field_key) | |
|
106 | Session().commit() | |
|
107 | except Exception: | |
|
108 | log.exception('Exception during removal of field') | |
|
109 | msg = _('An error occurred during removal of field') | |
|
110 | h.flash(msg, category='error') | |
|
111 | ||
|
112 | raise HTTPFound( | |
|
113 | h.route_path('edit_repo_fields', repo_name=self.db_repo_name)) |
@@ -0,0 +1,129 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import logging | |
|
22 | ||
|
23 | from pyramid.httpexceptions import HTTPFound | |
|
24 | from pyramid.view import view_config | |
|
25 | ||
|
26 | from rhodecode.apps._base import RepoAppView | |
|
27 | from rhodecode.lib import audit_logger | |
|
28 | from rhodecode.lib import helpers as h | |
|
29 | from rhodecode.lib.auth import ( | |
|
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
|
31 | from rhodecode.model.forms import IssueTrackerPatternsForm | |
|
32 | from rhodecode.model.meta import Session | |
|
33 | from rhodecode.model.settings import IssueTrackerSettingsModel | |
|
34 | ||
|
35 | log = logging.getLogger(__name__) | |
|
36 | ||
|
37 | ||
|
38 | class RepoSettingsIssueTrackersView(RepoAppView): | |
|
39 | def load_default_context(self): | |
|
40 | c = self._get_local_tmpl_context() | |
|
41 | ||
|
42 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
|
43 | c.repo_info = self.db_repo | |
|
44 | ||
|
45 | self._register_global_c(c) | |
|
46 | return c | |
|
47 | ||
|
48 | @LoginRequired() | |
|
49 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
50 | @view_config( | |
|
51 | route_name='edit_repo_issuetracker', request_method='GET', | |
|
52 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
53 | def repo_issuetracker(self): | |
|
54 | c = self.load_default_context() | |
|
55 | c.active = 'issuetracker' | |
|
56 | c.data = 'data' | |
|
57 | ||
|
58 | c.settings_model = IssueTrackerSettingsModel(repo=self.db_repo) | |
|
59 | c.global_patterns = c.settings_model.get_global_settings() | |
|
60 | c.repo_patterns = c.settings_model.get_repo_settings() | |
|
61 | ||
|
62 | return self._get_template_context(c) | |
|
63 | ||
|
64 | @LoginRequired() | |
|
65 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
66 | @CSRFRequired() | |
|
67 | @view_config( | |
|
68 | route_name='edit_repo_issuetracker_test', request_method='POST', | |
|
69 | xhr=True, renderer='string') | |
|
70 | def repo_issuetracker_test(self): | |
|
71 | return h.urlify_commit_message( | |
|
72 | self.request.POST.get('test_text', ''), | |
|
73 | self.db_repo_name) | |
|
74 | ||
|
75 | @LoginRequired() | |
|
76 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
77 | @CSRFRequired() | |
|
78 | @view_config( | |
|
79 | route_name='edit_repo_issuetracker_delete', request_method='POST', | |
|
80 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
81 | def repo_issuetracker_delete(self): | |
|
82 | _ = self.request.translate | |
|
83 | uid = self.request.POST.get('uid') | |
|
84 | repo_settings = IssueTrackerSettingsModel(repo=self.db_repo_name) | |
|
85 | try: | |
|
86 | repo_settings.delete_entries(uid) | |
|
87 | except Exception: | |
|
88 | h.flash(_('Error occurred during deleting issue tracker entry'), | |
|
89 | category='error') | |
|
90 | else: | |
|
91 | h.flash(_('Removed issue tracker entry'), category='success') | |
|
92 | raise HTTPFound( | |
|
93 | h.route_path('edit_repo_issuetracker', repo_name=self.db_repo_name)) | |
|
94 | ||
|
95 | def _update_patterns(self, form, repo_settings): | |
|
96 | for uid in form['delete_patterns']: | |
|
97 | repo_settings.delete_entries(uid) | |
|
98 | ||
|
99 | for pattern_data in form['patterns']: | |
|
100 | for setting_key, pattern, type_ in pattern_data: | |
|
101 | sett = repo_settings.create_or_update_setting( | |
|
102 | setting_key, pattern.strip(), type_) | |
|
103 | Session().add(sett) | |
|
104 | ||
|
105 | Session().commit() | |
|
106 | ||
|
107 | @LoginRequired() | |
|
108 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
109 | @CSRFRequired() | |
|
110 | @view_config( | |
|
111 | route_name='edit_repo_issuetracker_update', request_method='POST', | |
|
112 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
113 | def repo_issuetracker_update(self): | |
|
114 | _ = self.request.translate | |
|
115 | # Save inheritance | |
|
116 | repo_settings = IssueTrackerSettingsModel(repo=self.db_repo_name) | |
|
117 | inherited = ( | |
|
118 | self.request.POST.get('inherit_global_issuetracker') == "inherited") | |
|
119 | repo_settings.inherit_global_settings = inherited | |
|
120 | Session().commit() | |
|
121 | ||
|
122 | form = IssueTrackerPatternsForm()().to_python(self.request.POST) | |
|
123 | if form: | |
|
124 | self._update_patterns(form, repo_settings) | |
|
125 | ||
|
126 | h.flash(_('Updated issue tracker entries'), category='success') | |
|
127 | raise HTTPFound( | |
|
128 | h.route_path('edit_repo_issuetracker', repo_name=self.db_repo_name)) | |
|
129 |
@@ -0,0 +1,75 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import logging | |
|
22 | ||
|
23 | from pyramid.httpexceptions import HTTPFound | |
|
24 | from pyramid.view import view_config | |
|
25 | ||
|
26 | from rhodecode.apps._base import RepoAppView | |
|
27 | from rhodecode.lib import helpers as h | |
|
28 | from rhodecode.lib.auth import ( | |
|
29 | LoginRequired, CSRFRequired, HasRepoPermissionAnyDecorator) | |
|
30 | from rhodecode.model.scm import ScmModel | |
|
31 | ||
|
32 | log = logging.getLogger(__name__) | |
|
33 | ||
|
34 | ||
|
35 | class RepoSettingsRemoteView(RepoAppView): | |
|
36 | def load_default_context(self): | |
|
37 | c = self._get_local_tmpl_context() | |
|
38 | ||
|
39 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
|
40 | c.repo_info = self.db_repo | |
|
41 | ||
|
42 | self._register_global_c(c) | |
|
43 | return c | |
|
44 | ||
|
45 | @LoginRequired() | |
|
46 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
47 | @view_config( | |
|
48 | route_name='edit_repo_remote', request_method='GET', | |
|
49 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
50 | def repo_remote_edit_form(self): | |
|
51 | c = self.load_default_context() | |
|
52 | c.active = 'remote' | |
|
53 | ||
|
54 | return self._get_template_context(c) | |
|
55 | ||
|
56 | @LoginRequired() | |
|
57 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
58 | @CSRFRequired() | |
|
59 | @view_config( | |
|
60 | route_name='edit_repo_remote_pull', request_method='POST', | |
|
61 | renderer=None) | |
|
62 | def repo_remote_pull_changes(self): | |
|
63 | _ = self.request.translate | |
|
64 | self.load_default_context() | |
|
65 | ||
|
66 | try: | |
|
67 | ScmModel().pull_changes( | |
|
68 | self.db_repo_name, self._rhodecode_user.username) | |
|
69 | h.flash(_('Pulled from remote location'), category='success') | |
|
70 | except Exception: | |
|
71 | log.exception("Exception during pull from remote") | |
|
72 | h.flash(_('An error occurred during pull from remote location'), | |
|
73 | category='error') | |
|
74 | raise HTTPFound( | |
|
75 | h.route_path('edit_repo_remote', repo_name=self.db_repo_name)) |
@@ -0,0 +1,172 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import logging | |
|
22 | ||
|
23 | import formencode | |
|
24 | from pyramid.httpexceptions import HTTPFound, HTTPBadRequest | |
|
25 | from pyramid.response import Response | |
|
26 | from pyramid.renderers import render | |
|
27 | from pyramid.view import view_config | |
|
28 | ||
|
29 | from rhodecode.apps._base import RepoAppView | |
|
30 | from rhodecode.lib import audit_logger | |
|
31 | from rhodecode.lib import helpers as h | |
|
32 | from rhodecode.lib.auth import ( | |
|
33 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
|
34 | from rhodecode.model.forms import RepoVcsSettingsForm | |
|
35 | from rhodecode.model.meta import Session | |
|
36 | from rhodecode.model.settings import VcsSettingsModel, SettingNotFound | |
|
37 | ||
|
38 | log = logging.getLogger(__name__) | |
|
39 | ||
|
40 | ||
|
41 | class RepoSettingsVcsView(RepoAppView): | |
|
42 | def load_default_context(self): | |
|
43 | c = self._get_local_tmpl_context() | |
|
44 | ||
|
45 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
|
46 | c.repo_info = self.db_repo | |
|
47 | ||
|
48 | self._register_global_c(c) | |
|
49 | return c | |
|
50 | ||
|
51 | def _vcs_form_defaults(self, repo_name): | |
|
52 | model = VcsSettingsModel(repo=repo_name) | |
|
53 | global_defaults = model.get_global_settings() | |
|
54 | ||
|
55 | repo_defaults = {} | |
|
56 | repo_defaults.update(global_defaults) | |
|
57 | repo_defaults.update(model.get_repo_settings()) | |
|
58 | ||
|
59 | global_defaults = { | |
|
60 | '{}_inherited'.format(k): global_defaults[k] | |
|
61 | for k in global_defaults} | |
|
62 | ||
|
63 | defaults = { | |
|
64 | 'inherit_global_settings': model.inherit_global_settings | |
|
65 | } | |
|
66 | defaults.update(global_defaults) | |
|
67 | defaults.update(repo_defaults) | |
|
68 | defaults.update({ | |
|
69 | 'new_svn_branch': '', | |
|
70 | 'new_svn_tag': '', | |
|
71 | }) | |
|
72 | return defaults | |
|
73 | ||
|
74 | @LoginRequired() | |
|
75 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
76 | @view_config( | |
|
77 | route_name='edit_repo_vcs', request_method='GET', | |
|
78 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
79 | def repo_vcs_settings(self): | |
|
80 | c = self.load_default_context() | |
|
81 | model = VcsSettingsModel(repo=self.db_repo_name) | |
|
82 | ||
|
83 | c.active = 'vcs' | |
|
84 | c.global_svn_branch_patterns = model.get_global_svn_branch_patterns() | |
|
85 | c.global_svn_tag_patterns = model.get_global_svn_tag_patterns() | |
|
86 | c.svn_branch_patterns = model.get_repo_svn_branch_patterns() | |
|
87 | c.svn_tag_patterns = model.get_repo_svn_tag_patterns() | |
|
88 | ||
|
89 | defaults = self._vcs_form_defaults(self.db_repo_name) | |
|
90 | c.inherit_global_settings = defaults['inherit_global_settings'] | |
|
91 | ||
|
92 | data = render('rhodecode:templates/admin/repos/repo_edit.mako', | |
|
93 | self._get_template_context(c), self.request) | |
|
94 | html = formencode.htmlfill.render( | |
|
95 | data, | |
|
96 | defaults=defaults, | |
|
97 | encoding="UTF-8", | |
|
98 | force_defaults=False | |
|
99 | ) | |
|
100 | return Response(html) | |
|
101 | ||
|
102 | @LoginRequired() | |
|
103 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
104 | @CSRFRequired() | |
|
105 | @view_config( | |
|
106 | route_name='edit_repo_vcs_update', request_method='POST', | |
|
107 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
108 | def repo_settings_vcs_update(self): | |
|
109 | _ = self.request.translate | |
|
110 | c = self.load_default_context() | |
|
111 | c.active = 'vcs' | |
|
112 | ||
|
113 | model = VcsSettingsModel(repo=self.db_repo_name) | |
|
114 | c.global_svn_branch_patterns = model.get_global_svn_branch_patterns() | |
|
115 | c.global_svn_tag_patterns = model.get_global_svn_tag_patterns() | |
|
116 | c.svn_branch_patterns = model.get_repo_svn_branch_patterns() | |
|
117 | c.svn_tag_patterns = model.get_repo_svn_tag_patterns() | |
|
118 | ||
|
119 | defaults = self._vcs_form_defaults(self.db_repo_name) | |
|
120 | c.inherit_global_settings = defaults['inherit_global_settings'] | |
|
121 | ||
|
122 | application_form = RepoVcsSettingsForm(self.db_repo_name)() | |
|
123 | try: | |
|
124 | form_result = application_form.to_python(dict(self.request.POST)) | |
|
125 | except formencode.Invalid as errors: | |
|
126 | h.flash(_("Some form inputs contain invalid data."), | |
|
127 | category='error') | |
|
128 | ||
|
129 | data = render('rhodecode:templates/admin/repos/repo_edit.mako', | |
|
130 | self._get_template_context(c), self.request) | |
|
131 | html = formencode.htmlfill.render( | |
|
132 | data, | |
|
133 | defaults=errors.value, | |
|
134 | errors=errors.error_dict or {}, | |
|
135 | encoding="UTF-8", | |
|
136 | force_defaults=False | |
|
137 | ) | |
|
138 | return Response(html) | |
|
139 | ||
|
140 | try: | |
|
141 | inherit_global_settings = form_result['inherit_global_settings'] | |
|
142 | model.create_or_update_repo_settings( | |
|
143 | form_result, inherit_global_settings=inherit_global_settings) | |
|
144 | Session().commit() | |
|
145 | h.flash(_('Updated VCS settings'), category='success') | |
|
146 | except Exception: | |
|
147 | log.exception("Exception while updating settings") | |
|
148 | h.flash( | |
|
149 | _('Error occurred during updating repository VCS settings'), | |
|
150 | category='error') | |
|
151 | ||
|
152 | raise HTTPFound( | |
|
153 | h.route_path('edit_repo_vcs', repo_name=self.db_repo_name)) | |
|
154 | ||
|
155 | @LoginRequired() | |
|
156 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
157 | @CSRFRequired() | |
|
158 | @view_config( | |
|
159 | route_name='edit_repo_vcs_svn_pattern_delete', request_method='POST', | |
|
160 | renderer='json_ext', xhr=True) | |
|
161 | def repo_settings_delete_svn_pattern(self): | |
|
162 | self.load_default_context() | |
|
163 | delete_pattern_id = self.request.POST.get('delete_svn_pattern') | |
|
164 | model = VcsSettingsModel(repo=self.db_repo_name) | |
|
165 | try: | |
|
166 | model.delete_repo_svn_pattern(delete_pattern_id) | |
|
167 | except SettingNotFound: | |
|
168 | log.exception('Failed to delete SVN pattern') | |
|
169 | raise HTTPBadRequest() | |
|
170 | ||
|
171 | Session().commit() | |
|
172 | return True |
|
1 | NO CONTENT: new file 100644 |
@@ -1,222 +1,235 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | from rhodecode.apps.admin.navigation import NavigationRegistry |
|
23 | 23 | from rhodecode.config.routing import ADMIN_PREFIX |
|
24 | 24 | from rhodecode.lib.utils2 import str2bool |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | def admin_routes(config): |
|
28 | 28 | """ |
|
29 | 29 | Admin prefixed routes |
|
30 | 30 | """ |
|
31 | 31 | |
|
32 | 32 | config.add_route( |
|
33 | 33 | name='admin_audit_logs', |
|
34 | 34 | pattern='/audit_logs') |
|
35 | 35 | |
|
36 | 36 | config.add_route( |
|
37 | 37 | name='pull_requests_global_0', # backward compat |
|
38 | 38 | pattern='/pull_requests/{pull_request_id:\d+}') |
|
39 | 39 | config.add_route( |
|
40 | 40 | name='pull_requests_global_1', # backward compat |
|
41 | 41 | pattern='/pull-requests/{pull_request_id:\d+}') |
|
42 | 42 | config.add_route( |
|
43 | 43 | name='pull_requests_global', |
|
44 | 44 | pattern='/pull-request/{pull_request_id:\d+}') |
|
45 | 45 | |
|
46 | 46 | config.add_route( |
|
47 | 47 | name='admin_settings_open_source', |
|
48 | 48 | pattern='/settings/open_source') |
|
49 | 49 | config.add_route( |
|
50 | 50 | name='admin_settings_vcs_svn_generate_cfg', |
|
51 | 51 | pattern='/settings/vcs/svn_generate_cfg') |
|
52 | 52 | |
|
53 | 53 | config.add_route( |
|
54 | 54 | name='admin_settings_system', |
|
55 | 55 | pattern='/settings/system') |
|
56 | 56 | config.add_route( |
|
57 | 57 | name='admin_settings_system_update', |
|
58 | 58 | pattern='/settings/system/updates') |
|
59 | 59 | |
|
60 | 60 | config.add_route( |
|
61 | 61 | name='admin_settings_sessions', |
|
62 | 62 | pattern='/settings/sessions') |
|
63 | 63 | config.add_route( |
|
64 | 64 | name='admin_settings_sessions_cleanup', |
|
65 | 65 | pattern='/settings/sessions/cleanup') |
|
66 | 66 | |
|
67 | 67 | config.add_route( |
|
68 | 68 | name='admin_settings_process_management', |
|
69 | 69 | pattern='/settings/process_management') |
|
70 | 70 | config.add_route( |
|
71 | 71 | name='admin_settings_process_management_signal', |
|
72 | 72 | pattern='/settings/process_management/signal') |
|
73 | 73 | |
|
74 | 74 | # global permissions |
|
75 | 75 | |
|
76 | 76 | config.add_route( |
|
77 | 77 | name='admin_permissions_application', |
|
78 | 78 | pattern='/permissions/application') |
|
79 | 79 | config.add_route( |
|
80 | 80 | name='admin_permissions_application_update', |
|
81 | 81 | pattern='/permissions/application/update') |
|
82 | 82 | |
|
83 | 83 | config.add_route( |
|
84 | 84 | name='admin_permissions_global', |
|
85 | 85 | pattern='/permissions/global') |
|
86 | 86 | config.add_route( |
|
87 | 87 | name='admin_permissions_global_update', |
|
88 | 88 | pattern='/permissions/global/update') |
|
89 | 89 | |
|
90 | 90 | config.add_route( |
|
91 | 91 | name='admin_permissions_object', |
|
92 | 92 | pattern='/permissions/object') |
|
93 | 93 | config.add_route( |
|
94 | 94 | name='admin_permissions_object_update', |
|
95 | 95 | pattern='/permissions/object/update') |
|
96 | 96 | |
|
97 | 97 | config.add_route( |
|
98 | 98 | name='admin_permissions_ips', |
|
99 | 99 | pattern='/permissions/ips') |
|
100 | 100 | |
|
101 | 101 | config.add_route( |
|
102 | 102 | name='admin_permissions_overview', |
|
103 | 103 | pattern='/permissions/overview') |
|
104 | 104 | |
|
105 | 105 | config.add_route( |
|
106 | 106 | name='admin_permissions_auth_token_access', |
|
107 | 107 | pattern='/permissions/auth_token_access') |
|
108 | 108 | |
|
109 | 109 | # users admin |
|
110 | 110 | config.add_route( |
|
111 | 111 | name='users', |
|
112 | 112 | pattern='/users') |
|
113 | 113 | |
|
114 | 114 | config.add_route( |
|
115 | 115 | name='users_data', |
|
116 | 116 | pattern='/users_data') |
|
117 | 117 | |
|
118 | 118 | # user auth tokens |
|
119 | 119 | config.add_route( |
|
120 | 120 | name='edit_user_auth_tokens', |
|
121 | 121 | pattern='/users/{user_id:\d+}/edit/auth_tokens') |
|
122 | 122 | config.add_route( |
|
123 | 123 | name='edit_user_auth_tokens_add', |
|
124 | 124 | pattern='/users/{user_id:\d+}/edit/auth_tokens/new') |
|
125 | 125 | config.add_route( |
|
126 | 126 | name='edit_user_auth_tokens_delete', |
|
127 | 127 | pattern='/users/{user_id:\d+}/edit/auth_tokens/delete') |
|
128 | 128 | |
|
129 | 129 | # user ssh keys |
|
130 | 130 | config.add_route( |
|
131 | 131 | name='edit_user_ssh_keys', |
|
132 | 132 | pattern='/users/{user_id:\d+}/edit/ssh_keys') |
|
133 | 133 | config.add_route( |
|
134 | 134 | name='edit_user_ssh_keys_generate_keypair', |
|
135 | 135 | pattern='/users/{user_id:\d+}/edit/ssh_keys/generate') |
|
136 | 136 | config.add_route( |
|
137 | 137 | name='edit_user_ssh_keys_add', |
|
138 | 138 | pattern='/users/{user_id:\d+}/edit/ssh_keys/new') |
|
139 | 139 | config.add_route( |
|
140 | 140 | name='edit_user_ssh_keys_delete', |
|
141 | 141 | pattern='/users/{user_id:\d+}/edit/ssh_keys/delete') |
|
142 | 142 | |
|
143 | 143 | # user emails |
|
144 | 144 | config.add_route( |
|
145 | 145 | name='edit_user_emails', |
|
146 | 146 | pattern='/users/{user_id:\d+}/edit/emails') |
|
147 | 147 | config.add_route( |
|
148 | 148 | name='edit_user_emails_add', |
|
149 | 149 | pattern='/users/{user_id:\d+}/edit/emails/new') |
|
150 | 150 | config.add_route( |
|
151 | 151 | name='edit_user_emails_delete', |
|
152 | 152 | pattern='/users/{user_id:\d+}/edit/emails/delete') |
|
153 | 153 | |
|
154 | 154 | # user IPs |
|
155 | 155 | config.add_route( |
|
156 | 156 | name='edit_user_ips', |
|
157 | 157 | pattern='/users/{user_id:\d+}/edit/ips') |
|
158 | 158 | config.add_route( |
|
159 | 159 | name='edit_user_ips_add', |
|
160 | 160 | pattern='/users/{user_id:\d+}/edit/ips/new') |
|
161 | 161 | config.add_route( |
|
162 | 162 | name='edit_user_ips_delete', |
|
163 | 163 | pattern='/users/{user_id:\d+}/edit/ips/delete') |
|
164 | 164 | |
|
165 | 165 | # user perms |
|
166 | 166 | config.add_route( |
|
167 | 167 | name='edit_user_perms_summary', |
|
168 | 168 | pattern='/users/{user_id:\d+}/edit/permissions_summary') |
|
169 | 169 | config.add_route( |
|
170 | 170 | name='edit_user_perms_summary_json', |
|
171 | 171 | pattern='/users/{user_id:\d+}/edit/permissions_summary/json') |
|
172 | 172 | |
|
173 | 173 | # user groups management |
|
174 | 174 | config.add_route( |
|
175 | 175 | name='edit_user_groups_management', |
|
176 | 176 | pattern='/users/{user_id:\d+}/edit/groups_management') |
|
177 | 177 | |
|
178 | 178 | config.add_route( |
|
179 | 179 | name='edit_user_groups_management_updates', |
|
180 | 180 | pattern='/users/{user_id:\d+}/edit/edit_user_groups_management/updates') |
|
181 | 181 | |
|
182 | 182 | # user audit logs |
|
183 | 183 | config.add_route( |
|
184 | 184 | name='edit_user_audit_logs', |
|
185 | 185 | pattern='/users/{user_id:\d+}/edit/audit') |
|
186 | 186 | |
|
187 | 187 | # user groups admin |
|
188 | 188 | config.add_route( |
|
189 | 189 | name='user_groups', |
|
190 | 190 | pattern='/user_groups') |
|
191 | 191 | |
|
192 | 192 | config.add_route( |
|
193 | 193 | name='user_groups_data', |
|
194 | 194 | pattern='/user_groups_data') |
|
195 | 195 | |
|
196 | 196 | config.add_route( |
|
197 | 197 | name='user_group_members_data', |
|
198 | 198 | pattern='/user_groups/{user_group_id:\d+}/members') |
|
199 | 199 | |
|
200 | 200 | # user groups perms |
|
201 | 201 | config.add_route( |
|
202 | 202 | name='edit_user_group_perms_summary', |
|
203 | 203 | pattern='/user_groups/{user_group_id:\d+}/edit/permissions_summary') |
|
204 | 204 | config.add_route( |
|
205 | 205 | name='edit_user_group_perms_summary_json', |
|
206 | 206 | pattern='/user_groups/{user_group_id:\d+}/edit/permissions_summary/json') |
|
207 | 207 | |
|
208 | # repos admin | |
|
209 | config.add_route( | |
|
210 | name='repos', | |
|
211 | pattern='/repos') | |
|
212 | ||
|
213 | config.add_route( | |
|
214 | name='repo_new', | |
|
215 | pattern='/repos/new') | |
|
216 | ||
|
217 | config.add_route( | |
|
218 | name='repo_create', | |
|
219 | pattern='/repos/create') | |
|
220 | ||
|
208 | 221 | |
|
209 | 222 | def includeme(config): |
|
210 | 223 | settings = config.get_settings() |
|
211 | 224 | |
|
212 | 225 | # Create admin navigation registry and add it to the pyramid registry. |
|
213 | 226 | labs_active = str2bool(settings.get('labs_settings_active', False)) |
|
214 | 227 | navigation_registry = NavigationRegistry(labs_active=labs_active) |
|
215 | 228 | config.registry.registerUtility(navigation_registry) |
|
216 | 229 | |
|
217 | 230 | # main admin routes |
|
218 | 231 | config.add_route(name='admin_home', pattern=ADMIN_PREFIX) |
|
219 | 232 | config.include(admin_routes, route_prefix=ADMIN_PREFIX) |
|
220 | 233 | |
|
221 | 234 | # Scan module for configuration decorators. |
|
222 | 235 | config.scan('.views', ignore='.tests') |
This diff has been collapsed as it changes many lines, (712 lines changed) Show them Hide them | |||
@@ -1,1131 +1,509 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import urllib |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 | from rhodecode.apps._base import ADMIN_PREFIX | |
|
26 | 27 | from rhodecode.lib import auth |
|
27 |
from rhodecode.lib.utils2 import safe_str |
|
|
28 | from rhodecode.lib.utils2 import safe_str | |
|
28 | 29 | from rhodecode.lib import helpers as h |
|
29 | 30 | from rhodecode.model.db import ( |
|
30 | 31 | Repository, RepoGroup, UserRepoToPerm, User, Permission) |
|
31 | 32 | from rhodecode.model.meta import Session |
|
32 | 33 | from rhodecode.model.repo import RepoModel |
|
33 | 34 | from rhodecode.model.repo_group import RepoGroupModel |
|
34 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel | |
|
35 | 35 | from rhodecode.model.user import UserModel |
|
36 | 36 | from rhodecode.tests import ( |
|
37 |
login_user_session |
|
|
38 |
TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS |
|
|
37 | login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN, | |
|
38 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
|
39 | 39 | from rhodecode.tests.fixture import Fixture, error_function |
|
40 | 40 | from rhodecode.tests.utils import AssertResponse, repo_on_filesystem |
|
41 | 41 | |
|
42 | 42 | fixture = Fixture() |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def route_path(name, params=None, **kwargs): |
|
46 | 46 | import urllib |
|
47 | 47 | |
|
48 | 48 | base_url = { |
|
49 |
'repo |
|
|
49 | 'repos': ADMIN_PREFIX + '/repos', | |
|
50 | 'repo_new': ADMIN_PREFIX + '/repos/new', | |
|
51 | 'repo_create': ADMIN_PREFIX + '/repos/create', | |
|
52 | ||
|
50 | 53 | 'repo_creating_check': '/{repo_name}/repo_creating_check', |
|
51 | 54 | }[name].format(**kwargs) |
|
52 | 55 | |
|
53 | 56 | if params: |
|
54 | 57 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
55 | 58 | return base_url |
|
56 | 59 | |
|
57 | 60 | |
|
61 | def _get_permission_for_user(user, repo): | |
|
62 | perm = UserRepoToPerm.query()\ | |
|
63 | .filter(UserRepoToPerm.repository == | |
|
64 | Repository.get_by_repo_name(repo))\ | |
|
65 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ | |
|
66 | .all() | |
|
67 | return perm | |
|
68 | ||
|
69 | ||
|
58 | 70 | @pytest.mark.usefixtures("app") |
|
59 | 71 | class TestAdminRepos(object): |
|
60 | 72 | |
|
61 | def test_index(self): | |
|
62 | self.app.get(url('repos')) | |
|
73 | def test_repo_list(self, autologin_user, user_util): | |
|
74 | repo = user_util.create_repo() | |
|
75 | response = self.app.get( | |
|
76 | route_path('repos'), status=200) | |
|
63 | 77 | |
|
64 | def test_create_page_restricted(self, autologin_user, backend): | |
|
78 | response.mustcontain(repo.repo_name) | |
|
79 | ||
|
80 | def test_create_page_restricted_to_single_backend(self, autologin_user, backend): | |
|
65 | 81 | with mock.patch('rhodecode.BACKENDS', {'git': 'git'}): |
|
66 |
response = self.app.get( |
|
|
82 | response = self.app.get(route_path('repo_new'), status=200) | |
|
67 | 83 | assert_response = AssertResponse(response) |
|
68 | 84 | element = assert_response.get_element('#repo_type') |
|
69 | 85 | assert element.text_content() == '\ngit\n' |
|
70 | 86 | |
|
71 | def test_create_page_non_restricted(self, autologin_user, backend): | |
|
72 |
response = self.app.get( |
|
|
87 | def test_create_page_non_restricted_backends(self, autologin_user, backend): | |
|
88 | response = self.app.get(route_path('repo_new'), status=200) | |
|
73 | 89 | assert_response = AssertResponse(response) |
|
74 | 90 | assert_response.element_contains('#repo_type', 'git') |
|
75 | 91 | assert_response.element_contains('#repo_type', 'svn') |
|
76 | 92 | assert_response.element_contains('#repo_type', 'hg') |
|
77 | 93 | |
|
78 |
@pytest.mark.parametrize( |
|
|
79 |
|
|
|
94 | @pytest.mark.parametrize( | |
|
95 | "suffix", [u'', u'xxa'], ids=['', 'non-ascii']) | |
|
80 | 96 | def test_create(self, autologin_user, backend, suffix, csrf_token): |
|
81 | 97 | repo_name_unicode = backend.new_repo_name(suffix=suffix) |
|
82 | 98 | repo_name = repo_name_unicode.encode('utf8') |
|
83 | 99 | description_unicode = u'description for newly created repo' + suffix |
|
84 | 100 | description = description_unicode.encode('utf8') |
|
85 | 101 | response = self.app.post( |
|
86 |
|
|
|
102 | route_path('repo_create'), | |
|
87 | 103 | fixture._get_repo_create_params( |
|
88 | 104 | repo_private=False, |
|
89 | 105 | repo_name=repo_name, |
|
90 | 106 | repo_type=backend.alias, |
|
91 | 107 | repo_description=description, |
|
92 | 108 | csrf_token=csrf_token), |
|
93 | 109 | status=302) |
|
94 | 110 | |
|
95 | 111 | self.assert_repository_is_created_correctly( |
|
96 | 112 | repo_name, description, backend) |
|
97 | 113 | |
|
98 | def test_create_numeric(self, autologin_user, backend, csrf_token): | |
|
114 | def test_create_numeric_name(self, autologin_user, backend, csrf_token): | |
|
99 | 115 | numeric_repo = '1234' |
|
100 | 116 | repo_name = numeric_repo |
|
101 | 117 | description = 'description for newly created repo' + numeric_repo |
|
102 | 118 | self.app.post( |
|
103 |
|
|
|
119 | route_path('repo_create'), | |
|
104 | 120 | fixture._get_repo_create_params( |
|
105 | 121 | repo_private=False, |
|
106 | 122 | repo_name=repo_name, |
|
107 | 123 | repo_type=backend.alias, |
|
108 | 124 | repo_description=description, |
|
109 | 125 | csrf_token=csrf_token)) |
|
110 | 126 | |
|
111 | 127 | self.assert_repository_is_created_correctly( |
|
112 | 128 | repo_name, description, backend) |
|
113 | 129 | |
|
114 | 130 | @pytest.mark.parametrize("suffix", [u'', u'ąćę'], ids=['', 'non-ascii']) |
|
115 | 131 | def test_create_in_group( |
|
116 | 132 | self, autologin_user, backend, suffix, csrf_token): |
|
117 | 133 | # create GROUP |
|
118 | 134 | group_name = 'sometest_%s' % backend.alias |
|
119 | 135 | gr = RepoGroupModel().create(group_name=group_name, |
|
120 | 136 | group_description='test', |
|
121 | 137 | owner=TEST_USER_ADMIN_LOGIN) |
|
122 | 138 | Session().commit() |
|
123 | 139 | |
|
124 | 140 | repo_name = u'ingroup' + suffix |
|
125 | 141 | repo_name_full = RepoGroup.url_sep().join( |
|
126 | 142 | [group_name, repo_name]) |
|
127 | 143 | description = u'description for newly created repo' |
|
128 | 144 | self.app.post( |
|
129 |
|
|
|
145 | route_path('repo_create'), | |
|
130 | 146 | fixture._get_repo_create_params( |
|
131 | 147 | repo_private=False, |
|
132 | 148 | repo_name=safe_str(repo_name), |
|
133 | 149 | repo_type=backend.alias, |
|
134 | 150 | repo_description=description, |
|
135 | 151 | repo_group=gr.group_id, |
|
136 | 152 | csrf_token=csrf_token)) |
|
137 | 153 | |
|
138 | 154 | # TODO: johbo: Cleanup work to fixture |
|
139 | 155 | try: |
|
140 | 156 | self.assert_repository_is_created_correctly( |
|
141 | 157 | repo_name_full, description, backend) |
|
142 | 158 | |
|
143 | 159 | new_repo = RepoModel().get_by_repo_name(repo_name_full) |
|
144 | 160 | inherited_perms = UserRepoToPerm.query().filter( |
|
145 | 161 | UserRepoToPerm.repository_id == new_repo.repo_id).all() |
|
146 | 162 | assert len(inherited_perms) == 1 |
|
147 | 163 | finally: |
|
148 | 164 | RepoModel().delete(repo_name_full) |
|
149 | 165 | RepoGroupModel().delete(group_name) |
|
150 | 166 | Session().commit() |
|
151 | 167 | |
|
152 | def test_create_in_group_numeric( | |
|
168 | def test_create_in_group_numeric_name( | |
|
153 | 169 | self, autologin_user, backend, csrf_token): |
|
154 | 170 | # create GROUP |
|
155 | 171 | group_name = 'sometest_%s' % backend.alias |
|
156 | 172 | gr = RepoGroupModel().create(group_name=group_name, |
|
157 | 173 | group_description='test', |
|
158 | 174 | owner=TEST_USER_ADMIN_LOGIN) |
|
159 | 175 | Session().commit() |
|
160 | 176 | |
|
161 | 177 | repo_name = '12345' |
|
162 | 178 | repo_name_full = RepoGroup.url_sep().join([group_name, repo_name]) |
|
163 | 179 | description = 'description for newly created repo' |
|
164 | 180 | self.app.post( |
|
165 |
|
|
|
181 | route_path('repo_create'), | |
|
166 | 182 | fixture._get_repo_create_params( |
|
167 | 183 | repo_private=False, |
|
168 | 184 | repo_name=repo_name, |
|
169 | 185 | repo_type=backend.alias, |
|
170 | 186 | repo_description=description, |
|
171 | 187 | repo_group=gr.group_id, |
|
172 | 188 | csrf_token=csrf_token)) |
|
173 | 189 | |
|
174 | 190 | # TODO: johbo: Cleanup work to fixture |
|
175 | 191 | try: |
|
176 | 192 | self.assert_repository_is_created_correctly( |
|
177 | 193 | repo_name_full, description, backend) |
|
178 | 194 | |
|
179 | 195 | new_repo = RepoModel().get_by_repo_name(repo_name_full) |
|
180 | 196 | inherited_perms = UserRepoToPerm.query()\ |
|
181 | 197 | .filter(UserRepoToPerm.repository_id == new_repo.repo_id).all() |
|
182 | 198 | assert len(inherited_perms) == 1 |
|
183 | 199 | finally: |
|
184 | 200 | RepoModel().delete(repo_name_full) |
|
185 | 201 | RepoGroupModel().delete(group_name) |
|
186 | 202 | Session().commit() |
|
187 | 203 | |
|
188 | 204 | def test_create_in_group_without_needed_permissions(self, backend): |
|
189 | 205 | session = login_user_session( |
|
190 | 206 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
191 | 207 | csrf_token = auth.get_csrf_token(session) |
|
192 | 208 | # revoke |
|
193 | 209 | user_model = UserModel() |
|
194 | 210 | # disable fork and create on default user |
|
195 | 211 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository') |
|
196 | 212 | user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none') |
|
197 | 213 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository') |
|
198 | 214 | user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none') |
|
199 | 215 | |
|
200 | 216 | # disable on regular user |
|
201 | 217 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository') |
|
202 | 218 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none') |
|
203 | 219 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository') |
|
204 | 220 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none') |
|
205 | 221 | Session().commit() |
|
206 | 222 | |
|
207 | 223 | # create GROUP |
|
208 | 224 | group_name = 'reg_sometest_%s' % backend.alias |
|
209 | 225 | gr = RepoGroupModel().create(group_name=group_name, |
|
210 | 226 | group_description='test', |
|
211 | 227 | owner=TEST_USER_ADMIN_LOGIN) |
|
212 | 228 | Session().commit() |
|
213 | 229 | |
|
214 | 230 | group_name_allowed = 'reg_sometest_allowed_%s' % backend.alias |
|
215 | 231 | gr_allowed = RepoGroupModel().create( |
|
216 | 232 | group_name=group_name_allowed, |
|
217 | 233 | group_description='test', |
|
218 | 234 | owner=TEST_USER_REGULAR_LOGIN) |
|
219 | 235 | Session().commit() |
|
220 | 236 | |
|
221 | 237 | repo_name = 'ingroup' |
|
222 | 238 | description = 'description for newly created repo' |
|
223 | 239 | response = self.app.post( |
|
224 |
|
|
|
240 | route_path('repo_create'), | |
|
225 | 241 | fixture._get_repo_create_params( |
|
226 | 242 | repo_private=False, |
|
227 | 243 | repo_name=repo_name, |
|
228 | 244 | repo_type=backend.alias, |
|
229 | 245 | repo_description=description, |
|
230 | 246 | repo_group=gr.group_id, |
|
231 | 247 | csrf_token=csrf_token)) |
|
232 | 248 | |
|
233 | 249 | response.mustcontain('Invalid value') |
|
234 | 250 | |
|
235 | 251 | # user is allowed to create in this group |
|
236 | 252 | repo_name = 'ingroup' |
|
237 | 253 | repo_name_full = RepoGroup.url_sep().join( |
|
238 | 254 | [group_name_allowed, repo_name]) |
|
239 | 255 | description = 'description for newly created repo' |
|
240 | 256 | response = self.app.post( |
|
241 |
|
|
|
257 | route_path('repo_create'), | |
|
242 | 258 | fixture._get_repo_create_params( |
|
243 | 259 | repo_private=False, |
|
244 | 260 | repo_name=repo_name, |
|
245 | 261 | repo_type=backend.alias, |
|
246 | 262 | repo_description=description, |
|
247 | 263 | repo_group=gr_allowed.group_id, |
|
248 | 264 | csrf_token=csrf_token)) |
|
249 | 265 | |
|
250 | 266 | # TODO: johbo: Cleanup in pytest fixture |
|
251 | 267 | try: |
|
252 | 268 | self.assert_repository_is_created_correctly( |
|
253 | 269 | repo_name_full, description, backend) |
|
254 | 270 | |
|
255 | 271 | new_repo = RepoModel().get_by_repo_name(repo_name_full) |
|
256 | 272 | inherited_perms = UserRepoToPerm.query().filter( |
|
257 | 273 | UserRepoToPerm.repository_id == new_repo.repo_id).all() |
|
258 | 274 | assert len(inherited_perms) == 1 |
|
259 | 275 | |
|
260 | 276 | assert repo_on_filesystem(repo_name_full) |
|
261 | 277 | finally: |
|
262 | 278 | RepoModel().delete(repo_name_full) |
|
263 | 279 | RepoGroupModel().delete(group_name) |
|
264 | 280 | RepoGroupModel().delete(group_name_allowed) |
|
265 | 281 | Session().commit() |
|
266 | 282 | |
|
267 | 283 | def test_create_in_group_inherit_permissions(self, autologin_user, backend, |
|
268 | 284 | csrf_token): |
|
269 | 285 | # create GROUP |
|
270 | 286 | group_name = 'sometest_%s' % backend.alias |
|
271 | 287 | gr = RepoGroupModel().create(group_name=group_name, |
|
272 | 288 | group_description='test', |
|
273 | 289 | owner=TEST_USER_ADMIN_LOGIN) |
|
274 | 290 | perm = Permission.get_by_key('repository.write') |
|
275 | 291 | RepoGroupModel().grant_user_permission( |
|
276 | 292 | gr, TEST_USER_REGULAR_LOGIN, perm) |
|
277 | 293 | |
|
278 | 294 | # add repo permissions |
|
279 | 295 | Session().commit() |
|
280 | 296 | |
|
281 | 297 | repo_name = 'ingroup_inherited_%s' % backend.alias |
|
282 | 298 | repo_name_full = RepoGroup.url_sep().join([group_name, repo_name]) |
|
283 | 299 | description = 'description for newly created repo' |
|
284 | 300 | self.app.post( |
|
285 |
|
|
|
301 | route_path('repo_create'), | |
|
286 | 302 | fixture._get_repo_create_params( |
|
287 | 303 | repo_private=False, |
|
288 | 304 | repo_name=repo_name, |
|
289 | 305 | repo_type=backend.alias, |
|
290 | 306 | repo_description=description, |
|
291 | 307 | repo_group=gr.group_id, |
|
292 | 308 | repo_copy_permissions=True, |
|
293 | 309 | csrf_token=csrf_token)) |
|
294 | 310 | |
|
295 | 311 | # TODO: johbo: Cleanup to pytest fixture |
|
296 | 312 | try: |
|
297 | 313 | self.assert_repository_is_created_correctly( |
|
298 | 314 | repo_name_full, description, backend) |
|
299 | 315 | except Exception: |
|
300 | 316 | RepoGroupModel().delete(group_name) |
|
301 | 317 | Session().commit() |
|
302 | 318 | raise |
|
303 | 319 | |
|
304 | 320 | # check if inherited permissions are applied |
|
305 | 321 | new_repo = RepoModel().get_by_repo_name(repo_name_full) |
|
306 | 322 | inherited_perms = UserRepoToPerm.query().filter( |
|
307 | 323 | UserRepoToPerm.repository_id == new_repo.repo_id).all() |
|
308 | 324 | assert len(inherited_perms) == 2 |
|
309 | 325 | |
|
310 | 326 | assert TEST_USER_REGULAR_LOGIN in [ |
|
311 | 327 | x.user.username for x in inherited_perms] |
|
312 | 328 | assert 'repository.write' in [ |
|
313 | 329 | x.permission.permission_name for x in inherited_perms] |
|
314 | 330 | |
|
315 | 331 | RepoModel().delete(repo_name_full) |
|
316 | 332 | RepoGroupModel().delete(group_name) |
|
317 | 333 | Session().commit() |
|
318 | 334 | |
|
319 | 335 | @pytest.mark.xfail_backends( |
|
320 | 336 | "git", "hg", reason="Missing reposerver support") |
|
321 | 337 | def test_create_with_clone_uri(self, autologin_user, backend, reposerver, |
|
322 | 338 | csrf_token): |
|
323 | 339 | source_repo = backend.create_repo(number_of_commits=2) |
|
324 | 340 | source_repo_name = source_repo.repo_name |
|
325 | 341 | reposerver.serve(source_repo.scm_instance()) |
|
326 | 342 | |
|
327 | 343 | repo_name = backend.new_repo_name() |
|
328 | 344 | response = self.app.post( |
|
329 |
|
|
|
345 | route_path('repo_create'), | |
|
330 | 346 | fixture._get_repo_create_params( |
|
331 | 347 | repo_private=False, |
|
332 | 348 | repo_name=repo_name, |
|
333 | 349 | repo_type=backend.alias, |
|
334 | 350 | repo_description='', |
|
335 | 351 | clone_uri=reposerver.url, |
|
336 | 352 | csrf_token=csrf_token), |
|
337 | 353 | status=302) |
|
338 | 354 | |
|
339 | 355 | # Should be redirected to the creating page |
|
340 | 356 | response.mustcontain('repo_creating') |
|
341 | 357 | |
|
342 | 358 | # Expecting that both repositories have same history |
|
343 | 359 | source_repo = RepoModel().get_by_repo_name(source_repo_name) |
|
344 | 360 | source_vcs = source_repo.scm_instance() |
|
345 | 361 | repo = RepoModel().get_by_repo_name(repo_name) |
|
346 | 362 | repo_vcs = repo.scm_instance() |
|
347 | 363 | assert source_vcs[0].message == repo_vcs[0].message |
|
348 | 364 | assert source_vcs.count() == repo_vcs.count() |
|
349 | 365 | assert source_vcs.commit_ids == repo_vcs.commit_ids |
|
350 | 366 | |
|
351 | 367 | @pytest.mark.xfail_backends("svn", reason="Depends on import support") |
|
352 | 368 | def test_create_remote_repo_wrong_clone_uri(self, autologin_user, backend, |
|
353 | 369 | csrf_token): |
|
354 | 370 | repo_name = backend.new_repo_name() |
|
355 | 371 | description = 'description for newly created repo' |
|
356 | 372 | response = self.app.post( |
|
357 |
|
|
|
373 | route_path('repo_create'), | |
|
358 | 374 | fixture._get_repo_create_params( |
|
359 | 375 | repo_private=False, |
|
360 | 376 | repo_name=repo_name, |
|
361 | 377 | repo_type=backend.alias, |
|
362 | 378 | repo_description=description, |
|
363 | 379 | clone_uri='http://repo.invalid/repo', |
|
364 | 380 | csrf_token=csrf_token)) |
|
365 | 381 | response.mustcontain('invalid clone url') |
|
366 | 382 | |
|
367 | 383 | @pytest.mark.xfail_backends("svn", reason="Depends on import support") |
|
368 | 384 | def test_create_remote_repo_wrong_clone_uri_hg_svn( |
|
369 | 385 | self, autologin_user, backend, csrf_token): |
|
370 | 386 | repo_name = backend.new_repo_name() |
|
371 | 387 | description = 'description for newly created repo' |
|
372 | 388 | response = self.app.post( |
|
373 |
|
|
|
389 | route_path('repo_create'), | |
|
374 | 390 | fixture._get_repo_create_params( |
|
375 | 391 | repo_private=False, |
|
376 | 392 | repo_name=repo_name, |
|
377 | 393 | repo_type=backend.alias, |
|
378 | 394 | repo_description=description, |
|
379 | 395 | clone_uri='svn+http://svn.invalid/repo', |
|
380 | 396 | csrf_token=csrf_token)) |
|
381 | 397 | response.mustcontain('invalid clone url') |
|
382 | 398 | |
|
383 | 399 | def test_create_with_git_suffix( |
|
384 | 400 | self, autologin_user, backend, csrf_token): |
|
385 | 401 | repo_name = backend.new_repo_name() + ".git" |
|
386 | 402 | description = 'description for newly created repo' |
|
387 | 403 | response = self.app.post( |
|
388 |
|
|
|
404 | route_path('repo_create'), | |
|
389 | 405 | fixture._get_repo_create_params( |
|
390 | 406 | repo_private=False, |
|
391 | 407 | repo_name=repo_name, |
|
392 | 408 | repo_type=backend.alias, |
|
393 | 409 | repo_description=description, |
|
394 | 410 | csrf_token=csrf_token)) |
|
395 | 411 | response.mustcontain('Repository name cannot end with .git') |
|
396 | 412 | |
|
397 | def test_show(self, autologin_user, backend): | |
|
398 | self.app.get(url('repo', repo_name=backend.repo_name)) | |
|
399 | ||
|
400 | 413 | def test_default_user_cannot_access_private_repo_in_a_group( |
|
401 |
self, autologin_user, user_util, backend |
|
|
414 | self, autologin_user, user_util, backend): | |
|
402 | 415 | |
|
403 | 416 | group = user_util.create_repo_group() |
|
404 | 417 | |
|
405 | 418 | repo = backend.create_repo( |
|
406 | 419 | repo_private=True, repo_group=group, repo_copy_permissions=True) |
|
407 | 420 | |
|
408 | 421 | permissions = _get_permission_for_user( |
|
409 | 422 | user='default', repo=repo.repo_name) |
|
410 | 423 | assert len(permissions) == 1 |
|
411 | 424 | assert permissions[0].permission.permission_name == 'repository.none' |
|
412 | 425 | assert permissions[0].repository.private is True |
|
413 | 426 | |
|
414 | 427 | def test_create_on_top_level_without_permissions(self, backend): |
|
415 | 428 | session = login_user_session( |
|
416 | 429 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
417 | 430 | csrf_token = auth.get_csrf_token(session) |
|
418 | 431 | |
|
419 | 432 | # revoke |
|
420 | 433 | user_model = UserModel() |
|
421 | 434 | # disable fork and create on default user |
|
422 | 435 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository') |
|
423 | 436 | user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none') |
|
424 | 437 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository') |
|
425 | 438 | user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none') |
|
426 | 439 | |
|
427 | 440 | # disable on regular user |
|
428 | 441 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository') |
|
429 | 442 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none') |
|
430 | 443 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository') |
|
431 | 444 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none') |
|
432 | 445 | Session().commit() |
|
433 | 446 | |
|
434 | 447 | repo_name = backend.new_repo_name() |
|
435 | 448 | description = 'description for newly created repo' |
|
436 | 449 | response = self.app.post( |
|
437 |
|
|
|
450 | route_path('repo_create'), | |
|
438 | 451 | fixture._get_repo_create_params( |
|
439 | 452 | repo_private=False, |
|
440 | 453 | repo_name=repo_name, |
|
441 | 454 | repo_type=backend.alias, |
|
442 | 455 | repo_description=description, |
|
443 | 456 | csrf_token=csrf_token)) |
|
444 | 457 | |
|
445 | 458 | response.mustcontain( |
|
446 | 459 | u"You do not have the permission to store repositories in " |
|
447 | 460 | u"the root location.") |
|
448 | 461 | |
|
449 | 462 | @mock.patch.object(RepoModel, '_create_filesystem_repo', error_function) |
|
450 | 463 | def test_create_repo_when_filesystem_op_fails( |
|
451 | 464 | self, autologin_user, backend, csrf_token): |
|
452 | 465 | repo_name = backend.new_repo_name() |
|
453 | 466 | description = 'description for newly created repo' |
|
454 | 467 | |
|
455 | 468 | response = self.app.post( |
|
456 |
|
|
|
469 | route_path('repo_create'), | |
|
457 | 470 | fixture._get_repo_create_params( |
|
458 | 471 | repo_private=False, |
|
459 | 472 | repo_name=repo_name, |
|
460 | 473 | repo_type=backend.alias, |
|
461 | 474 | repo_description=description, |
|
462 | 475 | csrf_token=csrf_token)) |
|
463 | 476 | |
|
464 | 477 | assert_session_flash( |
|
465 | 478 | response, 'Error creating repository %s' % repo_name) |
|
466 | 479 | # repo must not be in db |
|
467 | 480 | assert backend.repo is None |
|
468 | 481 | # repo must not be in filesystem ! |
|
469 | 482 | assert not repo_on_filesystem(repo_name) |
|
470 | 483 | |
|
471 | 484 | def assert_repository_is_created_correctly( |
|
472 | 485 | self, repo_name, description, backend): |
|
473 | 486 | repo_name_utf8 = safe_str(repo_name) |
|
474 | 487 | |
|
475 | 488 | # run the check page that triggers the flash message |
|
476 | 489 | response = self.app.get( |
|
477 | 490 | route_path('repo_creating_check', repo_name=safe_str(repo_name))) |
|
478 | 491 | assert response.json == {u'result': True} |
|
479 | 492 | |
|
480 | 493 | flash_msg = u'Created repository <a href="/{}">{}</a>'.format( |
|
481 | 494 | urllib.quote(repo_name_utf8), repo_name) |
|
482 | 495 | assert_session_flash(response, flash_msg) |
|
483 | 496 | |
|
484 | 497 | # test if the repo was created in the database |
|
485 | 498 | new_repo = RepoModel().get_by_repo_name(repo_name) |
|
486 | 499 | |
|
487 | 500 | assert new_repo.repo_name == repo_name |
|
488 | 501 | assert new_repo.description == description |
|
489 | 502 | |
|
490 | 503 | # test if the repository is visible in the list ? |
|
491 | 504 | response = self.app.get( |
|
492 | 505 | h.route_path('repo_summary', repo_name=safe_str(repo_name))) |
|
493 | 506 | response.mustcontain(repo_name) |
|
494 | 507 | response.mustcontain(backend.alias) |
|
495 | 508 | |
|
496 | 509 | assert repo_on_filesystem(repo_name) |
|
497 | ||
|
498 | ||
|
499 | @pytest.mark.usefixtures("app") | |
|
500 | class TestVcsSettings(object): | |
|
501 | FORM_DATA = { | |
|
502 | 'inherit_global_settings': False, | |
|
503 | 'hooks_changegroup_repo_size': False, | |
|
504 | 'hooks_changegroup_push_logger': False, | |
|
505 | 'hooks_outgoing_pull_logger': False, | |
|
506 | 'extensions_largefiles': False, | |
|
507 | 'extensions_evolve': False, | |
|
508 | 'phases_publish': 'False', | |
|
509 | 'rhodecode_pr_merge_enabled': False, | |
|
510 | 'rhodecode_use_outdated_comments': False, | |
|
511 | 'new_svn_branch': '', | |
|
512 | 'new_svn_tag': '' | |
|
513 | } | |
|
514 | ||
|
515 | @pytest.mark.skip_backends('svn') | |
|
516 | def test_global_settings_initial_values(self, autologin_user, backend): | |
|
517 | repo_name = backend.repo_name | |
|
518 | response = self.app.get(url('repo_vcs_settings', repo_name=repo_name)) | |
|
519 | ||
|
520 | expected_settings = ( | |
|
521 | 'rhodecode_use_outdated_comments', 'rhodecode_pr_merge_enabled', | |
|
522 | 'hooks_changegroup_repo_size', 'hooks_changegroup_push_logger', | |
|
523 | 'hooks_outgoing_pull_logger' | |
|
524 | ) | |
|
525 | for setting in expected_settings: | |
|
526 | self.assert_repo_value_equals_global_value(response, setting) | |
|
527 | ||
|
528 | def test_show_settings_requires_repo_admin_permission( | |
|
529 | self, backend, user_util, settings_util): | |
|
530 | repo = backend.create_repo() | |
|
531 | repo_name = repo.repo_name | |
|
532 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
|
533 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |
|
534 | login_user_session( | |
|
535 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
|
536 | self.app.get(url('repo_vcs_settings', repo_name=repo_name), status=200) | |
|
537 | ||
|
538 | def test_inherit_global_settings_flag_is_true_by_default( | |
|
539 | self, autologin_user, backend): | |
|
540 | repo_name = backend.repo_name | |
|
541 | response = self.app.get(url('repo_vcs_settings', repo_name=repo_name)) | |
|
542 | ||
|
543 | assert_response = AssertResponse(response) | |
|
544 | element = assert_response.get_element('#inherit_global_settings') | |
|
545 | assert element.checked | |
|
546 | ||
|
547 | @pytest.mark.parametrize('checked_value', [True, False]) | |
|
548 | def test_inherit_global_settings_value( | |
|
549 | self, autologin_user, backend, checked_value, settings_util): | |
|
550 | repo = backend.create_repo() | |
|
551 | repo_name = repo.repo_name | |
|
552 | settings_util.create_repo_rhodecode_setting( | |
|
553 | repo, 'inherit_vcs_settings', checked_value, 'bool') | |
|
554 | response = self.app.get(url('repo_vcs_settings', repo_name=repo_name)) | |
|
555 | ||
|
556 | assert_response = AssertResponse(response) | |
|
557 | element = assert_response.get_element('#inherit_global_settings') | |
|
558 | assert element.checked == checked_value | |
|
559 | ||
|
560 | @pytest.mark.skip_backends('svn') | |
|
561 | def test_hooks_settings_are_created( | |
|
562 | self, autologin_user, backend, csrf_token): | |
|
563 | repo_name = backend.repo_name | |
|
564 | data = self.FORM_DATA.copy() | |
|
565 | data['csrf_token'] = csrf_token | |
|
566 | self.app.post( | |
|
567 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
568 | settings = SettingsModel(repo=repo_name) | |
|
569 | try: | |
|
570 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
571 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
572 | assert ui.ui_active is False | |
|
573 | finally: | |
|
574 | self._cleanup_repo_settings(settings) | |
|
575 | ||
|
576 | def test_hooks_settings_are_not_created_for_svn( | |
|
577 | self, autologin_user, backend_svn, csrf_token): | |
|
578 | repo_name = backend_svn.repo_name | |
|
579 | data = self.FORM_DATA.copy() | |
|
580 | data['csrf_token'] = csrf_token | |
|
581 | self.app.post( | |
|
582 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
583 | settings = SettingsModel(repo=repo_name) | |
|
584 | try: | |
|
585 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
586 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
587 | assert ui is None | |
|
588 | finally: | |
|
589 | self._cleanup_repo_settings(settings) | |
|
590 | ||
|
591 | @pytest.mark.skip_backends('svn') | |
|
592 | def test_hooks_settings_are_updated( | |
|
593 | self, autologin_user, backend, csrf_token): | |
|
594 | repo_name = backend.repo_name | |
|
595 | settings = SettingsModel(repo=repo_name) | |
|
596 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
597 | settings.create_ui_section_value(section, '', key=key, active=True) | |
|
598 | ||
|
599 | data = self.FORM_DATA.copy() | |
|
600 | data['csrf_token'] = csrf_token | |
|
601 | self.app.post( | |
|
602 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
603 | try: | |
|
604 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
605 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
606 | assert ui.ui_active is False | |
|
607 | finally: | |
|
608 | self._cleanup_repo_settings(settings) | |
|
609 | ||
|
610 | def test_hooks_settings_are_not_updated_for_svn( | |
|
611 | self, autologin_user, backend_svn, csrf_token): | |
|
612 | repo_name = backend_svn.repo_name | |
|
613 | settings = SettingsModel(repo=repo_name) | |
|
614 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
615 | settings.create_ui_section_value(section, '', key=key, active=True) | |
|
616 | ||
|
617 | data = self.FORM_DATA.copy() | |
|
618 | data['csrf_token'] = csrf_token | |
|
619 | self.app.post( | |
|
620 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
621 | try: | |
|
622 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |
|
623 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
624 | assert ui.ui_active is True | |
|
625 | finally: | |
|
626 | self._cleanup_repo_settings(settings) | |
|
627 | ||
|
628 | @pytest.mark.skip_backends('svn') | |
|
629 | def test_pr_settings_are_created( | |
|
630 | self, autologin_user, backend, csrf_token): | |
|
631 | repo_name = backend.repo_name | |
|
632 | data = self.FORM_DATA.copy() | |
|
633 | data['csrf_token'] = csrf_token | |
|
634 | self.app.post( | |
|
635 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
636 | settings = SettingsModel(repo=repo_name) | |
|
637 | try: | |
|
638 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
639 | setting = settings.get_setting_by_name(name) | |
|
640 | assert setting.app_settings_value is False | |
|
641 | finally: | |
|
642 | self._cleanup_repo_settings(settings) | |
|
643 | ||
|
644 | def test_pr_settings_are_not_created_for_svn( | |
|
645 | self, autologin_user, backend_svn, csrf_token): | |
|
646 | repo_name = backend_svn.repo_name | |
|
647 | data = self.FORM_DATA.copy() | |
|
648 | data['csrf_token'] = csrf_token | |
|
649 | self.app.post( | |
|
650 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
651 | settings = SettingsModel(repo=repo_name) | |
|
652 | try: | |
|
653 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
654 | setting = settings.get_setting_by_name(name) | |
|
655 | assert setting is None | |
|
656 | finally: | |
|
657 | self._cleanup_repo_settings(settings) | |
|
658 | ||
|
659 | def test_pr_settings_creation_requires_repo_admin_permission( | |
|
660 | self, backend, user_util, settings_util, csrf_token): | |
|
661 | repo = backend.create_repo() | |
|
662 | repo_name = repo.repo_name | |
|
663 | ||
|
664 | logout_user_session(self.app, csrf_token) | |
|
665 | session = login_user_session( | |
|
666 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
|
667 | new_csrf_token = auth.get_csrf_token(session) | |
|
668 | ||
|
669 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
|
670 | repo = Repository.get_by_repo_name(repo_name) | |
|
671 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |
|
672 | data = self.FORM_DATA.copy() | |
|
673 | data['csrf_token'] = new_csrf_token | |
|
674 | settings = SettingsModel(repo=repo_name) | |
|
675 | ||
|
676 | try: | |
|
677 | self.app.post( | |
|
678 | url('repo_vcs_settings', repo_name=repo_name), data, | |
|
679 | status=302) | |
|
680 | finally: | |
|
681 | self._cleanup_repo_settings(settings) | |
|
682 | ||
|
683 | @pytest.mark.skip_backends('svn') | |
|
684 | def test_pr_settings_are_updated( | |
|
685 | self, autologin_user, backend, csrf_token): | |
|
686 | repo_name = backend.repo_name | |
|
687 | settings = SettingsModel(repo=repo_name) | |
|
688 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
689 | settings.create_or_update_setting(name, True, 'bool') | |
|
690 | ||
|
691 | data = self.FORM_DATA.copy() | |
|
692 | data['csrf_token'] = csrf_token | |
|
693 | self.app.post( | |
|
694 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
695 | try: | |
|
696 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
697 | setting = settings.get_setting_by_name(name) | |
|
698 | assert setting.app_settings_value is False | |
|
699 | finally: | |
|
700 | self._cleanup_repo_settings(settings) | |
|
701 | ||
|
702 | def test_pr_settings_are_not_updated_for_svn( | |
|
703 | self, autologin_user, backend_svn, csrf_token): | |
|
704 | repo_name = backend_svn.repo_name | |
|
705 | settings = SettingsModel(repo=repo_name) | |
|
706 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
707 | settings.create_or_update_setting(name, True, 'bool') | |
|
708 | ||
|
709 | data = self.FORM_DATA.copy() | |
|
710 | data['csrf_token'] = csrf_token | |
|
711 | self.app.post( | |
|
712 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
713 | try: | |
|
714 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
715 | setting = settings.get_setting_by_name(name) | |
|
716 | assert setting.app_settings_value is True | |
|
717 | finally: | |
|
718 | self._cleanup_repo_settings(settings) | |
|
719 | ||
|
720 | def test_svn_settings_are_created( | |
|
721 | self, autologin_user, backend_svn, csrf_token, settings_util): | |
|
722 | repo_name = backend_svn.repo_name | |
|
723 | data = self.FORM_DATA.copy() | |
|
724 | data['new_svn_tag'] = 'svn-tag' | |
|
725 | data['new_svn_branch'] = 'svn-branch' | |
|
726 | data['csrf_token'] = csrf_token | |
|
727 | ||
|
728 | # Create few global settings to make sure that uniqueness validators | |
|
729 | # are not triggered | |
|
730 | settings_util.create_rhodecode_ui( | |
|
731 | VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch') | |
|
732 | settings_util.create_rhodecode_ui( | |
|
733 | VcsSettingsModel.SVN_TAG_SECTION, 'svn-tag') | |
|
734 | ||
|
735 | self.app.post( | |
|
736 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
737 | settings = SettingsModel(repo=repo_name) | |
|
738 | try: | |
|
739 | svn_branches = settings.get_ui_by_section( | |
|
740 | VcsSettingsModel.SVN_BRANCH_SECTION) | |
|
741 | svn_branch_names = [b.ui_value for b in svn_branches] | |
|
742 | svn_tags = settings.get_ui_by_section( | |
|
743 | VcsSettingsModel.SVN_TAG_SECTION) | |
|
744 | svn_tag_names = [b.ui_value for b in svn_tags] | |
|
745 | assert 'svn-branch' in svn_branch_names | |
|
746 | assert 'svn-tag' in svn_tag_names | |
|
747 | finally: | |
|
748 | self._cleanup_repo_settings(settings) | |
|
749 | ||
|
750 | def test_svn_settings_are_unique( | |
|
751 | self, autologin_user, backend_svn, csrf_token, settings_util): | |
|
752 | repo = backend_svn.repo | |
|
753 | repo_name = repo.repo_name | |
|
754 | data = self.FORM_DATA.copy() | |
|
755 | data['new_svn_tag'] = 'test_tag' | |
|
756 | data['new_svn_branch'] = 'test_branch' | |
|
757 | data['csrf_token'] = csrf_token | |
|
758 | settings_util.create_repo_rhodecode_ui( | |
|
759 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch') | |
|
760 | settings_util.create_repo_rhodecode_ui( | |
|
761 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag') | |
|
762 | ||
|
763 | response = self.app.post( | |
|
764 | url('repo_vcs_settings', repo_name=repo_name), data, status=200) | |
|
765 | response.mustcontain('Pattern already exists') | |
|
766 | ||
|
767 | def test_svn_settings_with_empty_values_are_not_created( | |
|
768 | self, autologin_user, backend_svn, csrf_token): | |
|
769 | repo_name = backend_svn.repo_name | |
|
770 | data = self.FORM_DATA.copy() | |
|
771 | data['csrf_token'] = csrf_token | |
|
772 | self.app.post( | |
|
773 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
774 | settings = SettingsModel(repo=repo_name) | |
|
775 | try: | |
|
776 | svn_branches = settings.get_ui_by_section( | |
|
777 | VcsSettingsModel.SVN_BRANCH_SECTION) | |
|
778 | svn_tags = settings.get_ui_by_section( | |
|
779 | VcsSettingsModel.SVN_TAG_SECTION) | |
|
780 | assert len(svn_branches) == 0 | |
|
781 | assert len(svn_tags) == 0 | |
|
782 | finally: | |
|
783 | self._cleanup_repo_settings(settings) | |
|
784 | ||
|
785 | def test_svn_settings_are_shown_for_svn_repository( | |
|
786 | self, autologin_user, backend_svn, csrf_token): | |
|
787 | repo_name = backend_svn.repo_name | |
|
788 | response = self.app.get( | |
|
789 | url('repo_vcs_settings', repo_name=repo_name), status=200) | |
|
790 | response.mustcontain('Subversion Settings') | |
|
791 | ||
|
792 | @pytest.mark.skip_backends('svn') | |
|
793 | def test_svn_settings_are_not_created_for_not_svn_repository( | |
|
794 | self, autologin_user, backend, csrf_token): | |
|
795 | repo_name = backend.repo_name | |
|
796 | data = self.FORM_DATA.copy() | |
|
797 | data['csrf_token'] = csrf_token | |
|
798 | self.app.post( | |
|
799 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
800 | settings = SettingsModel(repo=repo_name) | |
|
801 | try: | |
|
802 | svn_branches = settings.get_ui_by_section( | |
|
803 | VcsSettingsModel.SVN_BRANCH_SECTION) | |
|
804 | svn_tags = settings.get_ui_by_section( | |
|
805 | VcsSettingsModel.SVN_TAG_SECTION) | |
|
806 | assert len(svn_branches) == 0 | |
|
807 | assert len(svn_tags) == 0 | |
|
808 | finally: | |
|
809 | self._cleanup_repo_settings(settings) | |
|
810 | ||
|
811 | @pytest.mark.skip_backends('svn') | |
|
812 | def test_svn_settings_are_shown_only_for_svn_repository( | |
|
813 | self, autologin_user, backend, csrf_token): | |
|
814 | repo_name = backend.repo_name | |
|
815 | response = self.app.get( | |
|
816 | url('repo_vcs_settings', repo_name=repo_name), status=200) | |
|
817 | response.mustcontain(no='Subversion Settings') | |
|
818 | ||
|
819 | def test_hg_settings_are_created( | |
|
820 | self, autologin_user, backend_hg, csrf_token): | |
|
821 | repo_name = backend_hg.repo_name | |
|
822 | data = self.FORM_DATA.copy() | |
|
823 | data['new_svn_tag'] = 'svn-tag' | |
|
824 | data['new_svn_branch'] = 'svn-branch' | |
|
825 | data['csrf_token'] = csrf_token | |
|
826 | self.app.post( | |
|
827 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
828 | settings = SettingsModel(repo=repo_name) | |
|
829 | try: | |
|
830 | largefiles_ui = settings.get_ui_by_section_and_key( | |
|
831 | 'extensions', 'largefiles') | |
|
832 | assert largefiles_ui.ui_active is False | |
|
833 | phases_ui = settings.get_ui_by_section_and_key( | |
|
834 | 'phases', 'publish') | |
|
835 | assert str2bool(phases_ui.ui_value) is False | |
|
836 | finally: | |
|
837 | self._cleanup_repo_settings(settings) | |
|
838 | ||
|
839 | def test_hg_settings_are_updated( | |
|
840 | self, autologin_user, backend_hg, csrf_token): | |
|
841 | repo_name = backend_hg.repo_name | |
|
842 | settings = SettingsModel(repo=repo_name) | |
|
843 | settings.create_ui_section_value( | |
|
844 | 'extensions', '', key='largefiles', active=True) | |
|
845 | settings.create_ui_section_value( | |
|
846 | 'phases', '1', key='publish', active=True) | |
|
847 | ||
|
848 | data = self.FORM_DATA.copy() | |
|
849 | data['csrf_token'] = csrf_token | |
|
850 | self.app.post( | |
|
851 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
852 | try: | |
|
853 | largefiles_ui = settings.get_ui_by_section_and_key( | |
|
854 | 'extensions', 'largefiles') | |
|
855 | assert largefiles_ui.ui_active is False | |
|
856 | phases_ui = settings.get_ui_by_section_and_key( | |
|
857 | 'phases', 'publish') | |
|
858 | assert str2bool(phases_ui.ui_value) is False | |
|
859 | finally: | |
|
860 | self._cleanup_repo_settings(settings) | |
|
861 | ||
|
862 | def test_hg_settings_are_shown_for_hg_repository( | |
|
863 | self, autologin_user, backend_hg, csrf_token): | |
|
864 | repo_name = backend_hg.repo_name | |
|
865 | response = self.app.get( | |
|
866 | url('repo_vcs_settings', repo_name=repo_name), status=200) | |
|
867 | response.mustcontain('Mercurial Settings') | |
|
868 | ||
|
869 | @pytest.mark.skip_backends('hg') | |
|
870 | def test_hg_settings_are_created_only_for_hg_repository( | |
|
871 | self, autologin_user, backend, csrf_token): | |
|
872 | repo_name = backend.repo_name | |
|
873 | data = self.FORM_DATA.copy() | |
|
874 | data['csrf_token'] = csrf_token | |
|
875 | self.app.post( | |
|
876 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
877 | settings = SettingsModel(repo=repo_name) | |
|
878 | try: | |
|
879 | largefiles_ui = settings.get_ui_by_section_and_key( | |
|
880 | 'extensions', 'largefiles') | |
|
881 | assert largefiles_ui is None | |
|
882 | phases_ui = settings.get_ui_by_section_and_key( | |
|
883 | 'phases', 'publish') | |
|
884 | assert phases_ui is None | |
|
885 | finally: | |
|
886 | self._cleanup_repo_settings(settings) | |
|
887 | ||
|
888 | @pytest.mark.skip_backends('hg') | |
|
889 | def test_hg_settings_are_shown_only_for_hg_repository( | |
|
890 | self, autologin_user, backend, csrf_token): | |
|
891 | repo_name = backend.repo_name | |
|
892 | response = self.app.get( | |
|
893 | url('repo_vcs_settings', repo_name=repo_name), status=200) | |
|
894 | response.mustcontain(no='Mercurial Settings') | |
|
895 | ||
|
896 | @pytest.mark.skip_backends('hg') | |
|
897 | def test_hg_settings_are_updated_only_for_hg_repository( | |
|
898 | self, autologin_user, backend, csrf_token): | |
|
899 | repo_name = backend.repo_name | |
|
900 | settings = SettingsModel(repo=repo_name) | |
|
901 | settings.create_ui_section_value( | |
|
902 | 'extensions', '', key='largefiles', active=True) | |
|
903 | settings.create_ui_section_value( | |
|
904 | 'phases', '1', key='publish', active=True) | |
|
905 | ||
|
906 | data = self.FORM_DATA.copy() | |
|
907 | data['csrf_token'] = csrf_token | |
|
908 | self.app.post( | |
|
909 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
910 | try: | |
|
911 | largefiles_ui = settings.get_ui_by_section_and_key( | |
|
912 | 'extensions', 'largefiles') | |
|
913 | assert largefiles_ui.ui_active is True | |
|
914 | phases_ui = settings.get_ui_by_section_and_key( | |
|
915 | 'phases', 'publish') | |
|
916 | assert phases_ui.ui_value == '1' | |
|
917 | finally: | |
|
918 | self._cleanup_repo_settings(settings) | |
|
919 | ||
|
920 | def test_per_repo_svn_settings_are_displayed( | |
|
921 | self, autologin_user, backend_svn, settings_util): | |
|
922 | repo = backend_svn.create_repo() | |
|
923 | repo_name = repo.repo_name | |
|
924 | branches = [ | |
|
925 | settings_util.create_repo_rhodecode_ui( | |
|
926 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, | |
|
927 | 'branch_{}'.format(i)) | |
|
928 | for i in range(10)] | |
|
929 | tags = [ | |
|
930 | settings_util.create_repo_rhodecode_ui( | |
|
931 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'tag_{}'.format(i)) | |
|
932 | for i in range(10)] | |
|
933 | ||
|
934 | response = self.app.get( | |
|
935 | url('repo_vcs_settings', repo_name=repo_name), status=200) | |
|
936 | assert_response = AssertResponse(response) | |
|
937 | for branch in branches: | |
|
938 | css_selector = '[name=branch_value_{}]'.format(branch.ui_id) | |
|
939 | element = assert_response.get_element(css_selector) | |
|
940 | assert element.value == branch.ui_value | |
|
941 | for tag in tags: | |
|
942 | css_selector = '[name=tag_ui_value_new_{}]'.format(tag.ui_id) | |
|
943 | element = assert_response.get_element(css_selector) | |
|
944 | assert element.value == tag.ui_value | |
|
945 | ||
|
946 | def test_per_repo_hg_and_pr_settings_are_not_displayed_for_svn( | |
|
947 | self, autologin_user, backend_svn, settings_util): | |
|
948 | repo = backend_svn.create_repo() | |
|
949 | repo_name = repo.repo_name | |
|
950 | response = self.app.get( | |
|
951 | url('repo_vcs_settings', repo_name=repo_name), status=200) | |
|
952 | response.mustcontain(no='<label>Hooks:</label>') | |
|
953 | response.mustcontain(no='<label>Pull Request Settings:</label>') | |
|
954 | ||
|
955 | def test_inherit_global_settings_value_is_saved( | |
|
956 | self, autologin_user, backend, csrf_token): | |
|
957 | repo_name = backend.repo_name | |
|
958 | data = self.FORM_DATA.copy() | |
|
959 | data['csrf_token'] = csrf_token | |
|
960 | data['inherit_global_settings'] = True | |
|
961 | self.app.post( | |
|
962 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
963 | ||
|
964 | settings = SettingsModel(repo=repo_name) | |
|
965 | vcs_settings = VcsSettingsModel(repo=repo_name) | |
|
966 | try: | |
|
967 | assert vcs_settings.inherit_global_settings is True | |
|
968 | finally: | |
|
969 | self._cleanup_repo_settings(settings) | |
|
970 | ||
|
971 | def test_repo_cache_is_invalidated_when_settings_are_updated( | |
|
972 | self, autologin_user, backend, csrf_token): | |
|
973 | repo_name = backend.repo_name | |
|
974 | data = self.FORM_DATA.copy() | |
|
975 | data['csrf_token'] = csrf_token | |
|
976 | data['inherit_global_settings'] = True | |
|
977 | settings = SettingsModel(repo=repo_name) | |
|
978 | ||
|
979 | invalidation_patcher = mock.patch( | |
|
980 | 'rhodecode.controllers.admin.repos.ScmModel.mark_for_invalidation') | |
|
981 | with invalidation_patcher as invalidation_mock: | |
|
982 | self.app.post( | |
|
983 | url('repo_vcs_settings', repo_name=repo_name), data, | |
|
984 | status=302) | |
|
985 | try: | |
|
986 | invalidation_mock.assert_called_once_with(repo_name, delete=True) | |
|
987 | finally: | |
|
988 | self._cleanup_repo_settings(settings) | |
|
989 | ||
|
990 | def test_other_settings_not_saved_inherit_global_settings_is_true( | |
|
991 | self, autologin_user, backend, csrf_token): | |
|
992 | repo_name = backend.repo_name | |
|
993 | data = self.FORM_DATA.copy() | |
|
994 | data['csrf_token'] = csrf_token | |
|
995 | data['inherit_global_settings'] = True | |
|
996 | self.app.post( | |
|
997 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) | |
|
998 | ||
|
999 | settings = SettingsModel(repo=repo_name) | |
|
1000 | ui_settings = ( | |
|
1001 | VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS) | |
|
1002 | ||
|
1003 | vcs_settings = [] | |
|
1004 | try: | |
|
1005 | for section, key in ui_settings: | |
|
1006 | ui = settings.get_ui_by_section_and_key(section, key) | |
|
1007 | if ui: | |
|
1008 | vcs_settings.append(ui) | |
|
1009 | vcs_settings.extend(settings.get_ui_by_section( | |
|
1010 | VcsSettingsModel.SVN_BRANCH_SECTION)) | |
|
1011 | vcs_settings.extend(settings.get_ui_by_section( | |
|
1012 | VcsSettingsModel.SVN_TAG_SECTION)) | |
|
1013 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
1014 | setting = settings.get_setting_by_name(name) | |
|
1015 | if setting: | |
|
1016 | vcs_settings.append(setting) | |
|
1017 | assert vcs_settings == [] | |
|
1018 | finally: | |
|
1019 | self._cleanup_repo_settings(settings) | |
|
1020 | ||
|
1021 | def test_delete_svn_branch_and_tag_patterns( | |
|
1022 | self, autologin_user, backend_svn, settings_util, csrf_token): | |
|
1023 | repo = backend_svn.create_repo() | |
|
1024 | repo_name = repo.repo_name | |
|
1025 | branch = settings_util.create_repo_rhodecode_ui( | |
|
1026 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch', | |
|
1027 | cleanup=False) | |
|
1028 | tag = settings_util.create_repo_rhodecode_ui( | |
|
1029 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag', cleanup=False) | |
|
1030 | data = { | |
|
1031 | '_method': 'delete', | |
|
1032 | 'csrf_token': csrf_token | |
|
1033 | } | |
|
1034 | for id_ in (branch.ui_id, tag.ui_id): | |
|
1035 | data['delete_svn_pattern'] = id_, | |
|
1036 | self.app.post( | |
|
1037 | url('repo_vcs_settings', repo_name=repo_name), data, | |
|
1038 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) | |
|
1039 | settings = VcsSettingsModel(repo=repo_name) | |
|
1040 | assert settings.get_repo_svn_branch_patterns() == [] | |
|
1041 | ||
|
1042 | def test_delete_svn_branch_requires_repo_admin_permission( | |
|
1043 | self, backend_svn, user_util, settings_util, csrf_token): | |
|
1044 | repo = backend_svn.create_repo() | |
|
1045 | repo_name = repo.repo_name | |
|
1046 | ||
|
1047 | logout_user_session(self.app, csrf_token) | |
|
1048 | session = login_user_session( | |
|
1049 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
|
1050 | csrf_token = auth.get_csrf_token(session) | |
|
1051 | ||
|
1052 | repo = Repository.get_by_repo_name(repo_name) | |
|
1053 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |
|
1054 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |
|
1055 | branch = settings_util.create_repo_rhodecode_ui( | |
|
1056 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch', | |
|
1057 | cleanup=False) | |
|
1058 | data = { | |
|
1059 | '_method': 'delete', | |
|
1060 | 'csrf_token': csrf_token, | |
|
1061 | 'delete_svn_pattern': branch.ui_id | |
|
1062 | } | |
|
1063 | self.app.post( | |
|
1064 | url('repo_vcs_settings', repo_name=repo_name), data, | |
|
1065 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) | |
|
1066 | ||
|
1067 | def test_delete_svn_branch_raises_400_when_not_found( | |
|
1068 | self, autologin_user, backend_svn, settings_util, csrf_token): | |
|
1069 | repo_name = backend_svn.repo_name | |
|
1070 | data = { | |
|
1071 | '_method': 'delete', | |
|
1072 | 'delete_svn_pattern': 123, | |
|
1073 | 'csrf_token': csrf_token | |
|
1074 | } | |
|
1075 | self.app.post( | |
|
1076 | url('repo_vcs_settings', repo_name=repo_name), data, | |
|
1077 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=400) | |
|
1078 | ||
|
1079 | def test_delete_svn_branch_raises_400_when_no_id_specified( | |
|
1080 | self, autologin_user, backend_svn, settings_util, csrf_token): | |
|
1081 | repo_name = backend_svn.repo_name | |
|
1082 | data = { | |
|
1083 | '_method': 'delete', | |
|
1084 | 'csrf_token': csrf_token | |
|
1085 | } | |
|
1086 | self.app.post( | |
|
1087 | url('repo_vcs_settings', repo_name=repo_name), data, | |
|
1088 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=400) | |
|
1089 | ||
|
1090 | def _cleanup_repo_settings(self, settings_model): | |
|
1091 | cleanup = [] | |
|
1092 | ui_settings = ( | |
|
1093 | VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS) | |
|
1094 | ||
|
1095 | for section, key in ui_settings: | |
|
1096 | ui = settings_model.get_ui_by_section_and_key(section, key) | |
|
1097 | if ui: | |
|
1098 | cleanup.append(ui) | |
|
1099 | ||
|
1100 | cleanup.extend(settings_model.get_ui_by_section( | |
|
1101 | VcsSettingsModel.INHERIT_SETTINGS)) | |
|
1102 | cleanup.extend(settings_model.get_ui_by_section( | |
|
1103 | VcsSettingsModel.SVN_BRANCH_SECTION)) | |
|
1104 | cleanup.extend(settings_model.get_ui_by_section( | |
|
1105 | VcsSettingsModel.SVN_TAG_SECTION)) | |
|
1106 | ||
|
1107 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |
|
1108 | setting = settings_model.get_setting_by_name(name) | |
|
1109 | if setting: | |
|
1110 | cleanup.append(setting) | |
|
1111 | ||
|
1112 | for object_ in cleanup: | |
|
1113 | Session().delete(object_) | |
|
1114 | Session().commit() | |
|
1115 | ||
|
1116 | def assert_repo_value_equals_global_value(self, response, setting): | |
|
1117 | assert_response = AssertResponse(response) | |
|
1118 | global_css_selector = '[name={}_inherited]'.format(setting) | |
|
1119 | repo_css_selector = '[name={}]'.format(setting) | |
|
1120 | repo_element = assert_response.get_element(repo_css_selector) | |
|
1121 | global_element = assert_response.get_element(global_css_selector) | |
|
1122 | assert repo_element.value == global_element.value | |
|
1123 | ||
|
1124 | ||
|
1125 | def _get_permission_for_user(user, repo): | |
|
1126 | perm = UserRepoToPerm.query()\ | |
|
1127 | .filter(UserRepoToPerm.repository == | |
|
1128 | Repository.get_by_repo_name(repo))\ | |
|
1129 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ | |
|
1130 | .all() | |
|
1131 | return perm |
@@ -1,54 +1,54 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import collections |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | |
|
27 | 27 | from rhodecode.apps._base import BaseAppView |
|
28 | 28 | from rhodecode.apps.admin.navigation import navigation_list |
|
29 | 29 | from rhodecode.lib.auth import (LoginRequired, HasPermissionAllDecorator) |
|
30 | 30 | from rhodecode.lib.utils import read_opensource_licenses |
|
31 | 31 | |
|
32 | 32 | log = logging.getLogger(__name__) |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | class OpenSourceLicensesAdminSettingsView(BaseAppView): |
|
36 | 36 | |
|
37 | 37 | def load_default_context(self): |
|
38 | 38 | c = self._get_local_tmpl_context() |
|
39 | 39 | self._register_global_c(c) |
|
40 | 40 | return c |
|
41 | 41 | |
|
42 | 42 | @LoginRequired() |
|
43 | 43 | @HasPermissionAllDecorator('hg.admin') |
|
44 | 44 | @view_config( |
|
45 | 45 | route_name='admin_settings_open_source', request_method='GET', |
|
46 | 46 | renderer='rhodecode:templates/admin/settings/settings.mako') |
|
47 | 47 | def open_source_licenses(self): |
|
48 | 48 | c = self.load_default_context() |
|
49 | 49 | c.active = 'open_source' |
|
50 | 50 | c.navlist = navigation_list(self.request) |
|
51 | c.opensource_licenses = collections.OrderedDict( | |
|
52 | sorted(read_opensource_licenses().items(), key=lambda t: t[0])) | |
|
51 | items = sorted(read_opensource_licenses().items(), key=lambda t: t[0]) | |
|
52 | c.opensource_licenses = collections.OrderedDict(items) | |
|
53 | 53 | |
|
54 | 54 | return self._get_template_context(c) |
@@ -1,258 +1,256 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | import datetime | |
|
23 | 22 | |
|
24 | 23 | from pyramid.httpexceptions import HTTPFound |
|
25 | 24 | from pyramid.view import view_config |
|
26 | 25 | |
|
27 | 26 | from rhodecode.model.scm import UserGroupList |
|
28 | 27 | |
|
29 | 28 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
|
30 | 29 | from rhodecode.lib.auth import ( |
|
31 |
LoginRequired, |
|
|
30 | LoginRequired, NotAnonymous, | |
|
32 | 31 | HasUserGroupPermissionAnyDecorator) |
|
33 | 32 | from rhodecode.lib import helpers as h |
|
34 | 33 | from rhodecode.lib.utils import PartialRenderer |
|
35 |
from rhodecode.lib.utils2 import |
|
|
36 | from rhodecode.model.user_group import UserGroupModel | |
|
34 | from rhodecode.lib.utils2 import safe_unicode | |
|
37 | 35 | from rhodecode.model.db import ( |
|
38 | 36 | joinedload, or_, count, User, UserGroup, UserGroupMember, |
|
39 | 37 | UserGroupRepoToPerm, UserGroupRepoGroupToPerm) |
|
40 | 38 | from rhodecode.model.meta import Session |
|
41 | 39 | |
|
42 | 40 | log = logging.getLogger(__name__) |
|
43 | 41 | |
|
44 | 42 | |
|
45 | 43 | class AdminUserGroupsView(BaseAppView, DataGridAppView): |
|
46 | 44 | |
|
47 | 45 | def load_default_context(self): |
|
48 | 46 | c = self._get_local_tmpl_context() |
|
49 | 47 | self._register_global_c(c) |
|
50 | 48 | return c |
|
51 | 49 | |
|
52 | 50 | # permission check in data loading of |
|
53 | 51 | # `user_groups_list_data` via UserGroupList |
|
54 | 52 | @LoginRequired() |
|
55 | 53 | @NotAnonymous() |
|
56 | 54 | @view_config( |
|
57 | 55 | route_name='user_groups', request_method='GET', |
|
58 | 56 | renderer='rhodecode:templates/admin/user_groups/user_groups.mako') |
|
59 | 57 | def user_groups_list(self): |
|
60 | 58 | c = self.load_default_context() |
|
61 | 59 | return self._get_template_context(c) |
|
62 | 60 | |
|
63 | 61 | # permission check inside |
|
64 | 62 | @LoginRequired() |
|
65 | 63 | @NotAnonymous() |
|
66 | 64 | @view_config( |
|
67 | 65 | route_name='user_groups_data', request_method='GET', |
|
68 | 66 | renderer='json_ext', xhr=True) |
|
69 | 67 | def user_groups_list_data(self): |
|
70 | 68 | column_map = { |
|
71 | 69 | 'active': 'users_group_active', |
|
72 | 70 | 'description': 'user_group_description', |
|
73 | 71 | 'members': 'members_total', |
|
74 | 72 | 'owner': 'user_username', |
|
75 | 73 | 'sync': 'group_data' |
|
76 | 74 | } |
|
77 | 75 | draw, start, limit = self._extract_chunk(self.request) |
|
78 | 76 | search_q, order_by, order_dir = self._extract_ordering( |
|
79 | 77 | self.request, column_map=column_map) |
|
80 | 78 | |
|
81 | 79 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
82 | 80 | |
|
83 | 81 | def user_group_name(user_group_id, user_group_name): |
|
84 | 82 | return _render("user_group_name", user_group_id, user_group_name) |
|
85 | 83 | |
|
86 | 84 | def user_group_actions(user_group_id, user_group_name): |
|
87 | 85 | return _render("user_group_actions", user_group_id, user_group_name) |
|
88 | 86 | |
|
89 | 87 | def user_profile(username): |
|
90 | 88 | return _render('user_profile', username) |
|
91 | 89 | |
|
92 | 90 | auth_user_group_list = UserGroupList( |
|
93 | 91 | UserGroup.query().all(), perm_set=['usergroup.admin']) |
|
94 | 92 | |
|
95 | 93 | allowed_ids = [] |
|
96 | 94 | for user_group in auth_user_group_list: |
|
97 | 95 | allowed_ids.append(user_group.users_group_id) |
|
98 | 96 | |
|
99 | 97 | user_groups_data_total_count = UserGroup.query()\ |
|
100 | 98 | .filter(UserGroup.users_group_id.in_(allowed_ids))\ |
|
101 | 99 | .count() |
|
102 | 100 | |
|
103 | 101 | member_count = count(UserGroupMember.user_id) |
|
104 | 102 | base_q = Session.query( |
|
105 | 103 | UserGroup.users_group_name, |
|
106 | 104 | UserGroup.user_group_description, |
|
107 | 105 | UserGroup.users_group_active, |
|
108 | 106 | UserGroup.users_group_id, |
|
109 | 107 | UserGroup.group_data, |
|
110 | 108 | User, |
|
111 | 109 | member_count.label('member_count') |
|
112 | 110 | ) \ |
|
113 | 111 | .filter(UserGroup.users_group_id.in_(allowed_ids)) \ |
|
114 | 112 | .outerjoin(UserGroupMember) \ |
|
115 | 113 | .join(User, User.user_id == UserGroup.user_id) \ |
|
116 | 114 | .group_by(UserGroup, User) |
|
117 | 115 | |
|
118 | 116 | if search_q: |
|
119 | 117 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
120 | 118 | base_q = base_q.filter(or_( |
|
121 | 119 | UserGroup.users_group_name.ilike(like_expression), |
|
122 | 120 | )) |
|
123 | 121 | |
|
124 | 122 | user_groups_data_total_filtered_count = base_q.count() |
|
125 | 123 | |
|
126 | 124 | if order_by == 'members_total': |
|
127 | 125 | sort_col = member_count |
|
128 | 126 | elif order_by == 'user_username': |
|
129 | 127 | sort_col = User.username |
|
130 | 128 | else: |
|
131 | 129 | sort_col = getattr(UserGroup, order_by, None) |
|
132 | 130 | |
|
133 | 131 | if isinstance(sort_col, count) or sort_col: |
|
134 | 132 | if order_dir == 'asc': |
|
135 | 133 | sort_col = sort_col.asc() |
|
136 | 134 | else: |
|
137 | 135 | sort_col = sort_col.desc() |
|
138 | 136 | |
|
139 | 137 | base_q = base_q.order_by(sort_col) |
|
140 | 138 | base_q = base_q.offset(start).limit(limit) |
|
141 | 139 | |
|
142 | 140 | # authenticated access to user groups |
|
143 | 141 | auth_user_group_list = base_q.all() |
|
144 | 142 | |
|
145 | 143 | user_groups_data = [] |
|
146 | 144 | for user_gr in auth_user_group_list: |
|
147 | 145 | user_groups_data.append({ |
|
148 | 146 | "users_group_name": user_group_name( |
|
149 | 147 | user_gr.users_group_id, h.escape(user_gr.users_group_name)), |
|
150 | 148 | "name_raw": h.escape(user_gr.users_group_name), |
|
151 | 149 | "description": h.escape(user_gr.user_group_description), |
|
152 | 150 | "members": user_gr.member_count, |
|
153 | 151 | # NOTE(marcink): because of advanced query we |
|
154 | 152 | # need to load it like that |
|
155 | 153 | "sync": UserGroup._load_group_data( |
|
156 | 154 | user_gr.group_data).get('extern_type'), |
|
157 | 155 | "active": h.bool2icon(user_gr.users_group_active), |
|
158 | 156 | "owner": user_profile(user_gr.User.username), |
|
159 | 157 | "action": user_group_actions( |
|
160 | 158 | user_gr.users_group_id, user_gr.users_group_name) |
|
161 | 159 | }) |
|
162 | 160 | |
|
163 | 161 | data = ({ |
|
164 | 162 | 'draw': draw, |
|
165 | 163 | 'data': user_groups_data, |
|
166 | 164 | 'recordsTotal': user_groups_data_total_count, |
|
167 | 165 | 'recordsFiltered': user_groups_data_total_filtered_count, |
|
168 | 166 | }) |
|
169 | 167 | |
|
170 | 168 | return data |
|
171 | 169 | |
|
172 | 170 | @LoginRequired() |
|
173 | 171 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
174 | 172 | @view_config( |
|
175 | 173 | route_name='user_group_members_data', request_method='GET', |
|
176 | 174 | renderer='json_ext', xhr=True) |
|
177 | 175 | def user_group_members(self): |
|
178 | 176 | """ |
|
179 | 177 | Return members of given user group |
|
180 | 178 | """ |
|
181 | 179 | user_group_id = self.request.matchdict['user_group_id'] |
|
182 | 180 | user_group = UserGroup.get_or_404(user_group_id) |
|
183 | 181 | group_members_obj = sorted((x.user for x in user_group.members), |
|
184 | 182 | key=lambda u: u.username.lower()) |
|
185 | 183 | |
|
186 | 184 | group_members = [ |
|
187 | 185 | { |
|
188 | 186 | 'id': user.user_id, |
|
189 | 187 | 'first_name': user.first_name, |
|
190 | 188 | 'last_name': user.last_name, |
|
191 | 189 | 'username': user.username, |
|
192 | 190 | 'icon_link': h.gravatar_url(user.email, 30), |
|
193 | 191 | 'value_display': h.person(user.email), |
|
194 | 192 | 'value': user.username, |
|
195 | 193 | 'value_type': 'user', |
|
196 | 194 | 'active': user.active, |
|
197 | 195 | } |
|
198 | 196 | for user in group_members_obj |
|
199 | 197 | ] |
|
200 | 198 | |
|
201 | 199 | return { |
|
202 | 200 | 'members': group_members |
|
203 | 201 | } |
|
204 | 202 | |
|
205 | 203 | def _get_perms_summary(self, user_group_id): |
|
206 | 204 | permissions = { |
|
207 | 205 | 'repositories': {}, |
|
208 | 206 | 'repositories_groups': {}, |
|
209 | 207 | } |
|
210 | 208 | ugroup_repo_perms = UserGroupRepoToPerm.query()\ |
|
211 | 209 | .options(joinedload(UserGroupRepoToPerm.permission))\ |
|
212 | 210 | .options(joinedload(UserGroupRepoToPerm.repository))\ |
|
213 | 211 | .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\ |
|
214 | 212 | .all() |
|
215 | 213 | |
|
216 | 214 | for gr in ugroup_repo_perms: |
|
217 | 215 | permissions['repositories'][gr.repository.repo_name] \ |
|
218 | 216 | = gr.permission.permission_name |
|
219 | 217 | |
|
220 | 218 | ugroup_group_perms = UserGroupRepoGroupToPerm.query()\ |
|
221 | 219 | .options(joinedload(UserGroupRepoGroupToPerm.permission))\ |
|
222 | 220 | .options(joinedload(UserGroupRepoGroupToPerm.group))\ |
|
223 | 221 | .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\ |
|
224 | 222 | .all() |
|
225 | 223 | |
|
226 | 224 | for gr in ugroup_group_perms: |
|
227 | 225 | permissions['repositories_groups'][gr.group.group_name] \ |
|
228 | 226 | = gr.permission.permission_name |
|
229 | 227 | return permissions |
|
230 | 228 | |
|
231 | 229 | @LoginRequired() |
|
232 | 230 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
233 | 231 | @view_config( |
|
234 | 232 | route_name='edit_user_group_perms_summary', request_method='GET', |
|
235 | 233 | renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako') |
|
236 | 234 | def user_group_perms_summary(self): |
|
237 | 235 | c = self.load_default_context() |
|
238 | 236 | |
|
239 | 237 | user_group_id = self.request.matchdict.get('user_group_id') |
|
240 | 238 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
241 | 239 | |
|
242 | 240 | c.active = 'perms_summary' |
|
243 | 241 | |
|
244 | 242 | c.permissions = self._get_perms_summary(c.user_group.users_group_id) |
|
245 | 243 | return self._get_template_context(c) |
|
246 | 244 | |
|
247 | 245 | @LoginRequired() |
|
248 | 246 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
249 | 247 | @view_config( |
|
250 | 248 | route_name='edit_user_group_perms_summary_json', request_method='GET', |
|
251 | 249 | renderer='json_ext') |
|
252 | 250 | def user_group_perms_summary(self): |
|
253 | 251 | self.load_default_context() |
|
254 | 252 | |
|
255 | 253 | user_group_id = self.request.matchdict.get('user_group_id') |
|
256 | 254 | user_group = UserGroup.get_or_404(user_group_id) |
|
257 | 255 | |
|
258 | 256 | return self._get_perms_summary(user_group.users_group_id) |
@@ -1,387 +1,447 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | from rhodecode.apps._base import add_route_with_slash |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def includeme(config): |
|
24 | 24 | |
|
25 | 25 | # repo creating checks, special cases that aren't repo routes |
|
26 | 26 | config.add_route( |
|
27 | 27 | name='repo_creating', |
|
28 | 28 | pattern='/{repo_name:.*?[^/]}/repo_creating') |
|
29 | 29 | |
|
30 | 30 | config.add_route( |
|
31 | 31 | name='repo_creating_check', |
|
32 | 32 | pattern='/{repo_name:.*?[^/]}/repo_creating_check') |
|
33 | 33 | |
|
34 | 34 | # Summary |
|
35 | 35 | # NOTE(marcink): one additional route is defined in very bottom, catch |
|
36 | 36 | # all pattern |
|
37 | 37 | config.add_route( |
|
38 | 38 | name='repo_summary_explicit', |
|
39 | 39 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
40 | 40 | config.add_route( |
|
41 | 41 | name='repo_summary_commits', |
|
42 | 42 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) |
|
43 | 43 | |
|
44 | 44 | # Commits |
|
45 | 45 | config.add_route( |
|
46 | 46 | name='repo_commit', |
|
47 | 47 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) |
|
48 | 48 | |
|
49 | 49 | config.add_route( |
|
50 | 50 | name='repo_commit_children', |
|
51 | 51 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) |
|
52 | 52 | |
|
53 | 53 | config.add_route( |
|
54 | 54 | name='repo_commit_parents', |
|
55 | 55 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) |
|
56 | 56 | |
|
57 | 57 | config.add_route( |
|
58 | 58 | name='repo_commit_raw', |
|
59 | 59 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) |
|
60 | 60 | |
|
61 | 61 | config.add_route( |
|
62 | 62 | name='repo_commit_patch', |
|
63 | 63 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) |
|
64 | 64 | |
|
65 | 65 | config.add_route( |
|
66 | 66 | name='repo_commit_download', |
|
67 | 67 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) |
|
68 | 68 | |
|
69 | 69 | config.add_route( |
|
70 | 70 | name='repo_commit_data', |
|
71 | 71 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) |
|
72 | 72 | |
|
73 | 73 | config.add_route( |
|
74 | 74 | name='repo_commit_comment_create', |
|
75 | 75 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) |
|
76 | 76 | |
|
77 | 77 | config.add_route( |
|
78 | 78 | name='repo_commit_comment_preview', |
|
79 | 79 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) |
|
80 | 80 | |
|
81 | 81 | config.add_route( |
|
82 | 82 | name='repo_commit_comment_delete', |
|
83 | 83 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) |
|
84 | 84 | |
|
85 | 85 | # still working url for backward compat. |
|
86 | 86 | config.add_route( |
|
87 | 87 | name='repo_commit_raw_deprecated', |
|
88 | 88 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) |
|
89 | 89 | |
|
90 | 90 | # Files |
|
91 | 91 | config.add_route( |
|
92 | 92 | name='repo_archivefile', |
|
93 | 93 | pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True) |
|
94 | 94 | |
|
95 | 95 | config.add_route( |
|
96 | 96 | name='repo_files_diff', |
|
97 | 97 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) |
|
98 | 98 | config.add_route( # legacy route to make old links work |
|
99 | 99 | name='repo_files_diff_2way_redirect', |
|
100 | 100 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) |
|
101 | 101 | |
|
102 | 102 | config.add_route( |
|
103 | 103 | name='repo_files', |
|
104 | 104 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) |
|
105 | 105 | config.add_route( |
|
106 | 106 | name='repo_files:default_path', |
|
107 | 107 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) |
|
108 | 108 | config.add_route( |
|
109 | 109 | name='repo_files:default_commit', |
|
110 | 110 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) |
|
111 | 111 | |
|
112 | 112 | config.add_route( |
|
113 | 113 | name='repo_files:rendered', |
|
114 | 114 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) |
|
115 | 115 | |
|
116 | 116 | config.add_route( |
|
117 | 117 | name='repo_files:annotated', |
|
118 | 118 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) |
|
119 | 119 | config.add_route( |
|
120 | 120 | name='repo_files:annotated_previous', |
|
121 | 121 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) |
|
122 | 122 | |
|
123 | 123 | config.add_route( |
|
124 | 124 | name='repo_nodetree_full', |
|
125 | 125 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) |
|
126 | 126 | config.add_route( |
|
127 | 127 | name='repo_nodetree_full:default_path', |
|
128 | 128 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) |
|
129 | 129 | |
|
130 | 130 | config.add_route( |
|
131 | 131 | name='repo_files_nodelist', |
|
132 | 132 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) |
|
133 | 133 | |
|
134 | 134 | config.add_route( |
|
135 | 135 | name='repo_file_raw', |
|
136 | 136 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) |
|
137 | 137 | |
|
138 | 138 | config.add_route( |
|
139 | 139 | name='repo_file_download', |
|
140 | 140 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) |
|
141 | 141 | config.add_route( # backward compat to keep old links working |
|
142 | 142 | name='repo_file_download:legacy', |
|
143 | 143 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', |
|
144 | 144 | repo_route=True) |
|
145 | 145 | |
|
146 | 146 | config.add_route( |
|
147 | 147 | name='repo_file_history', |
|
148 | 148 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) |
|
149 | 149 | |
|
150 | 150 | config.add_route( |
|
151 | 151 | name='repo_file_authors', |
|
152 | 152 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) |
|
153 | 153 | |
|
154 | 154 | config.add_route( |
|
155 | 155 | name='repo_files_remove_file', |
|
156 | 156 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', |
|
157 | 157 | repo_route=True) |
|
158 | 158 | config.add_route( |
|
159 | 159 | name='repo_files_delete_file', |
|
160 | 160 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', |
|
161 | 161 | repo_route=True) |
|
162 | 162 | config.add_route( |
|
163 | 163 | name='repo_files_edit_file', |
|
164 | 164 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', |
|
165 | 165 | repo_route=True) |
|
166 | 166 | config.add_route( |
|
167 | 167 | name='repo_files_update_file', |
|
168 | 168 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', |
|
169 | 169 | repo_route=True) |
|
170 | 170 | config.add_route( |
|
171 | 171 | name='repo_files_add_file', |
|
172 | 172 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', |
|
173 | 173 | repo_route=True) |
|
174 | 174 | config.add_route( |
|
175 | 175 | name='repo_files_create_file', |
|
176 | 176 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', |
|
177 | 177 | repo_route=True) |
|
178 | 178 | |
|
179 | 179 | # Refs data |
|
180 | 180 | config.add_route( |
|
181 | 181 | name='repo_refs_data', |
|
182 | 182 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) |
|
183 | 183 | |
|
184 | 184 | config.add_route( |
|
185 | 185 | name='repo_refs_changelog_data', |
|
186 | 186 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) |
|
187 | 187 | |
|
188 | 188 | config.add_route( |
|
189 | 189 | name='repo_stats', |
|
190 | 190 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) |
|
191 | 191 | |
|
192 | 192 | # Changelog |
|
193 | 193 | config.add_route( |
|
194 | 194 | name='repo_changelog', |
|
195 | 195 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) |
|
196 | 196 | config.add_route( |
|
197 | 197 | name='repo_changelog_file', |
|
198 | 198 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) |
|
199 | 199 | config.add_route( |
|
200 | 200 | name='repo_changelog_elements', |
|
201 | 201 | pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True) |
|
202 | 202 | |
|
203 | 203 | # Compare |
|
204 | 204 | config.add_route( |
|
205 | 205 | name='repo_compare_select', |
|
206 | 206 | pattern='/{repo_name:.*?[^/]}/compare', repo_route=True) |
|
207 | 207 | |
|
208 | 208 | config.add_route( |
|
209 | 209 | name='repo_compare', |
|
210 | 210 | pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True) |
|
211 | 211 | |
|
212 | 212 | # Tags |
|
213 | 213 | config.add_route( |
|
214 | 214 | name='tags_home', |
|
215 | 215 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
216 | 216 | |
|
217 | 217 | # Branches |
|
218 | 218 | config.add_route( |
|
219 | 219 | name='branches_home', |
|
220 | 220 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
221 | 221 | |
|
222 | 222 | # Bookmarks |
|
223 | 223 | config.add_route( |
|
224 | 224 | name='bookmarks_home', |
|
225 | 225 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
226 | 226 | |
|
227 | 227 | # Forks |
|
228 | 228 | config.add_route( |
|
229 | 229 | name='repo_fork_new', |
|
230 | 230 | pattern='/{repo_name:.*?[^/]}/fork', repo_route=True, |
|
231 | 231 | repo_accepted_types=['hg', 'git']) |
|
232 | 232 | |
|
233 | 233 | config.add_route( |
|
234 | 234 | name='repo_fork_create', |
|
235 | 235 | pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True, |
|
236 | 236 | repo_accepted_types=['hg', 'git']) |
|
237 | 237 | |
|
238 | 238 | config.add_route( |
|
239 | 239 | name='repo_forks_show_all', |
|
240 | 240 | pattern='/{repo_name:.*?[^/]}/forks', repo_route=True, |
|
241 | 241 | repo_accepted_types=['hg', 'git']) |
|
242 | 242 | config.add_route( |
|
243 | 243 | name='repo_forks_data', |
|
244 | 244 | pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True, |
|
245 | 245 | repo_accepted_types=['hg', 'git']) |
|
246 | 246 | |
|
247 | 247 | # Pull Requests |
|
248 | 248 | config.add_route( |
|
249 | 249 | name='pullrequest_show', |
|
250 | 250 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}', |
|
251 | 251 | repo_route=True) |
|
252 | 252 | |
|
253 | 253 | config.add_route( |
|
254 | 254 | name='pullrequest_show_all', |
|
255 | 255 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
256 | 256 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
257 | 257 | |
|
258 | 258 | config.add_route( |
|
259 | 259 | name='pullrequest_show_all_data', |
|
260 | 260 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
261 | 261 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
262 | 262 | |
|
263 | 263 | config.add_route( |
|
264 | 264 | name='pullrequest_repo_refs', |
|
265 | 265 | pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
266 | 266 | repo_route=True) |
|
267 | 267 | |
|
268 | 268 | config.add_route( |
|
269 | 269 | name='pullrequest_repo_destinations', |
|
270 | 270 | pattern='/{repo_name:.*?[^/]}/pull-request/repo-destinations', |
|
271 | 271 | repo_route=True) |
|
272 | 272 | |
|
273 | 273 | config.add_route( |
|
274 | 274 | name='pullrequest_new', |
|
275 | 275 | pattern='/{repo_name:.*?[^/]}/pull-request/new', |
|
276 | 276 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
277 | 277 | |
|
278 | 278 | config.add_route( |
|
279 | 279 | name='pullrequest_create', |
|
280 | 280 | pattern='/{repo_name:.*?[^/]}/pull-request/create', |
|
281 | 281 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
282 | 282 | |
|
283 | 283 | config.add_route( |
|
284 | 284 | name='pullrequest_update', |
|
285 | 285 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update', |
|
286 | 286 | repo_route=True) |
|
287 | 287 | |
|
288 | 288 | config.add_route( |
|
289 | 289 | name='pullrequest_merge', |
|
290 | 290 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge', |
|
291 | 291 | repo_route=True) |
|
292 | 292 | |
|
293 | 293 | config.add_route( |
|
294 | 294 | name='pullrequest_delete', |
|
295 | 295 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete', |
|
296 | 296 | repo_route=True) |
|
297 | 297 | |
|
298 | 298 | config.add_route( |
|
299 | 299 | name='pullrequest_comment_create', |
|
300 | 300 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment', |
|
301 | 301 | repo_route=True) |
|
302 | 302 | |
|
303 | 303 | config.add_route( |
|
304 | 304 | name='pullrequest_comment_delete', |
|
305 | 305 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', |
|
306 | 306 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
307 | 307 | |
|
308 | 308 | # Settings |
|
309 | 309 | config.add_route( |
|
310 | 310 | name='edit_repo', |
|
311 | 311 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
312 | 312 | |
|
313 | 313 | # Settings advanced |
|
314 | 314 | config.add_route( |
|
315 | 315 | name='edit_repo_advanced', |
|
316 | 316 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
317 | 317 | config.add_route( |
|
318 | 318 | name='edit_repo_advanced_delete', |
|
319 | 319 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
320 | 320 | config.add_route( |
|
321 | 321 | name='edit_repo_advanced_locking', |
|
322 | 322 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
323 | 323 | config.add_route( |
|
324 | 324 | name='edit_repo_advanced_journal', |
|
325 | 325 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
326 | 326 | config.add_route( |
|
327 | 327 | name='edit_repo_advanced_fork', |
|
328 | 328 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
329 | 329 | |
|
330 | 330 | # Caches |
|
331 | 331 | config.add_route( |
|
332 | 332 | name='edit_repo_caches', |
|
333 | 333 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
334 | 334 | |
|
335 | 335 | # Permissions |
|
336 | 336 | config.add_route( |
|
337 | 337 | name='edit_repo_perms', |
|
338 | 338 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
339 | 339 | |
|
340 | # Repo Review Rules | |
|
340 | # Maintenance | |
|
341 | config.add_route( | |
|
342 | name='edit_repo_maintenance', | |
|
343 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) | |
|
344 | ||
|
345 | config.add_route( | |
|
346 | name='edit_repo_maintenance_execute', | |
|
347 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) | |
|
348 | ||
|
349 | # Fields | |
|
350 | config.add_route( | |
|
351 | name='edit_repo_fields', | |
|
352 | pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True) | |
|
353 | config.add_route( | |
|
354 | name='edit_repo_fields_create', | |
|
355 | pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True) | |
|
356 | config.add_route( | |
|
357 | name='edit_repo_fields_delete', | |
|
358 | pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True) | |
|
359 | ||
|
360 | # Locking | |
|
361 | config.add_route( | |
|
362 | name='repo_edit_toggle_locking', | |
|
363 | pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True) | |
|
364 | ||
|
365 | # Remote | |
|
366 | config.add_route( | |
|
367 | name='edit_repo_remote', | |
|
368 | pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True) | |
|
369 | config.add_route( | |
|
370 | name='edit_repo_remote_pull', | |
|
371 | pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True) | |
|
372 | ||
|
373 | ||
|
374 | # Statistics | |
|
375 | config.add_route( | |
|
376 | name='edit_repo_statistics', | |
|
377 | pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True) | |
|
378 | config.add_route( | |
|
379 | name='edit_repo_statistics_reset', | |
|
380 | pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True) | |
|
381 | ||
|
382 | # Issue trackers | |
|
383 | config.add_route( | |
|
384 | name='edit_repo_issuetracker', | |
|
385 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True) | |
|
386 | config.add_route( | |
|
387 | name='edit_repo_issuetracker_test', | |
|
388 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True) | |
|
389 | config.add_route( | |
|
390 | name='edit_repo_issuetracker_delete', | |
|
391 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True) | |
|
392 | config.add_route( | |
|
393 | name='edit_repo_issuetracker_update', | |
|
394 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True) | |
|
395 | ||
|
396 | # VCS Settings | |
|
397 | config.add_route( | |
|
398 | name='edit_repo_vcs', | |
|
399 | pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True) | |
|
400 | config.add_route( | |
|
401 | name='edit_repo_vcs_update', | |
|
402 | pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True) | |
|
403 | ||
|
404 | # svn pattern | |
|
405 | config.add_route( | |
|
406 | name='edit_repo_vcs_svn_pattern_delete', | |
|
407 | pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True) | |
|
408 | ||
|
409 | # Repo Review Rules (EE feature) | |
|
341 | 410 | config.add_route( |
|
342 | 411 | name='repo_reviewers', |
|
343 | 412 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
344 | 413 | |
|
345 | 414 | config.add_route( |
|
346 | 415 | name='repo_default_reviewers_data', |
|
347 | 416 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
348 | 417 | |
|
349 | # Maintenance | |
|
350 | config.add_route( | |
|
351 | name='repo_maintenance', | |
|
352 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) | |
|
353 | ||
|
354 | config.add_route( | |
|
355 | name='repo_maintenance_execute', | |
|
356 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) | |
|
357 | ||
|
358 | 418 | # Strip |
|
359 | 419 | config.add_route( |
|
360 | name='strip', | |
|
420 | name='edit_repo_strip', | |
|
361 | 421 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
362 | 422 | |
|
363 | 423 | config.add_route( |
|
364 | 424 | name='strip_check', |
|
365 | 425 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
366 | 426 | |
|
367 | 427 | config.add_route( |
|
368 | 428 | name='strip_execute', |
|
369 | 429 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
370 | 430 | |
|
371 | 431 | # ATOM/RSS Feed |
|
372 | 432 | config.add_route( |
|
373 | 433 | name='rss_feed_home', |
|
374 | 434 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) |
|
375 | 435 | |
|
376 | 436 | config.add_route( |
|
377 | 437 | name='atom_feed_home', |
|
378 | 438 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) |
|
379 | 439 | |
|
380 | 440 | # NOTE(marcink): needs to be at the end for catch-all |
|
381 | 441 | add_route_with_slash( |
|
382 | 442 | config, |
|
383 | 443 | name='repo_summary', |
|
384 | 444 | pattern='/{repo_name:.*?[^/]}', repo_route=True) |
|
385 | 445 | |
|
386 | 446 | # Scan module for configuration decorators. |
|
387 | 447 | config.scan('.views', ignore='.tests') |
@@ -1,122 +1,148 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.lib.utils2 import md5 |
|
24 | 24 | from rhodecode.model.db import Repository |
|
25 | 25 | from rhodecode.model.meta import Session |
|
26 | 26 | from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel |
|
27 | from rhodecode.tests import url | |
|
27 | ||
|
28 | ||
|
29 | def route_path(name, params=None, **kwargs): | |
|
30 | import urllib | |
|
31 | ||
|
32 | base_url = { | |
|
33 | 'repo_summary': '/{repo_name}', | |
|
34 | 'edit_repo_issuetracker': '/{repo_name}/settings/issue_trackers', | |
|
35 | 'edit_repo_issuetracker_test': '/{repo_name}/settings/issue_trackers/test', | |
|
36 | 'edit_repo_issuetracker_delete': '/{repo_name}/settings/issue_trackers/delete', | |
|
37 | 'edit_repo_issuetracker_update': '/{repo_name}/settings/issue_trackers/update', | |
|
38 | }[name].format(**kwargs) | |
|
39 | ||
|
40 | if params: | |
|
41 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
|
42 | return base_url | |
|
28 | 43 | |
|
29 | 44 | |
|
30 | 45 | @pytest.mark.usefixtures("app") |
|
31 | class TestAdminRepos: | |
|
46 | class TestRepoIssueTracker(object): | |
|
32 | 47 | def test_issuetracker_index(self, autologin_user, backend): |
|
33 | 48 | repo = backend.create_repo() |
|
34 |
response = self.app.get( |
|
|
49 | response = self.app.get(route_path('edit_repo_issuetracker', | |
|
35 | 50 | repo_name=repo.repo_name)) |
|
36 | 51 | assert response.status_code == 200 |
|
37 | 52 | |
|
38 | def test_add_issuetracker_patterns( | |
|
39 | self, autologin_user, backend, csrf_token, request): | |
|
53 | def test_add_and_test_issuetracker_patterns( | |
|
54 | self, autologin_user, backend, csrf_token, request, xhr_header): | |
|
40 | 55 | pattern = 'issuetracker_pat' |
|
41 | 56 | another_pattern = pattern+'1' |
|
42 | post_url = url('repo_issuetracker_save', | |
|
43 |
|
|
|
57 | post_url = route_path( | |
|
58 | 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name) | |
|
44 | 59 | post_data = { |
|
45 | 60 | 'new_pattern_pattern_0': pattern, |
|
46 | 61 | 'new_pattern_url_0': 'url', |
|
47 | 62 | 'new_pattern_prefix_0': 'prefix', |
|
48 | 63 | 'new_pattern_description_0': 'description', |
|
49 | 64 | 'new_pattern_pattern_1': another_pattern, |
|
50 | 65 | 'new_pattern_url_1': 'url1', |
|
51 | 66 | 'new_pattern_prefix_1': 'prefix1', |
|
52 | 67 | 'new_pattern_description_1': 'description1', |
|
53 | 68 | 'csrf_token': csrf_token |
|
54 | 69 | } |
|
55 | 70 | self.app.post(post_url, post_data, status=302) |
|
56 | 71 | self.settings_model = IssueTrackerSettingsModel(repo=backend.repo) |
|
57 | 72 | settings = self.settings_model.get_repo_settings() |
|
58 | 73 | self.uid = md5(pattern) |
|
59 | 74 | assert settings[self.uid]['pat'] == pattern |
|
60 | 75 | self.another_uid = md5(another_pattern) |
|
61 | 76 | assert settings[self.another_uid]['pat'] == another_pattern |
|
62 | 77 | |
|
78 | # test pattern | |
|
79 | data = {'test_text': 'example of issuetracker_pat replacement', | |
|
80 | 'csrf_token': csrf_token} | |
|
81 | response = self.app.post( | |
|
82 | route_path('edit_repo_issuetracker_test', | |
|
83 | repo_name=backend.repo.repo_name), | |
|
84 | extra_environ=xhr_header, params=data) | |
|
85 | ||
|
86 | assert response.body == \ | |
|
87 | 'example of <a class="issue-tracker-link" href="url">prefix</a> replacement' | |
|
88 | ||
|
63 | 89 | @request.addfinalizer |
|
64 | 90 | def cleanup(): |
|
65 | 91 | self.settings_model.delete_entries(self.uid) |
|
66 | 92 | self.settings_model.delete_entries(self.another_uid) |
|
67 | 93 | |
|
68 | 94 | def test_edit_issuetracker_pattern( |
|
69 | 95 | self, autologin_user, backend, csrf_token, request): |
|
70 | 96 | entry_key = 'issuetracker_pat_' |
|
71 | 97 | pattern = 'issuetracker_pat2' |
|
72 | 98 | old_pattern = 'issuetracker_pat' |
|
73 | 99 | old_uid = md5(old_pattern) |
|
74 | 100 | |
|
75 | 101 | sett = SettingsModel(repo=backend.repo).create_or_update_setting( |
|
76 | 102 | entry_key+old_uid, old_pattern, 'unicode') |
|
77 | 103 | Session().add(sett) |
|
78 | 104 | Session().commit() |
|
79 | post_url = url('repo_issuetracker_save', | |
|
80 |
|
|
|
105 | post_url = route_path( | |
|
106 | 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name) | |
|
81 | 107 | post_data = { |
|
82 | 108 | 'new_pattern_pattern_0': pattern, |
|
83 | 109 | 'new_pattern_url_0': 'url', |
|
84 | 110 | 'new_pattern_prefix_0': 'prefix', |
|
85 | 111 | 'new_pattern_description_0': 'description', |
|
86 | 112 | 'uid': old_uid, |
|
87 | 113 | 'csrf_token': csrf_token |
|
88 | 114 | } |
|
89 | 115 | self.app.post(post_url, post_data, status=302) |
|
90 | 116 | self.settings_model = IssueTrackerSettingsModel(repo=backend.repo) |
|
91 | 117 | settings = self.settings_model.get_repo_settings() |
|
92 | 118 | self.uid = md5(pattern) |
|
93 | 119 | assert settings[self.uid]['pat'] == pattern |
|
94 | 120 | with pytest.raises(KeyError): |
|
95 | settings[old_uid] | |
|
121 | key = settings[old_uid] | |
|
96 | 122 | |
|
97 | 123 | @request.addfinalizer |
|
98 | 124 | def cleanup(): |
|
99 | 125 | self.settings_model.delete_entries(self.uid) |
|
100 | 126 | |
|
101 | 127 | def test_delete_issuetracker_pattern( |
|
102 | 128 | self, autologin_user, backend, csrf_token, settings_util): |
|
103 | 129 | repo = backend.create_repo() |
|
104 | 130 | repo_name = repo.repo_name |
|
105 | 131 | entry_key = 'issuetracker_pat_' |
|
106 | 132 | pattern = 'issuetracker_pat3' |
|
107 | 133 | uid = md5(pattern) |
|
108 | 134 | settings_util.create_repo_rhodecode_setting( |
|
109 | 135 | repo=backend.repo, name=entry_key+uid, |
|
110 | 136 | value=entry_key, type_='unicode', cleanup=False) |
|
111 | 137 | |
|
112 | 138 | self.app.post( |
|
113 | url('repo_issuetracker_delete', | |
|
139 | route_path( | |
|
140 | 'edit_repo_issuetracker_delete', | |
|
114 | 141 | repo_name=backend.repo.repo_name), |
|
115 | 142 | { |
|
116 | '_method': 'delete', | |
|
117 | 143 | 'uid': uid, |
|
118 | 144 | 'csrf_token': csrf_token |
|
119 | 145 | }, status=302) |
|
120 | 146 | settings = IssueTrackerSettingsModel( |
|
121 | 147 | repo=Repository.get_by_repo_name(repo_name)).get_repo_settings() |
|
122 | 148 | assert 'rhodecode_%s%s' % (entry_key, uid) not in settings |
@@ -1,233 +1,232 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 | 24 | from rhodecode.lib.utils2 import str2bool |
|
25 | 25 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
26 | 26 | from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User |
|
27 | 27 | from rhodecode.model.meta import Session |
|
28 | 28 | from rhodecode.tests import ( |
|
29 |
|
|
|
30 | assert_session_flash) | |
|
29 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, assert_session_flash) | |
|
31 | 30 | from rhodecode.tests.fixture import Fixture |
|
32 | 31 | |
|
33 | 32 | fixture = Fixture() |
|
34 | 33 | |
|
35 | 34 | |
|
36 | 35 | def route_path(name, params=None, **kwargs): |
|
37 | 36 | import urllib |
|
38 | 37 | |
|
39 | 38 | base_url = { |
|
40 | 39 | 'edit_repo': '/{repo_name}/settings', |
|
41 | 40 | 'edit_repo_advanced': '/{repo_name}/settings/advanced', |
|
42 | 41 | 'edit_repo_caches': '/{repo_name}/settings/caches', |
|
43 | 42 | 'edit_repo_perms': '/{repo_name}/settings/permissions', |
|
43 | 'edit_repo_vcs': '/{repo_name}/settings/vcs', | |
|
44 | 'edit_repo_issuetracker': '/{repo_name}/settings/issue_trackers', | |
|
45 | 'edit_repo_fields': '/{repo_name}/settings/fields', | |
|
46 | 'edit_repo_remote': '/{repo_name}/settings/remote', | |
|
47 | 'edit_repo_statistics': '/{repo_name}/settings/statistics', | |
|
44 | 48 | }[name].format(**kwargs) |
|
45 | 49 | |
|
46 | 50 | if params: |
|
47 | 51 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
48 | 52 | return base_url |
|
49 | 53 | |
|
50 | 54 | |
|
51 | 55 | def _get_permission_for_user(user, repo): |
|
52 | 56 | perm = UserRepoToPerm.query()\ |
|
53 | 57 | .filter(UserRepoToPerm.repository == |
|
54 | 58 | Repository.get_by_repo_name(repo))\ |
|
55 | 59 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ |
|
56 | 60 | .all() |
|
57 | 61 | return perm |
|
58 | 62 | |
|
59 | 63 | |
|
60 | 64 | @pytest.mark.usefixtures('autologin_user', 'app') |
|
61 | 65 | class TestAdminRepoSettings(object): |
|
62 | 66 | @pytest.mark.parametrize('urlname', [ |
|
63 | 67 | 'edit_repo', |
|
64 | 68 | 'edit_repo_caches', |
|
65 | 69 | 'edit_repo_perms', |
|
66 | 70 | 'edit_repo_advanced', |
|
71 | 'edit_repo_vcs', | |
|
72 | 'edit_repo_issuetracker', | |
|
73 | 'edit_repo_fields', | |
|
74 | 'edit_repo_remote', | |
|
75 | 'edit_repo_statistics', | |
|
67 | 76 | ]) |
|
68 | 77 | def test_show_page(self, urlname, app, backend): |
|
69 | 78 | app.get(route_path(urlname, repo_name=backend.repo_name), status=200) |
|
70 | 79 | |
|
71 | 80 | def test_edit_accessible_when_missing_requirements( |
|
72 | 81 | self, backend_hg, autologin_user): |
|
73 | 82 | scm_patcher = mock.patch.object( |
|
74 | 83 | Repository, 'scm_instance', side_effect=RepositoryRequirementError) |
|
75 | 84 | with scm_patcher: |
|
76 | 85 | self.app.get(route_path('edit_repo', repo_name=backend_hg.repo_name)) |
|
77 | 86 | |
|
78 | @pytest.mark.parametrize('urlname', [ | |
|
79 | 'repo_vcs_settings', | |
|
80 | 'repo_settings_issuetracker', | |
|
81 | 'edit_repo_fields', | |
|
82 | 'edit_repo_remote', | |
|
83 | 'edit_repo_statistics', | |
|
84 | ]) | |
|
85 | def test_show_page_pylons(self, urlname, app): | |
|
86 | app.get(url(urlname, repo_name=HG_REPO)) | |
|
87 | ||
|
88 | 87 | @pytest.mark.parametrize('update_settings', [ |
|
89 | 88 | {'repo_description': 'alter-desc'}, |
|
90 | 89 | {'repo_owner': TEST_USER_REGULAR_LOGIN}, |
|
91 | 90 | {'repo_private': 'true'}, |
|
92 | 91 | {'repo_enable_locking': 'true'}, |
|
93 | 92 | {'repo_enable_downloads': 'true'}, |
|
94 | 93 | ]) |
|
95 | 94 | def test_update_repo_settings(self, update_settings, csrf_token, backend, user_util): |
|
96 | 95 | repo = user_util.create_repo(repo_type=backend.alias) |
|
97 | 96 | repo_name = repo.repo_name |
|
98 | 97 | |
|
99 | 98 | params = fixture._get_repo_create_params( |
|
100 | 99 | csrf_token=csrf_token, |
|
101 | 100 | repo_name=repo_name, |
|
102 | 101 | repo_type=backend.alias, |
|
103 | 102 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
104 | 103 | repo_description='DESC', |
|
105 | 104 | |
|
106 | 105 | repo_private='false', |
|
107 | 106 | repo_enable_locking='false', |
|
108 | 107 | repo_enable_downloads='false') |
|
109 | 108 | params.update(update_settings) |
|
110 | 109 | self.app.post( |
|
111 | 110 | route_path('edit_repo', repo_name=repo_name), |
|
112 | 111 | params=params, status=302) |
|
113 | 112 | |
|
114 | 113 | repo = Repository.get_by_repo_name(repo_name) |
|
115 | 114 | assert repo.user.username == \ |
|
116 | 115 | update_settings.get('repo_owner', repo.user.username) |
|
117 | 116 | |
|
118 | 117 | assert repo.description == \ |
|
119 | 118 | update_settings.get('repo_description', repo.description) |
|
120 | 119 | |
|
121 | 120 | assert repo.private == \ |
|
122 | 121 | str2bool(update_settings.get( |
|
123 | 122 | 'repo_private', repo.private)) |
|
124 | 123 | |
|
125 | 124 | assert repo.enable_locking == \ |
|
126 | 125 | str2bool(update_settings.get( |
|
127 | 126 | 'repo_enable_locking', repo.enable_locking)) |
|
128 | 127 | |
|
129 | 128 | assert repo.enable_downloads == \ |
|
130 | 129 | str2bool(update_settings.get( |
|
131 | 130 | 'repo_enable_downloads', repo.enable_downloads)) |
|
132 | 131 | |
|
133 | 132 | def test_update_repo_name_via_settings(self, csrf_token, user_util, backend): |
|
134 | 133 | repo = user_util.create_repo(repo_type=backend.alias) |
|
135 | 134 | repo_name = repo.repo_name |
|
136 | 135 | |
|
137 | 136 | repo_group = user_util.create_repo_group() |
|
138 | 137 | repo_group_name = repo_group.group_name |
|
139 | 138 | new_name = repo_group_name + '_' + repo_name |
|
140 | 139 | |
|
141 | 140 | params = fixture._get_repo_create_params( |
|
142 | 141 | csrf_token=csrf_token, |
|
143 | 142 | repo_name=new_name, |
|
144 | 143 | repo_type=backend.alias, |
|
145 | 144 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
146 | 145 | repo_description='DESC', |
|
147 | 146 | repo_private='false', |
|
148 | 147 | repo_enable_locking='false', |
|
149 | 148 | repo_enable_downloads='false') |
|
150 | 149 | self.app.post( |
|
151 | 150 | route_path('edit_repo', repo_name=repo_name), |
|
152 | 151 | params=params, status=302) |
|
153 | 152 | repo = Repository.get_by_repo_name(new_name) |
|
154 | 153 | assert repo.repo_name == new_name |
|
155 | 154 | |
|
156 | 155 | def test_update_repo_group_via_settings(self, csrf_token, user_util, backend): |
|
157 | 156 | repo = user_util.create_repo(repo_type=backend.alias) |
|
158 | 157 | repo_name = repo.repo_name |
|
159 | 158 | |
|
160 | 159 | repo_group = user_util.create_repo_group() |
|
161 | 160 | repo_group_name = repo_group.group_name |
|
162 | 161 | repo_group_id = repo_group.group_id |
|
163 | 162 | |
|
164 | 163 | new_name = repo_group_name + '/' + repo_name |
|
165 | 164 | params = fixture._get_repo_create_params( |
|
166 | 165 | csrf_token=csrf_token, |
|
167 | 166 | repo_name=repo_name, |
|
168 | 167 | repo_type=backend.alias, |
|
169 | 168 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
170 | 169 | repo_description='DESC', |
|
171 | 170 | repo_group=repo_group_id, |
|
172 | 171 | repo_private='false', |
|
173 | 172 | repo_enable_locking='false', |
|
174 | 173 | repo_enable_downloads='false') |
|
175 | 174 | self.app.post( |
|
176 | 175 | route_path('edit_repo', repo_name=repo_name), |
|
177 | 176 | params=params, status=302) |
|
178 | 177 | repo = Repository.get_by_repo_name(new_name) |
|
179 | 178 | assert repo.repo_name == new_name |
|
180 | 179 | |
|
181 | 180 | def test_set_private_flag_sets_default_user_permissions_to_none( |
|
182 | 181 | self, autologin_user, backend, csrf_token): |
|
183 | 182 | |
|
184 | 183 | # initially repository perm should be read |
|
185 | 184 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) |
|
186 | 185 | assert len(perm) == 1 |
|
187 | 186 | assert perm[0].permission.permission_name == 'repository.read' |
|
188 | 187 | assert not backend.repo.private |
|
189 | 188 | |
|
190 | 189 | response = self.app.post( |
|
191 | 190 | route_path('edit_repo', repo_name=backend.repo_name), |
|
192 | 191 | params=fixture._get_repo_create_params( |
|
193 | 192 | repo_private='true', |
|
194 | 193 | repo_name=backend.repo_name, |
|
195 | 194 | repo_type=backend.alias, |
|
196 | 195 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
197 | 196 | csrf_token=csrf_token), status=302) |
|
198 | 197 | |
|
199 | 198 | assert_session_flash( |
|
200 | 199 | response, |
|
201 | 200 | msg='Repository %s updated successfully' % (backend.repo_name)) |
|
202 | 201 | |
|
203 | 202 | repo = Repository.get_by_repo_name(backend.repo_name) |
|
204 | 203 | assert repo.private is True |
|
205 | 204 | |
|
206 | 205 | # now the repo default permission should be None |
|
207 | 206 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) |
|
208 | 207 | assert len(perm) == 1 |
|
209 | 208 | assert perm[0].permission.permission_name == 'repository.none' |
|
210 | 209 | |
|
211 | 210 | response = self.app.post( |
|
212 | 211 | route_path('edit_repo', repo_name=backend.repo_name), |
|
213 | 212 | params=fixture._get_repo_create_params( |
|
214 | 213 | repo_private='false', |
|
215 | 214 | repo_name=backend.repo_name, |
|
216 | 215 | repo_type=backend.alias, |
|
217 | 216 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
218 | 217 | csrf_token=csrf_token), status=302) |
|
219 | 218 | |
|
220 | 219 | assert_session_flash( |
|
221 | 220 | response, |
|
222 | 221 | msg='Repository %s updated successfully' % (backend.repo_name)) |
|
223 | 222 | assert backend.repo.private is False |
|
224 | 223 | |
|
225 | 224 | # we turn off private now the repo default permission should stay None |
|
226 | 225 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) |
|
227 | 226 | assert len(perm) == 1 |
|
228 | 227 | assert perm[0].permission.permission_name == 'repository.none' |
|
229 | 228 | |
|
230 | 229 | # update this permission back |
|
231 | 230 | perm[0].permission = Permission.get_by_key('repository.read') |
|
232 | 231 | Session().add(perm[0]) |
|
233 | 232 | Session().commit() |
@@ -1,121 +1,104 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 | 24 | import rhodecode |
|
25 | 25 | from rhodecode.model.db import Repository |
|
26 | 26 | from rhodecode.model.settings import SettingsModel |
|
27 | from rhodecode.tests import url | |
|
28 | 27 | from rhodecode.tests.utils import AssertResponse |
|
29 | 28 | |
|
30 | 29 | |
|
31 | 30 | def route_path(name, params=None, **kwargs): |
|
32 | 31 | import urllib |
|
33 | 32 | |
|
34 | 33 | base_url = { |
|
35 | 34 | 'edit_repo': '/{repo_name}/settings', |
|
35 | 'edit_repo_vcs': '/{repo_name}/settings/vcs', | |
|
36 | 'edit_repo_vcs_update': '/{repo_name}/settings/vcs/update', | |
|
36 | 37 | }[name].format(**kwargs) |
|
37 | 38 | |
|
38 | 39 | if params: |
|
39 | 40 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
40 | 41 | return base_url |
|
41 | 42 | |
|
42 | 43 | |
|
43 | 44 | @pytest.mark.usefixtures('autologin_user', 'app') |
|
44 | 45 | class TestAdminRepoVcsSettings(object): |
|
45 | 46 | |
|
46 | 47 | @pytest.mark.parametrize('setting_name, setting_backends', [ |
|
47 | 48 | ('hg_use_rebase_for_merging', ['hg']), |
|
48 | 49 | ]) |
|
49 | 50 | def test_labs_settings_visible_if_enabled( |
|
50 | 51 | self, setting_name, setting_backends, backend): |
|
51 | 52 | if backend.alias not in setting_backends: |
|
52 | 53 | pytest.skip('Setting not available for backend {}'.format(backend)) |
|
53 | 54 | |
|
54 |
vcs_settings_url = |
|
|
55 |
'repo_vcs |
|
|
55 | vcs_settings_url = route_path( | |
|
56 | 'edit_repo_vcs', repo_name=backend.repo.repo_name) | |
|
56 | 57 | |
|
57 | 58 | with mock.patch.dict( |
|
58 | 59 | rhodecode.CONFIG, {'labs_settings_active': 'true'}): |
|
59 | 60 | response = self.app.get(vcs_settings_url) |
|
60 | 61 | |
|
61 | 62 | assertr = AssertResponse(response) |
|
62 | 63 | assertr.one_element_exists('#rhodecode_{}'.format(setting_name)) |
|
63 | 64 | |
|
64 | 65 | @pytest.mark.parametrize('setting_name, setting_backends', [ |
|
65 | 66 | ('hg_use_rebase_for_merging', ['hg']), |
|
66 | 67 | ]) |
|
67 | def test_labs_settings_not_visible_if_disabled( | |
|
68 | self, setting_name, setting_backends, backend): | |
|
69 | if backend.alias not in setting_backends: | |
|
70 | pytest.skip('Setting not available for backend {}'.format(backend)) | |
|
71 | ||
|
72 | vcs_settings_url = url( | |
|
73 | 'repo_vcs_settings', repo_name=backend.repo.repo_name) | |
|
74 | ||
|
75 | with mock.patch.dict( | |
|
76 | rhodecode.CONFIG, {'labs_settings_active': 'false'}): | |
|
77 | response = self.app.get(vcs_settings_url) | |
|
78 | ||
|
79 | assertr = AssertResponse(response) | |
|
80 | assertr.no_element_exists('#rhodecode_{}'.format(setting_name)) | |
|
81 | ||
|
82 | @pytest.mark.parametrize('setting_name, setting_backends', [ | |
|
83 | ('hg_use_rebase_for_merging', ['hg']), | |
|
84 | ]) | |
|
85 | 68 | def test_update_boolean_settings( |
|
86 | 69 | self, csrf_token, setting_name, setting_backends, backend): |
|
87 | 70 | if backend.alias not in setting_backends: |
|
88 | 71 | pytest.skip('Setting not available for backend {}'.format(backend)) |
|
89 | 72 | |
|
90 | 73 | repo = backend.create_repo() |
|
91 | 74 | repo_name = repo.repo_name |
|
92 | 75 | |
|
93 | 76 | settings_model = SettingsModel(repo=repo) |
|
94 |
vcs_settings_url = |
|
|
95 |
'repo_vcs_ |
|
|
77 | vcs_settings_url = route_path( | |
|
78 | 'edit_repo_vcs_update', repo_name=repo_name) | |
|
96 | 79 | |
|
97 | 80 | self.app.post( |
|
98 | 81 | vcs_settings_url, |
|
99 | 82 | params={ |
|
100 | 83 | 'inherit_global_settings': False, |
|
101 | 84 | 'new_svn_branch': 'dummy-value-for-testing', |
|
102 | 85 | 'new_svn_tag': 'dummy-value-for-testing', |
|
103 | 86 | 'rhodecode_{}'.format(setting_name): 'true', |
|
104 | 87 | 'csrf_token': csrf_token, |
|
105 | 88 | }) |
|
106 | 89 | settings_model = SettingsModel(repo=Repository.get_by_repo_name(repo_name)) |
|
107 | 90 | setting = settings_model.get_setting_by_name(setting_name) |
|
108 | 91 | assert setting.app_settings_value |
|
109 | 92 | |
|
110 | 93 | self.app.post( |
|
111 | 94 | vcs_settings_url, |
|
112 | 95 | params={ |
|
113 | 96 | 'inherit_global_settings': False, |
|
114 | 97 | 'new_svn_branch': 'dummy-value-for-testing', |
|
115 | 98 | 'new_svn_tag': 'dummy-value-for-testing', |
|
116 | 99 | 'rhodecode_{}'.format(setting_name): 'false', |
|
117 | 100 | 'csrf_token': csrf_token, |
|
118 | 101 | }) |
|
119 | 102 | settings_model = SettingsModel(repo=Repository.get_by_repo_name(repo_name)) |
|
120 | 103 | setting = settings_model.get_setting_by_name(setting_name) |
|
121 | 104 | assert not setting.app_settings_value |
@@ -1,557 +1,557 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | import collections |
|
24 | 24 | |
|
25 | 25 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound |
|
26 | 26 | from pyramid.view import view_config |
|
27 | 27 | from pyramid.renderers import render |
|
28 | 28 | from pyramid.response import Response |
|
29 | 29 | |
|
30 | 30 | from rhodecode.apps._base import RepoAppView |
|
31 | 31 | |
|
32 | 32 | from rhodecode.lib import diffs, codeblocks |
|
33 | 33 | from rhodecode.lib.auth import ( |
|
34 | 34 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) |
|
35 | 35 | |
|
36 | 36 | from rhodecode.lib.compat import OrderedDict |
|
37 | 37 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
38 | 38 | import rhodecode.lib.helpers as h |
|
39 |
from rhodecode.lib.utils2 import safe_unicode |
|
|
39 | from rhodecode.lib.utils2 import safe_unicode | |
|
40 | 40 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
41 | 41 | from rhodecode.lib.vcs.exceptions import ( |
|
42 | 42 | RepositoryError, CommitDoesNotExistError, NodeDoesNotExistError) |
|
43 | 43 | from rhodecode.model.db import ChangesetComment, ChangesetStatus |
|
44 | 44 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
45 | 45 | from rhodecode.model.comment import CommentsModel |
|
46 | 46 | from rhodecode.model.meta import Session |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | def _update_with_GET(params, request): |
|
53 | 53 | for k in ['diff1', 'diff2', 'diff']: |
|
54 | 54 | params[k] += request.GET.getall(k) |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | def get_ignore_ws(fid, request): |
|
58 | 58 | ig_ws_global = request.GET.get('ignorews') |
|
59 | 59 | ig_ws = filter(lambda k: k.startswith('WS'), request.GET.getall(fid)) |
|
60 | 60 | if ig_ws: |
|
61 | 61 | try: |
|
62 | 62 | return int(ig_ws[0].split(':')[-1]) |
|
63 | 63 | except Exception: |
|
64 | 64 | pass |
|
65 | 65 | return ig_ws_global |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | def _ignorews_url(request, fileid=None): |
|
69 | 69 | _ = request.translate |
|
70 | 70 | fileid = str(fileid) if fileid else None |
|
71 | 71 | params = collections.defaultdict(list) |
|
72 | 72 | _update_with_GET(params, request) |
|
73 | 73 | label = _('Show whitespace') |
|
74 | 74 | tooltiplbl = _('Show whitespace for all diffs') |
|
75 | 75 | ig_ws = get_ignore_ws(fileid, request) |
|
76 | 76 | ln_ctx = get_line_ctx(fileid, request) |
|
77 | 77 | |
|
78 | 78 | if ig_ws is None: |
|
79 | 79 | params['ignorews'] += [1] |
|
80 | 80 | label = _('Ignore whitespace') |
|
81 | 81 | tooltiplbl = _('Ignore whitespace for all diffs') |
|
82 | 82 | ctx_key = 'context' |
|
83 | 83 | ctx_val = ln_ctx |
|
84 | 84 | |
|
85 | 85 | # if we have passed in ln_ctx pass it along to our params |
|
86 | 86 | if ln_ctx: |
|
87 | 87 | params[ctx_key] += [ctx_val] |
|
88 | 88 | |
|
89 | 89 | if fileid: |
|
90 | 90 | params['anchor'] = 'a_' + fileid |
|
91 | 91 | return h.link_to(label, request.current_route_path(_query=params), |
|
92 | 92 | title=tooltiplbl, class_='tooltip') |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | def get_line_ctx(fid, request): |
|
96 | 96 | ln_ctx_global = request.GET.get('context') |
|
97 | 97 | if fid: |
|
98 | 98 | ln_ctx = filter(lambda k: k.startswith('C'), request.GET.getall(fid)) |
|
99 | 99 | else: |
|
100 | 100 | _ln_ctx = filter(lambda k: k.startswith('C'), request.GET) |
|
101 | 101 | ln_ctx = request.GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global |
|
102 | 102 | if ln_ctx: |
|
103 | 103 | ln_ctx = [ln_ctx] |
|
104 | 104 | |
|
105 | 105 | if ln_ctx: |
|
106 | 106 | retval = ln_ctx[0].split(':')[-1] |
|
107 | 107 | else: |
|
108 | 108 | retval = ln_ctx_global |
|
109 | 109 | |
|
110 | 110 | try: |
|
111 | 111 | return int(retval) |
|
112 | 112 | except Exception: |
|
113 | 113 | return 3 |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | def _context_url(request, fileid=None): |
|
117 | 117 | """ |
|
118 | 118 | Generates a url for context lines. |
|
119 | 119 | |
|
120 | 120 | :param fileid: |
|
121 | 121 | """ |
|
122 | 122 | |
|
123 | 123 | _ = request.translate |
|
124 | 124 | fileid = str(fileid) if fileid else None |
|
125 | 125 | ig_ws = get_ignore_ws(fileid, request) |
|
126 | 126 | ln_ctx = (get_line_ctx(fileid, request) or 3) * 2 |
|
127 | 127 | |
|
128 | 128 | params = collections.defaultdict(list) |
|
129 | 129 | _update_with_GET(params, request) |
|
130 | 130 | |
|
131 | 131 | if ln_ctx > 0: |
|
132 | 132 | params['context'] += [ln_ctx] |
|
133 | 133 | |
|
134 | 134 | if ig_ws: |
|
135 | 135 | ig_ws_key = 'ignorews' |
|
136 | 136 | ig_ws_val = 1 |
|
137 | 137 | params[ig_ws_key] += [ig_ws_val] |
|
138 | 138 | |
|
139 | 139 | lbl = _('Increase context') |
|
140 | 140 | tooltiplbl = _('Increase context for all diffs') |
|
141 | 141 | |
|
142 | 142 | if fileid: |
|
143 | 143 | params['anchor'] = 'a_' + fileid |
|
144 | 144 | return h.link_to(lbl, request.current_route_path(_query=params), |
|
145 | 145 | title=tooltiplbl, class_='tooltip') |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | class RepoCommitsView(RepoAppView): |
|
149 | 149 | def load_default_context(self): |
|
150 | 150 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
151 | 151 | |
|
152 | 152 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
153 | 153 | c.repo_info = self.db_repo |
|
154 | 154 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
155 | 155 | |
|
156 | 156 | self._register_global_c(c) |
|
157 | 157 | return c |
|
158 | 158 | |
|
159 | 159 | def _commit(self, commit_id_range, method): |
|
160 | 160 | _ = self.request.translate |
|
161 | 161 | c = self.load_default_context() |
|
162 | 162 | c.ignorews_url = _ignorews_url |
|
163 | 163 | c.context_url = _context_url |
|
164 | 164 | c.fulldiff = self.request.GET.get('fulldiff') |
|
165 | 165 | |
|
166 | 166 | # fetch global flags of ignore ws or context lines |
|
167 | 167 | context_lcl = get_line_ctx('', self.request) |
|
168 | 168 | ign_whitespace_lcl = get_ignore_ws('', self.request) |
|
169 | 169 | |
|
170 | 170 | # diff_limit will cut off the whole diff if the limit is applied |
|
171 | 171 | # otherwise it will just hide the big files from the front-end |
|
172 | 172 | diff_limit = c.visual.cut_off_limit_diff |
|
173 | 173 | file_limit = c.visual.cut_off_limit_file |
|
174 | 174 | |
|
175 | 175 | # get ranges of commit ids if preset |
|
176 | 176 | commit_range = commit_id_range.split('...')[:2] |
|
177 | 177 | |
|
178 | 178 | try: |
|
179 | 179 | pre_load = ['affected_files', 'author', 'branch', 'date', |
|
180 | 180 | 'message', 'parents'] |
|
181 | 181 | |
|
182 | 182 | if len(commit_range) == 2: |
|
183 | 183 | commits = self.rhodecode_vcs_repo.get_commits( |
|
184 | 184 | start_id=commit_range[0], end_id=commit_range[1], |
|
185 | 185 | pre_load=pre_load) |
|
186 | 186 | commits = list(commits) |
|
187 | 187 | else: |
|
188 | 188 | commits = [self.rhodecode_vcs_repo.get_commit( |
|
189 | 189 | commit_id=commit_id_range, pre_load=pre_load)] |
|
190 | 190 | |
|
191 | 191 | c.commit_ranges = commits |
|
192 | 192 | if not c.commit_ranges: |
|
193 | 193 | raise RepositoryError( |
|
194 | 194 | 'The commit range returned an empty result') |
|
195 | 195 | except CommitDoesNotExistError: |
|
196 | 196 | msg = _('No such commit exists for this repository') |
|
197 | 197 | h.flash(msg, category='error') |
|
198 | 198 | raise HTTPNotFound() |
|
199 | 199 | except Exception: |
|
200 | 200 | log.exception("General failure") |
|
201 | 201 | raise HTTPNotFound() |
|
202 | 202 | |
|
203 | 203 | c.changes = OrderedDict() |
|
204 | 204 | c.lines_added = 0 |
|
205 | 205 | c.lines_deleted = 0 |
|
206 | 206 | |
|
207 | 207 | # auto collapse if we have more than limit |
|
208 | 208 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
209 | 209 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
210 | 210 | |
|
211 | 211 | c.commit_statuses = ChangesetStatus.STATUSES |
|
212 | 212 | c.inline_comments = [] |
|
213 | 213 | c.files = [] |
|
214 | 214 | |
|
215 | 215 | c.statuses = [] |
|
216 | 216 | c.comments = [] |
|
217 | 217 | c.unresolved_comments = [] |
|
218 | 218 | if len(c.commit_ranges) == 1: |
|
219 | 219 | commit = c.commit_ranges[0] |
|
220 | 220 | c.comments = CommentsModel().get_comments( |
|
221 | 221 | self.db_repo.repo_id, |
|
222 | 222 | revision=commit.raw_id) |
|
223 | 223 | c.statuses.append(ChangesetStatusModel().get_status( |
|
224 | 224 | self.db_repo.repo_id, commit.raw_id)) |
|
225 | 225 | # comments from PR |
|
226 | 226 | statuses = ChangesetStatusModel().get_statuses( |
|
227 | 227 | self.db_repo.repo_id, commit.raw_id, |
|
228 | 228 | with_revisions=True) |
|
229 | 229 | prs = set(st.pull_request for st in statuses |
|
230 | 230 | if st.pull_request is not None) |
|
231 | 231 | # from associated statuses, check the pull requests, and |
|
232 | 232 | # show comments from them |
|
233 | 233 | for pr in prs: |
|
234 | 234 | c.comments.extend(pr.comments) |
|
235 | 235 | |
|
236 | 236 | c.unresolved_comments = CommentsModel()\ |
|
237 | 237 | .get_commit_unresolved_todos(commit.raw_id) |
|
238 | 238 | |
|
239 | 239 | diff = None |
|
240 | 240 | # Iterate over ranges (default commit view is always one commit) |
|
241 | 241 | for commit in c.commit_ranges: |
|
242 | 242 | c.changes[commit.raw_id] = [] |
|
243 | 243 | |
|
244 | 244 | commit2 = commit |
|
245 | 245 | commit1 = commit.parents[0] if commit.parents else EmptyCommit() |
|
246 | 246 | |
|
247 | 247 | _diff = self.rhodecode_vcs_repo.get_diff( |
|
248 | 248 | commit1, commit2, |
|
249 | 249 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) |
|
250 | 250 | diff_processor = diffs.DiffProcessor( |
|
251 | 251 | _diff, format='newdiff', diff_limit=diff_limit, |
|
252 | 252 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
253 | 253 | |
|
254 | 254 | commit_changes = OrderedDict() |
|
255 | 255 | if method == 'show': |
|
256 | 256 | _parsed = diff_processor.prepare() |
|
257 | 257 | c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer) |
|
258 | 258 | |
|
259 | 259 | _parsed = diff_processor.prepare() |
|
260 | 260 | |
|
261 | 261 | def _node_getter(commit): |
|
262 | 262 | def get_node(fname): |
|
263 | 263 | try: |
|
264 | 264 | return commit.get_node(fname) |
|
265 | 265 | except NodeDoesNotExistError: |
|
266 | 266 | return None |
|
267 | 267 | return get_node |
|
268 | 268 | |
|
269 | 269 | inline_comments = CommentsModel().get_inline_comments( |
|
270 | 270 | self.db_repo.repo_id, revision=commit.raw_id) |
|
271 | 271 | c.inline_cnt = CommentsModel().get_inline_comments_count( |
|
272 | 272 | inline_comments) |
|
273 | 273 | |
|
274 | 274 | diffset = codeblocks.DiffSet( |
|
275 | 275 | repo_name=self.db_repo_name, |
|
276 | 276 | source_node_getter=_node_getter(commit1), |
|
277 | 277 | target_node_getter=_node_getter(commit2), |
|
278 | 278 | comments=inline_comments) |
|
279 | 279 | diffset = diffset.render_patchset( |
|
280 | 280 | _parsed, commit1.raw_id, commit2.raw_id) |
|
281 | 281 | |
|
282 | 282 | c.changes[commit.raw_id] = diffset |
|
283 | 283 | else: |
|
284 | 284 | # downloads/raw we only need RAW diff nothing else |
|
285 | 285 | diff = diff_processor.as_raw() |
|
286 | 286 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
|
287 | 287 | |
|
288 | 288 | # sort comments by how they were generated |
|
289 | 289 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) |
|
290 | 290 | |
|
291 | 291 | if len(c.commit_ranges) == 1: |
|
292 | 292 | c.commit = c.commit_ranges[0] |
|
293 | 293 | c.parent_tmpl = ''.join( |
|
294 | 294 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) |
|
295 | 295 | |
|
296 | 296 | if method == 'download': |
|
297 | 297 | response = Response(diff) |
|
298 | 298 | response.content_type = 'text/plain' |
|
299 | 299 | response.content_disposition = ( |
|
300 | 300 | 'attachment; filename=%s.diff' % commit_id_range[:12]) |
|
301 | 301 | return response |
|
302 | 302 | elif method == 'patch': |
|
303 | 303 | c.diff = safe_unicode(diff) |
|
304 | 304 | patch = render( |
|
305 | 305 | 'rhodecode:templates/changeset/patch_changeset.mako', |
|
306 | 306 | self._get_template_context(c), self.request) |
|
307 | 307 | response = Response(patch) |
|
308 | 308 | response.content_type = 'text/plain' |
|
309 | 309 | return response |
|
310 | 310 | elif method == 'raw': |
|
311 | 311 | response = Response(diff) |
|
312 | 312 | response.content_type = 'text/plain' |
|
313 | 313 | return response |
|
314 | 314 | elif method == 'show': |
|
315 | 315 | if len(c.commit_ranges) == 1: |
|
316 | 316 | html = render( |
|
317 | 317 | 'rhodecode:templates/changeset/changeset.mako', |
|
318 | 318 | self._get_template_context(c), self.request) |
|
319 | 319 | return Response(html) |
|
320 | 320 | else: |
|
321 | 321 | c.ancestor = None |
|
322 | 322 | c.target_repo = self.db_repo |
|
323 | 323 | html = render( |
|
324 | 324 | 'rhodecode:templates/changeset/changeset_range.mako', |
|
325 | 325 | self._get_template_context(c), self.request) |
|
326 | 326 | return Response(html) |
|
327 | 327 | |
|
328 | 328 | raise HTTPBadRequest() |
|
329 | 329 | |
|
330 | 330 | @LoginRequired() |
|
331 | 331 | @HasRepoPermissionAnyDecorator( |
|
332 | 332 | 'repository.read', 'repository.write', 'repository.admin') |
|
333 | 333 | @view_config( |
|
334 | 334 | route_name='repo_commit', request_method='GET', |
|
335 | 335 | renderer=None) |
|
336 | 336 | def repo_commit_show(self): |
|
337 | 337 | commit_id = self.request.matchdict['commit_id'] |
|
338 | 338 | return self._commit(commit_id, method='show') |
|
339 | 339 | |
|
340 | 340 | @LoginRequired() |
|
341 | 341 | @HasRepoPermissionAnyDecorator( |
|
342 | 342 | 'repository.read', 'repository.write', 'repository.admin') |
|
343 | 343 | @view_config( |
|
344 | 344 | route_name='repo_commit_raw', request_method='GET', |
|
345 | 345 | renderer=None) |
|
346 | 346 | @view_config( |
|
347 | 347 | route_name='repo_commit_raw_deprecated', request_method='GET', |
|
348 | 348 | renderer=None) |
|
349 | 349 | def repo_commit_raw(self): |
|
350 | 350 | commit_id = self.request.matchdict['commit_id'] |
|
351 | 351 | return self._commit(commit_id, method='raw') |
|
352 | 352 | |
|
353 | 353 | @LoginRequired() |
|
354 | 354 | @HasRepoPermissionAnyDecorator( |
|
355 | 355 | 'repository.read', 'repository.write', 'repository.admin') |
|
356 | 356 | @view_config( |
|
357 | 357 | route_name='repo_commit_patch', request_method='GET', |
|
358 | 358 | renderer=None) |
|
359 | 359 | def repo_commit_patch(self): |
|
360 | 360 | commit_id = self.request.matchdict['commit_id'] |
|
361 | 361 | return self._commit(commit_id, method='patch') |
|
362 | 362 | |
|
363 | 363 | @LoginRequired() |
|
364 | 364 | @HasRepoPermissionAnyDecorator( |
|
365 | 365 | 'repository.read', 'repository.write', 'repository.admin') |
|
366 | 366 | @view_config( |
|
367 | 367 | route_name='repo_commit_download', request_method='GET', |
|
368 | 368 | renderer=None) |
|
369 | 369 | def repo_commit_download(self): |
|
370 | 370 | commit_id = self.request.matchdict['commit_id'] |
|
371 | 371 | return self._commit(commit_id, method='download') |
|
372 | 372 | |
|
373 | 373 | @LoginRequired() |
|
374 | 374 | @NotAnonymous() |
|
375 | 375 | @HasRepoPermissionAnyDecorator( |
|
376 | 376 | 'repository.read', 'repository.write', 'repository.admin') |
|
377 | 377 | @CSRFRequired() |
|
378 | 378 | @view_config( |
|
379 | 379 | route_name='repo_commit_comment_create', request_method='POST', |
|
380 | 380 | renderer='json_ext') |
|
381 | 381 | def repo_commit_comment_create(self): |
|
382 | 382 | _ = self.request.translate |
|
383 | 383 | commit_id = self.request.matchdict['commit_id'] |
|
384 | 384 | |
|
385 | 385 | c = self.load_default_context() |
|
386 | 386 | status = self.request.POST.get('changeset_status', None) |
|
387 | 387 | text = self.request.POST.get('text') |
|
388 | 388 | comment_type = self.request.POST.get('comment_type') |
|
389 | 389 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
390 | 390 | |
|
391 | 391 | if status: |
|
392 | 392 | text = text or (_('Status change %(transition_icon)s %(status)s') |
|
393 | 393 | % {'transition_icon': '>', |
|
394 | 394 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
395 | 395 | |
|
396 | 396 | multi_commit_ids = [] |
|
397 | 397 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): |
|
398 | 398 | if _commit_id not in ['', None, EmptyCommit.raw_id]: |
|
399 | 399 | if _commit_id not in multi_commit_ids: |
|
400 | 400 | multi_commit_ids.append(_commit_id) |
|
401 | 401 | |
|
402 | 402 | commit_ids = multi_commit_ids or [commit_id] |
|
403 | 403 | |
|
404 | 404 | comment = None |
|
405 | 405 | for current_id in filter(None, commit_ids): |
|
406 | 406 | comment = CommentsModel().create( |
|
407 | 407 | text=text, |
|
408 | 408 | repo=self.db_repo.repo_id, |
|
409 | 409 | user=self._rhodecode_db_user.user_id, |
|
410 | 410 | commit_id=current_id, |
|
411 | 411 | f_path=self.request.POST.get('f_path'), |
|
412 | 412 | line_no=self.request.POST.get('line'), |
|
413 | 413 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
414 | 414 | if status else None), |
|
415 | 415 | status_change_type=status, |
|
416 | 416 | comment_type=comment_type, |
|
417 | 417 | resolves_comment_id=resolves_comment_id |
|
418 | 418 | ) |
|
419 | 419 | |
|
420 | 420 | # get status if set ! |
|
421 | 421 | if status: |
|
422 | 422 | # if latest status was from pull request and it's closed |
|
423 | 423 | # disallow changing status ! |
|
424 | 424 | # dont_allow_on_closed_pull_request = True ! |
|
425 | 425 | |
|
426 | 426 | try: |
|
427 | 427 | ChangesetStatusModel().set_status( |
|
428 | 428 | self.db_repo.repo_id, |
|
429 | 429 | status, |
|
430 | 430 | self._rhodecode_db_user.user_id, |
|
431 | 431 | comment, |
|
432 | 432 | revision=current_id, |
|
433 | 433 | dont_allow_on_closed_pull_request=True |
|
434 | 434 | ) |
|
435 | 435 | except StatusChangeOnClosedPullRequestError: |
|
436 | 436 | msg = _('Changing the status of a commit associated with ' |
|
437 | 437 | 'a closed pull request is not allowed') |
|
438 | 438 | log.exception(msg) |
|
439 | 439 | h.flash(msg, category='warning') |
|
440 | 440 | raise HTTPFound(h.route_path( |
|
441 | 441 | 'repo_commit', repo_name=self.db_repo_name, |
|
442 | 442 | commit_id=current_id)) |
|
443 | 443 | |
|
444 | 444 | # finalize, commit and redirect |
|
445 | 445 | Session().commit() |
|
446 | 446 | |
|
447 | 447 | data = { |
|
448 | 448 | 'target_id': h.safeid(h.safe_unicode( |
|
449 | 449 | self.request.POST.get('f_path'))), |
|
450 | 450 | } |
|
451 | 451 | if comment: |
|
452 | 452 | c.co = comment |
|
453 | 453 | rendered_comment = render( |
|
454 | 454 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
455 | 455 | self._get_template_context(c), self.request) |
|
456 | 456 | |
|
457 | 457 | data.update(comment.get_dict()) |
|
458 | 458 | data.update({'rendered_text': rendered_comment}) |
|
459 | 459 | |
|
460 | 460 | return data |
|
461 | 461 | |
|
462 | 462 | @LoginRequired() |
|
463 | 463 | @NotAnonymous() |
|
464 | 464 | @HasRepoPermissionAnyDecorator( |
|
465 | 465 | 'repository.read', 'repository.write', 'repository.admin') |
|
466 | 466 | @CSRFRequired() |
|
467 | 467 | @view_config( |
|
468 | 468 | route_name='repo_commit_comment_preview', request_method='POST', |
|
469 | 469 | renderer='string', xhr=True) |
|
470 | 470 | def repo_commit_comment_preview(self): |
|
471 | 471 | # Technically a CSRF token is not needed as no state changes with this |
|
472 | 472 | # call. However, as this is a POST is better to have it, so automated |
|
473 | 473 | # tools don't flag it as potential CSRF. |
|
474 | 474 | # Post is required because the payload could be bigger than the maximum |
|
475 | 475 | # allowed by GET. |
|
476 | 476 | |
|
477 | 477 | text = self.request.POST.get('text') |
|
478 | 478 | renderer = self.request.POST.get('renderer') or 'rst' |
|
479 | 479 | if text: |
|
480 | 480 | return h.render(text, renderer=renderer, mentions=True) |
|
481 | 481 | return '' |
|
482 | 482 | |
|
483 | 483 | @LoginRequired() |
|
484 | 484 | @NotAnonymous() |
|
485 | 485 | @HasRepoPermissionAnyDecorator( |
|
486 | 486 | 'repository.read', 'repository.write', 'repository.admin') |
|
487 | 487 | @CSRFRequired() |
|
488 | 488 | @view_config( |
|
489 | 489 | route_name='repo_commit_comment_delete', request_method='POST', |
|
490 | 490 | renderer='json_ext') |
|
491 | 491 | def repo_commit_comment_delete(self): |
|
492 | 492 | commit_id = self.request.matchdict['commit_id'] |
|
493 | 493 | comment_id = self.request.matchdict['comment_id'] |
|
494 | 494 | |
|
495 | 495 | comment = ChangesetComment.get_or_404(comment_id) |
|
496 | 496 | if not comment: |
|
497 | 497 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
498 | 498 | # comment already deleted in another call probably |
|
499 | 499 | return True |
|
500 | 500 | |
|
501 | 501 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
502 | 502 | super_admin = h.HasPermissionAny('hg.admin')() |
|
503 | 503 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
504 | 504 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
505 | 505 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
506 | 506 | |
|
507 | 507 | if super_admin or comment_owner or comment_repo_admin: |
|
508 | 508 | CommentsModel().delete(comment=comment, user=self._rhodecode_db_user) |
|
509 | 509 | Session().commit() |
|
510 | 510 | return True |
|
511 | 511 | else: |
|
512 | 512 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
513 | 513 | self._rhodecode_db_user, comment_id) |
|
514 | 514 | raise HTTPNotFound() |
|
515 | 515 | |
|
516 | 516 | @LoginRequired() |
|
517 | 517 | @HasRepoPermissionAnyDecorator( |
|
518 | 518 | 'repository.read', 'repository.write', 'repository.admin') |
|
519 | 519 | @view_config( |
|
520 | 520 | route_name='repo_commit_data', request_method='GET', |
|
521 | 521 | renderer='json_ext', xhr=True) |
|
522 | 522 | def repo_commit_data(self): |
|
523 | 523 | commit_id = self.request.matchdict['commit_id'] |
|
524 | 524 | self.load_default_context() |
|
525 | 525 | |
|
526 | 526 | try: |
|
527 | 527 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
528 | 528 | except CommitDoesNotExistError as e: |
|
529 | 529 | return EmptyCommit(message=str(e)) |
|
530 | 530 | |
|
531 | 531 | @LoginRequired() |
|
532 | 532 | @HasRepoPermissionAnyDecorator( |
|
533 | 533 | 'repository.read', 'repository.write', 'repository.admin') |
|
534 | 534 | @view_config( |
|
535 | 535 | route_name='repo_commit_children', request_method='GET', |
|
536 | 536 | renderer='json_ext', xhr=True) |
|
537 | 537 | def repo_commit_children(self): |
|
538 | 538 | commit_id = self.request.matchdict['commit_id'] |
|
539 | 539 | self.load_default_context() |
|
540 | 540 | |
|
541 | 541 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
542 | 542 | result = {"results": commit.children} |
|
543 | 543 | return result |
|
544 | 544 | |
|
545 | 545 | @LoginRequired() |
|
546 | 546 | @HasRepoPermissionAnyDecorator( |
|
547 | 547 | 'repository.read', 'repository.write', 'repository.admin') |
|
548 | 548 | @view_config( |
|
549 | 549 | route_name='repo_commit_parents', request_method='GET', |
|
550 | 550 | renderer='json_ext') |
|
551 | 551 | def repo_commit_parents(self): |
|
552 | 552 | commit_id = self.request.matchdict['commit_id'] |
|
553 | 553 | self.load_default_context() |
|
554 | 554 | |
|
555 | 555 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
556 | 556 | result = {"results": commit.parents} |
|
557 | 557 | return result |
@@ -1,203 +1,202 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytz |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from beaker.cache import cache_region |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | from pyramid.response import Response |
|
27 | 27 | from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed |
|
28 | 28 | |
|
29 | 29 | from rhodecode.apps._base import RepoAppView |
|
30 | 30 | from rhodecode.lib import audit_logger |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator, | |
|
33 | NotAnonymous, CSRFRequired) | |
|
32 | from rhodecode.lib.auth import ( | |
|
33 | LoginRequired, HasRepoPermissionAnyDecorator) | |
|
34 | 34 | from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer |
|
35 | from rhodecode.lib.ext_json import json | |
|
36 | 35 | from rhodecode.lib.utils2 import str2bool, safe_int |
|
37 | 36 | from rhodecode.model.db import UserApiKeys, CacheKey |
|
38 | 37 | |
|
39 | 38 | log = logging.getLogger(__name__) |
|
40 | 39 | |
|
41 | 40 | |
|
42 | 41 | class RepoFeedView(RepoAppView): |
|
43 | 42 | def load_default_context(self): |
|
44 | 43 | c = self._get_local_tmpl_context() |
|
45 | 44 | |
|
46 | 45 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
47 | 46 | c.repo_info = self.db_repo |
|
48 | 47 | |
|
49 | 48 | self._register_global_c(c) |
|
50 | 49 | self._load_defaults() |
|
51 | 50 | return c |
|
52 | 51 | |
|
53 | 52 | def _get_config(self): |
|
54 | 53 | import rhodecode |
|
55 | 54 | config = rhodecode.CONFIG |
|
56 | 55 | |
|
57 | 56 | return { |
|
58 | 57 | 'language': 'en-us', |
|
59 | 58 | 'feed_ttl': '5', # TTL of feed, |
|
60 | 59 | 'feed_include_diff': |
|
61 | 60 | str2bool(config.get('rss_include_diff', False)), |
|
62 | 61 | 'feed_items_per_page': |
|
63 | 62 | safe_int(config.get('rss_items_per_page', 20)), |
|
64 | 63 | 'feed_diff_limit': |
|
65 | 64 | # we need to protect from parsing huge diffs here other way |
|
66 | 65 | # we can kill the server |
|
67 | 66 | safe_int(config.get('rss_cut_off_limit', 32 * 1024)), |
|
68 | 67 | } |
|
69 | 68 | |
|
70 | 69 | def _load_defaults(self): |
|
71 | 70 | _ = self.request.translate |
|
72 | 71 | config = self._get_config() |
|
73 | 72 | # common values for feeds |
|
74 | 73 | self.description = _('Changes on %s repository') |
|
75 | 74 | self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s') |
|
76 | 75 | self.language = config["language"] |
|
77 | 76 | self.ttl = config["feed_ttl"] |
|
78 | 77 | self.feed_include_diff = config['feed_include_diff'] |
|
79 | 78 | self.feed_diff_limit = config['feed_diff_limit'] |
|
80 | 79 | self.feed_items_per_page = config['feed_items_per_page'] |
|
81 | 80 | |
|
82 | 81 | def _changes(self, commit): |
|
83 | 82 | diff_processor = DiffProcessor( |
|
84 | 83 | commit.diff(), diff_limit=self.feed_diff_limit) |
|
85 | 84 | _parsed = diff_processor.prepare(inline_diff=False) |
|
86 | 85 | limited_diff = isinstance(_parsed, LimitedDiffContainer) |
|
87 | 86 | |
|
88 | 87 | return _parsed, limited_diff |
|
89 | 88 | |
|
90 | 89 | def _get_title(self, commit): |
|
91 | 90 | return h.shorter(commit.message, 160) |
|
92 | 91 | |
|
93 | 92 | def _get_description(self, commit): |
|
94 | 93 | _renderer = self.request.get_partial_renderer( |
|
95 | 94 | 'feed/atom_feed_entry.mako') |
|
96 | 95 | parsed_diff, limited_diff = self._changes(commit) |
|
97 | 96 | return _renderer( |
|
98 | 97 | 'body', |
|
99 | 98 | commit=commit, |
|
100 | 99 | parsed_diff=parsed_diff, |
|
101 | 100 | limited_diff=limited_diff, |
|
102 | 101 | feed_include_diff=self.feed_include_diff, |
|
103 | 102 | ) |
|
104 | 103 | |
|
105 | 104 | def _set_timezone(self, date, tzinfo=pytz.utc): |
|
106 | 105 | if not getattr(date, "tzinfo", None): |
|
107 | 106 | date.replace(tzinfo=tzinfo) |
|
108 | 107 | return date |
|
109 | 108 | |
|
110 | 109 | def _get_commits(self): |
|
111 | 110 | return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:]) |
|
112 | 111 | |
|
113 | 112 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
114 | 113 | @HasRepoPermissionAnyDecorator( |
|
115 | 114 | 'repository.read', 'repository.write', 'repository.admin') |
|
116 | 115 | @view_config( |
|
117 | 116 | route_name='atom_feed_home', request_method='GET', |
|
118 | 117 | renderer=None) |
|
119 | 118 | def atom(self): |
|
120 | 119 | """ |
|
121 | 120 | Produce an atom-1.0 feed via feedgenerator module |
|
122 | 121 | """ |
|
123 | 122 | self.load_default_context() |
|
124 | 123 | |
|
125 | 124 | @cache_region('long_term') |
|
126 | 125 | def _generate_feed(cache_key): |
|
127 | 126 | feed = Atom1Feed( |
|
128 | 127 | title=self.title % self.db_repo_name, |
|
129 | 128 | link=h.route_url('repo_summary', repo_name=self.db_repo_name), |
|
130 | 129 | description=self.description % self.db_repo_name, |
|
131 | 130 | language=self.language, |
|
132 | 131 | ttl=self.ttl |
|
133 | 132 | ) |
|
134 | 133 | |
|
135 | 134 | for commit in reversed(self._get_commits()): |
|
136 | 135 | date = self._set_timezone(commit.date) |
|
137 | 136 | feed.add_item( |
|
138 | 137 | title=self._get_title(commit), |
|
139 | 138 | author_name=commit.author, |
|
140 | 139 | description=self._get_description(commit), |
|
141 | 140 | link=h.route_url( |
|
142 | 141 | 'repo_commit', repo_name=self.db_repo_name, |
|
143 | 142 | commit_id=commit.raw_id), |
|
144 | 143 | pubdate=date,) |
|
145 | 144 | |
|
146 | 145 | return feed.mime_type, feed.writeString('utf-8') |
|
147 | 146 | |
|
148 | 147 | invalidator_context = CacheKey.repo_context_cache( |
|
149 | 148 | _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_ATOM) |
|
150 | 149 | |
|
151 | 150 | with invalidator_context as context: |
|
152 | 151 | context.invalidate() |
|
153 | 152 | mime_type, feed = context.compute() |
|
154 | 153 | |
|
155 | 154 | response = Response(feed) |
|
156 | 155 | response.content_type = mime_type |
|
157 | 156 | return response |
|
158 | 157 | |
|
159 | 158 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
160 | 159 | @HasRepoPermissionAnyDecorator( |
|
161 | 160 | 'repository.read', 'repository.write', 'repository.admin') |
|
162 | 161 | @view_config( |
|
163 | 162 | route_name='rss_feed_home', request_method='GET', |
|
164 | 163 | renderer=None) |
|
165 | 164 | def rss(self): |
|
166 | 165 | """ |
|
167 | 166 | Produce an rss2 feed via feedgenerator module |
|
168 | 167 | """ |
|
169 | 168 | self.load_default_context() |
|
170 | 169 | |
|
171 | 170 | @cache_region('long_term') |
|
172 | 171 | def _generate_feed(cache_key): |
|
173 | 172 | feed = Rss201rev2Feed( |
|
174 | 173 | title=self.title % self.db_repo_name, |
|
175 | 174 | link=h.route_url('repo_summary', repo_name=self.db_repo_name), |
|
176 | 175 | description=self.description % self.db_repo_name, |
|
177 | 176 | language=self.language, |
|
178 | 177 | ttl=self.ttl |
|
179 | 178 | ) |
|
180 | 179 | |
|
181 | 180 | for commit in reversed(self._get_commits()): |
|
182 | 181 | date = self._set_timezone(commit.date) |
|
183 | 182 | feed.add_item( |
|
184 | 183 | title=self._get_title(commit), |
|
185 | 184 | author_name=commit.author, |
|
186 | 185 | description=self._get_description(commit), |
|
187 | 186 | link=h.route_url( |
|
188 | 187 | 'repo_commit', repo_name=self.db_repo_name, |
|
189 | 188 | commit_id=commit.raw_id), |
|
190 | 189 | pubdate=date,) |
|
191 | 190 | |
|
192 | 191 | return feed.mime_type, feed.writeString('utf-8') |
|
193 | 192 | |
|
194 | 193 | invalidator_context = CacheKey.repo_context_cache( |
|
195 | 194 | _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_RSS) |
|
196 | 195 | |
|
197 | 196 | with invalidator_context as context: |
|
198 | 197 | context.invalidate() |
|
199 | 198 | mime_type, feed = context.compute() |
|
200 | 199 | |
|
201 | 200 | response = Response(feed) |
|
202 | 201 | response.content_type = mime_type |
|
203 | 202 | return response |
@@ -1,68 +1,67 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | from pyramid.view import view_config |
|
24 | 24 | |
|
25 | 25 | from rhodecode.apps._base import RepoAppView |
|
26 |
from rhodecode.lib.auth import |
|
|
27 | NotAnonymous) | |
|
26 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
|
28 | 27 | from rhodecode.lib import repo_maintenance |
|
29 | 28 | |
|
30 | 29 | log = logging.getLogger(__name__) |
|
31 | 30 | |
|
32 | 31 | |
|
33 | 32 | class RepoMaintenanceView(RepoAppView): |
|
34 | 33 | def load_default_context(self): |
|
35 | 34 | c = self._get_local_tmpl_context() |
|
36 | 35 | |
|
37 | 36 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
38 | 37 | c.repo_info = self.db_repo |
|
39 | 38 | |
|
40 | 39 | self._register_global_c(c) |
|
41 | 40 | return c |
|
42 | 41 | |
|
43 | 42 | @LoginRequired() |
|
44 | 43 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
45 | 44 | @view_config( |
|
46 | route_name='repo_maintenance', request_method='GET', | |
|
45 | route_name='edit_repo_maintenance', request_method='GET', | |
|
47 | 46 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
48 | 47 | def repo_maintenance(self): |
|
49 | 48 | c = self.load_default_context() |
|
50 | 49 | c.active = 'maintenance' |
|
51 | 50 | maintenance = repo_maintenance.RepoMaintenance() |
|
52 | 51 | c.executable_tasks = maintenance.get_tasks_for_repo(self.db_repo) |
|
53 | 52 | return self._get_template_context(c) |
|
54 | 53 | |
|
55 | 54 | @LoginRequired() |
|
56 | 55 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
57 | 56 | @view_config( |
|
58 | route_name='repo_maintenance_execute', request_method='GET', | |
|
57 | route_name='edit_repo_maintenance_execute', request_method='GET', | |
|
59 | 58 | renderer='json', xhr=True) |
|
60 | 59 | def repo_maintenance_execute(self): |
|
61 | 60 | c = self.load_default_context() |
|
62 | 61 | c.active = 'maintenance' |
|
63 | 62 | _ = self.request.translate |
|
64 | 63 | |
|
65 | 64 | maintenance = repo_maintenance.RepoMaintenance() |
|
66 | 65 | executed_types = maintenance.execute(self.db_repo) |
|
67 | 66 | |
|
68 | 67 | return executed_types |
@@ -1,98 +1,92 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | import deform | |
|
24 | 23 | from pyramid.httpexceptions import HTTPFound |
|
25 | 24 | from pyramid.view import view_config |
|
26 | 25 | |
|
27 | 26 | from rhodecode.apps._base import RepoAppView |
|
28 | from rhodecode.forms import RcForm | |
|
29 | 27 | from rhodecode.lib import helpers as h |
|
30 | 28 | from rhodecode.lib import audit_logger |
|
31 | 29 | from rhodecode.lib.auth import ( |
|
32 | LoginRequired, HasRepoPermissionAnyDecorator, | |
|
33 | HasRepoPermissionAllDecorator, CSRFRequired) | |
|
34 | from rhodecode.model.db import RepositoryField, RepoGroup | |
|
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
|
35 | 31 | from rhodecode.model.forms import RepoPermsForm |
|
36 | 32 | from rhodecode.model.meta import Session |
|
37 | 33 | from rhodecode.model.repo import RepoModel |
|
38 | from rhodecode.model.scm import RepoGroupList, ScmModel | |
|
39 | from rhodecode.model.validation_schema.schemas import repo_schema | |
|
40 | 34 | |
|
41 | 35 | log = logging.getLogger(__name__) |
|
42 | 36 | |
|
43 | 37 | |
|
44 | 38 | class RepoSettingsPermissionsView(RepoAppView): |
|
45 | 39 | |
|
46 | 40 | def load_default_context(self): |
|
47 | 41 | c = self._get_local_tmpl_context() |
|
48 | 42 | |
|
49 | 43 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
50 | 44 | c.repo_info = self.db_repo |
|
51 | 45 | |
|
52 | 46 | self._register_global_c(c) |
|
53 | 47 | return c |
|
54 | 48 | |
|
55 | 49 | @LoginRequired() |
|
56 | 50 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
57 | 51 | @view_config( |
|
58 | 52 | route_name='edit_repo_perms', request_method='GET', |
|
59 | 53 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
60 | 54 | def edit_permissions(self): |
|
61 | 55 | c = self.load_default_context() |
|
62 | 56 | c.active = 'permissions' |
|
63 | 57 | return self._get_template_context(c) |
|
64 | 58 | |
|
65 | 59 | @LoginRequired() |
|
66 |
@HasRepoPermissionA |
|
|
60 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
67 | 61 | @CSRFRequired() |
|
68 | 62 | @view_config( |
|
69 | 63 | route_name='edit_repo_perms', request_method='POST', |
|
70 | 64 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
71 | 65 | def edit_permissions_update(self): |
|
72 | 66 | _ = self.request.translate |
|
73 | 67 | c = self.load_default_context() |
|
74 | 68 | c.active = 'permissions' |
|
75 | 69 | data = self.request.POST |
|
76 | 70 | # store private flag outside of HTML to verify if we can modify |
|
77 |
# default user permissions, prevents submi |
|
|
71 | # default user permissions, prevents submission of FAKE post data | |
|
78 | 72 | # into the form for private repos |
|
79 | 73 | data['repo_private'] = self.db_repo.private |
|
80 | 74 | form = RepoPermsForm()().to_python(data) |
|
81 | 75 | changes = RepoModel().update_permissions( |
|
82 | 76 | self.db_repo_name, form['perm_additions'], form['perm_updates'], |
|
83 | 77 | form['perm_deletions']) |
|
84 | 78 | |
|
85 | 79 | action_data = { |
|
86 | 80 | 'added': changes['added'], |
|
87 | 81 | 'updated': changes['updated'], |
|
88 | 82 | 'deleted': changes['deleted'], |
|
89 | 83 | } |
|
90 | 84 | audit_logger.store_web( |
|
91 | 85 | 'repo.edit.permissions', action_data=action_data, |
|
92 | 86 | user=self._rhodecode_user, repo=self.db_repo) |
|
93 | 87 | |
|
94 | 88 | Session().commit() |
|
95 | 89 | h.flash(_('Repository permissions updated'), category='success') |
|
96 | 90 | |
|
97 | 91 | raise HTTPFound( |
|
98 |
|
|
|
92 | h.route_path('edit_repo_perms', repo_name=self.db_repo_name)) |
@@ -1,179 +1,254 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | import deform |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | |
|
27 | 27 | from rhodecode.apps._base import RepoAppView |
|
28 | 28 | from rhodecode.forms import RcForm |
|
29 | 29 | from rhodecode.lib import helpers as h |
|
30 | 30 | from rhodecode.lib import audit_logger |
|
31 | 31 | from rhodecode.lib.auth import ( |
|
32 | LoginRequired, HasRepoPermissionAnyDecorator, | |
|
33 | HasRepoPermissionAllDecorator, CSRFRequired) | |
|
34 | from rhodecode.model.db import RepositoryField, RepoGroup | |
|
32 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
|
33 | from rhodecode.model.db import RepositoryField, RepoGroup, Repository | |
|
35 | 34 | from rhodecode.model.meta import Session |
|
36 | 35 | from rhodecode.model.repo import RepoModel |
|
37 | 36 | from rhodecode.model.scm import RepoGroupList, ScmModel |
|
38 | 37 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
39 | 38 | |
|
40 | 39 | log = logging.getLogger(__name__) |
|
41 | 40 | |
|
42 | 41 | |
|
43 | 42 | class RepoSettingsView(RepoAppView): |
|
44 | 43 | |
|
45 | 44 | def load_default_context(self): |
|
46 | 45 | c = self._get_local_tmpl_context() |
|
47 | 46 | |
|
48 | 47 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
49 | 48 | c.repo_info = self.db_repo |
|
50 | 49 | |
|
51 | 50 | acl_groups = RepoGroupList( |
|
52 | 51 | RepoGroup.query().all(), |
|
53 | 52 | perm_set=['group.write', 'group.admin']) |
|
54 | 53 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) |
|
55 | 54 | c.repo_groups_choices = map(lambda k: k[0], c.repo_groups) |
|
56 | 55 | |
|
57 | 56 | # in case someone no longer have a group.write access to a repository |
|
58 | 57 | # pre fill the list with this entry, we don't care if this is the same |
|
59 | 58 | # but it will allow saving repo data properly. |
|
60 | 59 | repo_group = self.db_repo.group |
|
61 | 60 | if repo_group and repo_group.group_id not in c.repo_groups_choices: |
|
62 | 61 | c.repo_groups_choices.append(repo_group.group_id) |
|
63 | 62 | c.repo_groups.append(RepoGroup._generate_choice(repo_group)) |
|
64 | 63 | |
|
65 | 64 | if c.repository_requirements_missing or self.rhodecode_vcs_repo is None: |
|
66 | 65 | # we might be in missing requirement state, so we load things |
|
67 | 66 | # without touching scm_instance() |
|
68 | 67 | c.landing_revs_choices, c.landing_revs = \ |
|
69 | 68 | ScmModel().get_repo_landing_revs() |
|
70 | 69 | else: |
|
71 | 70 | c.landing_revs_choices, c.landing_revs = \ |
|
72 | 71 | ScmModel().get_repo_landing_revs(self.db_repo) |
|
73 | 72 | |
|
74 | 73 | c.personal_repo_group = c.auth_user.personal_repo_group |
|
75 | 74 | c.repo_fields = RepositoryField.query()\ |
|
76 | 75 | .filter(RepositoryField.repository == self.db_repo).all() |
|
77 | 76 | |
|
78 | 77 | self._register_global_c(c) |
|
79 | 78 | return c |
|
80 | 79 | |
|
81 | 80 | def _get_schema(self, c, old_values=None): |
|
82 | 81 | return repo_schema.RepoSettingsSchema().bind( |
|
83 | 82 | repo_type=self.db_repo.repo_type, |
|
84 | 83 | repo_type_options=[self.db_repo.repo_type], |
|
85 | 84 | repo_ref_options=c.landing_revs_choices, |
|
86 | 85 | repo_ref_items=c.landing_revs, |
|
87 | 86 | repo_repo_group_options=c.repo_groups_choices, |
|
88 | 87 | repo_repo_group_items=c.repo_groups, |
|
89 | 88 | # user caller |
|
90 | 89 | user=self._rhodecode_user, |
|
91 | 90 | old_values=old_values |
|
92 | 91 | ) |
|
93 | 92 | |
|
94 | 93 | @LoginRequired() |
|
95 | 94 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
96 | 95 | @view_config( |
|
97 | 96 | route_name='edit_repo', request_method='GET', |
|
98 | 97 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
99 | 98 | def edit_settings(self): |
|
100 | 99 | c = self.load_default_context() |
|
101 | 100 | c.active = 'settings' |
|
102 | 101 | |
|
103 | 102 | defaults = RepoModel()._get_defaults(self.db_repo_name) |
|
104 | 103 | defaults['repo_owner'] = defaults['user'] |
|
105 | 104 | defaults['repo_landing_commit_ref'] = defaults['repo_landing_rev'] |
|
106 | 105 | |
|
107 | 106 | schema = self._get_schema(c) |
|
108 | 107 | c.form = RcForm(schema, appstruct=defaults) |
|
109 | 108 | return self._get_template_context(c) |
|
110 | 109 | |
|
111 | 110 | @LoginRequired() |
|
112 |
@HasRepoPermissionA |
|
|
111 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
113 | 112 | @CSRFRequired() |
|
114 | 113 | @view_config( |
|
115 | 114 | route_name='edit_repo', request_method='POST', |
|
116 | 115 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
117 | 116 | def edit_settings_update(self): |
|
118 | 117 | _ = self.request.translate |
|
119 | 118 | c = self.load_default_context() |
|
120 | 119 | c.active = 'settings' |
|
121 | 120 | old_repo_name = self.db_repo_name |
|
122 | 121 | |
|
123 | 122 | old_values = self.db_repo.get_api_data() |
|
124 | 123 | schema = self._get_schema(c, old_values=old_values) |
|
125 | 124 | |
|
126 | 125 | c.form = RcForm(schema) |
|
127 | 126 | pstruct = self.request.POST.items() |
|
128 | 127 | pstruct.append(('repo_type', self.db_repo.repo_type)) |
|
129 | 128 | try: |
|
130 | 129 | schema_data = c.form.validate(pstruct) |
|
131 | 130 | except deform.ValidationFailure as err_form: |
|
132 | 131 | return self._get_template_context(c) |
|
133 | 132 | |
|
134 | 133 | # data is now VALID, proceed with updates |
|
135 | 134 | # save validated data back into the updates dict |
|
136 | 135 | validated_updates = dict( |
|
137 | 136 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
138 | 137 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
139 | 138 | |
|
140 | 139 | user=schema_data['repo_owner'], |
|
141 | 140 | repo_description=schema_data['repo_description'], |
|
142 | 141 | repo_private=schema_data['repo_private'], |
|
143 | 142 | clone_uri=schema_data['repo_clone_uri'], |
|
144 | 143 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
145 | 144 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
146 | 145 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
147 | 146 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
148 | 147 | ) |
|
149 | 148 | # detect if CLONE URI changed, if we get OLD means we keep old values |
|
150 | 149 | if schema_data['repo_clone_uri_change'] == 'OLD': |
|
151 | 150 | validated_updates['clone_uri'] = self.db_repo.clone_uri |
|
152 | 151 | |
|
153 | 152 | # use the new full name for redirect |
|
154 | 153 | new_repo_name = schema_data['repo_group']['repo_name_with_group'] |
|
155 | 154 | |
|
156 | 155 | # save extra fields into our validated data |
|
157 | 156 | for key, value in pstruct: |
|
158 | 157 | if key.startswith(RepositoryField.PREFIX): |
|
159 | 158 | validated_updates[key] = value |
|
160 | 159 | |
|
161 | 160 | try: |
|
162 | 161 | RepoModel().update(self.db_repo, **validated_updates) |
|
163 | 162 | ScmModel().mark_for_invalidation(new_repo_name) |
|
164 | 163 | |
|
165 | 164 | audit_logger.store_web( |
|
166 | 165 | 'repo.edit', action_data={'old_data': old_values}, |
|
167 | 166 | user=self._rhodecode_user, repo=self.db_repo) |
|
168 | 167 | |
|
169 | 168 | Session().commit() |
|
170 | 169 | |
|
171 | 170 | h.flash(_('Repository {} updated successfully').format( |
|
172 | 171 | old_repo_name), category='success') |
|
173 | 172 | except Exception: |
|
174 | 173 | log.exception("Exception during update of repository") |
|
175 | 174 | h.flash(_('Error occurred during update of repository {}').format( |
|
176 | 175 | old_repo_name), category='error') |
|
177 | 176 | |
|
178 | 177 | raise HTTPFound( |
|
179 |
|
|
|
178 | h.route_path('edit_repo', repo_name=new_repo_name)) | |
|
179 | ||
|
180 | @LoginRequired() | |
|
181 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
|
182 | @view_config( | |
|
183 | route_name='repo_edit_toggle_locking', request_method='GET', | |
|
184 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
185 | def toggle_locking(self): | |
|
186 | """ | |
|
187 | Toggle locking of repository by simple GET call to url | |
|
188 | """ | |
|
189 | _ = self.request.translate | |
|
190 | repo = self.db_repo | |
|
191 | ||
|
192 | try: | |
|
193 | if repo.enable_locking: | |
|
194 | if repo.locked[0]: | |
|
195 | Repository.unlock(repo) | |
|
196 | action = _('Unlocked') | |
|
197 | else: | |
|
198 | Repository.lock( | |
|
199 | repo, self._rhodecode_user.user_id, | |
|
200 | lock_reason=Repository.LOCK_WEB) | |
|
201 | action = _('Locked') | |
|
202 | ||
|
203 | h.flash(_('Repository has been %s') % action, | |
|
204 | category='success') | |
|
205 | except Exception: | |
|
206 | log.exception("Exception during unlocking") | |
|
207 | h.flash(_('An error occurred during unlocking'), | |
|
208 | category='error') | |
|
209 | raise HTTPFound( | |
|
210 | h.route_path('repo_summary', repo_name=self.db_repo_name)) | |
|
211 | ||
|
212 | @LoginRequired() | |
|
213 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
214 | @view_config( | |
|
215 | route_name='edit_repo_statistics', request_method='GET', | |
|
216 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
217 | def edit_statistics_form(self): | |
|
218 | c = self.load_default_context() | |
|
219 | ||
|
220 | if self.db_repo.stats: | |
|
221 | # this is on what revision we ended up so we add +1 for count | |
|
222 | last_rev = self.db_repo.stats.stat_on_revision + 1 | |
|
223 | else: | |
|
224 | last_rev = 0 | |
|
225 | ||
|
226 | c.active = 'statistics' | |
|
227 | c.stats_revision = last_rev | |
|
228 | c.repo_last_rev = self.rhodecode_vcs_repo.count() | |
|
229 | ||
|
230 | if last_rev == 0 or c.repo_last_rev == 0: | |
|
231 | c.stats_percentage = 0 | |
|
232 | else: | |
|
233 | c.stats_percentage = '%.2f' % ( | |
|
234 | (float((last_rev)) / c.repo_last_rev) * 100) | |
|
235 | return self._get_template_context(c) | |
|
236 | ||
|
237 | @LoginRequired() | |
|
238 | @HasRepoPermissionAnyDecorator('repository.admin') | |
|
239 | @CSRFRequired() | |
|
240 | @view_config( | |
|
241 | route_name='edit_repo_statistics_reset', request_method='POST', | |
|
242 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
|
243 | def repo_statistics_reset(self): | |
|
244 | _ = self.request.translate | |
|
245 | ||
|
246 | try: | |
|
247 | RepoModel().delete_stats(self.db_repo_name) | |
|
248 | Session().commit() | |
|
249 | except Exception: | |
|
250 | log.exception('Edit statistics failure') | |
|
251 | h.flash(_('An error occurred during deletion of repository stats'), | |
|
252 | category='error') | |
|
253 | raise HTTPFound( | |
|
254 | h.route_path('edit_repo_statistics', repo_name=self.db_repo_name)) |
@@ -1,226 +1,226 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | from pyramid.view import view_config |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound |
|
25 | 25 | |
|
26 | 26 | from rhodecode.apps._base import RepoAppView |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.lib import audit_logger |
|
29 | 29 | from rhodecode.lib.auth import ( |
|
30 | 30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
31 | 31 | from rhodecode.lib.exceptions import AttachedForksError |
|
32 | 32 | from rhodecode.lib.utils2 import safe_int |
|
33 | 33 | from rhodecode.lib.vcs import RepositoryError |
|
34 | 34 | from rhodecode.model.db import Session, UserFollowing, User, Repository |
|
35 | 35 | from rhodecode.model.repo import RepoModel |
|
36 | 36 | from rhodecode.model.scm import ScmModel |
|
37 | 37 | |
|
38 | 38 | log = logging.getLogger(__name__) |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | class RepoSettingsView(RepoAppView): |
|
42 | 42 | |
|
43 | 43 | def load_default_context(self): |
|
44 | 44 | c = self._get_local_tmpl_context() |
|
45 | 45 | |
|
46 | 46 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
47 | 47 | c.repo_info = self.db_repo |
|
48 | 48 | |
|
49 | 49 | self._register_global_c(c) |
|
50 | 50 | return c |
|
51 | 51 | |
|
52 | 52 | @LoginRequired() |
|
53 | 53 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
54 | 54 | @view_config( |
|
55 | 55 | route_name='edit_repo_advanced', request_method='GET', |
|
56 | 56 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
57 | 57 | def edit_advanced(self): |
|
58 | 58 | c = self.load_default_context() |
|
59 | 59 | c.active = 'advanced' |
|
60 | 60 | |
|
61 | 61 | c.default_user_id = User.get_default_user().user_id |
|
62 | 62 | c.in_public_journal = UserFollowing.query() \ |
|
63 | 63 | .filter(UserFollowing.user_id == c.default_user_id) \ |
|
64 |
.filter(UserFollowing.follows_repository == |
|
|
64 | .filter(UserFollowing.follows_repository == self.db_repo).scalar() | |
|
65 | 65 | |
|
66 | 66 | c.has_origin_repo_read_perm = False |
|
67 | 67 | if self.db_repo.fork: |
|
68 | 68 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( |
|
69 | 69 | 'repository.write', 'repository.read', 'repository.admin')( |
|
70 | 70 | self.db_repo.fork.repo_name, 'repo set as fork page') |
|
71 | 71 | |
|
72 | 72 | return self._get_template_context(c) |
|
73 | 73 | |
|
74 | 74 | @LoginRequired() |
|
75 | 75 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
76 | 76 | @CSRFRequired() |
|
77 | 77 | @view_config( |
|
78 | 78 | route_name='edit_repo_advanced_delete', request_method='POST', |
|
79 | 79 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
80 | 80 | def edit_advanced_delete(self): |
|
81 | 81 | """ |
|
82 | 82 | Deletes the repository, or shows warnings if deletion is not possible |
|
83 | 83 | because of attached forks or other errors. |
|
84 | 84 | """ |
|
85 | 85 | _ = self.request.translate |
|
86 | 86 | handle_forks = self.request.POST.get('forks', None) |
|
87 | 87 | |
|
88 | 88 | try: |
|
89 | 89 | _forks = self.db_repo.forks.count() |
|
90 | 90 | if _forks and handle_forks: |
|
91 | 91 | if handle_forks == 'detach_forks': |
|
92 | 92 | handle_forks = 'detach' |
|
93 | 93 | h.flash(_('Detached %s forks') % _forks, category='success') |
|
94 | 94 | elif handle_forks == 'delete_forks': |
|
95 | 95 | handle_forks = 'delete' |
|
96 | 96 | h.flash(_('Deleted %s forks') % _forks, category='success') |
|
97 | 97 | |
|
98 | 98 | old_data = self.db_repo.get_api_data() |
|
99 | 99 | RepoModel().delete(self.db_repo, forks=handle_forks) |
|
100 | 100 | |
|
101 | 101 | repo = audit_logger.RepoWrap(repo_id=None, |
|
102 | 102 | repo_name=self.db_repo.repo_name) |
|
103 | 103 | audit_logger.store_web( |
|
104 | 104 | 'repo.delete', action_data={'old_data': old_data}, |
|
105 | 105 | user=self._rhodecode_user, repo=repo) |
|
106 | 106 | |
|
107 | 107 | ScmModel().mark_for_invalidation(self.db_repo_name, delete=True) |
|
108 | 108 | h.flash( |
|
109 | 109 | _('Deleted repository `%s`') % self.db_repo_name, |
|
110 | 110 | category='success') |
|
111 | 111 | Session().commit() |
|
112 | 112 | except AttachedForksError: |
|
113 | 113 | repo_advanced_url = h.route_path( |
|
114 | 114 | 'edit_repo_advanced', repo_name=self.db_repo_name, |
|
115 | 115 | _anchor='advanced-delete') |
|
116 | 116 | delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url) |
|
117 | 117 | h.flash(_('Cannot delete `{repo}` it still contains attached forks. ' |
|
118 | 118 | 'Try using {delete_or_detach} option.') |
|
119 | 119 | .format(repo=self.db_repo_name, delete_or_detach=delete_anchor), |
|
120 | 120 | category='warning') |
|
121 | 121 | |
|
122 | 122 | # redirect to advanced for forks handle action ? |
|
123 | 123 | raise HTTPFound(repo_advanced_url) |
|
124 | 124 | |
|
125 | 125 | except Exception: |
|
126 | 126 | log.exception("Exception during deletion of repository") |
|
127 | 127 | h.flash(_('An error occurred during deletion of `%s`') |
|
128 | 128 | % self.db_repo_name, category='error') |
|
129 | 129 | # redirect to advanced for more deletion options |
|
130 | 130 | raise HTTPFound( |
|
131 | 131 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name), |
|
132 | 132 | _anchor='advanced-delete') |
|
133 | 133 | |
|
134 | 134 | raise HTTPFound(h.route_path('home')) |
|
135 | 135 | |
|
136 | 136 | @LoginRequired() |
|
137 | 137 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
138 | 138 | @CSRFRequired() |
|
139 | 139 | @view_config( |
|
140 | 140 | route_name='edit_repo_advanced_journal', request_method='POST', |
|
141 | 141 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
142 | 142 | def edit_advanced_journal(self): |
|
143 | 143 | """ |
|
144 | 144 | Set's this repository to be visible in public journal, |
|
145 | 145 | in other words making default user to follow this repo |
|
146 | 146 | """ |
|
147 | 147 | _ = self.request.translate |
|
148 | 148 | |
|
149 | 149 | try: |
|
150 | 150 | user_id = User.get_default_user().user_id |
|
151 | 151 | ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id) |
|
152 | 152 | h.flash(_('Updated repository visibility in public journal'), |
|
153 | 153 | category='success') |
|
154 | 154 | Session().commit() |
|
155 | 155 | except Exception: |
|
156 | 156 | h.flash(_('An error occurred during setting this ' |
|
157 | 157 | 'repository in public journal'), |
|
158 | 158 | category='error') |
|
159 | 159 | |
|
160 | 160 | raise HTTPFound( |
|
161 | 161 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
162 | 162 | |
|
163 | 163 | @LoginRequired() |
|
164 | 164 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
165 | 165 | @CSRFRequired() |
|
166 | 166 | @view_config( |
|
167 | 167 | route_name='edit_repo_advanced_fork', request_method='POST', |
|
168 | 168 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
169 | 169 | def edit_advanced_fork(self): |
|
170 | 170 | """ |
|
171 | 171 | Mark given repository as a fork of another |
|
172 | 172 | """ |
|
173 | 173 | _ = self.request.translate |
|
174 | 174 | |
|
175 | 175 | new_fork_id = self.request.POST.get('id_fork_of') |
|
176 | 176 | try: |
|
177 | 177 | |
|
178 | 178 | if new_fork_id and not new_fork_id.isdigit(): |
|
179 | 179 | log.error('Given fork id %s is not an INT', new_fork_id) |
|
180 | 180 | |
|
181 | 181 | fork_id = safe_int(new_fork_id) |
|
182 | 182 | repo = ScmModel().mark_as_fork( |
|
183 | 183 | self.db_repo_name, fork_id, self._rhodecode_user.user_id) |
|
184 | 184 | fork = repo.fork.repo_name if repo.fork else _('Nothing') |
|
185 | 185 | Session().commit() |
|
186 | 186 | h.flash(_('Marked repo %s as fork of %s') % (self.db_repo_name, fork), |
|
187 | 187 | category='success') |
|
188 | 188 | except RepositoryError as e: |
|
189 | 189 | log.exception("Repository Error occurred") |
|
190 | 190 | h.flash(str(e), category='error') |
|
191 | 191 | except Exception as e: |
|
192 | 192 | log.exception("Exception while editing fork") |
|
193 | 193 | h.flash(_('An error occurred during this operation'), |
|
194 | 194 | category='error') |
|
195 | 195 | |
|
196 | 196 | raise HTTPFound( |
|
197 | 197 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
198 | 198 | |
|
199 | 199 | @LoginRequired() |
|
200 | 200 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
201 | 201 | @CSRFRequired() |
|
202 | 202 | @view_config( |
|
203 | 203 | route_name='edit_repo_advanced_locking', request_method='POST', |
|
204 | 204 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
205 | 205 | def edit_advanced_locking(self): |
|
206 | 206 | """ |
|
207 | 207 | Toggle locking of repository |
|
208 | 208 | """ |
|
209 | 209 | _ = self.request.translate |
|
210 | 210 | set_lock = self.request.POST.get('set_lock') |
|
211 | 211 | set_unlock = self.request.POST.get('set_unlock') |
|
212 | 212 | |
|
213 | 213 | try: |
|
214 | 214 | if set_lock: |
|
215 | 215 | Repository.lock(self.db_repo, self._rhodecode_user.user_id, |
|
216 | 216 | lock_reason=Repository.LOCK_WEB) |
|
217 | 217 | h.flash(_('Locked repository'), category='success') |
|
218 | 218 | elif set_unlock: |
|
219 | 219 | Repository.unlock(self.db_repo) |
|
220 | 220 | h.flash(_('Unlocked repository'), category='success') |
|
221 | 221 | except Exception as e: |
|
222 | 222 | log.exception("Exception during unlocking") |
|
223 | 223 | h.flash(_('An error occurred during unlocking'), category='error') |
|
224 | 224 | |
|
225 | 225 | raise HTTPFound( |
|
226 | 226 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
@@ -1,116 +1,116 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | from pyramid.view import view_config |
|
23 | 23 | |
|
24 | 24 | from rhodecode.apps._base import RepoAppView |
|
25 | 25 | from rhodecode.lib import audit_logger |
|
26 | 26 | from rhodecode.lib import helpers as h |
|
27 | from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator, | |
|
28 | NotAnonymous, CSRFRequired) | |
|
27 | from rhodecode.lib.auth import ( | |
|
28 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
|
29 | 29 | from rhodecode.lib.ext_json import json |
|
30 | 30 | |
|
31 | 31 | log = logging.getLogger(__name__) |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | class StripView(RepoAppView): |
|
35 | 35 | def load_default_context(self): |
|
36 | 36 | c = self._get_local_tmpl_context() |
|
37 | 37 | |
|
38 | 38 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
39 | 39 | c.repo_info = self.db_repo |
|
40 | 40 | |
|
41 | 41 | self._register_global_c(c) |
|
42 | 42 | return c |
|
43 | 43 | |
|
44 | 44 | @LoginRequired() |
|
45 | 45 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
46 | 46 | @view_config( |
|
47 | route_name='strip', request_method='GET', | |
|
47 | route_name='edit_repo_strip', request_method='GET', | |
|
48 | 48 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
49 | 49 | def strip(self): |
|
50 | 50 | c = self.load_default_context() |
|
51 | 51 | c.active = 'strip' |
|
52 | 52 | c.strip_limit = 10 |
|
53 | 53 | |
|
54 | 54 | return self._get_template_context(c) |
|
55 | 55 | |
|
56 | 56 | @LoginRequired() |
|
57 | 57 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
58 | 58 | @CSRFRequired() |
|
59 | 59 | @view_config( |
|
60 | 60 | route_name='strip_check', request_method='POST', |
|
61 | 61 | renderer='json', xhr=True) |
|
62 | 62 | def strip_check(self): |
|
63 | 63 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
64 | 64 | data = {} |
|
65 | 65 | rp = self.request.POST |
|
66 | 66 | for i in range(1, 11): |
|
67 | 67 | chset = 'changeset_id-%d' % (i,) |
|
68 | 68 | check = rp.get(chset) |
|
69 | 69 | |
|
70 | 70 | if check: |
|
71 | 71 | data[i] = self.db_repo.get_changeset(rp[chset]) |
|
72 | 72 | if isinstance(data[i], EmptyCommit): |
|
73 | 73 | data[i] = {'rev': None, 'commit': h.escape(rp[chset])} |
|
74 | 74 | else: |
|
75 | 75 | data[i] = {'rev': data[i].raw_id, 'branch': data[i].branch, |
|
76 | 76 | 'author': data[i].author, |
|
77 | 77 | 'comment': data[i].message} |
|
78 | 78 | else: |
|
79 | 79 | break |
|
80 | 80 | return data |
|
81 | 81 | |
|
82 | 82 | @LoginRequired() |
|
83 | 83 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
84 | 84 | @CSRFRequired() |
|
85 | 85 | @view_config( |
|
86 | 86 | route_name='strip_execute', request_method='POST', |
|
87 | 87 | renderer='json', xhr=True) |
|
88 | 88 | def strip_execute(self): |
|
89 | 89 | from rhodecode.model.scm import ScmModel |
|
90 | 90 | |
|
91 | 91 | c = self.load_default_context() |
|
92 | 92 | user = self._rhodecode_user |
|
93 | 93 | rp = self.request.POST |
|
94 | 94 | data = {} |
|
95 | 95 | for idx in rp: |
|
96 | 96 | commit = json.loads(rp[idx]) |
|
97 | 97 | # If someone put two times the same branch |
|
98 | 98 | if commit['branch'] in data.keys(): |
|
99 | 99 | continue |
|
100 | 100 | try: |
|
101 | 101 | ScmModel().strip( |
|
102 |
repo= |
|
|
102 | repo=self.db_repo, | |
|
103 | 103 | commit_id=commit['rev'], branch=commit['branch']) |
|
104 | 104 | log.info('Stripped commit %s from repo `%s` by %s' % ( |
|
105 |
commit['rev'], |
|
|
105 | commit['rev'], self.db_repo_name, user)) | |
|
106 | 106 | data[commit['rev']] = True |
|
107 | 107 | |
|
108 | 108 | audit_logger.store_web( |
|
109 | 109 | 'repo.commit.strip', action_data={'commit_id': commit['rev']}, |
|
110 | 110 | repo=self.db_repo, user=self._rhodecode_user, commit=True) |
|
111 | 111 | |
|
112 | 112 | except Exception as e: |
|
113 | 113 | data[commit['rev']] = False |
|
114 | 114 | log.debug('Stripped commit %s from repo `%s` failed by %s, exeption %s' % ( |
|
115 | 115 | commit['rev'], self.db_repo_name, user, e.message)) |
|
116 | 116 | return data |
@@ -1,370 +1,372 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import string |
|
23 | 23 | |
|
24 | 24 | from pyramid.view import view_config |
|
25 | ||
|
26 | 25 | from beaker.cache import cache_region |
|
27 | 26 | |
|
28 | ||
|
29 | 27 | from rhodecode.controllers import utils |
|
30 | ||
|
31 | 28 | from rhodecode.apps._base import RepoAppView |
|
32 | 29 | from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP) |
|
33 | 30 | from rhodecode.lib import caches, helpers as h |
|
34 | 31 | from rhodecode.lib.helpers import RepoPage |
|
35 | 32 | from rhodecode.lib.utils2 import safe_str, safe_int |
|
36 | 33 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
37 | 34 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
38 | 35 | from rhodecode.lib.ext_json import json |
|
39 | 36 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
40 | 37 | from rhodecode.lib.vcs.exceptions import CommitError, EmptyRepositoryError |
|
41 | 38 | from rhodecode.model.db import Statistics, CacheKey, User |
|
42 | 39 | from rhodecode.model.meta import Session |
|
43 | 40 | from rhodecode.model.repo import ReadmeFinder |
|
44 | 41 | from rhodecode.model.scm import ScmModel |
|
45 | 42 | |
|
46 | 43 | log = logging.getLogger(__name__) |
|
47 | 44 | |
|
48 | 45 | |
|
49 | 46 | class RepoSummaryView(RepoAppView): |
|
50 | 47 | |
|
51 | 48 | def load_default_context(self): |
|
52 | 49 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
53 | 50 | |
|
54 | 51 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
55 | 52 | c.repo_info = self.db_repo |
|
56 | 53 | c.rhodecode_repo = None |
|
57 | 54 | if not c.repository_requirements_missing: |
|
58 | 55 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
59 | 56 | |
|
60 | 57 | self._register_global_c(c) |
|
61 | 58 | return c |
|
62 | 59 | |
|
63 | 60 | def _get_readme_data(self, db_repo, default_renderer): |
|
64 | 61 | repo_name = db_repo.repo_name |
|
65 | 62 | log.debug('Looking for README file') |
|
66 | 63 | |
|
67 | 64 | @cache_region('long_term') |
|
68 | 65 | def _generate_readme(cache_key): |
|
69 | 66 | readme_data = None |
|
70 | 67 | readme_node = None |
|
71 | 68 | readme_filename = None |
|
72 | 69 | commit = self._get_landing_commit_or_none(db_repo) |
|
73 | 70 | if commit: |
|
74 | 71 | log.debug("Searching for a README file.") |
|
75 | 72 | readme_node = ReadmeFinder(default_renderer).search(commit) |
|
76 | 73 | if readme_node: |
|
77 |
relative_url = |
|
|
74 | relative_urls = { | |
|
75 | 'raw': h.route_path( | |
|
78 | 76 | 'repo_file_raw', repo_name=repo_name, |
|
79 | commit_id=commit.raw_id, f_path=readme_node.path) | |
|
77 | commit_id=commit.raw_id, f_path=readme_node.path), | |
|
78 | 'standard': h.route_path( | |
|
79 | 'repo_files', repo_name=repo_name, | |
|
80 | commit_id=commit.raw_id, f_path=readme_node.path), | |
|
81 | } | |
|
80 | 82 | readme_data = self._render_readme_or_none( |
|
81 | commit, readme_node, relative_url) | |
|
83 | commit, readme_node, relative_urls) | |
|
82 | 84 | readme_filename = readme_node.path |
|
83 | 85 | return readme_data, readme_filename |
|
84 | 86 | |
|
85 | 87 | invalidator_context = CacheKey.repo_context_cache( |
|
86 | 88 | _generate_readme, repo_name, CacheKey.CACHE_TYPE_README) |
|
87 | 89 | |
|
88 | 90 | with invalidator_context as context: |
|
89 | 91 | context.invalidate() |
|
90 | 92 | computed = context.compute() |
|
91 | 93 | |
|
92 | 94 | return computed |
|
93 | 95 | |
|
94 | 96 | def _get_landing_commit_or_none(self, db_repo): |
|
95 | 97 | log.debug("Getting the landing commit.") |
|
96 | 98 | try: |
|
97 | 99 | commit = db_repo.get_landing_commit() |
|
98 | 100 | if not isinstance(commit, EmptyCommit): |
|
99 | 101 | return commit |
|
100 | 102 | else: |
|
101 | 103 | log.debug("Repository is empty, no README to render.") |
|
102 | 104 | except CommitError: |
|
103 | 105 | log.exception( |
|
104 | 106 | "Problem getting commit when trying to render the README.") |
|
105 | 107 | |
|
106 | def _render_readme_or_none(self, commit, readme_node, relative_url): | |
|
108 | def _render_readme_or_none(self, commit, readme_node, relative_urls): | |
|
107 | 109 | log.debug( |
|
108 | 110 | 'Found README file `%s` rendering...', readme_node.path) |
|
109 | 111 | renderer = MarkupRenderer() |
|
110 | 112 | try: |
|
111 | 113 | html_source = renderer.render( |
|
112 | 114 | readme_node.content, filename=readme_node.path) |
|
113 | if relative_url: | |
|
114 | return relative_links(html_source, relative_url) | |
|
115 | if relative_urls: | |
|
116 | return relative_links(html_source, relative_urls) | |
|
115 | 117 | return html_source |
|
116 | 118 | except Exception: |
|
117 | 119 | log.exception( |
|
118 | 120 | "Exception while trying to render the README") |
|
119 | 121 | |
|
120 | 122 | def _load_commits_context(self, c): |
|
121 | 123 | p = safe_int(self.request.GET.get('page'), 1) |
|
122 | 124 | size = safe_int(self.request.GET.get('size'), 10) |
|
123 | 125 | |
|
124 | 126 | def url_generator(**kw): |
|
125 | 127 | query_params = { |
|
126 | 128 | 'size': size |
|
127 | 129 | } |
|
128 | 130 | query_params.update(kw) |
|
129 | 131 | return h.route_path( |
|
130 | 132 | 'repo_summary_commits', |
|
131 | 133 | repo_name=c.rhodecode_db_repo.repo_name, _query=query_params) |
|
132 | 134 | |
|
133 | 135 | pre_load = ['author', 'branch', 'date', 'message'] |
|
134 | 136 | try: |
|
135 | 137 | collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load) |
|
136 | 138 | except EmptyRepositoryError: |
|
137 | 139 | collection = self.rhodecode_vcs_repo |
|
138 | 140 | |
|
139 | 141 | c.repo_commits = RepoPage( |
|
140 | 142 | collection, page=p, items_per_page=size, url=url_generator) |
|
141 | 143 | page_ids = [x.raw_id for x in c.repo_commits] |
|
142 | 144 | c.comments = self.db_repo.get_comments(page_ids) |
|
143 | 145 | c.statuses = self.db_repo.statuses(page_ids) |
|
144 | 146 | |
|
145 | 147 | @LoginRequired() |
|
146 | 148 | @HasRepoPermissionAnyDecorator( |
|
147 | 149 | 'repository.read', 'repository.write', 'repository.admin') |
|
148 | 150 | @view_config( |
|
149 | 151 | route_name='repo_summary_commits', request_method='GET', |
|
150 | 152 | renderer='rhodecode:templates/summary/summary_commits.mako') |
|
151 | 153 | def summary_commits(self): |
|
152 | 154 | c = self.load_default_context() |
|
153 | 155 | self._load_commits_context(c) |
|
154 | 156 | return self._get_template_context(c) |
|
155 | 157 | |
|
156 | 158 | @LoginRequired() |
|
157 | 159 | @HasRepoPermissionAnyDecorator( |
|
158 | 160 | 'repository.read', 'repository.write', 'repository.admin') |
|
159 | 161 | @view_config( |
|
160 | 162 | route_name='repo_summary', request_method='GET', |
|
161 | 163 | renderer='rhodecode:templates/summary/summary.mako') |
|
162 | 164 | @view_config( |
|
163 | 165 | route_name='repo_summary_slash', request_method='GET', |
|
164 | 166 | renderer='rhodecode:templates/summary/summary.mako') |
|
165 | 167 | @view_config( |
|
166 | 168 | route_name='repo_summary_explicit', request_method='GET', |
|
167 | 169 | renderer='rhodecode:templates/summary/summary.mako') |
|
168 | 170 | def summary(self): |
|
169 | 171 | c = self.load_default_context() |
|
170 | 172 | |
|
171 | 173 | # Prepare the clone URL |
|
172 | 174 | username = '' |
|
173 | 175 | if self._rhodecode_user.username != User.DEFAULT_USER: |
|
174 | 176 | username = safe_str(self._rhodecode_user.username) |
|
175 | 177 | |
|
176 | 178 | _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl |
|
177 | 179 | if '{repo}' in _def_clone_uri: |
|
178 | 180 | _def_clone_uri_by_id = _def_clone_uri.replace( |
|
179 | 181 | '{repo}', '_{repoid}') |
|
180 | 182 | elif '{repoid}' in _def_clone_uri: |
|
181 | 183 | _def_clone_uri_by_id = _def_clone_uri.replace( |
|
182 | 184 | '_{repoid}', '{repo}') |
|
183 | 185 | |
|
184 | 186 | c.clone_repo_url = self.db_repo.clone_url( |
|
185 | 187 | user=username, uri_tmpl=_def_clone_uri) |
|
186 | 188 | c.clone_repo_url_id = self.db_repo.clone_url( |
|
187 | 189 | user=username, uri_tmpl=_def_clone_uri_by_id) |
|
188 | 190 | |
|
189 | 191 | # If enabled, get statistics data |
|
190 | 192 | |
|
191 | 193 | c.show_stats = bool(self.db_repo.enable_statistics) |
|
192 | 194 | |
|
193 | 195 | stats = Session().query(Statistics) \ |
|
194 | 196 | .filter(Statistics.repository == self.db_repo) \ |
|
195 | 197 | .scalar() |
|
196 | 198 | |
|
197 | 199 | c.stats_percentage = 0 |
|
198 | 200 | |
|
199 | 201 | if stats and stats.languages: |
|
200 | 202 | c.no_data = False is self.db_repo.enable_statistics |
|
201 | 203 | lang_stats_d = json.loads(stats.languages) |
|
202 | 204 | |
|
203 | 205 | # Sort first by decreasing count and second by the file extension, |
|
204 | 206 | # so we have a consistent output. |
|
205 | 207 | lang_stats_items = sorted(lang_stats_d.iteritems(), |
|
206 | 208 | key=lambda k: (-k[1], k[0]))[:10] |
|
207 | 209 | lang_stats = [(x, {"count": y, |
|
208 | 210 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) |
|
209 | 211 | for x, y in lang_stats_items] |
|
210 | 212 | |
|
211 | 213 | c.trending_languages = json.dumps(lang_stats) |
|
212 | 214 | else: |
|
213 | 215 | c.no_data = True |
|
214 | 216 | c.trending_languages = json.dumps({}) |
|
215 | 217 | |
|
216 | 218 | scm_model = ScmModel() |
|
217 | 219 | c.enable_downloads = self.db_repo.enable_downloads |
|
218 | 220 | c.repository_followers = scm_model.get_followers(self.db_repo) |
|
219 | 221 | c.repository_forks = scm_model.get_forks(self.db_repo) |
|
220 | 222 | c.repository_is_user_following = scm_model.is_following_repo( |
|
221 | 223 | self.db_repo_name, self._rhodecode_user.user_id) |
|
222 | 224 | |
|
223 | 225 | # first interaction with the VCS instance after here... |
|
224 | 226 | if c.repository_requirements_missing: |
|
225 | 227 | self.request.override_renderer = \ |
|
226 | 228 | 'rhodecode:templates/summary/missing_requirements.mako' |
|
227 | 229 | return self._get_template_context(c) |
|
228 | 230 | |
|
229 | 231 | c.readme_data, c.readme_file = \ |
|
230 | 232 | self._get_readme_data(self.db_repo, c.visual.default_renderer) |
|
231 | 233 | |
|
232 | 234 | # loads the summary commits template context |
|
233 | 235 | self._load_commits_context(c) |
|
234 | 236 | |
|
235 | 237 | return self._get_template_context(c) |
|
236 | 238 | |
|
237 | 239 | def get_request_commit_id(self): |
|
238 | 240 | return self.request.matchdict['commit_id'] |
|
239 | 241 | |
|
240 | 242 | @LoginRequired() |
|
241 | 243 | @HasRepoPermissionAnyDecorator( |
|
242 | 244 | 'repository.read', 'repository.write', 'repository.admin') |
|
243 | 245 | @view_config( |
|
244 | 246 | route_name='repo_stats', request_method='GET', |
|
245 | 247 | renderer='json_ext') |
|
246 | 248 | def repo_stats(self): |
|
247 | 249 | commit_id = self.get_request_commit_id() |
|
248 | 250 | |
|
249 | 251 | _namespace = caches.get_repo_namespace_key( |
|
250 | 252 | caches.SUMMARY_STATS, self.db_repo_name) |
|
251 | 253 | show_stats = bool(self.db_repo.enable_statistics) |
|
252 | 254 | cache_manager = caches.get_cache_manager( |
|
253 | 255 | 'repo_cache_long', _namespace) |
|
254 | 256 | _cache_key = caches.compute_key_from_params( |
|
255 | 257 | self.db_repo_name, commit_id, show_stats) |
|
256 | 258 | |
|
257 | 259 | def compute_stats(): |
|
258 | 260 | code_stats = {} |
|
259 | 261 | size = 0 |
|
260 | 262 | try: |
|
261 | 263 | scm_instance = self.db_repo.scm_instance() |
|
262 | 264 | commit = scm_instance.get_commit(commit_id) |
|
263 | 265 | |
|
264 | 266 | for node in commit.get_filenodes_generator(): |
|
265 | 267 | size += node.size |
|
266 | 268 | if not show_stats: |
|
267 | 269 | continue |
|
268 | 270 | ext = string.lower(node.extension) |
|
269 | 271 | ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext) |
|
270 | 272 | if ext_info: |
|
271 | 273 | if ext in code_stats: |
|
272 | 274 | code_stats[ext]['count'] += 1 |
|
273 | 275 | else: |
|
274 | 276 | code_stats[ext] = {"count": 1, "desc": ext_info} |
|
275 | 277 | except EmptyRepositoryError: |
|
276 | 278 | pass |
|
277 | 279 | return {'size': h.format_byte_size_binary(size), |
|
278 | 280 | 'code_stats': code_stats} |
|
279 | 281 | |
|
280 | 282 | stats = cache_manager.get(_cache_key, createfunc=compute_stats) |
|
281 | 283 | return stats |
|
282 | 284 | |
|
283 | 285 | @LoginRequired() |
|
284 | 286 | @HasRepoPermissionAnyDecorator( |
|
285 | 287 | 'repository.read', 'repository.write', 'repository.admin') |
|
286 | 288 | @view_config( |
|
287 | 289 | route_name='repo_refs_data', request_method='GET', |
|
288 | 290 | renderer='json_ext') |
|
289 | 291 | def repo_refs_data(self): |
|
290 | 292 | _ = self.request.translate |
|
291 | 293 | self.load_default_context() |
|
292 | 294 | |
|
293 | 295 | repo = self.rhodecode_vcs_repo |
|
294 | 296 | refs_to_create = [ |
|
295 | 297 | (_("Branch"), repo.branches, 'branch'), |
|
296 | 298 | (_("Tag"), repo.tags, 'tag'), |
|
297 | 299 | (_("Bookmark"), repo.bookmarks, 'book'), |
|
298 | 300 | ] |
|
299 | 301 | res = self._create_reference_data( |
|
300 | 302 | repo, self.db_repo_name, refs_to_create) |
|
301 | 303 | data = { |
|
302 | 304 | 'more': False, |
|
303 | 305 | 'results': res |
|
304 | 306 | } |
|
305 | 307 | return data |
|
306 | 308 | |
|
307 | 309 | @LoginRequired() |
|
308 | 310 | @HasRepoPermissionAnyDecorator( |
|
309 | 311 | 'repository.read', 'repository.write', 'repository.admin') |
|
310 | 312 | @view_config( |
|
311 | 313 | route_name='repo_refs_changelog_data', request_method='GET', |
|
312 | 314 | renderer='json_ext') |
|
313 | 315 | def repo_refs_changelog_data(self): |
|
314 | 316 | _ = self.request.translate |
|
315 | 317 | self.load_default_context() |
|
316 | 318 | |
|
317 | 319 | repo = self.rhodecode_vcs_repo |
|
318 | 320 | |
|
319 | 321 | refs_to_create = [ |
|
320 | 322 | (_("Branches"), repo.branches, 'branch'), |
|
321 | 323 | (_("Closed branches"), repo.branches_closed, 'branch_closed'), |
|
322 | 324 | # TODO: enable when vcs can handle bookmarks filters |
|
323 | 325 | # (_("Bookmarks"), repo.bookmarks, "book"), |
|
324 | 326 | ] |
|
325 | 327 | res = self._create_reference_data( |
|
326 | 328 | repo, self.db_repo_name, refs_to_create) |
|
327 | 329 | data = { |
|
328 | 330 | 'more': False, |
|
329 | 331 | 'results': res |
|
330 | 332 | } |
|
331 | 333 | return data |
|
332 | 334 | |
|
333 | 335 | def _create_reference_data(self, repo, full_repo_name, refs_to_create): |
|
334 | 336 | format_ref_id = utils.get_format_ref_id(repo) |
|
335 | 337 | |
|
336 | 338 | result = [] |
|
337 | 339 | for title, refs, ref_type in refs_to_create: |
|
338 | 340 | if refs: |
|
339 | 341 | result.append({ |
|
340 | 342 | 'text': title, |
|
341 | 343 | 'children': self._create_reference_items( |
|
342 | 344 | repo, full_repo_name, refs, ref_type, |
|
343 | 345 | format_ref_id), |
|
344 | 346 | }) |
|
345 | 347 | return result |
|
346 | 348 | |
|
347 | 349 | def _create_reference_items(self, repo, full_repo_name, refs, ref_type, |
|
348 | 350 | format_ref_id): |
|
349 | 351 | result = [] |
|
350 | 352 | is_svn = h.is_svn(repo) |
|
351 | 353 | for ref_name, raw_id in refs.iteritems(): |
|
352 | 354 | files_url = self._create_files_url( |
|
353 | 355 | repo, full_repo_name, ref_name, raw_id, is_svn) |
|
354 | 356 | result.append({ |
|
355 | 357 | 'text': ref_name, |
|
356 | 358 | 'id': format_ref_id(ref_name, raw_id), |
|
357 | 359 | 'raw_id': raw_id, |
|
358 | 360 | 'type': ref_type, |
|
359 | 361 | 'files_url': files_url, |
|
360 | 362 | }) |
|
361 | 363 | return result |
|
362 | 364 | |
|
363 | 365 | def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn): |
|
364 | 366 | use_commit_id = '/' in ref_name or is_svn |
|
365 | 367 | return h.route_path( |
|
366 | 368 | 'repo_files', |
|
367 | 369 | repo_name=full_repo_name, |
|
368 | 370 | f_path=ref_name if is_svn else '', |
|
369 | 371 | commit_id=raw_id if use_commit_id else ref_name, |
|
370 | 372 | _query=dict(at=ref_name)) |
@@ -1,180 +1,182 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Pylons environment configuration |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import logging |
|
27 | 27 | import rhodecode |
|
28 | 28 | import platform |
|
29 | 29 | import re |
|
30 | 30 | import io |
|
31 | 31 | |
|
32 | 32 | from mako.lookup import TemplateLookup |
|
33 | 33 | from pylons.configuration import PylonsConfig |
|
34 | 34 | from pylons.error import handle_mako_error |
|
35 | 35 | from pyramid.settings import asbool |
|
36 | 36 | |
|
37 | 37 | # ------------------------------------------------------------------------------ |
|
38 | 38 | # CELERY magic until refactor - issue #4163 - import order matters here: |
|
39 | 39 | from rhodecode.lib import celerypylons # this must be first, celerypylons |
|
40 | 40 | # sets config settings upon import |
|
41 | 41 | |
|
42 | 42 | import rhodecode.integrations # any modules using celery task |
|
43 | 43 | # decorators should be added afterwards: |
|
44 | 44 | # ------------------------------------------------------------------------------ |
|
45 | 45 | |
|
46 | 46 | from rhodecode.lib import app_globals |
|
47 | 47 | from rhodecode.config import utils |
|
48 | 48 | from rhodecode.config.routing import make_map |
|
49 | 49 | from rhodecode.config.jsroutes import generate_jsroutes_content |
|
50 | 50 | |
|
51 | 51 | from rhodecode.lib import helpers |
|
52 | 52 | from rhodecode.lib.auth import set_available_permissions |
|
53 | 53 | from rhodecode.lib.utils import ( |
|
54 | 54 | repo2db_mapper, make_db_config, set_rhodecode_config, |
|
55 | 55 | load_rcextensions) |
|
56 | 56 | from rhodecode.lib.utils2 import str2bool, aslist |
|
57 | 57 | from rhodecode.lib.vcs import connect_vcs, start_vcs_server |
|
58 | 58 | from rhodecode.model.scm import ScmModel |
|
59 | 59 | |
|
60 | 60 | log = logging.getLogger(__name__) |
|
61 | 61 | |
|
62 | 62 | def load_environment(global_conf, app_conf, initial=False, |
|
63 | 63 | test_env=None, test_index=None): |
|
64 | 64 | """ |
|
65 | 65 | Configure the Pylons environment via the ``pylons.config`` |
|
66 | 66 | object |
|
67 | 67 | """ |
|
68 | 68 | config = PylonsConfig() |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | # Pylons paths |
|
72 | 72 | root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
|
73 | 73 | paths = { |
|
74 | 74 | 'root': root, |
|
75 | 75 | 'controllers': os.path.join(root, 'controllers'), |
|
76 | 76 | 'static_files': os.path.join(root, 'public'), |
|
77 | 77 | 'templates': [os.path.join(root, 'templates')], |
|
78 | 78 | } |
|
79 | 79 | |
|
80 | 80 | # Initialize config with the basic options |
|
81 | 81 | config.init_app(global_conf, app_conf, package='rhodecode', paths=paths) |
|
82 | 82 | |
|
83 | 83 | # store some globals into rhodecode |
|
84 | 84 | rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery')) |
|
85 | 85 | rhodecode.CELERY_EAGER = str2bool( |
|
86 | 86 | config['app_conf'].get('celery.always.eager')) |
|
87 | 87 | |
|
88 | 88 | config['routes.map'] = make_map(config) |
|
89 | 89 | |
|
90 | 90 | config['pylons.app_globals'] = app_globals.Globals(config) |
|
91 | 91 | config['pylons.h'] = helpers |
|
92 | 92 | rhodecode.CONFIG = config |
|
93 | 93 | |
|
94 | 94 | load_rcextensions(root_path=config['here']) |
|
95 | 95 | |
|
96 | 96 | # Setup cache object as early as possible |
|
97 | 97 | import pylons |
|
98 | 98 | pylons.cache._push_object(config['pylons.app_globals'].cache) |
|
99 | 99 | |
|
100 | 100 | # Create the Mako TemplateLookup, with the default auto-escaping |
|
101 | 101 | config['pylons.app_globals'].mako_lookup = TemplateLookup( |
|
102 | 102 | directories=paths['templates'], |
|
103 | 103 | error_handler=handle_mako_error, |
|
104 | 104 | module_directory=os.path.join(app_conf['cache_dir'], 'templates'), |
|
105 | 105 | input_encoding='utf-8', default_filters=['escape'], |
|
106 | 106 | imports=['from webhelpers.html import escape']) |
|
107 | 107 | |
|
108 | 108 | # sets the c attribute access when don't existing attribute are accessed |
|
109 | 109 | config['pylons.strict_tmpl_context'] = True |
|
110 | 110 | |
|
111 | 111 | # configure channelstream |
|
112 | 112 | config['channelstream_config'] = { |
|
113 | 113 | 'enabled': asbool(config.get('channelstream.enabled', False)), |
|
114 | 114 | 'server': config.get('channelstream.server'), |
|
115 | 115 | 'secret': config.get('channelstream.secret') |
|
116 | 116 | } |
|
117 | 117 | |
|
118 | 118 | set_available_permissions(config) |
|
119 | 119 | db_cfg = make_db_config(clear_session=True) |
|
120 | 120 | |
|
121 | 121 | repos_path = list(db_cfg.items('paths'))[0][1] |
|
122 | 122 | config['base_path'] = repos_path |
|
123 | 123 | |
|
124 | 124 | # store db config also in main global CONFIG |
|
125 | 125 | set_rhodecode_config(config) |
|
126 | 126 | |
|
127 | 127 | # configure instance id |
|
128 | 128 | utils.set_instance_id(config) |
|
129 | 129 | |
|
130 | 130 | # CONFIGURATION OPTIONS HERE (note: all config options will override |
|
131 | 131 | # any Pylons config options) |
|
132 | 132 | |
|
133 | 133 | # store config reference into our module to skip import magic of pylons |
|
134 | 134 | rhodecode.CONFIG.update(config) |
|
135 | 135 | |
|
136 | 136 | return config |
|
137 | 137 | |
|
138 | 138 | |
|
139 | 139 | def load_pyramid_environment(global_config, settings): |
|
140 | 140 | # Some parts of the code expect a merge of global and app settings. |
|
141 | 141 | settings_merged = global_config.copy() |
|
142 | 142 | settings_merged.update(settings) |
|
143 | 143 | |
|
144 | 144 | # Store the settings to make them available to other modules. |
|
145 | 145 | rhodecode.PYRAMID_SETTINGS = settings_merged |
|
146 | # NOTE(marcink): needs to be enabled after full port to pyramid | |
|
147 | # rhodecode.CONFIG = config | |
|
146 | 148 | |
|
147 | 149 | # If this is a test run we prepare the test environment like |
|
148 | 150 | # creating a test database, test search index and test repositories. |
|
149 | 151 | # This has to be done before the database connection is initialized. |
|
150 | 152 | if settings['is_test']: |
|
151 | 153 | rhodecode.is_test = True |
|
152 | 154 | rhodecode.disable_error_handler = True |
|
153 | 155 | |
|
154 | 156 | utils.initialize_test_environment(settings_merged) |
|
155 | 157 | |
|
156 | 158 | # Initialize the database connection. |
|
157 | 159 | utils.initialize_database(settings_merged) |
|
158 | 160 | |
|
159 | 161 | # Limit backends to `vcs.backends` from configuration |
|
160 | 162 | for alias in rhodecode.BACKENDS.keys(): |
|
161 | 163 | if alias not in settings['vcs.backends']: |
|
162 | 164 | del rhodecode.BACKENDS[alias] |
|
163 | 165 | log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys()) |
|
164 | 166 | |
|
165 | 167 | # initialize vcs client and optionally run the server if enabled |
|
166 | 168 | vcs_server_uri = settings['vcs.server'] |
|
167 | 169 | vcs_server_enabled = settings['vcs.server.enable'] |
|
168 | 170 | start_server = ( |
|
169 | 171 | settings['vcs.start_server'] and |
|
170 | 172 | not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0'))) |
|
171 | 173 | |
|
172 | 174 | if vcs_server_enabled and start_server: |
|
173 | 175 | log.info("Starting vcsserver") |
|
174 | 176 | start_vcs_server(server_and_port=vcs_server_uri, |
|
175 | 177 | protocol=utils.get_vcs_server_protocol(settings), |
|
176 | 178 | log_level=settings['vcs.server.log_level']) |
|
177 | 179 | |
|
178 | 180 | utils.configure_vcs(settings) |
|
179 | 181 | if vcs_server_enabled: |
|
180 | 182 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) |
@@ -1,486 +1,393 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Routes configuration |
|
23 | 23 | |
|
24 | 24 | The more specific and detailed routes should be defined first so they |
|
25 | 25 | may take precedent over the more generic routes. For more information |
|
26 | 26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
27 | 27 | |
|
28 | 28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
29 | 29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
30 | 30 | """ |
|
31 | 31 | import os |
|
32 | 32 | import re |
|
33 | 33 | from routes import Mapper |
|
34 | 34 | |
|
35 | 35 | # prefix for non repository related links needs to be prefixed with `/` |
|
36 | 36 | ADMIN_PREFIX = '/_admin' |
|
37 | 37 | STATIC_FILE_PREFIX = '/_static' |
|
38 | 38 | |
|
39 | 39 | # Default requirements for URL parts |
|
40 | 40 | URL_NAME_REQUIREMENTS = { |
|
41 | 41 | # group name can have a slash in them, but they must not end with a slash |
|
42 | 42 | 'group_name': r'.*?[^/]', |
|
43 | 43 | 'repo_group_name': r'.*?[^/]', |
|
44 | 44 | # repo names can have a slash in them, but they must not end with a slash |
|
45 | 45 | 'repo_name': r'.*?[^/]', |
|
46 | 46 | # file path eats up everything at the end |
|
47 | 47 | 'f_path': r'.*', |
|
48 | 48 | # reference types |
|
49 | 49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
50 | 50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
51 | 51 | } |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | class JSRoutesMapper(Mapper): |
|
55 | 55 | """ |
|
56 | 56 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
57 | 57 | """ |
|
58 | 58 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
59 | 59 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
60 | 60 | def __init__(self, *args, **kw): |
|
61 | 61 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
62 | 62 | self._jsroutes = [] |
|
63 | 63 | |
|
64 | 64 | def connect(self, *args, **kw): |
|
65 | 65 | """ |
|
66 | 66 | Wrapper for connect to take an extra argument jsroute=True |
|
67 | 67 | |
|
68 | 68 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
69 | 69 | """ |
|
70 | 70 | if kw.pop('jsroute', False): |
|
71 | 71 | if not self._named_route_regex.match(args[0]): |
|
72 | 72 | raise Exception('only named routes can be added to pyroutes') |
|
73 | 73 | self._jsroutes.append(args[0]) |
|
74 | 74 | |
|
75 | 75 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
76 | 76 | |
|
77 | 77 | def _extract_route_information(self, route): |
|
78 | 78 | """ |
|
79 | 79 | Convert a route into tuple(name, path, args), eg: |
|
80 | 80 | ('show_user', '/profile/%(username)s', ['username']) |
|
81 | 81 | """ |
|
82 | 82 | routepath = route.routepath |
|
83 | 83 | def replace(matchobj): |
|
84 | 84 | if matchobj.group(1): |
|
85 | 85 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
86 | 86 | else: |
|
87 | 87 | return "%%(%s)s" % matchobj.group(2) |
|
88 | 88 | |
|
89 | 89 | routepath = self._argument_prog.sub(replace, routepath) |
|
90 | 90 | return ( |
|
91 | 91 | route.name, |
|
92 | 92 | routepath, |
|
93 | 93 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
94 | 94 | for arg in self._argument_prog.findall(route.routepath)] |
|
95 | 95 | ) |
|
96 | 96 | |
|
97 | 97 | def jsroutes(self): |
|
98 | 98 | """ |
|
99 | 99 | Return a list of pyroutes.js compatible routes |
|
100 | 100 | """ |
|
101 | 101 | for route_name in self._jsroutes: |
|
102 | 102 | yield self._extract_route_information(self._routenames[route_name]) |
|
103 | 103 | |
|
104 | 104 | |
|
105 | 105 | def make_map(config): |
|
106 | 106 | """Create, configure and return the routes Mapper""" |
|
107 | 107 | rmap = JSRoutesMapper( |
|
108 | 108 | directory=config['pylons.paths']['controllers'], |
|
109 | 109 | always_scan=config['debug']) |
|
110 | 110 | rmap.minimization = False |
|
111 | 111 | rmap.explicit = False |
|
112 | 112 | |
|
113 | 113 | from rhodecode.lib.utils2 import str2bool |
|
114 | 114 | from rhodecode.model import repo, repo_group |
|
115 | 115 | |
|
116 | 116 | def check_repo(environ, match_dict): |
|
117 | 117 | """ |
|
118 | 118 | check for valid repository for proper 404 handling |
|
119 | 119 | |
|
120 | 120 | :param environ: |
|
121 | 121 | :param match_dict: |
|
122 | 122 | """ |
|
123 | 123 | repo_name = match_dict.get('repo_name') |
|
124 | 124 | |
|
125 | 125 | if match_dict.get('f_path'): |
|
126 | 126 | # fix for multiple initial slashes that causes errors |
|
127 | 127 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
128 | 128 | repo_model = repo.RepoModel() |
|
129 | 129 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
130 | 130 | # if we match quickly from database, short circuit the operation, |
|
131 | 131 | # and validate repo based on the type. |
|
132 | 132 | if by_name_match: |
|
133 | 133 | return True |
|
134 | 134 | |
|
135 | 135 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
136 | 136 | if by_id_match: |
|
137 | 137 | repo_name = by_id_match.repo_name |
|
138 | 138 | match_dict['repo_name'] = repo_name |
|
139 | 139 | return True |
|
140 | 140 | |
|
141 | 141 | return False |
|
142 | 142 | |
|
143 | 143 | def check_group(environ, match_dict): |
|
144 | 144 | """ |
|
145 | 145 | check for valid repository group path for proper 404 handling |
|
146 | 146 | |
|
147 | 147 | :param environ: |
|
148 | 148 | :param match_dict: |
|
149 | 149 | """ |
|
150 | 150 | repo_group_name = match_dict.get('group_name') |
|
151 | 151 | repo_group_model = repo_group.RepoGroupModel() |
|
152 | 152 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
153 | 153 | if by_name_match: |
|
154 | 154 | return True |
|
155 | 155 | |
|
156 | 156 | return False |
|
157 | 157 | |
|
158 | 158 | def check_user_group(environ, match_dict): |
|
159 | 159 | """ |
|
160 | 160 | check for valid user group for proper 404 handling |
|
161 | 161 | |
|
162 | 162 | :param environ: |
|
163 | 163 | :param match_dict: |
|
164 | 164 | """ |
|
165 | 165 | return True |
|
166 | 166 | |
|
167 | 167 | def check_int(environ, match_dict): |
|
168 | 168 | return match_dict.get('id').isdigit() |
|
169 | 169 | |
|
170 | 170 | |
|
171 | 171 | #========================================================================== |
|
172 | 172 | # CUSTOM ROUTES HERE |
|
173 | 173 | #========================================================================== |
|
174 | 174 | |
|
175 | 175 | # ping and pylons error test |
|
176 | 176 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
177 | 177 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
178 | 178 | |
|
179 | # ADMIN REPOSITORY ROUTES | |
|
180 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
|
181 | controller='admin/repos') as m: | |
|
182 | m.connect('repos', '/repos', | |
|
183 | action='create', conditions={'method': ['POST']}) | |
|
184 | m.connect('repos', '/repos', | |
|
185 | action='index', conditions={'method': ['GET']}) | |
|
186 | m.connect('new_repo', '/create_repository', jsroute=True, | |
|
187 | action='create_repository', conditions={'method': ['GET']}) | |
|
188 | m.connect('delete_repo', '/repos/{repo_name}', | |
|
189 | action='delete', conditions={'method': ['DELETE']}, | |
|
190 | requirements=URL_NAME_REQUIREMENTS) | |
|
191 | m.connect('repo', '/repos/{repo_name}', | |
|
192 | action='show', conditions={'method': ['GET'], | |
|
193 | 'function': check_repo}, | |
|
194 | requirements=URL_NAME_REQUIREMENTS) | |
|
195 | ||
|
196 | 179 | # ADMIN REPOSITORY GROUPS ROUTES |
|
197 | 180 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
198 | 181 | controller='admin/repo_groups') as m: |
|
199 | 182 | m.connect('repo_groups', '/repo_groups', |
|
200 | 183 | action='create', conditions={'method': ['POST']}) |
|
201 | 184 | m.connect('repo_groups', '/repo_groups', |
|
202 | 185 | action='index', conditions={'method': ['GET']}) |
|
203 | 186 | m.connect('new_repo_group', '/repo_groups/new', |
|
204 | 187 | action='new', conditions={'method': ['GET']}) |
|
205 | 188 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
206 | 189 | action='update', conditions={'method': ['PUT'], |
|
207 | 190 | 'function': check_group}, |
|
208 | 191 | requirements=URL_NAME_REQUIREMENTS) |
|
209 | 192 | |
|
210 | 193 | # EXTRAS REPO GROUP ROUTES |
|
211 | 194 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
212 | 195 | action='edit', |
|
213 | 196 | conditions={'method': ['GET'], 'function': check_group}, |
|
214 | 197 | requirements=URL_NAME_REQUIREMENTS) |
|
215 | 198 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
216 | 199 | action='edit', |
|
217 | 200 | conditions={'method': ['PUT'], 'function': check_group}, |
|
218 | 201 | requirements=URL_NAME_REQUIREMENTS) |
|
219 | 202 | |
|
220 | 203 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
221 | 204 | action='edit_repo_group_advanced', |
|
222 | 205 | conditions={'method': ['GET'], 'function': check_group}, |
|
223 | 206 | requirements=URL_NAME_REQUIREMENTS) |
|
224 | 207 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
225 | 208 | action='edit_repo_group_advanced', |
|
226 | 209 | conditions={'method': ['PUT'], 'function': check_group}, |
|
227 | 210 | requirements=URL_NAME_REQUIREMENTS) |
|
228 | 211 | |
|
229 | 212 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
230 | 213 | action='edit_repo_group_perms', |
|
231 | 214 | conditions={'method': ['GET'], 'function': check_group}, |
|
232 | 215 | requirements=URL_NAME_REQUIREMENTS) |
|
233 | 216 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
234 | 217 | action='update_perms', |
|
235 | 218 | conditions={'method': ['PUT'], 'function': check_group}, |
|
236 | 219 | requirements=URL_NAME_REQUIREMENTS) |
|
237 | 220 | |
|
238 | 221 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
239 | 222 | action='delete', conditions={'method': ['DELETE'], |
|
240 | 223 | 'function': check_group}, |
|
241 | 224 | requirements=URL_NAME_REQUIREMENTS) |
|
242 | 225 | |
|
243 | 226 | # ADMIN USER ROUTES |
|
244 | 227 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
245 | 228 | controller='admin/users') as m: |
|
246 | 229 | m.connect('users', '/users', |
|
247 | 230 | action='create', conditions={'method': ['POST']}) |
|
248 | 231 | m.connect('new_user', '/users/new', |
|
249 | 232 | action='new', conditions={'method': ['GET']}) |
|
250 | 233 | m.connect('update_user', '/users/{user_id}', |
|
251 | 234 | action='update', conditions={'method': ['PUT']}) |
|
252 | 235 | m.connect('delete_user', '/users/{user_id}', |
|
253 | 236 | action='delete', conditions={'method': ['DELETE']}) |
|
254 | 237 | m.connect('edit_user', '/users/{user_id}/edit', |
|
255 | 238 | action='edit', conditions={'method': ['GET']}, jsroute=True) |
|
256 | 239 | m.connect('user', '/users/{user_id}', |
|
257 | 240 | action='show', conditions={'method': ['GET']}) |
|
258 | 241 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
259 | 242 | action='reset_password', conditions={'method': ['POST']}) |
|
260 | 243 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
261 | 244 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
262 | 245 | |
|
263 | 246 | # EXTRAS USER ROUTES |
|
264 | 247 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
265 | 248 | action='edit_advanced', conditions={'method': ['GET']}) |
|
266 | 249 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
267 | 250 | action='update_advanced', conditions={'method': ['PUT']}) |
|
268 | 251 | |
|
269 | 252 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
270 | 253 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
271 | 254 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
272 | 255 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
273 | 256 | |
|
274 | 257 | # ADMIN USER GROUPS REST ROUTES |
|
275 | 258 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
276 | 259 | controller='admin/user_groups') as m: |
|
277 | 260 | m.connect('users_groups', '/user_groups', |
|
278 | 261 | action='create', conditions={'method': ['POST']}) |
|
279 | 262 | m.connect('new_users_group', '/user_groups/new', |
|
280 | 263 | action='new', conditions={'method': ['GET']}) |
|
281 | 264 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
282 | 265 | action='update', conditions={'method': ['PUT']}) |
|
283 | 266 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
284 | 267 | action='delete', conditions={'method': ['DELETE']}) |
|
285 | 268 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
286 | 269 | action='edit', conditions={'method': ['GET']}, |
|
287 | 270 | function=check_user_group) |
|
288 | 271 | |
|
289 | 272 | # EXTRAS USER GROUP ROUTES |
|
290 | 273 | m.connect('edit_user_group_global_perms', |
|
291 | 274 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
292 | 275 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
293 | 276 | m.connect('edit_user_group_global_perms', |
|
294 | 277 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
295 | 278 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
296 | 279 | |
|
297 | 280 | m.connect('edit_user_group_perms', |
|
298 | 281 | '/user_groups/{user_group_id}/edit/permissions', |
|
299 | 282 | action='edit_perms', conditions={'method': ['GET']}) |
|
300 | 283 | m.connect('edit_user_group_perms', |
|
301 | 284 | '/user_groups/{user_group_id}/edit/permissions', |
|
302 | 285 | action='update_perms', conditions={'method': ['PUT']}) |
|
303 | 286 | |
|
304 | 287 | m.connect('edit_user_group_advanced', |
|
305 | 288 | '/user_groups/{user_group_id}/edit/advanced', |
|
306 | 289 | action='edit_advanced', conditions={'method': ['GET']}) |
|
307 | 290 | |
|
308 | 291 | m.connect('edit_user_group_advanced_sync', |
|
309 | 292 | '/user_groups/{user_group_id}/edit/advanced/sync', |
|
310 | 293 | action='edit_advanced_set_synchronization', conditions={'method': ['POST']}) |
|
311 | 294 | |
|
312 | 295 | # ADMIN DEFAULTS REST ROUTES |
|
313 | 296 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
314 | 297 | controller='admin/defaults') as m: |
|
315 | 298 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
316 | 299 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
317 | 300 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
318 | 301 | action='index', conditions={'method': ['GET']}) |
|
319 | 302 | |
|
320 | 303 | # ADMIN SETTINGS ROUTES |
|
321 | 304 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
322 | 305 | controller='admin/settings') as m: |
|
323 | 306 | |
|
324 | 307 | # default |
|
325 | 308 | m.connect('admin_settings', '/settings', |
|
326 | 309 | action='settings_global_update', |
|
327 | 310 | conditions={'method': ['POST']}) |
|
328 | 311 | m.connect('admin_settings', '/settings', |
|
329 | 312 | action='settings_global', conditions={'method': ['GET']}) |
|
330 | 313 | |
|
331 | 314 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
332 | 315 | action='settings_vcs_update', |
|
333 | 316 | conditions={'method': ['POST']}) |
|
334 | 317 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
335 | 318 | action='settings_vcs', |
|
336 | 319 | conditions={'method': ['GET']}) |
|
337 | 320 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
338 | 321 | action='delete_svn_pattern', |
|
339 | 322 | conditions={'method': ['DELETE']}) |
|
340 | 323 | |
|
341 | 324 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
342 | 325 | action='settings_mapping_update', |
|
343 | 326 | conditions={'method': ['POST']}) |
|
344 | 327 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
345 | 328 | action='settings_mapping', conditions={'method': ['GET']}) |
|
346 | 329 | |
|
347 | 330 | m.connect('admin_settings_global', '/settings/global', |
|
348 | 331 | action='settings_global_update', |
|
349 | 332 | conditions={'method': ['POST']}) |
|
350 | 333 | m.connect('admin_settings_global', '/settings/global', |
|
351 | 334 | action='settings_global', conditions={'method': ['GET']}) |
|
352 | 335 | |
|
353 | 336 | m.connect('admin_settings_visual', '/settings/visual', |
|
354 | 337 | action='settings_visual_update', |
|
355 | 338 | conditions={'method': ['POST']}) |
|
356 | 339 | m.connect('admin_settings_visual', '/settings/visual', |
|
357 | 340 | action='settings_visual', conditions={'method': ['GET']}) |
|
358 | 341 | |
|
359 | 342 | m.connect('admin_settings_issuetracker', |
|
360 | 343 | '/settings/issue-tracker', action='settings_issuetracker', |
|
361 | 344 | conditions={'method': ['GET']}) |
|
362 | 345 | m.connect('admin_settings_issuetracker_save', |
|
363 | 346 | '/settings/issue-tracker/save', |
|
364 | 347 | action='settings_issuetracker_save', |
|
365 | 348 | conditions={'method': ['POST']}) |
|
366 | 349 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
367 | 350 | action='settings_issuetracker_test', |
|
368 | 351 | conditions={'method': ['POST']}) |
|
369 | 352 | m.connect('admin_issuetracker_delete', |
|
370 | 353 | '/settings/issue-tracker/delete', |
|
371 | 354 | action='settings_issuetracker_delete', |
|
372 | 355 | conditions={'method': ['DELETE']}) |
|
373 | 356 | |
|
374 | 357 | m.connect('admin_settings_email', '/settings/email', |
|
375 | 358 | action='settings_email_update', |
|
376 | 359 | conditions={'method': ['POST']}) |
|
377 | 360 | m.connect('admin_settings_email', '/settings/email', |
|
378 | 361 | action='settings_email', conditions={'method': ['GET']}) |
|
379 | 362 | |
|
380 | 363 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
381 | 364 | action='settings_hooks_update', |
|
382 | 365 | conditions={'method': ['POST', 'DELETE']}) |
|
383 | 366 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
384 | 367 | action='settings_hooks', conditions={'method': ['GET']}) |
|
385 | 368 | |
|
386 | 369 | m.connect('admin_settings_search', '/settings/search', |
|
387 | 370 | action='settings_search', conditions={'method': ['GET']}) |
|
388 | 371 | |
|
389 | 372 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
390 | 373 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
391 | 374 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
392 | 375 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
393 | 376 | |
|
394 | 377 | m.connect('admin_settings_labs', '/settings/labs', |
|
395 | 378 | action='settings_labs_update', |
|
396 | 379 | conditions={'method': ['POST']}) |
|
397 | 380 | m.connect('admin_settings_labs', '/settings/labs', |
|
398 | 381 | action='settings_labs', conditions={'method': ['GET']}) |
|
399 | 382 | |
|
400 | 383 | # ADMIN MY ACCOUNT |
|
401 | 384 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
402 | 385 | controller='admin/my_account') as m: |
|
403 | 386 | |
|
404 | 387 | # NOTE(marcink): this needs to be kept for password force flag to be |
|
405 | 388 | # handled in pylons controllers, remove after full migration to pyramid |
|
406 | 389 | m.connect('my_account_password', '/my_account/password', |
|
407 | 390 | action='my_account_password', conditions={'method': ['GET']}) |
|
408 | 391 | |
|
409 | #========================================================================== | |
|
410 | # REPOSITORY ROUTES | |
|
411 | #========================================================================== | |
|
412 | ||
|
413 | # repo edit options | |
|
414 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', | |
|
415 | controller='admin/repos', action='edit_fields', | |
|
416 | conditions={'method': ['GET'], 'function': check_repo}, | |
|
417 | requirements=URL_NAME_REQUIREMENTS) | |
|
418 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', | |
|
419 | controller='admin/repos', action='create_repo_field', | |
|
420 | conditions={'method': ['PUT'], 'function': check_repo}, | |
|
421 | requirements=URL_NAME_REQUIREMENTS) | |
|
422 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', | |
|
423 | controller='admin/repos', action='delete_repo_field', | |
|
424 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
|
425 | requirements=URL_NAME_REQUIREMENTS) | |
|
426 | ||
|
427 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', | |
|
428 | controller='admin/repos', action='toggle_locking', | |
|
429 | conditions={'method': ['GET'], 'function': check_repo}, | |
|
430 | requirements=URL_NAME_REQUIREMENTS) | |
|
431 | ||
|
432 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', | |
|
433 | controller='admin/repos', action='edit_remote_form', | |
|
434 | conditions={'method': ['GET'], 'function': check_repo}, | |
|
435 | requirements=URL_NAME_REQUIREMENTS) | |
|
436 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', | |
|
437 | controller='admin/repos', action='edit_remote', | |
|
438 | conditions={'method': ['PUT'], 'function': check_repo}, | |
|
439 | requirements=URL_NAME_REQUIREMENTS) | |
|
440 | ||
|
441 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', | |
|
442 | controller='admin/repos', action='edit_statistics_form', | |
|
443 | conditions={'method': ['GET'], 'function': check_repo}, | |
|
444 | requirements=URL_NAME_REQUIREMENTS) | |
|
445 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', | |
|
446 | controller='admin/repos', action='edit_statistics', | |
|
447 | conditions={'method': ['PUT'], 'function': check_repo}, | |
|
448 | requirements=URL_NAME_REQUIREMENTS) | |
|
449 | rmap.connect('repo_settings_issuetracker', | |
|
450 | '/{repo_name}/settings/issue-tracker', | |
|
451 | controller='admin/repos', action='repo_issuetracker', | |
|
452 | conditions={'method': ['GET'], 'function': check_repo}, | |
|
453 | requirements=URL_NAME_REQUIREMENTS) | |
|
454 | rmap.connect('repo_issuetracker_test', | |
|
455 | '/{repo_name}/settings/issue-tracker/test', | |
|
456 | controller='admin/repos', action='repo_issuetracker_test', | |
|
457 | conditions={'method': ['POST'], 'function': check_repo}, | |
|
458 | requirements=URL_NAME_REQUIREMENTS) | |
|
459 | rmap.connect('repo_issuetracker_delete', | |
|
460 | '/{repo_name}/settings/issue-tracker/delete', | |
|
461 | controller='admin/repos', action='repo_issuetracker_delete', | |
|
462 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
|
463 | requirements=URL_NAME_REQUIREMENTS) | |
|
464 | rmap.connect('repo_issuetracker_save', | |
|
465 | '/{repo_name}/settings/issue-tracker/save', | |
|
466 | controller='admin/repos', action='repo_issuetracker_save', | |
|
467 | conditions={'method': ['POST'], 'function': check_repo}, | |
|
468 | requirements=URL_NAME_REQUIREMENTS) | |
|
469 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
|
470 | controller='admin/repos', action='repo_settings_vcs_update', | |
|
471 | conditions={'method': ['POST'], 'function': check_repo}, | |
|
472 | requirements=URL_NAME_REQUIREMENTS) | |
|
473 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
|
474 | controller='admin/repos', action='repo_settings_vcs', | |
|
475 | conditions={'method': ['GET'], 'function': check_repo}, | |
|
476 | requirements=URL_NAME_REQUIREMENTS) | |
|
477 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
|
478 | controller='admin/repos', action='repo_delete_svn_pattern', | |
|
479 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
|
480 | requirements=URL_NAME_REQUIREMENTS) | |
|
481 | rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest', | |
|
482 | controller='admin/repos', action='repo_settings_pullrequest', | |
|
483 | conditions={'method': ['GET', 'POST'], 'function': check_repo}, | |
|
484 | requirements=URL_NAME_REQUIREMENTS) | |
|
485 | 392 | |
|
486 | 393 | return rmap |
@@ -1,697 +1,697 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | settings controller for rhodecode admin |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import collections |
|
27 | 27 | import logging |
|
28 | 28 | |
|
29 | 29 | import datetime |
|
30 | 30 | import formencode |
|
31 | 31 | from formencode import htmlfill |
|
32 | 32 | from pylons import request, tmpl_context as c, url, config |
|
33 | 33 | from pylons.controllers.util import redirect |
|
34 | 34 | from pylons.i18n.translation import _ |
|
35 | from pylons.decorators import jsonify | |
|
35 | 36 | from pyramid.threadlocal import get_current_registry |
|
36 | 37 | from webob.exc import HTTPBadRequest |
|
37 | 38 | |
|
38 | 39 | import rhodecode |
|
39 | 40 | from rhodecode.apps.admin.navigation import navigation_list |
|
40 | 41 | from rhodecode.apps.svn_support.config_keys import generate_config |
|
41 | 42 | from rhodecode.lib import auth |
|
42 | 43 | from rhodecode.lib import helpers as h |
|
43 | 44 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
44 | 45 | from rhodecode.lib.base import BaseController, render |
|
45 | 46 | from rhodecode.lib.celerylib import tasks, run_task |
|
46 | 47 | from rhodecode.lib.utils import repo2db_mapper |
|
47 | 48 | from rhodecode.lib.utils2 import ( |
|
48 | 49 | str2bool, safe_unicode, AttributeDict, safe_int) |
|
49 | 50 | from rhodecode.lib.compat import OrderedDict |
|
50 | from rhodecode.lib.utils import jsonify | |
|
51 | 51 | |
|
52 | 52 | from rhodecode.model.db import RhodeCodeUi, Repository |
|
53 | 53 | from rhodecode.model.forms import ApplicationSettingsForm, \ |
|
54 | 54 | ApplicationUiSettingsForm, ApplicationVisualisationForm, \ |
|
55 | 55 | LabsSettingsForm, IssueTrackerPatternsForm |
|
56 | 56 | from rhodecode.model.repo_group import RepoGroupModel |
|
57 | 57 | |
|
58 | 58 | from rhodecode.model.scm import ScmModel |
|
59 | 59 | from rhodecode.model.notification import EmailNotificationModel |
|
60 | 60 | from rhodecode.model.meta import Session |
|
61 | 61 | from rhodecode.model.settings import ( |
|
62 | 62 | IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound, |
|
63 | 63 | SettingsModel) |
|
64 | 64 | |
|
65 | 65 | from rhodecode.model.supervisor import SupervisorModel, SUPERVISOR_MASTER |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | log = logging.getLogger(__name__) |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | class SettingsController(BaseController): |
|
72 | 72 | """REST Controller styled on the Atom Publishing Protocol""" |
|
73 | 73 | # To properly map this controller, ensure your config/routing.py |
|
74 | 74 | # file has a resource setup: |
|
75 | 75 | # map.resource('setting', 'settings', controller='admin/settings', |
|
76 | 76 | # path_prefix='/admin', name_prefix='admin_') |
|
77 | 77 | |
|
78 | 78 | @LoginRequired() |
|
79 | 79 | def __before__(self): |
|
80 | 80 | super(SettingsController, self).__before__() |
|
81 | 81 | c.labs_active = str2bool( |
|
82 | 82 | rhodecode.CONFIG.get('labs_settings_active', 'true')) |
|
83 | 83 | c.navlist = navigation_list(request) |
|
84 | 84 | |
|
85 | 85 | def _get_ui_settings(self): |
|
86 | 86 | ret = RhodeCodeUi.query().all() |
|
87 | 87 | |
|
88 | 88 | if not ret: |
|
89 | 89 | raise Exception('Could not get application ui settings !') |
|
90 | 90 | settings = {} |
|
91 | 91 | for each in ret: |
|
92 | 92 | k = each.ui_key |
|
93 | 93 | v = each.ui_value |
|
94 | 94 | if k == '/': |
|
95 | 95 | k = 'root_path' |
|
96 | 96 | |
|
97 | 97 | if k in ['push_ssl', 'publish', 'enabled']: |
|
98 | 98 | v = str2bool(v) |
|
99 | 99 | |
|
100 | 100 | if k.find('.') != -1: |
|
101 | 101 | k = k.replace('.', '_') |
|
102 | 102 | |
|
103 | 103 | if each.ui_section in ['hooks', 'extensions']: |
|
104 | 104 | v = each.ui_active |
|
105 | 105 | |
|
106 | 106 | settings[each.ui_section + '_' + k] = v |
|
107 | 107 | return settings |
|
108 | 108 | |
|
109 | 109 | @HasPermissionAllDecorator('hg.admin') |
|
110 | 110 | @auth.CSRFRequired() |
|
111 | 111 | @jsonify |
|
112 | 112 | def delete_svn_pattern(self): |
|
113 | 113 | if not request.is_xhr: |
|
114 | 114 | raise HTTPBadRequest() |
|
115 | 115 | |
|
116 | 116 | delete_pattern_id = request.POST.get('delete_svn_pattern') |
|
117 | 117 | model = VcsSettingsModel() |
|
118 | 118 | try: |
|
119 | 119 | model.delete_global_svn_pattern(delete_pattern_id) |
|
120 | 120 | except SettingNotFound: |
|
121 | 121 | raise HTTPBadRequest() |
|
122 | 122 | |
|
123 | 123 | Session().commit() |
|
124 | 124 | return True |
|
125 | 125 | |
|
126 | 126 | @HasPermissionAllDecorator('hg.admin') |
|
127 | 127 | @auth.CSRFRequired() |
|
128 | 128 | def settings_vcs_update(self): |
|
129 | 129 | """POST /admin/settings: All items in the collection""" |
|
130 | 130 | # url('admin_settings_vcs') |
|
131 | 131 | c.active = 'vcs' |
|
132 | 132 | |
|
133 | 133 | model = VcsSettingsModel() |
|
134 | 134 | c.svn_branch_patterns = model.get_global_svn_branch_patterns() |
|
135 | 135 | c.svn_tag_patterns = model.get_global_svn_tag_patterns() |
|
136 | 136 | |
|
137 | 137 | # TODO: Replace with request.registry after migrating to pyramid. |
|
138 | 138 | pyramid_settings = get_current_registry().settings |
|
139 | 139 | c.svn_proxy_generate_config = pyramid_settings[generate_config] |
|
140 | 140 | |
|
141 | 141 | application_form = ApplicationUiSettingsForm()() |
|
142 | 142 | |
|
143 | 143 | try: |
|
144 | 144 | form_result = application_form.to_python(dict(request.POST)) |
|
145 | 145 | except formencode.Invalid as errors: |
|
146 | 146 | h.flash( |
|
147 | 147 | _("Some form inputs contain invalid data."), |
|
148 | 148 | category='error') |
|
149 | 149 | return htmlfill.render( |
|
150 | 150 | render('admin/settings/settings.mako'), |
|
151 | 151 | defaults=errors.value, |
|
152 | 152 | errors=errors.error_dict or {}, |
|
153 | 153 | prefix_error=False, |
|
154 | 154 | encoding="UTF-8", |
|
155 | 155 | force_defaults=False |
|
156 | 156 | ) |
|
157 | 157 | |
|
158 | 158 | try: |
|
159 | 159 | if c.visual.allow_repo_location_change: |
|
160 | 160 | model.update_global_path_setting( |
|
161 | 161 | form_result['paths_root_path']) |
|
162 | 162 | |
|
163 | 163 | model.update_global_ssl_setting(form_result['web_push_ssl']) |
|
164 | 164 | model.update_global_hook_settings(form_result) |
|
165 | 165 | |
|
166 | 166 | model.create_or_update_global_svn_settings(form_result) |
|
167 | 167 | model.create_or_update_global_hg_settings(form_result) |
|
168 | 168 | model.create_or_update_global_git_settings(form_result) |
|
169 | 169 | model.create_or_update_global_pr_settings(form_result) |
|
170 | 170 | except Exception: |
|
171 | 171 | log.exception("Exception while updating settings") |
|
172 | 172 | h.flash(_('Error occurred during updating ' |
|
173 | 173 | 'application settings'), category='error') |
|
174 | 174 | else: |
|
175 | 175 | Session().commit() |
|
176 | 176 | h.flash(_('Updated VCS settings'), category='success') |
|
177 | 177 | return redirect(url('admin_settings_vcs')) |
|
178 | 178 | |
|
179 | 179 | return htmlfill.render( |
|
180 | 180 | render('admin/settings/settings.mako'), |
|
181 | 181 | defaults=self._form_defaults(), |
|
182 | 182 | encoding="UTF-8", |
|
183 | 183 | force_defaults=False) |
|
184 | 184 | |
|
185 | 185 | @HasPermissionAllDecorator('hg.admin') |
|
186 | 186 | def settings_vcs(self): |
|
187 | 187 | """GET /admin/settings: All items in the collection""" |
|
188 | 188 | # url('admin_settings_vcs') |
|
189 | 189 | c.active = 'vcs' |
|
190 | 190 | model = VcsSettingsModel() |
|
191 | 191 | c.svn_branch_patterns = model.get_global_svn_branch_patterns() |
|
192 | 192 | c.svn_tag_patterns = model.get_global_svn_tag_patterns() |
|
193 | 193 | |
|
194 | 194 | # TODO: Replace with request.registry after migrating to pyramid. |
|
195 | 195 | pyramid_settings = get_current_registry().settings |
|
196 | 196 | c.svn_proxy_generate_config = pyramid_settings[generate_config] |
|
197 | 197 | |
|
198 | 198 | defaults = self._form_defaults() |
|
199 | 199 | |
|
200 | 200 | model.create_largeobjects_dirs_if_needed(defaults['paths_root_path']) |
|
201 | 201 | return htmlfill.render( |
|
202 | 202 | render('admin/settings/settings.mako'), |
|
203 | 203 | defaults=defaults, |
|
204 | 204 | encoding="UTF-8", |
|
205 | 205 | force_defaults=False) |
|
206 | 206 | |
|
207 | 207 | @HasPermissionAllDecorator('hg.admin') |
|
208 | 208 | @auth.CSRFRequired() |
|
209 | 209 | def settings_mapping_update(self): |
|
210 | 210 | """POST /admin/settings/mapping: All items in the collection""" |
|
211 | 211 | # url('admin_settings_mapping') |
|
212 | 212 | c.active = 'mapping' |
|
213 | 213 | rm_obsolete = request.POST.get('destroy', False) |
|
214 | 214 | invalidate_cache = request.POST.get('invalidate', False) |
|
215 | 215 | log.debug( |
|
216 | 216 | 'rescanning repo location with destroy obsolete=%s', rm_obsolete) |
|
217 | 217 | |
|
218 | 218 | if invalidate_cache: |
|
219 | 219 | log.debug('invalidating all repositories cache') |
|
220 | 220 | for repo in Repository.get_all(): |
|
221 | 221 | ScmModel().mark_for_invalidation(repo.repo_name, delete=True) |
|
222 | 222 | |
|
223 | 223 | filesystem_repos = ScmModel().repo_scan() |
|
224 | 224 | added, removed = repo2db_mapper(filesystem_repos, rm_obsolete) |
|
225 | 225 | _repr = lambda l: ', '.join(map(safe_unicode, l)) or '-' |
|
226 | 226 | h.flash(_('Repositories successfully ' |
|
227 | 227 | 'rescanned added: %s ; removed: %s') % |
|
228 | 228 | (_repr(added), _repr(removed)), |
|
229 | 229 | category='success') |
|
230 | 230 | return redirect(url('admin_settings_mapping')) |
|
231 | 231 | |
|
232 | 232 | @HasPermissionAllDecorator('hg.admin') |
|
233 | 233 | def settings_mapping(self): |
|
234 | 234 | """GET /admin/settings/mapping: All items in the collection""" |
|
235 | 235 | # url('admin_settings_mapping') |
|
236 | 236 | c.active = 'mapping' |
|
237 | 237 | |
|
238 | 238 | return htmlfill.render( |
|
239 | 239 | render('admin/settings/settings.mako'), |
|
240 | 240 | defaults=self._form_defaults(), |
|
241 | 241 | encoding="UTF-8", |
|
242 | 242 | force_defaults=False) |
|
243 | 243 | |
|
244 | 244 | @HasPermissionAllDecorator('hg.admin') |
|
245 | 245 | @auth.CSRFRequired() |
|
246 | 246 | def settings_global_update(self): |
|
247 | 247 | """POST /admin/settings/global: All items in the collection""" |
|
248 | 248 | # url('admin_settings_global') |
|
249 | 249 | c.active = 'global' |
|
250 | 250 | c.personal_repo_group_default_pattern = RepoGroupModel()\ |
|
251 | 251 | .get_personal_group_name_pattern() |
|
252 | 252 | application_form = ApplicationSettingsForm()() |
|
253 | 253 | try: |
|
254 | 254 | form_result = application_form.to_python(dict(request.POST)) |
|
255 | 255 | except formencode.Invalid as errors: |
|
256 | 256 | return htmlfill.render( |
|
257 | 257 | render('admin/settings/settings.mako'), |
|
258 | 258 | defaults=errors.value, |
|
259 | 259 | errors=errors.error_dict or {}, |
|
260 | 260 | prefix_error=False, |
|
261 | 261 | encoding="UTF-8", |
|
262 | 262 | force_defaults=False) |
|
263 | 263 | |
|
264 | 264 | try: |
|
265 | 265 | settings = [ |
|
266 | 266 | ('title', 'rhodecode_title', 'unicode'), |
|
267 | 267 | ('realm', 'rhodecode_realm', 'unicode'), |
|
268 | 268 | ('pre_code', 'rhodecode_pre_code', 'unicode'), |
|
269 | 269 | ('post_code', 'rhodecode_post_code', 'unicode'), |
|
270 | 270 | ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'), |
|
271 | 271 | ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'), |
|
272 | 272 | ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'), |
|
273 | 273 | ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'), |
|
274 | 274 | ] |
|
275 | 275 | for setting, form_key, type_ in settings: |
|
276 | 276 | sett = SettingsModel().create_or_update_setting( |
|
277 | 277 | setting, form_result[form_key], type_) |
|
278 | 278 | Session().add(sett) |
|
279 | 279 | |
|
280 | 280 | Session().commit() |
|
281 | 281 | SettingsModel().invalidate_settings_cache() |
|
282 | 282 | h.flash(_('Updated application settings'), category='success') |
|
283 | 283 | except Exception: |
|
284 | 284 | log.exception("Exception while updating application settings") |
|
285 | 285 | h.flash( |
|
286 | 286 | _('Error occurred during updating application settings'), |
|
287 | 287 | category='error') |
|
288 | 288 | |
|
289 | 289 | return redirect(url('admin_settings_global')) |
|
290 | 290 | |
|
291 | 291 | @HasPermissionAllDecorator('hg.admin') |
|
292 | 292 | def settings_global(self): |
|
293 | 293 | """GET /admin/settings/global: All items in the collection""" |
|
294 | 294 | # url('admin_settings_global') |
|
295 | 295 | c.active = 'global' |
|
296 | 296 | c.personal_repo_group_default_pattern = RepoGroupModel()\ |
|
297 | 297 | .get_personal_group_name_pattern() |
|
298 | 298 | |
|
299 | 299 | return htmlfill.render( |
|
300 | 300 | render('admin/settings/settings.mako'), |
|
301 | 301 | defaults=self._form_defaults(), |
|
302 | 302 | encoding="UTF-8", |
|
303 | 303 | force_defaults=False) |
|
304 | 304 | |
|
305 | 305 | @HasPermissionAllDecorator('hg.admin') |
|
306 | 306 | @auth.CSRFRequired() |
|
307 | 307 | def settings_visual_update(self): |
|
308 | 308 | """POST /admin/settings/visual: All items in the collection""" |
|
309 | 309 | # url('admin_settings_visual') |
|
310 | 310 | c.active = 'visual' |
|
311 | 311 | application_form = ApplicationVisualisationForm()() |
|
312 | 312 | try: |
|
313 | 313 | form_result = application_form.to_python(dict(request.POST)) |
|
314 | 314 | except formencode.Invalid as errors: |
|
315 | 315 | return htmlfill.render( |
|
316 | 316 | render('admin/settings/settings.mako'), |
|
317 | 317 | defaults=errors.value, |
|
318 | 318 | errors=errors.error_dict or {}, |
|
319 | 319 | prefix_error=False, |
|
320 | 320 | encoding="UTF-8", |
|
321 | 321 | force_defaults=False |
|
322 | 322 | ) |
|
323 | 323 | |
|
324 | 324 | try: |
|
325 | 325 | settings = [ |
|
326 | 326 | ('show_public_icon', 'rhodecode_show_public_icon', 'bool'), |
|
327 | 327 | ('show_private_icon', 'rhodecode_show_private_icon', 'bool'), |
|
328 | 328 | ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'), |
|
329 | 329 | ('repository_fields', 'rhodecode_repository_fields', 'bool'), |
|
330 | 330 | ('dashboard_items', 'rhodecode_dashboard_items', 'int'), |
|
331 | 331 | ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'), |
|
332 | 332 | ('show_version', 'rhodecode_show_version', 'bool'), |
|
333 | 333 | ('use_gravatar', 'rhodecode_use_gravatar', 'bool'), |
|
334 | 334 | ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'), |
|
335 | 335 | ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'), |
|
336 | 336 | ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'), |
|
337 | 337 | ('support_url', 'rhodecode_support_url', 'unicode'), |
|
338 | 338 | ('show_revision_number', 'rhodecode_show_revision_number', 'bool'), |
|
339 | 339 | ('show_sha_length', 'rhodecode_show_sha_length', 'int'), |
|
340 | 340 | ] |
|
341 | 341 | for setting, form_key, type_ in settings: |
|
342 | 342 | sett = SettingsModel().create_or_update_setting( |
|
343 | 343 | setting, form_result[form_key], type_) |
|
344 | 344 | Session().add(sett) |
|
345 | 345 | |
|
346 | 346 | Session().commit() |
|
347 | 347 | SettingsModel().invalidate_settings_cache() |
|
348 | 348 | h.flash(_('Updated visualisation settings'), category='success') |
|
349 | 349 | except Exception: |
|
350 | 350 | log.exception("Exception updating visualization settings") |
|
351 | 351 | h.flash(_('Error occurred during updating ' |
|
352 | 352 | 'visualisation settings'), |
|
353 | 353 | category='error') |
|
354 | 354 | |
|
355 | 355 | return redirect(url('admin_settings_visual')) |
|
356 | 356 | |
|
357 | 357 | @HasPermissionAllDecorator('hg.admin') |
|
358 | 358 | def settings_visual(self): |
|
359 | 359 | """GET /admin/settings/visual: All items in the collection""" |
|
360 | 360 | # url('admin_settings_visual') |
|
361 | 361 | c.active = 'visual' |
|
362 | 362 | |
|
363 | 363 | return htmlfill.render( |
|
364 | 364 | render('admin/settings/settings.mako'), |
|
365 | 365 | defaults=self._form_defaults(), |
|
366 | 366 | encoding="UTF-8", |
|
367 | 367 | force_defaults=False) |
|
368 | 368 | |
|
369 | 369 | @HasPermissionAllDecorator('hg.admin') |
|
370 | 370 | @auth.CSRFRequired() |
|
371 | 371 | def settings_issuetracker_test(self): |
|
372 | 372 | if request.is_xhr: |
|
373 | 373 | return h.urlify_commit_message( |
|
374 | 374 | request.POST.get('test_text', ''), |
|
375 | 375 | 'repo_group/test_repo1') |
|
376 | 376 | else: |
|
377 | 377 | raise HTTPBadRequest() |
|
378 | 378 | |
|
379 | 379 | @HasPermissionAllDecorator('hg.admin') |
|
380 | 380 | @auth.CSRFRequired() |
|
381 | 381 | def settings_issuetracker_delete(self): |
|
382 | 382 | uid = request.POST.get('uid') |
|
383 | 383 | IssueTrackerSettingsModel().delete_entries(uid) |
|
384 | 384 | h.flash(_('Removed issue tracker entry'), category='success') |
|
385 | 385 | return redirect(url('admin_settings_issuetracker')) |
|
386 | 386 | |
|
387 | 387 | @HasPermissionAllDecorator('hg.admin') |
|
388 | 388 | def settings_issuetracker(self): |
|
389 | 389 | """GET /admin/settings/issue-tracker: All items in the collection""" |
|
390 | 390 | # url('admin_settings_issuetracker') |
|
391 | 391 | c.active = 'issuetracker' |
|
392 | 392 | defaults = SettingsModel().get_all_settings() |
|
393 | 393 | |
|
394 | 394 | entry_key = 'rhodecode_issuetracker_pat_' |
|
395 | 395 | |
|
396 | 396 | c.issuetracker_entries = {} |
|
397 | 397 | for k, v in defaults.items(): |
|
398 | 398 | if k.startswith(entry_key): |
|
399 | 399 | uid = k[len(entry_key):] |
|
400 | 400 | c.issuetracker_entries[uid] = None |
|
401 | 401 | |
|
402 | 402 | for uid in c.issuetracker_entries: |
|
403 | 403 | c.issuetracker_entries[uid] = AttributeDict({ |
|
404 | 404 | 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid), |
|
405 | 405 | 'url': defaults.get('rhodecode_issuetracker_url_' + uid), |
|
406 | 406 | 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid), |
|
407 | 407 | 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid), |
|
408 | 408 | }) |
|
409 | 409 | |
|
410 | 410 | return render('admin/settings/settings.mako') |
|
411 | 411 | |
|
412 | 412 | @HasPermissionAllDecorator('hg.admin') |
|
413 | 413 | @auth.CSRFRequired() |
|
414 | 414 | def settings_issuetracker_save(self): |
|
415 | 415 | settings_model = IssueTrackerSettingsModel() |
|
416 | 416 | |
|
417 | 417 | form = IssueTrackerPatternsForm()().to_python(request.POST) |
|
418 | 418 | if form: |
|
419 | 419 | for uid in form.get('delete_patterns', []): |
|
420 | 420 | settings_model.delete_entries(uid) |
|
421 | 421 | |
|
422 | 422 | for pattern in form.get('patterns', []): |
|
423 | 423 | for setting, value, type_ in pattern: |
|
424 | 424 | sett = settings_model.create_or_update_setting( |
|
425 | 425 | setting, value, type_) |
|
426 | 426 | Session().add(sett) |
|
427 | 427 | |
|
428 | 428 | Session().commit() |
|
429 | 429 | |
|
430 | 430 | SettingsModel().invalidate_settings_cache() |
|
431 | 431 | h.flash(_('Updated issue tracker entries'), category='success') |
|
432 | 432 | return redirect(url('admin_settings_issuetracker')) |
|
433 | 433 | |
|
434 | 434 | @HasPermissionAllDecorator('hg.admin') |
|
435 | 435 | @auth.CSRFRequired() |
|
436 | 436 | def settings_email_update(self): |
|
437 | 437 | """POST /admin/settings/email: All items in the collection""" |
|
438 | 438 | # url('admin_settings_email') |
|
439 | 439 | c.active = 'email' |
|
440 | 440 | |
|
441 | 441 | test_email = request.POST.get('test_email') |
|
442 | 442 | |
|
443 | 443 | if not test_email: |
|
444 | 444 | h.flash(_('Please enter email address'), category='error') |
|
445 | 445 | return redirect(url('admin_settings_email')) |
|
446 | 446 | |
|
447 | 447 | email_kwargs = { |
|
448 | 448 | 'date': datetime.datetime.now(), |
|
449 | 449 | 'user': c.rhodecode_user, |
|
450 | 450 | 'rhodecode_version': c.rhodecode_version |
|
451 | 451 | } |
|
452 | 452 | |
|
453 | 453 | (subject, headers, email_body, |
|
454 | 454 | email_body_plaintext) = EmailNotificationModel().render_email( |
|
455 | 455 | EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs) |
|
456 | 456 | |
|
457 | 457 | recipients = [test_email] if test_email else None |
|
458 | 458 | |
|
459 | 459 | run_task(tasks.send_email, recipients, subject, |
|
460 | 460 | email_body_plaintext, email_body) |
|
461 | 461 | |
|
462 | 462 | h.flash(_('Send email task created'), category='success') |
|
463 | 463 | return redirect(url('admin_settings_email')) |
|
464 | 464 | |
|
465 | 465 | @HasPermissionAllDecorator('hg.admin') |
|
466 | 466 | def settings_email(self): |
|
467 | 467 | """GET /admin/settings/email: All items in the collection""" |
|
468 | 468 | # url('admin_settings_email') |
|
469 | 469 | c.active = 'email' |
|
470 | 470 | c.rhodecode_ini = rhodecode.CONFIG |
|
471 | 471 | |
|
472 | 472 | return htmlfill.render( |
|
473 | 473 | render('admin/settings/settings.mako'), |
|
474 | 474 | defaults=self._form_defaults(), |
|
475 | 475 | encoding="UTF-8", |
|
476 | 476 | force_defaults=False) |
|
477 | 477 | |
|
478 | 478 | @HasPermissionAllDecorator('hg.admin') |
|
479 | 479 | @auth.CSRFRequired() |
|
480 | 480 | def settings_hooks_update(self): |
|
481 | 481 | """POST or DELETE /admin/settings/hooks: All items in the collection""" |
|
482 | 482 | # url('admin_settings_hooks') |
|
483 | 483 | c.active = 'hooks' |
|
484 | 484 | if c.visual.allow_custom_hooks_settings: |
|
485 | 485 | ui_key = request.POST.get('new_hook_ui_key') |
|
486 | 486 | ui_value = request.POST.get('new_hook_ui_value') |
|
487 | 487 | |
|
488 | 488 | hook_id = request.POST.get('hook_id') |
|
489 | 489 | new_hook = False |
|
490 | 490 | |
|
491 | 491 | model = SettingsModel() |
|
492 | 492 | try: |
|
493 | 493 | if ui_value and ui_key: |
|
494 | 494 | model.create_or_update_hook(ui_key, ui_value) |
|
495 | 495 | h.flash(_('Added new hook'), category='success') |
|
496 | 496 | new_hook = True |
|
497 | 497 | elif hook_id: |
|
498 | 498 | RhodeCodeUi.delete(hook_id) |
|
499 | 499 | Session().commit() |
|
500 | 500 | |
|
501 | 501 | # check for edits |
|
502 | 502 | update = False |
|
503 | 503 | _d = request.POST.dict_of_lists() |
|
504 | 504 | for k, v in zip(_d.get('hook_ui_key', []), |
|
505 | 505 | _d.get('hook_ui_value_new', [])): |
|
506 | 506 | model.create_or_update_hook(k, v) |
|
507 | 507 | update = True |
|
508 | 508 | |
|
509 | 509 | if update and not new_hook: |
|
510 | 510 | h.flash(_('Updated hooks'), category='success') |
|
511 | 511 | Session().commit() |
|
512 | 512 | except Exception: |
|
513 | 513 | log.exception("Exception during hook creation") |
|
514 | 514 | h.flash(_('Error occurred during hook creation'), |
|
515 | 515 | category='error') |
|
516 | 516 | |
|
517 | 517 | return redirect(url('admin_settings_hooks')) |
|
518 | 518 | |
|
519 | 519 | @HasPermissionAllDecorator('hg.admin') |
|
520 | 520 | def settings_hooks(self): |
|
521 | 521 | """GET /admin/settings/hooks: All items in the collection""" |
|
522 | 522 | # url('admin_settings_hooks') |
|
523 | 523 | c.active = 'hooks' |
|
524 | 524 | |
|
525 | 525 | model = SettingsModel() |
|
526 | 526 | c.hooks = model.get_builtin_hooks() |
|
527 | 527 | c.custom_hooks = model.get_custom_hooks() |
|
528 | 528 | |
|
529 | 529 | return htmlfill.render( |
|
530 | 530 | render('admin/settings/settings.mako'), |
|
531 | 531 | defaults=self._form_defaults(), |
|
532 | 532 | encoding="UTF-8", |
|
533 | 533 | force_defaults=False) |
|
534 | 534 | |
|
535 | 535 | @HasPermissionAllDecorator('hg.admin') |
|
536 | 536 | def settings_search(self): |
|
537 | 537 | """GET /admin/settings/search: All items in the collection""" |
|
538 | 538 | # url('admin_settings_search') |
|
539 | 539 | c.active = 'search' |
|
540 | 540 | |
|
541 | 541 | from rhodecode.lib.index import searcher_from_config |
|
542 | 542 | searcher = searcher_from_config(config) |
|
543 | 543 | c.statistics = searcher.statistics() |
|
544 | 544 | |
|
545 | 545 | return render('admin/settings/settings.mako') |
|
546 | 546 | |
|
547 | 547 | @HasPermissionAllDecorator('hg.admin') |
|
548 | 548 | def settings_supervisor(self): |
|
549 | 549 | c.rhodecode_ini = rhodecode.CONFIG |
|
550 | 550 | c.active = 'supervisor' |
|
551 | 551 | |
|
552 | 552 | c.supervisor_procs = OrderedDict([ |
|
553 | 553 | (SUPERVISOR_MASTER, {}), |
|
554 | 554 | ]) |
|
555 | 555 | |
|
556 | 556 | c.log_size = 10240 |
|
557 | 557 | supervisor = SupervisorModel() |
|
558 | 558 | |
|
559 | 559 | _connection = supervisor.get_connection( |
|
560 | 560 | c.rhodecode_ini.get('supervisor.uri')) |
|
561 | 561 | c.connection_error = None |
|
562 | 562 | try: |
|
563 | 563 | _connection.supervisor.getAllProcessInfo() |
|
564 | 564 | except Exception as e: |
|
565 | 565 | c.connection_error = str(e) |
|
566 | 566 | log.exception("Exception reading supervisor data") |
|
567 | 567 | return render('admin/settings/settings.mako') |
|
568 | 568 | |
|
569 | 569 | groupid = c.rhodecode_ini.get('supervisor.group_id') |
|
570 | 570 | |
|
571 | 571 | # feed our group processes to the main |
|
572 | 572 | for proc in supervisor.get_group_processes(_connection, groupid): |
|
573 | 573 | c.supervisor_procs[proc['name']] = {} |
|
574 | 574 | |
|
575 | 575 | for k in c.supervisor_procs.keys(): |
|
576 | 576 | try: |
|
577 | 577 | # master process info |
|
578 | 578 | if k == SUPERVISOR_MASTER: |
|
579 | 579 | _data = supervisor.get_master_state(_connection) |
|
580 | 580 | _data['name'] = 'supervisor master' |
|
581 | 581 | _data['description'] = 'pid %s, id: %s, ver: %s' % ( |
|
582 | 582 | _data['pid'], _data['id'], _data['ver']) |
|
583 | 583 | c.supervisor_procs[k] = _data |
|
584 | 584 | else: |
|
585 | 585 | procid = groupid + ":" + k |
|
586 | 586 | c.supervisor_procs[k] = supervisor.get_process_info(_connection, procid) |
|
587 | 587 | except Exception as e: |
|
588 | 588 | log.exception("Exception reading supervisor data") |
|
589 | 589 | c.supervisor_procs[k] = {'_rhodecode_error': str(e)} |
|
590 | 590 | |
|
591 | 591 | return render('admin/settings/settings.mako') |
|
592 | 592 | |
|
593 | 593 | @HasPermissionAllDecorator('hg.admin') |
|
594 | 594 | def settings_supervisor_log(self, procid): |
|
595 | 595 | import rhodecode |
|
596 | 596 | c.rhodecode_ini = rhodecode.CONFIG |
|
597 | 597 | c.active = 'supervisor_tail' |
|
598 | 598 | |
|
599 | 599 | supervisor = SupervisorModel() |
|
600 | 600 | _connection = supervisor.get_connection(c.rhodecode_ini.get('supervisor.uri')) |
|
601 | 601 | groupid = c.rhodecode_ini.get('supervisor.group_id') |
|
602 | 602 | procid = groupid + ":" + procid if procid != SUPERVISOR_MASTER else procid |
|
603 | 603 | |
|
604 | 604 | c.log_size = 10240 |
|
605 | 605 | offset = abs(safe_int(request.GET.get('offset', c.log_size))) * -1 |
|
606 | 606 | c.log = supervisor.read_process_log(_connection, procid, offset, 0) |
|
607 | 607 | |
|
608 | 608 | return render('admin/settings/settings.mako') |
|
609 | 609 | |
|
610 | 610 | @HasPermissionAllDecorator('hg.admin') |
|
611 | 611 | @auth.CSRFRequired() |
|
612 | 612 | def settings_labs_update(self): |
|
613 | 613 | """POST /admin/settings/labs: All items in the collection""" |
|
614 | 614 | # url('admin_settings/labs', method={'POST'}) |
|
615 | 615 | c.active = 'labs' |
|
616 | 616 | |
|
617 | 617 | application_form = LabsSettingsForm()() |
|
618 | 618 | try: |
|
619 | 619 | form_result = application_form.to_python(dict(request.POST)) |
|
620 | 620 | except formencode.Invalid as errors: |
|
621 | 621 | h.flash( |
|
622 | 622 | _('Some form inputs contain invalid data.'), |
|
623 | 623 | category='error') |
|
624 | 624 | return htmlfill.render( |
|
625 | 625 | render('admin/settings/settings.mako'), |
|
626 | 626 | defaults=errors.value, |
|
627 | 627 | errors=errors.error_dict or {}, |
|
628 | 628 | prefix_error=False, |
|
629 | 629 | encoding='UTF-8', |
|
630 | 630 | force_defaults=False |
|
631 | 631 | ) |
|
632 | 632 | |
|
633 | 633 | try: |
|
634 | 634 | session = Session() |
|
635 | 635 | for setting in _LAB_SETTINGS: |
|
636 | 636 | setting_name = setting.key[len('rhodecode_'):] |
|
637 | 637 | sett = SettingsModel().create_or_update_setting( |
|
638 | 638 | setting_name, form_result[setting.key], setting.type) |
|
639 | 639 | session.add(sett) |
|
640 | 640 | |
|
641 | 641 | except Exception: |
|
642 | 642 | log.exception('Exception while updating lab settings') |
|
643 | 643 | h.flash(_('Error occurred during updating labs settings'), |
|
644 | 644 | category='error') |
|
645 | 645 | else: |
|
646 | 646 | Session().commit() |
|
647 | 647 | SettingsModel().invalidate_settings_cache() |
|
648 | 648 | h.flash(_('Updated Labs settings'), category='success') |
|
649 | 649 | return redirect(url('admin_settings_labs')) |
|
650 | 650 | |
|
651 | 651 | return htmlfill.render( |
|
652 | 652 | render('admin/settings/settings.mako'), |
|
653 | 653 | defaults=self._form_defaults(), |
|
654 | 654 | encoding='UTF-8', |
|
655 | 655 | force_defaults=False) |
|
656 | 656 | |
|
657 | 657 | @HasPermissionAllDecorator('hg.admin') |
|
658 | 658 | def settings_labs(self): |
|
659 | 659 | """GET /admin/settings/labs: All items in the collection""" |
|
660 | 660 | # url('admin_settings_labs') |
|
661 | 661 | if not c.labs_active: |
|
662 | 662 | redirect(url('admin_settings')) |
|
663 | 663 | |
|
664 | 664 | c.active = 'labs' |
|
665 | 665 | c.lab_settings = _LAB_SETTINGS |
|
666 | 666 | |
|
667 | 667 | return htmlfill.render( |
|
668 | 668 | render('admin/settings/settings.mako'), |
|
669 | 669 | defaults=self._form_defaults(), |
|
670 | 670 | encoding='UTF-8', |
|
671 | 671 | force_defaults=False) |
|
672 | 672 | |
|
673 | 673 | def _form_defaults(self): |
|
674 | 674 | defaults = SettingsModel().get_all_settings() |
|
675 | 675 | defaults.update(self._get_ui_settings()) |
|
676 | 676 | |
|
677 | 677 | defaults.update({ |
|
678 | 678 | 'new_svn_branch': '', |
|
679 | 679 | 'new_svn_tag': '', |
|
680 | 680 | }) |
|
681 | 681 | return defaults |
|
682 | 682 | |
|
683 | 683 | |
|
684 | 684 | # :param key: name of the setting including the 'rhodecode_' prefix |
|
685 | 685 | # :param type: the RhodeCodeSetting type to use. |
|
686 | 686 | # :param group: the i18ned group in which we should dispaly this setting |
|
687 | 687 | # :param label: the i18ned label we should display for this setting |
|
688 | 688 | # :param help: the i18ned help we should dispaly for this setting |
|
689 | 689 | LabSetting = collections.namedtuple( |
|
690 | 690 | 'LabSetting', ('key', 'type', 'group', 'label', 'help')) |
|
691 | 691 | |
|
692 | 692 | |
|
693 | 693 | # This list has to be kept in sync with the form |
|
694 | 694 | # rhodecode.model.forms.LabsSettingsForm. |
|
695 | 695 | _LAB_SETTINGS = [ |
|
696 | 696 | |
|
697 | 697 | ] |
@@ -1,419 +1,413 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | User Groups crud controller for pylons |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import formencode |
|
27 | 27 | |
|
28 | 28 | import peppercorn |
|
29 | 29 | from formencode import htmlfill |
|
30 | 30 | from pylons import request, tmpl_context as c, url, config |
|
31 | 31 | from pylons.controllers.util import redirect |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | |
|
34 | from sqlalchemy.orm import joinedload | |
|
35 | ||
|
36 | 34 | from rhodecode.lib import auth |
|
37 | 35 | from rhodecode.lib import helpers as h |
|
38 | 36 | from rhodecode.lib import audit_logger |
|
39 | from rhodecode.lib.ext_json import json | |
|
40 | 37 | from rhodecode.lib.exceptions import UserGroupAssignedException,\ |
|
41 | 38 | RepoGroupAssignmentError |
|
42 | from rhodecode.lib.utils import jsonify | |
|
43 | 39 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int |
|
44 | 40 | from rhodecode.lib.auth import ( |
|
45 | 41 | LoginRequired, NotAnonymous, HasUserGroupPermissionAnyDecorator, |
|
46 | 42 | HasPermissionAnyDecorator) |
|
47 | 43 | from rhodecode.lib.base import BaseController, render |
|
48 | 44 | from rhodecode.model.permission import PermissionModel |
|
49 | from rhodecode.model.scm import UserGroupList | |
|
50 | 45 | from rhodecode.model.user_group import UserGroupModel |
|
51 |
from rhodecode.model.db import |
|
|
52 | User, UserGroup, UserGroupRepoToPerm, UserGroupRepoGroupToPerm) | |
|
46 | from rhodecode.model.db import User, UserGroup | |
|
53 | 47 | from rhodecode.model.forms import ( |
|
54 | 48 | UserGroupForm, UserGroupPermsForm, UserIndividualPermissionsForm, |
|
55 | 49 | UserPermissionsForm) |
|
56 | 50 | from rhodecode.model.meta import Session |
|
57 | 51 | |
|
58 | 52 | |
|
59 | 53 | log = logging.getLogger(__name__) |
|
60 | 54 | |
|
61 | 55 | |
|
62 | 56 | class UserGroupsController(BaseController): |
|
63 | 57 | """REST Controller styled on the Atom Publishing Protocol""" |
|
64 | 58 | |
|
65 | 59 | @LoginRequired() |
|
66 | 60 | def __before__(self): |
|
67 | 61 | super(UserGroupsController, self).__before__() |
|
68 | 62 | c.available_permissions = config['available_permissions'] |
|
69 | 63 | PermissionModel().set_global_permission_choices(c, gettext_translator=_) |
|
70 | 64 | |
|
71 | 65 | def __load_data(self, user_group_id): |
|
72 | 66 | c.group_members_obj = [x.user for x in c.user_group.members] |
|
73 | 67 | c.group_members_obj.sort(key=lambda u: u.username.lower()) |
|
74 | 68 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] |
|
75 | 69 | |
|
76 | 70 | def __load_defaults(self, user_group_id): |
|
77 | 71 | """ |
|
78 | 72 | Load defaults settings for edit, and update |
|
79 | 73 | |
|
80 | 74 | :param user_group_id: |
|
81 | 75 | """ |
|
82 | 76 | user_group = UserGroup.get_or_404(user_group_id) |
|
83 | 77 | data = user_group.get_dict() |
|
84 | 78 | # fill owner |
|
85 | 79 | if user_group.user: |
|
86 | 80 | data.update({'user': user_group.user.username}) |
|
87 | 81 | else: |
|
88 | 82 | replacement_user = User.get_first_super_admin().username |
|
89 | 83 | data.update({'user': replacement_user}) |
|
90 | 84 | return data |
|
91 | 85 | |
|
92 | 86 | def _revoke_perms_on_yourself(self, form_result): |
|
93 | 87 | _updates = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
94 | 88 | form_result['perm_updates']) |
|
95 | 89 | _additions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
96 | 90 | form_result['perm_additions']) |
|
97 | 91 | _deletions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
98 | 92 | form_result['perm_deletions']) |
|
99 | 93 | admin_perm = 'usergroup.admin' |
|
100 | 94 | if _updates and _updates[0][1] != admin_perm or \ |
|
101 | 95 | _additions and _additions[0][1] != admin_perm or \ |
|
102 | 96 | _deletions and _deletions[0][1] != admin_perm: |
|
103 | 97 | return True |
|
104 | 98 | return False |
|
105 | 99 | |
|
106 | 100 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
107 | 101 | @auth.CSRFRequired() |
|
108 | 102 | def create(self): |
|
109 | 103 | |
|
110 | 104 | users_group_form = UserGroupForm()() |
|
111 | 105 | try: |
|
112 | 106 | form_result = users_group_form.to_python(dict(request.POST)) |
|
113 | 107 | user_group = UserGroupModel().create( |
|
114 | 108 | name=form_result['users_group_name'], |
|
115 | 109 | description=form_result['user_group_description'], |
|
116 | 110 | owner=c.rhodecode_user.user_id, |
|
117 | 111 | active=form_result['users_group_active']) |
|
118 | 112 | Session().flush() |
|
119 | 113 | creation_data = user_group.get_api_data() |
|
120 | 114 | user_group_name = form_result['users_group_name'] |
|
121 | 115 | |
|
122 | 116 | audit_logger.store_web( |
|
123 | 117 | 'user_group.create', action_data={'data': creation_data}, |
|
124 | 118 | user=c.rhodecode_user) |
|
125 | 119 | |
|
126 | 120 | user_group_link = h.link_to( |
|
127 | 121 | h.escape(user_group_name), |
|
128 | 122 | url('edit_users_group', user_group_id=user_group.users_group_id)) |
|
129 | 123 | h.flash(h.literal(_('Created user group %(user_group_link)s') |
|
130 | 124 | % {'user_group_link': user_group_link}), |
|
131 | 125 | category='success') |
|
132 | 126 | Session().commit() |
|
133 | 127 | except formencode.Invalid as errors: |
|
134 | 128 | return htmlfill.render( |
|
135 | 129 | render('admin/user_groups/user_group_add.mako'), |
|
136 | 130 | defaults=errors.value, |
|
137 | 131 | errors=errors.error_dict or {}, |
|
138 | 132 | prefix_error=False, |
|
139 | 133 | encoding="UTF-8", |
|
140 | 134 | force_defaults=False) |
|
141 | 135 | except Exception: |
|
142 | 136 | log.exception("Exception creating user group") |
|
143 | 137 | h.flash(_('Error occurred during creation of user group %s') \ |
|
144 | 138 | % request.POST.get('users_group_name'), category='error') |
|
145 | 139 | |
|
146 | 140 | return redirect( |
|
147 | 141 | url('edit_users_group', user_group_id=user_group.users_group_id)) |
|
148 | 142 | |
|
149 | 143 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
150 | 144 | def new(self): |
|
151 | 145 | """GET /user_groups/new: Form to create a new item""" |
|
152 | 146 | # url('new_users_group') |
|
153 | 147 | return render('admin/user_groups/user_group_add.mako') |
|
154 | 148 | |
|
155 | 149 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
156 | 150 | @auth.CSRFRequired() |
|
157 | 151 | def update(self, user_group_id): |
|
158 | 152 | |
|
159 | 153 | user_group_id = safe_int(user_group_id) |
|
160 | 154 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
161 | 155 | c.active = 'settings' |
|
162 | 156 | self.__load_data(user_group_id) |
|
163 | 157 | |
|
164 | 158 | users_group_form = UserGroupForm( |
|
165 | 159 | edit=True, old_data=c.user_group.get_dict(), allow_disabled=True)() |
|
166 | 160 | |
|
167 | 161 | old_values = c.user_group.get_api_data() |
|
168 | 162 | try: |
|
169 | 163 | form_result = users_group_form.to_python(request.POST) |
|
170 | 164 | pstruct = peppercorn.parse(request.POST.items()) |
|
171 | 165 | form_result['users_group_members'] = pstruct['user_group_members'] |
|
172 | 166 | |
|
173 | 167 | user_group, added_members, removed_members = \ |
|
174 | 168 | UserGroupModel().update(c.user_group, form_result) |
|
175 | 169 | updated_user_group = form_result['users_group_name'] |
|
176 | 170 | |
|
177 | 171 | audit_logger.store_web( |
|
178 | 172 | 'user_group.edit', action_data={'old_data': old_values}, |
|
179 | 173 | user=c.rhodecode_user) |
|
180 | 174 | |
|
181 | 175 | # TODO(marcink): use added/removed to set user_group.edit.member.add |
|
182 | 176 | |
|
183 | 177 | h.flash(_('Updated user group %s') % updated_user_group, |
|
184 | 178 | category='success') |
|
185 | 179 | Session().commit() |
|
186 | 180 | except formencode.Invalid as errors: |
|
187 | 181 | defaults = errors.value |
|
188 | 182 | e = errors.error_dict or {} |
|
189 | 183 | |
|
190 | 184 | return htmlfill.render( |
|
191 | 185 | render('admin/user_groups/user_group_edit.mako'), |
|
192 | 186 | defaults=defaults, |
|
193 | 187 | errors=e, |
|
194 | 188 | prefix_error=False, |
|
195 | 189 | encoding="UTF-8", |
|
196 | 190 | force_defaults=False) |
|
197 | 191 | except Exception: |
|
198 | 192 | log.exception("Exception during update of user group") |
|
199 | 193 | h.flash(_('Error occurred during update of user group %s') |
|
200 | 194 | % request.POST.get('users_group_name'), category='error') |
|
201 | 195 | |
|
202 | 196 | return redirect(url('edit_users_group', user_group_id=user_group_id)) |
|
203 | 197 | |
|
204 | 198 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
205 | 199 | @auth.CSRFRequired() |
|
206 | 200 | def delete(self, user_group_id): |
|
207 | 201 | user_group_id = safe_int(user_group_id) |
|
208 | 202 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
209 | 203 | force = str2bool(request.POST.get('force')) |
|
210 | 204 | |
|
211 | 205 | old_values = c.user_group.get_api_data() |
|
212 | 206 | try: |
|
213 | 207 | UserGroupModel().delete(c.user_group, force=force) |
|
214 | 208 | audit_logger.store_web( |
|
215 | 209 | 'user.delete', action_data={'old_data': old_values}, |
|
216 | 210 | user=c.rhodecode_user) |
|
217 | 211 | Session().commit() |
|
218 | 212 | h.flash(_('Successfully deleted user group'), category='success') |
|
219 | 213 | except UserGroupAssignedException as e: |
|
220 | 214 | h.flash(str(e), category='error') |
|
221 | 215 | except Exception: |
|
222 | 216 | log.exception("Exception during deletion of user group") |
|
223 | 217 | h.flash(_('An error occurred during deletion of user group'), |
|
224 | 218 | category='error') |
|
225 | 219 | return redirect(url('users_groups')) |
|
226 | 220 | |
|
227 | 221 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
228 | 222 | def edit(self, user_group_id): |
|
229 | 223 | """GET /user_groups/user_group_id/edit: Form to edit an existing item""" |
|
230 | 224 | # url('edit_users_group', user_group_id=ID) |
|
231 | 225 | |
|
232 | 226 | user_group_id = safe_int(user_group_id) |
|
233 | 227 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
234 | 228 | c.active = 'settings' |
|
235 | 229 | self.__load_data(user_group_id) |
|
236 | 230 | |
|
237 | 231 | defaults = self.__load_defaults(user_group_id) |
|
238 | 232 | |
|
239 | 233 | return htmlfill.render( |
|
240 | 234 | render('admin/user_groups/user_group_edit.mako'), |
|
241 | 235 | defaults=defaults, |
|
242 | 236 | encoding="UTF-8", |
|
243 | 237 | force_defaults=False |
|
244 | 238 | ) |
|
245 | 239 | |
|
246 | 240 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
247 | 241 | def edit_perms(self, user_group_id): |
|
248 | 242 | user_group_id = safe_int(user_group_id) |
|
249 | 243 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
250 | 244 | c.active = 'perms' |
|
251 | 245 | |
|
252 | 246 | defaults = {} |
|
253 | 247 | # fill user group users |
|
254 | 248 | for p in c.user_group.user_user_group_to_perm: |
|
255 | 249 | defaults.update({'u_perm_%s' % p.user.user_id: |
|
256 | 250 | p.permission.permission_name}) |
|
257 | 251 | |
|
258 | 252 | for p in c.user_group.user_group_user_group_to_perm: |
|
259 | 253 | defaults.update({'g_perm_%s' % p.user_group.users_group_id: |
|
260 | 254 | p.permission.permission_name}) |
|
261 | 255 | |
|
262 | 256 | return htmlfill.render( |
|
263 | 257 | render('admin/user_groups/user_group_edit.mako'), |
|
264 | 258 | defaults=defaults, |
|
265 | 259 | encoding="UTF-8", |
|
266 | 260 | force_defaults=False |
|
267 | 261 | ) |
|
268 | 262 | |
|
269 | 263 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
270 | 264 | @auth.CSRFRequired() |
|
271 | 265 | def update_perms(self, user_group_id): |
|
272 | 266 | """ |
|
273 | 267 | grant permission for given usergroup |
|
274 | 268 | |
|
275 | 269 | :param user_group_id: |
|
276 | 270 | """ |
|
277 | 271 | user_group_id = safe_int(user_group_id) |
|
278 | 272 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
279 | 273 | form = UserGroupPermsForm()().to_python(request.POST) |
|
280 | 274 | |
|
281 | 275 | if not c.rhodecode_user.is_admin: |
|
282 | 276 | if self._revoke_perms_on_yourself(form): |
|
283 | 277 | msg = _('Cannot change permission for yourself as admin') |
|
284 | 278 | h.flash(msg, category='warning') |
|
285 | 279 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
286 | 280 | |
|
287 | 281 | try: |
|
288 | 282 | UserGroupModel().update_permissions(user_group_id, |
|
289 | 283 | form['perm_additions'], form['perm_updates'], form['perm_deletions']) |
|
290 | 284 | except RepoGroupAssignmentError: |
|
291 | 285 | h.flash(_('Target group cannot be the same'), category='error') |
|
292 | 286 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
293 | 287 | |
|
294 | 288 | # TODO(marcink): implement global permissions |
|
295 | 289 | # audit_log.store_web('user_group.edit.permissions') |
|
296 | 290 | Session().commit() |
|
297 | 291 | h.flash(_('User Group permissions updated'), category='success') |
|
298 | 292 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
299 | 293 | |
|
300 | 294 | |
|
301 | 295 | |
|
302 | 296 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
303 | 297 | def edit_global_perms(self, user_group_id): |
|
304 | 298 | user_group_id = safe_int(user_group_id) |
|
305 | 299 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
306 | 300 | c.active = 'global_perms' |
|
307 | 301 | |
|
308 | 302 | c.default_user = User.get_default_user() |
|
309 | 303 | defaults = c.user_group.get_dict() |
|
310 | 304 | defaults.update(c.default_user.get_default_perms(suffix='_inherited')) |
|
311 | 305 | defaults.update(c.user_group.get_default_perms()) |
|
312 | 306 | |
|
313 | 307 | return htmlfill.render( |
|
314 | 308 | render('admin/user_groups/user_group_edit.mako'), |
|
315 | 309 | defaults=defaults, |
|
316 | 310 | encoding="UTF-8", |
|
317 | 311 | force_defaults=False |
|
318 | 312 | ) |
|
319 | 313 | |
|
320 | 314 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
321 | 315 | @auth.CSRFRequired() |
|
322 | 316 | def update_global_perms(self, user_group_id): |
|
323 | 317 | user_group_id = safe_int(user_group_id) |
|
324 | 318 | user_group = UserGroup.get_or_404(user_group_id) |
|
325 | 319 | c.active = 'global_perms' |
|
326 | 320 | |
|
327 | 321 | try: |
|
328 | 322 | # first stage that verifies the checkbox |
|
329 | 323 | _form = UserIndividualPermissionsForm() |
|
330 | 324 | form_result = _form.to_python(dict(request.POST)) |
|
331 | 325 | inherit_perms = form_result['inherit_default_permissions'] |
|
332 | 326 | user_group.inherit_default_permissions = inherit_perms |
|
333 | 327 | Session().add(user_group) |
|
334 | 328 | |
|
335 | 329 | if not inherit_perms: |
|
336 | 330 | # only update the individual ones if we un check the flag |
|
337 | 331 | _form = UserPermissionsForm( |
|
338 | 332 | [x[0] for x in c.repo_create_choices], |
|
339 | 333 | [x[0] for x in c.repo_create_on_write_choices], |
|
340 | 334 | [x[0] for x in c.repo_group_create_choices], |
|
341 | 335 | [x[0] for x in c.user_group_create_choices], |
|
342 | 336 | [x[0] for x in c.fork_choices], |
|
343 | 337 | [x[0] for x in c.inherit_default_permission_choices])() |
|
344 | 338 | |
|
345 | 339 | form_result = _form.to_python(dict(request.POST)) |
|
346 | 340 | form_result.update({'perm_user_group_id': user_group.users_group_id}) |
|
347 | 341 | |
|
348 | 342 | PermissionModel().update_user_group_permissions(form_result) |
|
349 | 343 | |
|
350 | 344 | Session().commit() |
|
351 | 345 | h.flash(_('User Group global permissions updated successfully'), |
|
352 | 346 | category='success') |
|
353 | 347 | |
|
354 | 348 | except formencode.Invalid as errors: |
|
355 | 349 | defaults = errors.value |
|
356 | 350 | c.user_group = user_group |
|
357 | 351 | return htmlfill.render( |
|
358 | 352 | render('admin/user_groups/user_group_edit.mako'), |
|
359 | 353 | defaults=defaults, |
|
360 | 354 | errors=errors.error_dict or {}, |
|
361 | 355 | prefix_error=False, |
|
362 | 356 | encoding="UTF-8", |
|
363 | 357 | force_defaults=False) |
|
364 | 358 | except Exception: |
|
365 | 359 | log.exception("Exception during permissions saving") |
|
366 | 360 | h.flash(_('An error occurred during permissions saving'), |
|
367 | 361 | category='error') |
|
368 | 362 | |
|
369 | 363 | return redirect(url('edit_user_group_global_perms', user_group_id=user_group_id)) |
|
370 | 364 | |
|
371 | 365 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
372 | 366 | def edit_advanced(self, user_group_id): |
|
373 | 367 | user_group_id = safe_int(user_group_id) |
|
374 | 368 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
375 | 369 | c.active = 'advanced' |
|
376 | 370 | c.group_members_obj = sorted( |
|
377 | 371 | (x.user for x in c.user_group.members), |
|
378 | 372 | key=lambda u: u.username.lower()) |
|
379 | 373 | |
|
380 | 374 | c.group_to_repos = sorted( |
|
381 | 375 | (x.repository for x in c.user_group.users_group_repo_to_perm), |
|
382 | 376 | key=lambda u: u.repo_name.lower()) |
|
383 | 377 | |
|
384 | 378 | c.group_to_repo_groups = sorted( |
|
385 | 379 | (x.group for x in c.user_group.users_group_repo_group_to_perm), |
|
386 | 380 | key=lambda u: u.group_name.lower()) |
|
387 | 381 | |
|
388 | 382 | return render('admin/user_groups/user_group_edit.mako') |
|
389 | 383 | |
|
390 | 384 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
391 | 385 | def edit_advanced_set_synchronization(self, user_group_id): |
|
392 | 386 | user_group_id = safe_int(user_group_id) |
|
393 | 387 | user_group = UserGroup.get_or_404(user_group_id) |
|
394 | 388 | |
|
395 | 389 | existing = user_group.group_data.get('extern_type') |
|
396 | 390 | |
|
397 | 391 | if existing: |
|
398 | 392 | new_state = user_group.group_data |
|
399 | 393 | new_state['extern_type'] = None |
|
400 | 394 | else: |
|
401 | 395 | new_state = user_group.group_data |
|
402 | 396 | new_state['extern_type'] = 'manual' |
|
403 | 397 | new_state['extern_type_set_by'] = c.rhodecode_user.username |
|
404 | 398 | |
|
405 | 399 | try: |
|
406 | 400 | user_group.group_data = new_state |
|
407 | 401 | Session().add(user_group) |
|
408 | 402 | Session().commit() |
|
409 | 403 | |
|
410 | 404 | h.flash(_('User Group synchronization updated successfully'), |
|
411 | 405 | category='success') |
|
412 | 406 | except Exception: |
|
413 | 407 | log.exception("Exception during sync settings saving") |
|
414 | 408 | h.flash(_('An error occurred during synchronization update'), |
|
415 | 409 | category='error') |
|
416 | 410 | |
|
417 | 411 | return redirect( |
|
418 | 412 | url('edit_user_group_advanced', user_group_id=user_group_id)) |
|
419 | 413 |
@@ -1,712 +1,617 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | The base Controller API |
|
23 | 23 | Provides the BaseController class for subclassing. And usage in different |
|
24 | 24 | controllers |
|
25 | 25 | """ |
|
26 | 26 | |
|
27 | 27 | import logging |
|
28 | 28 | import socket |
|
29 | 29 | |
|
30 | 30 | import markupsafe |
|
31 | 31 | import ipaddress |
|
32 | 32 | import pyramid.threadlocal |
|
33 | 33 | |
|
34 | 34 | from paste.auth.basic import AuthBasicAuthenticator |
|
35 | 35 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception |
|
36 | 36 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION |
|
37 | from pylons import tmpl_context as c, request, url | |
|
38 | from pylons.controllers import WSGIController | |
|
39 | from pylons.controllers.util import redirect | |
|
40 | from pylons.i18n import translation | |
|
41 | # marcink: don't remove this import | |
|
42 | from pylons.templating import render_mako, pylons_globals, literal, cached_template | |
|
43 | from pylons.i18n.translation import _ | |
|
44 | from webob.exc import HTTPFound | |
|
45 | ||
|
46 | 37 | |
|
47 | 38 | import rhodecode |
|
48 | 39 | from rhodecode.authentication.base import VCS_TYPE |
|
49 | 40 | from rhodecode.lib import auth, utils2 |
|
50 | 41 | from rhodecode.lib import helpers as h |
|
51 | 42 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper |
|
52 | 43 | from rhodecode.lib.exceptions import UserCreationError |
|
53 | 44 | from rhodecode.lib.utils import ( |
|
54 | 45 | get_repo_slug, set_rhodecode_config, password_changed, |
|
55 | 46 | get_enabled_hook_classes) |
|
56 | 47 | from rhodecode.lib.utils2 import ( |
|
57 | 48 | str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist) |
|
58 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError | |
|
59 | 49 | from rhodecode.model import meta |
|
60 | 50 | from rhodecode.model.db import Repository, User, ChangesetComment |
|
61 | 51 | from rhodecode.model.notification import NotificationModel |
|
62 | 52 | from rhodecode.model.scm import ScmModel |
|
63 | 53 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
64 | 54 | |
|
55 | # NOTE(marcink): remove after base controller is no longer required | |
|
56 | from pylons.controllers import WSGIController | |
|
57 | from pylons.i18n import translation | |
|
65 | 58 | |
|
66 | 59 | log = logging.getLogger(__name__) |
|
67 | 60 | |
|
68 | 61 | |
|
69 | 62 | # hack to make the migration to pyramid easier |
|
70 | 63 | def render(template_name, extra_vars=None, cache_key=None, |
|
71 | 64 | cache_type=None, cache_expire=None): |
|
72 | 65 | """Render a template with Mako |
|
73 | 66 | |
|
74 | 67 | Accepts the cache options ``cache_key``, ``cache_type``, and |
|
75 | 68 | ``cache_expire``. |
|
76 | 69 | |
|
77 | 70 | """ |
|
71 | from pylons.templating import literal | |
|
72 | from pylons.templating import cached_template, pylons_globals | |
|
73 | ||
|
78 | 74 | # Create a render callable for the cache function |
|
79 | 75 | def render_template(): |
|
80 | 76 | # Pull in extra vars if needed |
|
81 | 77 | globs = extra_vars or {} |
|
82 | 78 | |
|
83 | 79 | # Second, get the globals |
|
84 | 80 | globs.update(pylons_globals()) |
|
85 | 81 | |
|
86 | 82 | globs['_ungettext'] = globs['ungettext'] |
|
87 | 83 | # Grab a template reference |
|
88 | 84 | template = globs['app_globals'].mako_lookup.get_template(template_name) |
|
89 | 85 | |
|
90 | 86 | return literal(template.render_unicode(**globs)) |
|
91 | 87 | |
|
92 | 88 | return cached_template(template_name, render_template, cache_key=cache_key, |
|
93 | 89 | cache_type=cache_type, cache_expire=cache_expire) |
|
94 | 90 | |
|
95 | 91 | def _filter_proxy(ip): |
|
96 | 92 | """ |
|
97 | 93 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
98 | 94 | ips. Those comma separated IPs are passed from various proxies in the |
|
99 | 95 | chain of request processing. The left-most being the original client. |
|
100 | 96 | We only care about the first IP which came from the org. client. |
|
101 | 97 | |
|
102 | 98 | :param ip: ip string from headers |
|
103 | 99 | """ |
|
104 | 100 | if ',' in ip: |
|
105 | 101 | _ips = ip.split(',') |
|
106 | 102 | _first_ip = _ips[0].strip() |
|
107 | 103 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
108 | 104 | return _first_ip |
|
109 | 105 | return ip |
|
110 | 106 | |
|
111 | 107 | |
|
112 | 108 | def _filter_port(ip): |
|
113 | 109 | """ |
|
114 | 110 | Removes a port from ip, there are 4 main cases to handle here. |
|
115 | 111 | - ipv4 eg. 127.0.0.1 |
|
116 | 112 | - ipv6 eg. ::1 |
|
117 | 113 | - ipv4+port eg. 127.0.0.1:8080 |
|
118 | 114 | - ipv6+port eg. [::1]:8080 |
|
119 | 115 | |
|
120 | 116 | :param ip: |
|
121 | 117 | """ |
|
122 | 118 | def is_ipv6(ip_addr): |
|
123 | 119 | if hasattr(socket, 'inet_pton'): |
|
124 | 120 | try: |
|
125 | 121 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
126 | 122 | except socket.error: |
|
127 | 123 | return False |
|
128 | 124 | else: |
|
129 | 125 | # fallback to ipaddress |
|
130 | 126 | try: |
|
131 | 127 | ipaddress.IPv6Address(safe_unicode(ip_addr)) |
|
132 | 128 | except Exception: |
|
133 | 129 | return False |
|
134 | 130 | return True |
|
135 | 131 | |
|
136 | 132 | if ':' not in ip: # must be ipv4 pure ip |
|
137 | 133 | return ip |
|
138 | 134 | |
|
139 | 135 | if '[' in ip and ']' in ip: # ipv6 with port |
|
140 | 136 | return ip.split(']')[0][1:].lower() |
|
141 | 137 | |
|
142 | 138 | # must be ipv6 or ipv4 with port |
|
143 | 139 | if is_ipv6(ip): |
|
144 | 140 | return ip |
|
145 | 141 | else: |
|
146 | 142 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
147 | 143 | return ip |
|
148 | 144 | |
|
149 | 145 | |
|
150 | 146 | def get_ip_addr(environ): |
|
151 | 147 | proxy_key = 'HTTP_X_REAL_IP' |
|
152 | 148 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
153 | 149 | def_key = 'REMOTE_ADDR' |
|
154 | 150 | _filters = lambda x: _filter_port(_filter_proxy(x)) |
|
155 | 151 | |
|
156 | 152 | ip = environ.get(proxy_key) |
|
157 | 153 | if ip: |
|
158 | 154 | return _filters(ip) |
|
159 | 155 | |
|
160 | 156 | ip = environ.get(proxy_key2) |
|
161 | 157 | if ip: |
|
162 | 158 | return _filters(ip) |
|
163 | 159 | |
|
164 | 160 | ip = environ.get(def_key, '0.0.0.0') |
|
165 | 161 | return _filters(ip) |
|
166 | 162 | |
|
167 | 163 | |
|
168 | 164 | def get_server_ip_addr(environ, log_errors=True): |
|
169 | 165 | hostname = environ.get('SERVER_NAME') |
|
170 | 166 | try: |
|
171 | 167 | return socket.gethostbyname(hostname) |
|
172 | 168 | except Exception as e: |
|
173 | 169 | if log_errors: |
|
174 | 170 | # in some cases this lookup is not possible, and we don't want to |
|
175 | 171 | # make it an exception in logs |
|
176 | 172 | log.exception('Could not retrieve server ip address: %s', e) |
|
177 | 173 | return hostname |
|
178 | 174 | |
|
179 | 175 | |
|
180 | 176 | def get_server_port(environ): |
|
181 | 177 | return environ.get('SERVER_PORT') |
|
182 | 178 | |
|
183 | 179 | |
|
184 | 180 | def get_access_path(environ): |
|
185 | 181 | path = environ.get('PATH_INFO') |
|
186 | 182 | org_req = environ.get('pylons.original_request') |
|
187 | 183 | if org_req: |
|
188 | 184 | path = org_req.environ.get('PATH_INFO') |
|
189 | 185 | return path |
|
190 | 186 | |
|
191 | 187 | |
|
192 | 188 | def get_user_agent(environ): |
|
193 | 189 | return environ.get('HTTP_USER_AGENT') |
|
194 | 190 | |
|
195 | 191 | |
|
196 | 192 | def vcs_operation_context( |
|
197 | 193 | environ, repo_name, username, action, scm, check_locking=True, |
|
198 | 194 | is_shadow_repo=False): |
|
199 | 195 | """ |
|
200 | 196 | Generate the context for a vcs operation, e.g. push or pull. |
|
201 | 197 | |
|
202 | 198 | This context is passed over the layers so that hooks triggered by the |
|
203 | 199 | vcs operation know details like the user, the user's IP address etc. |
|
204 | 200 | |
|
205 | 201 | :param check_locking: Allows to switch of the computation of the locking |
|
206 | 202 | data. This serves mainly the need of the simplevcs middleware to be |
|
207 | 203 | able to disable this for certain operations. |
|
208 | 204 | |
|
209 | 205 | """ |
|
210 | 206 | # Tri-state value: False: unlock, None: nothing, True: lock |
|
211 | 207 | make_lock = None |
|
212 | 208 | locked_by = [None, None, None] |
|
213 | 209 | is_anonymous = username == User.DEFAULT_USER |
|
214 | 210 | if not is_anonymous and check_locking: |
|
215 | 211 | log.debug('Checking locking on repository "%s"', repo_name) |
|
216 | 212 | user = User.get_by_username(username) |
|
217 | 213 | repo = Repository.get_by_repo_name(repo_name) |
|
218 | 214 | make_lock, __, locked_by = repo.get_locking_state( |
|
219 | 215 | action, user.user_id) |
|
220 | 216 | |
|
221 | 217 | settings_model = VcsSettingsModel(repo=repo_name) |
|
222 | 218 | ui_settings = settings_model.get_ui_settings() |
|
223 | 219 | |
|
224 | 220 | extras = { |
|
225 | 221 | 'ip': get_ip_addr(environ), |
|
226 | 222 | 'username': username, |
|
227 | 223 | 'action': action, |
|
228 | 224 | 'repository': repo_name, |
|
229 | 225 | 'scm': scm, |
|
230 | 226 | 'config': rhodecode.CONFIG['__file__'], |
|
231 | 227 | 'make_lock': make_lock, |
|
232 | 228 | 'locked_by': locked_by, |
|
233 | 229 | 'server_url': utils2.get_server_url(environ), |
|
234 | 230 | 'user_agent': get_user_agent(environ), |
|
235 | 231 | 'hooks': get_enabled_hook_classes(ui_settings), |
|
236 | 232 | 'is_shadow_repo': is_shadow_repo, |
|
237 | 233 | } |
|
238 | 234 | return extras |
|
239 | 235 | |
|
240 | 236 | |
|
241 | 237 | class BasicAuth(AuthBasicAuthenticator): |
|
242 | 238 | |
|
243 | 239 | def __init__(self, realm, authfunc, registry, auth_http_code=None, |
|
244 | 240 | initial_call_detection=False, acl_repo_name=None): |
|
245 | 241 | self.realm = realm |
|
246 | 242 | self.initial_call = initial_call_detection |
|
247 | 243 | self.authfunc = authfunc |
|
248 | 244 | self.registry = registry |
|
249 | 245 | self.acl_repo_name = acl_repo_name |
|
250 | 246 | self._rc_auth_http_code = auth_http_code |
|
251 | 247 | |
|
252 | 248 | def _get_response_from_code(self, http_code): |
|
253 | 249 | try: |
|
254 | 250 | return get_exception(safe_int(http_code)) |
|
255 | 251 | except Exception: |
|
256 | 252 | log.exception('Failed to fetch response for code %s' % http_code) |
|
257 | 253 | return HTTPForbidden |
|
258 | 254 | |
|
259 | 255 | def build_authentication(self): |
|
260 | 256 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
261 | 257 | if self._rc_auth_http_code and not self.initial_call: |
|
262 | 258 | # return alternative HTTP code if alternative http return code |
|
263 | 259 | # is specified in RhodeCode config, but ONLY if it's not the |
|
264 | 260 | # FIRST call |
|
265 | 261 | custom_response_klass = self._get_response_from_code( |
|
266 | 262 | self._rc_auth_http_code) |
|
267 | 263 | return custom_response_klass(headers=head) |
|
268 | 264 | return HTTPUnauthorized(headers=head) |
|
269 | 265 | |
|
270 | 266 | def authenticate(self, environ): |
|
271 | 267 | authorization = AUTHORIZATION(environ) |
|
272 | 268 | if not authorization: |
|
273 | 269 | return self.build_authentication() |
|
274 | 270 | (authmeth, auth) = authorization.split(' ', 1) |
|
275 | 271 | if 'basic' != authmeth.lower(): |
|
276 | 272 | return self.build_authentication() |
|
277 | 273 | auth = auth.strip().decode('base64') |
|
278 | 274 | _parts = auth.split(':', 1) |
|
279 | 275 | if len(_parts) == 2: |
|
280 | 276 | username, password = _parts |
|
281 | 277 | if self.authfunc( |
|
282 | 278 | username, password, environ, VCS_TYPE, |
|
283 | 279 | registry=self.registry, acl_repo_name=self.acl_repo_name): |
|
284 | 280 | return username |
|
285 | 281 | if username and password: |
|
286 | 282 | # we mark that we actually executed authentication once, at |
|
287 | 283 | # that point we can use the alternative auth code |
|
288 | 284 | self.initial_call = False |
|
289 | 285 | |
|
290 | 286 | return self.build_authentication() |
|
291 | 287 | |
|
292 | 288 | __call__ = authenticate |
|
293 | 289 | |
|
294 | 290 | |
|
295 | 291 | def calculate_version_hash(config): |
|
296 | 292 | return md5( |
|
297 | 293 | config.get('beaker.session.secret', '') + |
|
298 | 294 | rhodecode.__version__)[:8] |
|
299 | 295 | |
|
300 | 296 | |
|
301 | 297 | def get_current_lang(request): |
|
302 | 298 | # NOTE(marcink): remove after pyramid move |
|
303 | 299 | try: |
|
304 | 300 | return translation.get_lang()[0] |
|
305 | 301 | except: |
|
306 | 302 | pass |
|
307 | 303 | |
|
308 | 304 | return getattr(request, '_LOCALE_', request.locale_name) |
|
309 | 305 | |
|
310 | 306 | |
|
311 | 307 | def attach_context_attributes(context, request, user_id): |
|
312 | 308 | """ |
|
313 | 309 | Attach variables into template context called `c`, please note that |
|
314 | 310 | request could be pylons or pyramid request in here. |
|
315 | 311 | """ |
|
316 | 312 | # NOTE(marcink): remove check after pyramid migration |
|
317 | 313 | if hasattr(request, 'registry'): |
|
318 | 314 | config = request.registry.settings |
|
319 | 315 | else: |
|
320 | 316 | from pylons import config |
|
321 | 317 | |
|
322 | 318 | rc_config = SettingsModel().get_all_settings(cache=True) |
|
323 | 319 | |
|
324 | 320 | context.rhodecode_version = rhodecode.__version__ |
|
325 | 321 | context.rhodecode_edition = config.get('rhodecode.edition') |
|
326 | 322 | # unique secret + version does not leak the version but keep consistency |
|
327 | 323 | context.rhodecode_version_hash = calculate_version_hash(config) |
|
328 | 324 | |
|
329 | 325 | # Default language set for the incoming request |
|
330 | 326 | context.language = get_current_lang(request) |
|
331 | 327 | |
|
332 | 328 | # Visual options |
|
333 | 329 | context.visual = AttributeDict({}) |
|
334 | 330 | |
|
335 | 331 | # DB stored Visual Items |
|
336 | 332 | context.visual.show_public_icon = str2bool( |
|
337 | 333 | rc_config.get('rhodecode_show_public_icon')) |
|
338 | 334 | context.visual.show_private_icon = str2bool( |
|
339 | 335 | rc_config.get('rhodecode_show_private_icon')) |
|
340 | 336 | context.visual.stylify_metatags = str2bool( |
|
341 | 337 | rc_config.get('rhodecode_stylify_metatags')) |
|
342 | 338 | context.visual.dashboard_items = safe_int( |
|
343 | 339 | rc_config.get('rhodecode_dashboard_items', 100)) |
|
344 | 340 | context.visual.admin_grid_items = safe_int( |
|
345 | 341 | rc_config.get('rhodecode_admin_grid_items', 100)) |
|
346 | 342 | context.visual.repository_fields = str2bool( |
|
347 | 343 | rc_config.get('rhodecode_repository_fields')) |
|
348 | 344 | context.visual.show_version = str2bool( |
|
349 | 345 | rc_config.get('rhodecode_show_version')) |
|
350 | 346 | context.visual.use_gravatar = str2bool( |
|
351 | 347 | rc_config.get('rhodecode_use_gravatar')) |
|
352 | 348 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') |
|
353 | 349 | context.visual.default_renderer = rc_config.get( |
|
354 | 350 | 'rhodecode_markup_renderer', 'rst') |
|
355 | 351 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES |
|
356 | 352 | context.visual.rhodecode_support_url = \ |
|
357 | 353 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') |
|
358 | 354 | |
|
359 | 355 | context.visual.affected_files_cut_off = 60 |
|
360 | 356 | |
|
361 | 357 | context.pre_code = rc_config.get('rhodecode_pre_code') |
|
362 | 358 | context.post_code = rc_config.get('rhodecode_post_code') |
|
363 | 359 | context.rhodecode_name = rc_config.get('rhodecode_title') |
|
364 | 360 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') |
|
365 | 361 | # if we have specified default_encoding in the request, it has more |
|
366 | 362 | # priority |
|
367 | 363 | if request.GET.get('default_encoding'): |
|
368 | 364 | context.default_encodings.insert(0, request.GET.get('default_encoding')) |
|
369 | 365 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') |
|
370 | 366 | |
|
371 | 367 | # INI stored |
|
372 | 368 | context.labs_active = str2bool( |
|
373 | 369 | config.get('labs_settings_active', 'false')) |
|
374 | 370 | context.visual.allow_repo_location_change = str2bool( |
|
375 | 371 | config.get('allow_repo_location_change', True)) |
|
376 | 372 | context.visual.allow_custom_hooks_settings = str2bool( |
|
377 | 373 | config.get('allow_custom_hooks_settings', True)) |
|
378 | 374 | context.debug_style = str2bool(config.get('debug_style', False)) |
|
379 | 375 | |
|
380 | 376 | context.rhodecode_instanceid = config.get('instance_id') |
|
381 | 377 | |
|
382 | 378 | context.visual.cut_off_limit_diff = safe_int( |
|
383 | 379 | config.get('cut_off_limit_diff')) |
|
384 | 380 | context.visual.cut_off_limit_file = safe_int( |
|
385 | 381 | config.get('cut_off_limit_file')) |
|
386 | 382 | |
|
387 | 383 | # AppEnlight |
|
388 | 384 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) |
|
389 | 385 | context.appenlight_api_public_key = config.get( |
|
390 | 386 | 'appenlight.api_public_key', '') |
|
391 | 387 | context.appenlight_server_url = config.get('appenlight.server_url', '') |
|
392 | 388 | |
|
393 | 389 | # JS template context |
|
394 | 390 | context.template_context = { |
|
395 | 391 | 'repo_name': None, |
|
396 | 392 | 'repo_type': None, |
|
397 | 393 | 'repo_landing_commit': None, |
|
398 | 394 | 'rhodecode_user': { |
|
399 | 395 | 'username': None, |
|
400 | 396 | 'email': None, |
|
401 | 397 | 'notification_status': False |
|
402 | 398 | }, |
|
403 | 399 | 'visual': { |
|
404 | 400 | 'default_renderer': None |
|
405 | 401 | }, |
|
406 | 402 | 'commit_data': { |
|
407 | 403 | 'commit_id': None |
|
408 | 404 | }, |
|
409 | 405 | 'pull_request_data': {'pull_request_id': None}, |
|
410 | 406 | 'timeago': { |
|
411 | 407 | 'refresh_time': 120 * 1000, |
|
412 | 408 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 |
|
413 | 409 | }, |
|
414 | 'pylons_dispatch': { | |
|
415 | # 'controller': request.environ['pylons.routes_dict']['controller'], | |
|
416 | # 'action': request.environ['pylons.routes_dict']['action'], | |
|
417 | }, | |
|
418 | 410 | 'pyramid_dispatch': { |
|
419 | 411 | |
|
420 | 412 | }, |
|
421 | 413 | 'extra': {'plugins': {}} |
|
422 | 414 | } |
|
423 | 415 | # END CONFIG VARS |
|
424 | 416 | |
|
425 | 417 | # TODO: This dosn't work when called from pylons compatibility tween. |
|
426 | 418 | # Fix this and remove it from base controller. |
|
427 | 419 | # context.repo_name = get_repo_slug(request) # can be empty |
|
428 | 420 | |
|
429 | 421 | diffmode = 'sideside' |
|
430 | 422 | if request.GET.get('diffmode'): |
|
431 | 423 | if request.GET['diffmode'] == 'unified': |
|
432 | 424 | diffmode = 'unified' |
|
433 | 425 | elif request.session.get('diffmode'): |
|
434 | 426 | diffmode = request.session['diffmode'] |
|
435 | 427 | |
|
436 | 428 | context.diffmode = diffmode |
|
437 | 429 | |
|
438 | 430 | if request.session.get('diffmode') != diffmode: |
|
439 | 431 | request.session['diffmode'] = diffmode |
|
440 | 432 | |
|
441 | 433 | context.csrf_token = auth.get_csrf_token(session=request.session) |
|
442 | 434 | context.backends = rhodecode.BACKENDS.keys() |
|
443 | 435 | context.backends.sort() |
|
444 | 436 | context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id) |
|
445 | 437 | |
|
446 | 438 | # NOTE(marcink): when migrated to pyramid we don't need to set this anymore, |
|
447 | 439 | # given request will ALWAYS be pyramid one |
|
448 | 440 | pyramid_request = pyramid.threadlocal.get_current_request() |
|
449 | 441 | context.pyramid_request = pyramid_request |
|
450 | 442 | |
|
451 | 443 | # web case |
|
452 | 444 | if hasattr(pyramid_request, 'user'): |
|
453 | 445 | context.auth_user = pyramid_request.user |
|
454 | 446 | context.rhodecode_user = pyramid_request.user |
|
455 | 447 | |
|
456 | 448 | # api case |
|
457 | 449 | if hasattr(pyramid_request, 'rpc_user'): |
|
458 | 450 | context.auth_user = pyramid_request.rpc_user |
|
459 | 451 | context.rhodecode_user = pyramid_request.rpc_user |
|
460 | 452 | |
|
461 | 453 | # attach the whole call context to the request |
|
462 | 454 | request.call_context = context |
|
463 | 455 | |
|
464 | 456 | |
|
465 | 457 | def get_auth_user(request): |
|
466 | 458 | environ = request.environ |
|
467 | 459 | session = request.session |
|
468 | 460 | |
|
469 | 461 | ip_addr = get_ip_addr(environ) |
|
470 | 462 | # make sure that we update permissions each time we call controller |
|
471 | 463 | _auth_token = (request.GET.get('auth_token', '') or |
|
472 | 464 | request.GET.get('api_key', '')) |
|
473 | 465 | |
|
474 | 466 | if _auth_token: |
|
475 | 467 | # when using API_KEY we assume user exists, and |
|
476 | 468 | # doesn't need auth based on cookies. |
|
477 | 469 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) |
|
478 | 470 | authenticated = False |
|
479 | 471 | else: |
|
480 | 472 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
481 | 473 | try: |
|
482 | 474 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), |
|
483 | 475 | ip_addr=ip_addr) |
|
484 | 476 | except UserCreationError as e: |
|
485 | 477 | h.flash(e, 'error') |
|
486 | 478 | # container auth or other auth functions that create users |
|
487 | 479 | # on the fly can throw this exception signaling that there's |
|
488 | 480 | # issue with user creation, explanation should be provided |
|
489 | 481 | # in Exception itself. We then create a simple blank |
|
490 | 482 | # AuthUser |
|
491 | 483 | auth_user = AuthUser(ip_addr=ip_addr) |
|
492 | 484 | |
|
493 | 485 | if password_changed(auth_user, session): |
|
494 | 486 | session.invalidate() |
|
495 | 487 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
496 | 488 | auth_user = AuthUser(ip_addr=ip_addr) |
|
497 | 489 | |
|
498 | 490 | authenticated = cookie_store.get('is_authenticated') |
|
499 | 491 | |
|
500 | 492 | if not auth_user.is_authenticated and auth_user.is_user_object: |
|
501 | 493 | # user is not authenticated and not empty |
|
502 | 494 | auth_user.set_authenticated(authenticated) |
|
503 | 495 | |
|
504 | 496 | return auth_user |
|
505 | 497 | |
|
506 | 498 | |
|
507 | 499 | class BaseController(WSGIController): |
|
508 | 500 | |
|
509 | 501 | def __before__(self): |
|
510 | 502 | """ |
|
511 | 503 | __before__ is called before controller methods and after __call__ |
|
512 | 504 | """ |
|
513 | 505 | # on each call propagate settings calls into global settings. |
|
514 | 506 | from pylons import config |
|
507 | from pylons import tmpl_context as c, request, url | |
|
515 | 508 | set_rhodecode_config(config) |
|
516 | 509 | attach_context_attributes(c, request, self._rhodecode_user.user_id) |
|
517 | 510 | |
|
518 | 511 | # TODO: Remove this when fixed in attach_context_attributes() |
|
519 | 512 | c.repo_name = get_repo_slug(request) # can be empty |
|
520 | 513 | |
|
521 | 514 | self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff')) |
|
522 | 515 | self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file')) |
|
523 | 516 | self.sa = meta.Session |
|
524 | 517 | self.scm_model = ScmModel(self.sa) |
|
525 | 518 | |
|
526 | 519 | # set user language |
|
527 | 520 | user_lang = getattr(c.pyramid_request, '_LOCALE_', None) |
|
528 | 521 | if user_lang: |
|
529 | 522 | translation.set_lang(user_lang) |
|
530 | 523 | log.debug('set language to %s for user %s', |
|
531 | 524 | user_lang, self._rhodecode_user) |
|
532 | 525 | |
|
533 | 526 | def _dispatch_redirect(self, with_url, environ, start_response): |
|
527 | from webob.exc import HTTPFound | |
|
534 | 528 | resp = HTTPFound(with_url) |
|
535 | 529 | environ['SCRIPT_NAME'] = '' # handle prefix middleware |
|
536 | 530 | environ['PATH_INFO'] = with_url |
|
537 | 531 | return resp(environ, start_response) |
|
538 | 532 | |
|
539 | 533 | def __call__(self, environ, start_response): |
|
540 | 534 | """Invoke the Controller""" |
|
541 | 535 | # WSGIController.__call__ dispatches to the Controller method |
|
542 | 536 | # the request is routed to. This routing information is |
|
543 | 537 | # available in environ['pylons.routes_dict'] |
|
544 | 538 | from rhodecode.lib import helpers as h |
|
539 | from pylons import tmpl_context as c, request, url | |
|
545 | 540 | |
|
546 | 541 | # Provide the Pylons context to Pyramid's debugtoolbar if it asks |
|
547 | 542 | if environ.get('debugtoolbar.wants_pylons_context', False): |
|
548 | 543 | environ['debugtoolbar.pylons_context'] = c._current_obj() |
|
549 | 544 | |
|
550 | 545 | _route_name = '.'.join([environ['pylons.routes_dict']['controller'], |
|
551 | 546 | environ['pylons.routes_dict']['action']]) |
|
552 | 547 | |
|
553 | 548 | self.rc_config = SettingsModel().get_all_settings(cache=True) |
|
554 | 549 | self.ip_addr = get_ip_addr(environ) |
|
555 | 550 | |
|
556 | 551 | # The rhodecode auth user is looked up and passed through the |
|
557 | 552 | # environ by the pylons compatibility tween in pyramid. |
|
558 | 553 | # So we can just grab it from there. |
|
559 | 554 | auth_user = environ['rc_auth_user'] |
|
560 | 555 | |
|
561 | 556 | # set globals for auth user |
|
562 | 557 | request.user = auth_user |
|
563 | 558 | self._rhodecode_user = auth_user |
|
564 | 559 | |
|
565 | 560 | log.info('IP: %s User: %s accessed %s [%s]' % ( |
|
566 | 561 | self.ip_addr, auth_user, safe_unicode(get_access_path(environ)), |
|
567 | 562 | _route_name) |
|
568 | 563 | ) |
|
569 | 564 | |
|
570 | 565 | user_obj = auth_user.get_instance() |
|
571 | 566 | if user_obj and user_obj.user_data.get('force_password_change'): |
|
572 | 567 | h.flash('You are required to change your password', 'warning', |
|
573 | 568 | ignore_duplicate=True) |
|
574 | 569 | return self._dispatch_redirect( |
|
575 | 570 | url('my_account_password'), environ, start_response) |
|
576 | 571 | |
|
577 | 572 | return WSGIController.__call__(self, environ, start_response) |
|
578 | 573 | |
|
579 | 574 | |
|
580 | 575 | def h_filter(s): |
|
581 | 576 | """ |
|
582 | 577 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` |
|
583 | 578 | we wrap this with additional functionality that converts None to empty |
|
584 | 579 | strings |
|
585 | 580 | """ |
|
586 | 581 | if s is None: |
|
587 | 582 | return markupsafe.Markup() |
|
588 | 583 | return markupsafe.escape(s) |
|
589 | 584 | |
|
590 | 585 | |
|
591 | 586 | def add_events_routes(config): |
|
592 | 587 | """ |
|
593 | 588 | Adds routing that can be used in events. Because some events are triggered |
|
594 | 589 | outside of pyramid context, we need to bootstrap request with some |
|
595 | 590 | routing registered |
|
596 | 591 | """ |
|
597 | 592 | config.add_route(name='home', pattern='/') |
|
598 | 593 | |
|
599 | 594 | config.add_route(name='repo_summary', pattern='/{repo_name}') |
|
600 | 595 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') |
|
601 | 596 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') |
|
602 | 597 | |
|
603 | 598 | config.add_route(name='pullrequest_show', |
|
604 | 599 | pattern='/{repo_name}/pull-request/{pull_request_id}') |
|
605 | 600 | config.add_route(name='pull_requests_global', |
|
606 | 601 | pattern='/pull-request/{pull_request_id}') |
|
607 | 602 | |
|
608 | 603 | config.add_route(name='repo_commit', |
|
609 | 604 | pattern='/{repo_name}/changeset/{commit_id}') |
|
610 | 605 | config.add_route(name='repo_files', |
|
611 | 606 | pattern='/{repo_name}/files/{commit_id}/{f_path}') |
|
612 | 607 | |
|
613 | 608 | |
|
614 | 609 | def bootstrap_request(**kwargs): |
|
615 | 610 | import pyramid.testing |
|
616 | 611 | request = pyramid.testing.DummyRequest(**kwargs) |
|
617 | 612 | request.application_url = kwargs.pop('application_url', 'http://example.com') |
|
618 | 613 | request.host = kwargs.pop('host', 'example.com:80') |
|
619 | 614 | request.domain = kwargs.pop('domain', 'example.com') |
|
620 | 615 | |
|
621 | 616 | config = pyramid.testing.setUp(request=request) |
|
622 | 617 | add_events_routes(config) |
|
623 | ||
|
624 | ||
|
625 | class BaseRepoController(BaseController): | |
|
626 | """ | |
|
627 | Base class for controllers responsible for loading all needed data for | |
|
628 | repository loaded items are | |
|
629 | ||
|
630 | c.rhodecode_repo: instance of scm repository | |
|
631 | c.rhodecode_db_repo: instance of db | |
|
632 | c.repository_requirements_missing: shows that repository specific data | |
|
633 | could not be displayed due to the missing requirements | |
|
634 | c.repository_pull_requests: show number of open pull requests | |
|
635 | """ | |
|
636 | ||
|
637 | def __before__(self): | |
|
638 | super(BaseRepoController, self).__before__() | |
|
639 | if c.repo_name: # extracted from routes | |
|
640 | db_repo = Repository.get_by_repo_name(c.repo_name) | |
|
641 | if not db_repo: | |
|
642 | return | |
|
643 | ||
|
644 | log.debug( | |
|
645 | 'Found repository in database %s with state `%s`', | |
|
646 | safe_unicode(db_repo), safe_unicode(db_repo.repo_state)) | |
|
647 | route = getattr(request.environ.get('routes.route'), 'name', '') | |
|
648 | ||
|
649 | # allow to delete repos that are somehow damages in filesystem | |
|
650 | if route in ['delete_repo']: | |
|
651 | return | |
|
652 | ||
|
653 | if db_repo.repo_state in [Repository.STATE_PENDING]: | |
|
654 | if route in ['repo_creating_home']: | |
|
655 | return | |
|
656 | check_url = url('repo_creating_home', repo_name=c.repo_name) | |
|
657 | return redirect(check_url) | |
|
658 | ||
|
659 | self.rhodecode_db_repo = db_repo | |
|
660 | ||
|
661 | missing_requirements = False | |
|
662 | try: | |
|
663 | self.rhodecode_repo = self.rhodecode_db_repo.scm_instance() | |
|
664 | except RepositoryRequirementError as e: | |
|
665 | missing_requirements = True | |
|
666 | self._handle_missing_requirements(e) | |
|
667 | ||
|
668 | if self.rhodecode_repo is None and not missing_requirements: | |
|
669 | log.error('%s this repository is present in database but it ' | |
|
670 | 'cannot be created as an scm instance', c.repo_name) | |
|
671 | ||
|
672 | h.flash(_( | |
|
673 | "The repository at %(repo_name)s cannot be located.") % | |
|
674 | {'repo_name': c.repo_name}, | |
|
675 | category='error', ignore_duplicate=True) | |
|
676 | redirect(h.route_path('home')) | |
|
677 | ||
|
678 | # update last change according to VCS data | |
|
679 | if not missing_requirements: | |
|
680 | commit = db_repo.get_commit( | |
|
681 | pre_load=["author", "date", "message", "parents"]) | |
|
682 | db_repo.update_commit_cache(commit) | |
|
683 | ||
|
684 | # Prepare context | |
|
685 | c.rhodecode_db_repo = db_repo | |
|
686 | c.rhodecode_repo = self.rhodecode_repo | |
|
687 | c.repository_requirements_missing = missing_requirements | |
|
688 | ||
|
689 | self._update_global_counters(self.scm_model, db_repo) | |
|
690 | ||
|
691 | def _update_global_counters(self, scm_model, db_repo): | |
|
692 | """ | |
|
693 | Base variables that are exposed to every page of repository | |
|
694 | """ | |
|
695 | c.repository_pull_requests = scm_model.get_pull_requests(db_repo) | |
|
696 | ||
|
697 | def _handle_missing_requirements(self, error): | |
|
698 | self.rhodecode_repo = None | |
|
699 | log.error( | |
|
700 | 'Requirements are missing for repository %s: %s', | |
|
701 | c.repo_name, error.message) | |
|
702 | ||
|
703 | summary_url = h.route_path('repo_summary', repo_name=c.repo_name) | |
|
704 | statistics_url = url('edit_repo_statistics', repo_name=c.repo_name) | |
|
705 | settings_update_url = url('repo', repo_name=c.repo_name) | |
|
706 | path = request.path | |
|
707 | should_redirect = ( | |
|
708 | path not in (summary_url, settings_update_url) | |
|
709 | and '/settings' not in path or path == statistics_url | |
|
710 | ) | |
|
711 | if should_redirect: | |
|
712 | redirect(summary_url) |
@@ -1,997 +1,973 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Utilities library for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import datetime |
|
26 | 26 | import decorator |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | import re |
|
31 | 31 | import shutil |
|
32 | 32 | import tempfile |
|
33 | 33 | import traceback |
|
34 | 34 | import tarfile |
|
35 | 35 | import warnings |
|
36 | 36 | import hashlib |
|
37 | 37 | from os.path import join as jn |
|
38 | 38 | |
|
39 | 39 | import paste |
|
40 | 40 | import pkg_resources |
|
41 | 41 | from paste.script.command import Command, BadCommand |
|
42 | 42 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
43 | 43 | from mako import exceptions |
|
44 | 44 | from pyramid.threadlocal import get_current_registry |
|
45 | 45 | from pyramid.request import Request |
|
46 | 46 | |
|
47 | 47 | from rhodecode.lib.fakemod import create_module |
|
48 | 48 | from rhodecode.lib.vcs.backends.base import Config |
|
49 | 49 | from rhodecode.lib.vcs.exceptions import VCSError |
|
50 | 50 | from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend |
|
51 | 51 | from rhodecode.lib.utils2 import ( |
|
52 | 52 | safe_str, safe_unicode, get_current_rhodecode_user, md5) |
|
53 | 53 | from rhodecode.model import meta |
|
54 | 54 | from rhodecode.model.db import ( |
|
55 | 55 | Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup) |
|
56 | 56 | from rhodecode.model.meta import Session |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | log = logging.getLogger(__name__) |
|
60 | 60 | |
|
61 | 61 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') |
|
62 | 62 | |
|
63 | 63 | # String which contains characters that are not allowed in slug names for |
|
64 | 64 | # repositories or repository groups. It is properly escaped to use it in |
|
65 | 65 | # regular expressions. |
|
66 | 66 | SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:') |
|
67 | 67 | |
|
68 | 68 | # Regex that matches forbidden characters in repo/group slugs. |
|
69 | 69 | SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS)) |
|
70 | 70 | |
|
71 | 71 | # Regex that matches allowed characters in repo/group slugs. |
|
72 | 72 | SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS)) |
|
73 | 73 | |
|
74 | 74 | # Regex that matches whole repo/group slugs. |
|
75 | 75 | SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS)) |
|
76 | 76 | |
|
77 | 77 | _license_cache = None |
|
78 | 78 | |
|
79 | 79 | |
|
80 | 80 | def repo_name_slug(value): |
|
81 | 81 | """ |
|
82 | 82 | Return slug of name of repository |
|
83 | 83 | This function is called on each creation/modification |
|
84 | 84 | of repository to prevent bad names in repo |
|
85 | 85 | """ |
|
86 | 86 | replacement_char = '-' |
|
87 | 87 | |
|
88 | 88 | slug = remove_formatting(value) |
|
89 | 89 | slug = SLUG_BAD_CHAR_RE.sub('', slug) |
|
90 | 90 | slug = re.sub('[\s]+', '-', slug) |
|
91 | 91 | slug = collapse(slug, replacement_char) |
|
92 | 92 | return slug |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | #============================================================================== |
|
96 | 96 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
97 | 97 | #============================================================================== |
|
98 | 98 | def get_repo_slug(request): |
|
99 | 99 | _repo = '' |
|
100 | 100 | if isinstance(request, Request): |
|
101 | 101 | if hasattr(request, 'db_repo'): |
|
102 | 102 | # if our requests has set db reference use it for name, this |
|
103 | 103 | # translates the example.com/_<id> into proper repo names |
|
104 | 104 | _repo = request.db_repo.repo_name |
|
105 | 105 | elif getattr(request, 'matchdict', None): |
|
106 | 106 | # pyramid |
|
107 | 107 | _repo = request.matchdict.get('repo_name') |
|
108 | 108 | |
|
109 | 109 | # TODO(marcink): remove after pylons migration... |
|
110 | 110 | if not _repo: |
|
111 | 111 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
112 | 112 | |
|
113 | 113 | if _repo: |
|
114 | 114 | _repo = _repo.rstrip('/') |
|
115 | 115 | return _repo |
|
116 | 116 | |
|
117 | 117 | |
|
118 | 118 | def get_repo_group_slug(request): |
|
119 | 119 | _group = '' |
|
120 | 120 | if isinstance(request, Request): |
|
121 | 121 | if hasattr(request, 'db_repo_group'): |
|
122 | 122 | # if our requests has set db reference use it for name, this |
|
123 | 123 | # translates the example.com/_<id> into proper repo group names |
|
124 | 124 | _group = request.db_repo_group.group_name |
|
125 | 125 | elif getattr(request, 'matchdict', None): |
|
126 | 126 | # pyramid |
|
127 | 127 | _group = request.matchdict.get('repo_group_name') |
|
128 | 128 | |
|
129 | 129 | # TODO(marcink): remove after pylons migration... |
|
130 | 130 | if not _group: |
|
131 | 131 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
132 | 132 | |
|
133 | 133 | if _group: |
|
134 | 134 | _group = _group.rstrip('/') |
|
135 | 135 | return _group |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | def get_user_group_slug(request): |
|
139 | 139 | if isinstance(request, Request) and getattr(request, 'matchdict', None): |
|
140 | 140 | # pyramid |
|
141 | 141 | _group = request.matchdict.get('user_group_id') |
|
142 | 142 | else: |
|
143 | 143 | _group = request.environ['pylons.routes_dict'].get('user_group_id') |
|
144 | 144 | |
|
145 | 145 | try: |
|
146 | 146 | _group = UserGroup.get(_group) |
|
147 | 147 | if _group: |
|
148 | 148 | _group = _group.users_group_name |
|
149 | 149 | except Exception: |
|
150 | 150 | log.exception('Failed to get user group by id') |
|
151 | 151 | # catch all failures here |
|
152 | 152 | return None |
|
153 | 153 | |
|
154 | 154 | return _group |
|
155 | 155 | |
|
156 | 156 | |
|
157 | 157 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): |
|
158 | 158 | """ |
|
159 | 159 | Scans given path for repos and return (name,(type,path)) tuple |
|
160 | 160 | |
|
161 | 161 | :param path: path to scan for repositories |
|
162 | 162 | :param recursive: recursive search and return names with subdirs in front |
|
163 | 163 | """ |
|
164 | 164 | |
|
165 | 165 | # remove ending slash for better results |
|
166 | 166 | path = path.rstrip(os.sep) |
|
167 | 167 | log.debug('now scanning in %s location recursive:%s...', path, recursive) |
|
168 | 168 | |
|
169 | 169 | def _get_repos(p): |
|
170 | 170 | dirpaths = _get_dirpaths(p) |
|
171 | 171 | if not _is_dir_writable(p): |
|
172 | 172 | log.warning('repo path without write access: %s', p) |
|
173 | 173 | |
|
174 | 174 | for dirpath in dirpaths: |
|
175 | 175 | if os.path.isfile(os.path.join(p, dirpath)): |
|
176 | 176 | continue |
|
177 | 177 | cur_path = os.path.join(p, dirpath) |
|
178 | 178 | |
|
179 | 179 | # skip removed repos |
|
180 | 180 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): |
|
181 | 181 | continue |
|
182 | 182 | |
|
183 | 183 | #skip .<somethin> dirs |
|
184 | 184 | if dirpath.startswith('.'): |
|
185 | 185 | continue |
|
186 | 186 | |
|
187 | 187 | try: |
|
188 | 188 | scm_info = get_scm(cur_path) |
|
189 | 189 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
190 | 190 | except VCSError: |
|
191 | 191 | if not recursive: |
|
192 | 192 | continue |
|
193 | 193 | #check if this dir containts other repos for recursive scan |
|
194 | 194 | rec_path = os.path.join(p, dirpath) |
|
195 | 195 | if os.path.isdir(rec_path): |
|
196 | 196 | for inner_scm in _get_repos(rec_path): |
|
197 | 197 | yield inner_scm |
|
198 | 198 | |
|
199 | 199 | return _get_repos(path) |
|
200 | 200 | |
|
201 | 201 | |
|
202 | 202 | def _get_dirpaths(p): |
|
203 | 203 | try: |
|
204 | 204 | # OS-independable way of checking if we have at least read-only |
|
205 | 205 | # access or not. |
|
206 | 206 | dirpaths = os.listdir(p) |
|
207 | 207 | except OSError: |
|
208 | 208 | log.warning('ignoring repo path without read access: %s', p) |
|
209 | 209 | return [] |
|
210 | 210 | |
|
211 | 211 | # os.listpath has a tweak: If a unicode is passed into it, then it tries to |
|
212 | 212 | # decode paths and suddenly returns unicode objects itself. The items it |
|
213 | 213 | # cannot decode are returned as strings and cause issues. |
|
214 | 214 | # |
|
215 | 215 | # Those paths are ignored here until a solid solution for path handling has |
|
216 | 216 | # been built. |
|
217 | 217 | expected_type = type(p) |
|
218 | 218 | |
|
219 | 219 | def _has_correct_type(item): |
|
220 | 220 | if type(item) is not expected_type: |
|
221 | 221 | log.error( |
|
222 | 222 | u"Ignoring path %s since it cannot be decoded into unicode.", |
|
223 | 223 | # Using "repr" to make sure that we see the byte value in case |
|
224 | 224 | # of support. |
|
225 | 225 | repr(item)) |
|
226 | 226 | return False |
|
227 | 227 | return True |
|
228 | 228 | |
|
229 | 229 | dirpaths = [item for item in dirpaths if _has_correct_type(item)] |
|
230 | 230 | |
|
231 | 231 | return dirpaths |
|
232 | 232 | |
|
233 | 233 | |
|
234 | 234 | def _is_dir_writable(path): |
|
235 | 235 | """ |
|
236 | 236 | Probe if `path` is writable. |
|
237 | 237 | |
|
238 | 238 | Due to trouble on Cygwin / Windows, this is actually probing if it is |
|
239 | 239 | possible to create a file inside of `path`, stat does not produce reliable |
|
240 | 240 | results in this case. |
|
241 | 241 | """ |
|
242 | 242 | try: |
|
243 | 243 | with tempfile.TemporaryFile(dir=path): |
|
244 | 244 | pass |
|
245 | 245 | except OSError: |
|
246 | 246 | return False |
|
247 | 247 | return True |
|
248 | 248 | |
|
249 | 249 | |
|
250 | 250 | def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None): |
|
251 | 251 | """ |
|
252 | 252 | Returns True if given path is a valid repository False otherwise. |
|
253 | 253 | If expect_scm param is given also, compare if given scm is the same |
|
254 | 254 | as expected from scm parameter. If explicit_scm is given don't try to |
|
255 | 255 | detect the scm, just use the given one to check if repo is valid |
|
256 | 256 | |
|
257 | 257 | :param repo_name: |
|
258 | 258 | :param base_path: |
|
259 | 259 | :param expect_scm: |
|
260 | 260 | :param explicit_scm: |
|
261 | 261 | |
|
262 | 262 | :return True: if given path is a valid repository |
|
263 | 263 | """ |
|
264 | 264 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
265 | 265 | log.debug('Checking if `%s` is a valid path for repository. ' |
|
266 | 266 | 'Explicit type: %s', repo_name, explicit_scm) |
|
267 | 267 | |
|
268 | 268 | try: |
|
269 | 269 | if explicit_scm: |
|
270 | 270 | detected_scms = [get_scm_backend(explicit_scm)] |
|
271 | 271 | else: |
|
272 | 272 | detected_scms = get_scm(full_path) |
|
273 | 273 | |
|
274 | 274 | if expect_scm: |
|
275 | 275 | return detected_scms[0] == expect_scm |
|
276 | 276 | log.debug('path: %s is an vcs object:%s', full_path, detected_scms) |
|
277 | 277 | return True |
|
278 | 278 | except VCSError: |
|
279 | 279 | log.debug('path: %s is not a valid repo !', full_path) |
|
280 | 280 | return False |
|
281 | 281 | |
|
282 | 282 | |
|
283 | 283 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): |
|
284 | 284 | """ |
|
285 | 285 | Returns True if given path is a repository group, False otherwise |
|
286 | 286 | |
|
287 | 287 | :param repo_name: |
|
288 | 288 | :param base_path: |
|
289 | 289 | """ |
|
290 | 290 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) |
|
291 | 291 | log.debug('Checking if `%s` is a valid path for repository group', |
|
292 | 292 | repo_group_name) |
|
293 | 293 | |
|
294 | 294 | # check if it's not a repo |
|
295 | 295 | if is_valid_repo(repo_group_name, base_path): |
|
296 | 296 | log.debug('Repo called %s exist, it is not a valid ' |
|
297 | 297 | 'repo group' % repo_group_name) |
|
298 | 298 | return False |
|
299 | 299 | |
|
300 | 300 | try: |
|
301 | 301 | # we need to check bare git repos at higher level |
|
302 | 302 | # since we might match branches/hooks/info/objects or possible |
|
303 | 303 | # other things inside bare git repo |
|
304 | 304 | scm_ = get_scm(os.path.dirname(full_path)) |
|
305 | 305 | log.debug('path: %s is a vcs object:%s, not valid ' |
|
306 | 306 | 'repo group' % (full_path, scm_)) |
|
307 | 307 | return False |
|
308 | 308 | except VCSError: |
|
309 | 309 | pass |
|
310 | 310 | |
|
311 | 311 | # check if it's a valid path |
|
312 | 312 | if skip_path_check or os.path.isdir(full_path): |
|
313 | 313 | log.debug('path: %s is a valid repo group !', full_path) |
|
314 | 314 | return True |
|
315 | 315 | |
|
316 | 316 | log.debug('path: %s is not a valid repo group !', full_path) |
|
317 | 317 | return False |
|
318 | 318 | |
|
319 | 319 | |
|
320 | 320 | def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'): |
|
321 | 321 | while True: |
|
322 | 322 | ok = raw_input(prompt) |
|
323 | 323 | if ok.lower() in ('y', 'ye', 'yes'): |
|
324 | 324 | return True |
|
325 | 325 | if ok.lower() in ('n', 'no', 'nop', 'nope'): |
|
326 | 326 | return False |
|
327 | 327 | retries = retries - 1 |
|
328 | 328 | if retries < 0: |
|
329 | 329 | raise IOError |
|
330 | 330 | print(complaint) |
|
331 | 331 | |
|
332 | 332 | # propagated from mercurial documentation |
|
333 | 333 | ui_sections = [ |
|
334 | 334 | 'alias', 'auth', |
|
335 | 335 | 'decode/encode', 'defaults', |
|
336 | 336 | 'diff', 'email', |
|
337 | 337 | 'extensions', 'format', |
|
338 | 338 | 'merge-patterns', 'merge-tools', |
|
339 | 339 | 'hooks', 'http_proxy', |
|
340 | 340 | 'smtp', 'patch', |
|
341 | 341 | 'paths', 'profiling', |
|
342 | 342 | 'server', 'trusted', |
|
343 | 343 | 'ui', 'web', ] |
|
344 | 344 | |
|
345 | 345 | |
|
346 | 346 | def config_data_from_db(clear_session=True, repo=None): |
|
347 | 347 | """ |
|
348 | 348 | Read the configuration data from the database and return configuration |
|
349 | 349 | tuples. |
|
350 | 350 | """ |
|
351 | 351 | from rhodecode.model.settings import VcsSettingsModel |
|
352 | 352 | |
|
353 | 353 | config = [] |
|
354 | 354 | |
|
355 | 355 | sa = meta.Session() |
|
356 | 356 | settings_model = VcsSettingsModel(repo=repo, sa=sa) |
|
357 | 357 | |
|
358 | 358 | ui_settings = settings_model.get_ui_settings() |
|
359 | 359 | |
|
360 | 360 | for setting in ui_settings: |
|
361 | 361 | if setting.active: |
|
362 | 362 | log.debug( |
|
363 | 363 | 'settings ui from db: [%s] %s=%s', |
|
364 | 364 | setting.section, setting.key, setting.value) |
|
365 | 365 | config.append(( |
|
366 | 366 | safe_str(setting.section), safe_str(setting.key), |
|
367 | 367 | safe_str(setting.value))) |
|
368 | 368 | if setting.key == 'push_ssl': |
|
369 | 369 | # force set push_ssl requirement to False, rhodecode |
|
370 | 370 | # handles that |
|
371 | 371 | config.append(( |
|
372 | 372 | safe_str(setting.section), safe_str(setting.key), False)) |
|
373 | 373 | if clear_session: |
|
374 | 374 | meta.Session.remove() |
|
375 | 375 | |
|
376 | 376 | # TODO: mikhail: probably it makes no sense to re-read hooks information. |
|
377 | 377 | # It's already there and activated/deactivated |
|
378 | 378 | skip_entries = [] |
|
379 | 379 | enabled_hook_classes = get_enabled_hook_classes(ui_settings) |
|
380 | 380 | if 'pull' not in enabled_hook_classes: |
|
381 | 381 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL)) |
|
382 | 382 | if 'push' not in enabled_hook_classes: |
|
383 | 383 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH)) |
|
384 | 384 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH)) |
|
385 | 385 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY)) |
|
386 | 386 | |
|
387 | 387 | config = [entry for entry in config if entry[:2] not in skip_entries] |
|
388 | 388 | |
|
389 | 389 | return config |
|
390 | 390 | |
|
391 | 391 | |
|
392 | 392 | def make_db_config(clear_session=True, repo=None): |
|
393 | 393 | """ |
|
394 | 394 | Create a :class:`Config` instance based on the values in the database. |
|
395 | 395 | """ |
|
396 | 396 | config = Config() |
|
397 | 397 | config_data = config_data_from_db(clear_session=clear_session, repo=repo) |
|
398 | 398 | for section, option, value in config_data: |
|
399 | 399 | config.set(section, option, value) |
|
400 | 400 | return config |
|
401 | 401 | |
|
402 | 402 | |
|
403 | 403 | def get_enabled_hook_classes(ui_settings): |
|
404 | 404 | """ |
|
405 | 405 | Return the enabled hook classes. |
|
406 | 406 | |
|
407 | 407 | :param ui_settings: List of ui_settings as returned |
|
408 | 408 | by :meth:`VcsSettingsModel.get_ui_settings` |
|
409 | 409 | |
|
410 | 410 | :return: a list with the enabled hook classes. The order is not guaranteed. |
|
411 | 411 | :rtype: list |
|
412 | 412 | """ |
|
413 | 413 | enabled_hooks = [] |
|
414 | 414 | active_hook_keys = [ |
|
415 | 415 | key for section, key, value, active in ui_settings |
|
416 | 416 | if section == 'hooks' and active] |
|
417 | 417 | |
|
418 | 418 | hook_names = { |
|
419 | 419 | RhodeCodeUi.HOOK_PUSH: 'push', |
|
420 | 420 | RhodeCodeUi.HOOK_PULL: 'pull', |
|
421 | 421 | RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size' |
|
422 | 422 | } |
|
423 | 423 | |
|
424 | 424 | for key in active_hook_keys: |
|
425 | 425 | hook = hook_names.get(key) |
|
426 | 426 | if hook: |
|
427 | 427 | enabled_hooks.append(hook) |
|
428 | 428 | |
|
429 | 429 | return enabled_hooks |
|
430 | 430 | |
|
431 | 431 | |
|
432 | 432 | def set_rhodecode_config(config): |
|
433 | 433 | """ |
|
434 | 434 | Updates pylons config with new settings from database |
|
435 | 435 | |
|
436 | 436 | :param config: |
|
437 | 437 | """ |
|
438 | 438 | from rhodecode.model.settings import SettingsModel |
|
439 | 439 | app_settings = SettingsModel().get_all_settings() |
|
440 | 440 | |
|
441 | 441 | for k, v in app_settings.items(): |
|
442 | 442 | config[k] = v |
|
443 | 443 | |
|
444 | 444 | |
|
445 | 445 | def get_rhodecode_realm(): |
|
446 | 446 | """ |
|
447 | 447 | Return the rhodecode realm from database. |
|
448 | 448 | """ |
|
449 | 449 | from rhodecode.model.settings import SettingsModel |
|
450 | 450 | realm = SettingsModel().get_setting_by_name('realm') |
|
451 | 451 | return safe_str(realm.app_settings_value) |
|
452 | 452 | |
|
453 | 453 | |
|
454 | 454 | def get_rhodecode_base_path(): |
|
455 | 455 | """ |
|
456 | 456 | Returns the base path. The base path is the filesystem path which points |
|
457 | 457 | to the repository store. |
|
458 | 458 | """ |
|
459 | 459 | from rhodecode.model.settings import SettingsModel |
|
460 | 460 | paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/') |
|
461 | 461 | return safe_str(paths_ui.ui_value) |
|
462 | 462 | |
|
463 | 463 | |
|
464 | 464 | def map_groups(path): |
|
465 | 465 | """ |
|
466 | 466 | Given a full path to a repository, create all nested groups that this |
|
467 | 467 | repo is inside. This function creates parent-child relationships between |
|
468 | 468 | groups and creates default perms for all new groups. |
|
469 | 469 | |
|
470 | 470 | :param paths: full path to repository |
|
471 | 471 | """ |
|
472 | 472 | from rhodecode.model.repo_group import RepoGroupModel |
|
473 | 473 | sa = meta.Session() |
|
474 | 474 | groups = path.split(Repository.NAME_SEP) |
|
475 | 475 | parent = None |
|
476 | 476 | group = None |
|
477 | 477 | |
|
478 | 478 | # last element is repo in nested groups structure |
|
479 | 479 | groups = groups[:-1] |
|
480 | 480 | rgm = RepoGroupModel(sa) |
|
481 | 481 | owner = User.get_first_super_admin() |
|
482 | 482 | for lvl, group_name in enumerate(groups): |
|
483 | 483 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
484 | 484 | group = RepoGroup.get_by_group_name(group_name) |
|
485 | 485 | desc = '%s group' % group_name |
|
486 | 486 | |
|
487 | 487 | # skip folders that are now removed repos |
|
488 | 488 | if REMOVED_REPO_PAT.match(group_name): |
|
489 | 489 | break |
|
490 | 490 | |
|
491 | 491 | if group is None: |
|
492 | 492 | log.debug('creating group level: %s group_name: %s', |
|
493 | 493 | lvl, group_name) |
|
494 | 494 | group = RepoGroup(group_name, parent) |
|
495 | 495 | group.group_description = desc |
|
496 | 496 | group.user = owner |
|
497 | 497 | sa.add(group) |
|
498 | 498 | perm_obj = rgm._create_default_perms(group) |
|
499 | 499 | sa.add(perm_obj) |
|
500 | 500 | sa.flush() |
|
501 | 501 | |
|
502 | 502 | parent = group |
|
503 | 503 | return group |
|
504 | 504 | |
|
505 | 505 | |
|
506 | 506 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
507 | 507 | """ |
|
508 | 508 | maps all repos given in initial_repo_list, non existing repositories |
|
509 | 509 | are created, if remove_obsolete is True it also checks for db entries |
|
510 | 510 | that are not in initial_repo_list and removes them. |
|
511 | 511 | |
|
512 | 512 | :param initial_repo_list: list of repositories found by scanning methods |
|
513 | 513 | :param remove_obsolete: check for obsolete entries in database |
|
514 | 514 | """ |
|
515 | 515 | from rhodecode.model.repo import RepoModel |
|
516 | 516 | from rhodecode.model.scm import ScmModel |
|
517 | 517 | from rhodecode.model.repo_group import RepoGroupModel |
|
518 | 518 | from rhodecode.model.settings import SettingsModel |
|
519 | 519 | |
|
520 | 520 | sa = meta.Session() |
|
521 | 521 | repo_model = RepoModel() |
|
522 | 522 | user = User.get_first_super_admin() |
|
523 | 523 | added = [] |
|
524 | 524 | |
|
525 | 525 | # creation defaults |
|
526 | 526 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
527 | 527 | enable_statistics = defs.get('repo_enable_statistics') |
|
528 | 528 | enable_locking = defs.get('repo_enable_locking') |
|
529 | 529 | enable_downloads = defs.get('repo_enable_downloads') |
|
530 | 530 | private = defs.get('repo_private') |
|
531 | 531 | |
|
532 | 532 | for name, repo in initial_repo_list.items(): |
|
533 | 533 | group = map_groups(name) |
|
534 | 534 | unicode_name = safe_unicode(name) |
|
535 | 535 | db_repo = repo_model.get_by_repo_name(unicode_name) |
|
536 | 536 | # found repo that is on filesystem not in RhodeCode database |
|
537 | 537 | if not db_repo: |
|
538 | 538 | log.info('repository %s not found, creating now', name) |
|
539 | 539 | added.append(name) |
|
540 | 540 | desc = (repo.description |
|
541 | 541 | if repo.description != 'unknown' |
|
542 | 542 | else '%s repository' % name) |
|
543 | 543 | |
|
544 | 544 | db_repo = repo_model._create_repo( |
|
545 | 545 | repo_name=name, |
|
546 | 546 | repo_type=repo.alias, |
|
547 | 547 | description=desc, |
|
548 | 548 | repo_group=getattr(group, 'group_id', None), |
|
549 | 549 | owner=user, |
|
550 | 550 | enable_locking=enable_locking, |
|
551 | 551 | enable_downloads=enable_downloads, |
|
552 | 552 | enable_statistics=enable_statistics, |
|
553 | 553 | private=private, |
|
554 | 554 | state=Repository.STATE_CREATED |
|
555 | 555 | ) |
|
556 | 556 | sa.commit() |
|
557 | 557 | # we added that repo just now, and make sure we updated server info |
|
558 | 558 | if db_repo.repo_type == 'git': |
|
559 | 559 | git_repo = db_repo.scm_instance() |
|
560 | 560 | # update repository server-info |
|
561 | 561 | log.debug('Running update server info') |
|
562 | 562 | git_repo._update_server_info() |
|
563 | 563 | |
|
564 | 564 | db_repo.update_commit_cache() |
|
565 | 565 | |
|
566 | 566 | config = db_repo._config |
|
567 | 567 | config.set('extensions', 'largefiles', '') |
|
568 | 568 | ScmModel().install_hooks( |
|
569 | 569 | db_repo.scm_instance(config=config), |
|
570 | 570 | repo_type=db_repo.repo_type) |
|
571 | 571 | |
|
572 | 572 | removed = [] |
|
573 | 573 | if remove_obsolete: |
|
574 | 574 | # remove from database those repositories that are not in the filesystem |
|
575 | 575 | for repo in sa.query(Repository).all(): |
|
576 | 576 | if repo.repo_name not in initial_repo_list.keys(): |
|
577 | 577 | log.debug("Removing non-existing repository found in db `%s`", |
|
578 | 578 | repo.repo_name) |
|
579 | 579 | try: |
|
580 | 580 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) |
|
581 | 581 | sa.commit() |
|
582 | 582 | removed.append(repo.repo_name) |
|
583 | 583 | except Exception: |
|
584 | 584 | # don't hold further removals on error |
|
585 | 585 | log.error(traceback.format_exc()) |
|
586 | 586 | sa.rollback() |
|
587 | 587 | |
|
588 | 588 | def splitter(full_repo_name): |
|
589 | 589 | _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1) |
|
590 | 590 | gr_name = None |
|
591 | 591 | if len(_parts) == 2: |
|
592 | 592 | gr_name = _parts[0] |
|
593 | 593 | return gr_name |
|
594 | 594 | |
|
595 | 595 | initial_repo_group_list = [splitter(x) for x in |
|
596 | 596 | initial_repo_list.keys() if splitter(x)] |
|
597 | 597 | |
|
598 | 598 | # remove from database those repository groups that are not in the |
|
599 | 599 | # filesystem due to parent child relationships we need to delete them |
|
600 | 600 | # in a specific order of most nested first |
|
601 | 601 | all_groups = [x.group_name for x in sa.query(RepoGroup).all()] |
|
602 | 602 | nested_sort = lambda gr: len(gr.split('/')) |
|
603 | 603 | for group_name in sorted(all_groups, key=nested_sort, reverse=True): |
|
604 | 604 | if group_name not in initial_repo_group_list: |
|
605 | 605 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
606 | 606 | if (repo_group.children.all() or |
|
607 | 607 | not RepoGroupModel().check_exist_filesystem( |
|
608 | 608 | group_name=group_name, exc_on_failure=False)): |
|
609 | 609 | continue |
|
610 | 610 | |
|
611 | 611 | log.info( |
|
612 | 612 | 'Removing non-existing repository group found in db `%s`', |
|
613 | 613 | group_name) |
|
614 | 614 | try: |
|
615 | 615 | RepoGroupModel(sa).delete(group_name, fs_remove=False) |
|
616 | 616 | sa.commit() |
|
617 | 617 | removed.append(group_name) |
|
618 | 618 | except Exception: |
|
619 | 619 | # don't hold further removals on error |
|
620 | 620 | log.exception( |
|
621 | 621 | 'Unable to remove repository group `%s`', |
|
622 | 622 | group_name) |
|
623 | 623 | sa.rollback() |
|
624 | 624 | raise |
|
625 | 625 | |
|
626 | 626 | return added, removed |
|
627 | 627 | |
|
628 | 628 | |
|
629 | 629 | def get_default_cache_settings(settings): |
|
630 | 630 | cache_settings = {} |
|
631 | 631 | for key in settings.keys(): |
|
632 | 632 | for prefix in ['beaker.cache.', 'cache.']: |
|
633 | 633 | if key.startswith(prefix): |
|
634 | 634 | name = key.split(prefix)[1].strip() |
|
635 | 635 | cache_settings[name] = settings[key].strip() |
|
636 | 636 | return cache_settings |
|
637 | 637 | |
|
638 | 638 | |
|
639 | 639 | # set cache regions for beaker so celery can utilise it |
|
640 | 640 | def add_cache(settings): |
|
641 | 641 | from rhodecode.lib import caches |
|
642 | 642 | cache_settings = {'regions': None} |
|
643 | 643 | # main cache settings used as default ... |
|
644 | 644 | cache_settings.update(get_default_cache_settings(settings)) |
|
645 | 645 | |
|
646 | 646 | if cache_settings['regions']: |
|
647 | 647 | for region in cache_settings['regions'].split(','): |
|
648 | 648 | region = region.strip() |
|
649 | 649 | region_settings = {} |
|
650 | 650 | for key, value in cache_settings.items(): |
|
651 | 651 | if key.startswith(region): |
|
652 | 652 | region_settings[key.split('.')[1]] = value |
|
653 | 653 | |
|
654 | 654 | caches.configure_cache_region( |
|
655 | 655 | region, region_settings, cache_settings) |
|
656 | 656 | |
|
657 | 657 | |
|
658 | 658 | def load_rcextensions(root_path): |
|
659 | 659 | import rhodecode |
|
660 | 660 | from rhodecode.config import conf |
|
661 | 661 | |
|
662 | 662 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
663 | 663 | if os.path.isfile(path): |
|
664 | 664 | rcext = create_module('rc', path) |
|
665 | 665 | EXT = rhodecode.EXTENSIONS = rcext |
|
666 | 666 | log.debug('Found rcextensions now loading %s...', rcext) |
|
667 | 667 | |
|
668 | 668 | # Additional mappings that are not present in the pygments lexers |
|
669 | 669 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
670 | 670 | |
|
671 | 671 | # auto check if the module is not missing any data, set to default if is |
|
672 | 672 | # this will help autoupdate new feature of rcext module |
|
673 | 673 | #from rhodecode.config import rcextensions |
|
674 | 674 | #for k in dir(rcextensions): |
|
675 | 675 | # if not k.startswith('_') and not hasattr(EXT, k): |
|
676 | 676 | # setattr(EXT, k, getattr(rcextensions, k)) |
|
677 | 677 | |
|
678 | 678 | |
|
679 | 679 | def get_custom_lexer(extension): |
|
680 | 680 | """ |
|
681 | 681 | returns a custom lexer if it is defined in rcextensions module, or None |
|
682 | 682 | if there's no custom lexer defined |
|
683 | 683 | """ |
|
684 | 684 | import rhodecode |
|
685 | 685 | from pygments import lexers |
|
686 | 686 | |
|
687 | 687 | # custom override made by RhodeCode |
|
688 | 688 | if extension in ['mako']: |
|
689 | 689 | return lexers.get_lexer_by_name('html+mako') |
|
690 | 690 | |
|
691 | 691 | # check if we didn't define this extension as other lexer |
|
692 | 692 | extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None) |
|
693 | 693 | if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS: |
|
694 | 694 | _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension] |
|
695 | 695 | return lexers.get_lexer_by_name(_lexer_name) |
|
696 | 696 | |
|
697 | 697 | |
|
698 | 698 | #============================================================================== |
|
699 | 699 | # TEST FUNCTIONS AND CREATORS |
|
700 | 700 | #============================================================================== |
|
701 | 701 | def create_test_index(repo_location, config): |
|
702 | 702 | """ |
|
703 | 703 | Makes default test index. |
|
704 | 704 | """ |
|
705 | 705 | import rc_testdata |
|
706 | 706 | |
|
707 | 707 | rc_testdata.extract_search_index( |
|
708 | 708 | 'vcs_search_index', os.path.dirname(config['search.location'])) |
|
709 | 709 | |
|
710 | 710 | |
|
711 | 711 | def create_test_directory(test_path): |
|
712 | 712 | """ |
|
713 | 713 | Create test directory if it doesn't exist. |
|
714 | 714 | """ |
|
715 | 715 | if not os.path.isdir(test_path): |
|
716 | 716 | log.debug('Creating testdir %s', test_path) |
|
717 | 717 | os.makedirs(test_path) |
|
718 | 718 | |
|
719 | 719 | |
|
720 | 720 | def create_test_database(test_path, config): |
|
721 | 721 | """ |
|
722 | 722 | Makes a fresh database. |
|
723 | 723 | """ |
|
724 | 724 | from rhodecode.lib.db_manage import DbManage |
|
725 | 725 | |
|
726 | 726 | # PART ONE create db |
|
727 | 727 | dbconf = config['sqlalchemy.db1.url'] |
|
728 | 728 | log.debug('making test db %s', dbconf) |
|
729 | 729 | |
|
730 | 730 | dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'], |
|
731 | 731 | tests=True, cli_args={'force_ask': True}) |
|
732 | 732 | dbmanage.create_tables(override=True) |
|
733 | 733 | dbmanage.set_db_version() |
|
734 | 734 | # for tests dynamically set new root paths based on generated content |
|
735 | 735 | dbmanage.create_settings(dbmanage.config_prompt(test_path)) |
|
736 | 736 | dbmanage.create_default_user() |
|
737 | 737 | dbmanage.create_test_admin_and_users() |
|
738 | 738 | dbmanage.create_permissions() |
|
739 | 739 | dbmanage.populate_default_permissions() |
|
740 | 740 | Session().commit() |
|
741 | 741 | |
|
742 | 742 | |
|
743 | 743 | def create_test_repositories(test_path, config): |
|
744 | 744 | """ |
|
745 | 745 | Creates test repositories in the temporary directory. Repositories are |
|
746 | 746 | extracted from archives within the rc_testdata package. |
|
747 | 747 | """ |
|
748 | 748 | import rc_testdata |
|
749 | 749 | from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO |
|
750 | 750 | |
|
751 | 751 | log.debug('making test vcs repositories') |
|
752 | 752 | |
|
753 | 753 | idx_path = config['search.location'] |
|
754 | 754 | data_path = config['cache_dir'] |
|
755 | 755 | |
|
756 | 756 | # clean index and data |
|
757 | 757 | if idx_path and os.path.exists(idx_path): |
|
758 | 758 | log.debug('remove %s', idx_path) |
|
759 | 759 | shutil.rmtree(idx_path) |
|
760 | 760 | |
|
761 | 761 | if data_path and os.path.exists(data_path): |
|
762 | 762 | log.debug('remove %s', data_path) |
|
763 | 763 | shutil.rmtree(data_path) |
|
764 | 764 | |
|
765 | 765 | rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) |
|
766 | 766 | rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) |
|
767 | 767 | |
|
768 | 768 | # Note: Subversion is in the process of being integrated with the system, |
|
769 | 769 | # until we have a properly packed version of the test svn repository, this |
|
770 | 770 | # tries to copy over the repo from a package "rc_testdata" |
|
771 | 771 | svn_repo_path = rc_testdata.get_svn_repo_archive() |
|
772 | 772 | with tarfile.open(svn_repo_path) as tar: |
|
773 | 773 | tar.extractall(jn(test_path, SVN_REPO)) |
|
774 | 774 | |
|
775 | 775 | |
|
776 | 776 | #============================================================================== |
|
777 | 777 | # PASTER COMMANDS |
|
778 | 778 | #============================================================================== |
|
779 | 779 | class BasePasterCommand(Command): |
|
780 | 780 | """ |
|
781 | 781 | Abstract Base Class for paster commands. |
|
782 | 782 | |
|
783 | 783 | The celery commands are somewhat aggressive about loading |
|
784 | 784 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
785 | 785 | environment variable to our loader, we have to bootstrap a bit and |
|
786 | 786 | make sure we've had a chance to load the pylons config off of the |
|
787 | 787 | command line, otherwise everything fails. |
|
788 | 788 | """ |
|
789 | 789 | min_args = 1 |
|
790 | 790 | min_args_error = "Please provide a paster config file as an argument." |
|
791 | 791 | takes_config_file = 1 |
|
792 | 792 | requires_config_file = True |
|
793 | 793 | |
|
794 | 794 | def notify_msg(self, msg, log=False): |
|
795 | 795 | """Make a notification to user, additionally if logger is passed |
|
796 | 796 | it logs this action using given logger |
|
797 | 797 | |
|
798 | 798 | :param msg: message that will be printed to user |
|
799 | 799 | :param log: logging instance, to use to additionally log this message |
|
800 | 800 | |
|
801 | 801 | """ |
|
802 | 802 | if log and isinstance(log, logging): |
|
803 | 803 | log(msg) |
|
804 | 804 | |
|
805 | 805 | def run(self, args): |
|
806 | 806 | """ |
|
807 | 807 | Overrides Command.run |
|
808 | 808 | |
|
809 | 809 | Checks for a config file argument and loads it. |
|
810 | 810 | """ |
|
811 | 811 | if len(args) < self.min_args: |
|
812 | 812 | raise BadCommand( |
|
813 | 813 | self.min_args_error % {'min_args': self.min_args, |
|
814 | 814 | 'actual_args': len(args)}) |
|
815 | 815 | |
|
816 | 816 | # Decrement because we're going to lob off the first argument. |
|
817 | 817 | # @@ This is hacky |
|
818 | 818 | self.min_args -= 1 |
|
819 | 819 | self.bootstrap_config(args[0]) |
|
820 | 820 | self.update_parser() |
|
821 | 821 | return super(BasePasterCommand, self).run(args[1:]) |
|
822 | 822 | |
|
823 | 823 | def update_parser(self): |
|
824 | 824 | """ |
|
825 | 825 | Abstract method. Allows for the class' parser to be updated |
|
826 | 826 | before the superclass' `run` method is called. Necessary to |
|
827 | 827 | allow options/arguments to be passed through to the underlying |
|
828 | 828 | celery command. |
|
829 | 829 | """ |
|
830 | 830 | raise NotImplementedError("Abstract Method.") |
|
831 | 831 | |
|
832 | 832 | def bootstrap_config(self, conf): |
|
833 | 833 | """ |
|
834 | 834 | Loads the pylons configuration. |
|
835 | 835 | """ |
|
836 | 836 | from pylons import config as pylonsconfig |
|
837 | 837 | |
|
838 | 838 | self.path_to_ini_file = os.path.realpath(conf) |
|
839 | 839 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) |
|
840 | 840 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
|
841 | 841 | |
|
842 | 842 | def _init_session(self): |
|
843 | 843 | """ |
|
844 | 844 | Inits SqlAlchemy Session |
|
845 | 845 | """ |
|
846 | 846 | logging.config.fileConfig(self.path_to_ini_file) |
|
847 | 847 | from pylons import config |
|
848 | 848 | from rhodecode.config.utils import initialize_database |
|
849 | 849 | |
|
850 | 850 | # get to remove repos !! |
|
851 | 851 | add_cache(config) |
|
852 | 852 | initialize_database(config) |
|
853 | 853 | |
|
854 | 854 | |
|
855 | @decorator.decorator | |
|
856 | def jsonify(func, *args, **kwargs): | |
|
857 | """Action decorator that formats output for JSON | |
|
858 | ||
|
859 | Given a function that will return content, this decorator will turn | |
|
860 | the result into JSON, with a content-type of 'application/json' and | |
|
861 | output it. | |
|
862 | ||
|
863 | """ | |
|
864 | from pylons.decorators.util import get_pylons | |
|
865 | from rhodecode.lib.ext_json import json | |
|
866 | pylons = get_pylons(args) | |
|
867 | pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8' | |
|
868 | data = func(*args, **kwargs) | |
|
869 | if isinstance(data, (list, tuple)): | |
|
870 | msg = "JSON responses with Array envelopes are susceptible to " \ | |
|
871 | "cross-site data leak attacks, see " \ | |
|
872 | "http://wiki.pylonshq.com/display/pylonsfaq/Warnings" | |
|
873 | warnings.warn(msg, Warning, 2) | |
|
874 | log.warning(msg) | |
|
875 | log.debug("Returning JSON wrapped action output") | |
|
876 | return json.dumps(data, encoding='utf-8') | |
|
877 | ||
|
878 | ||
|
879 | 855 | class PartialRenderer(object): |
|
880 | 856 | """ |
|
881 | 857 | Partial renderer used to render chunks of html used in datagrids |
|
882 | 858 | use like:: |
|
883 | 859 | |
|
884 | 860 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
885 | 861 | _render('quick_menu', args, kwargs) |
|
886 | 862 | PartialRenderer.h, |
|
887 | 863 | c, |
|
888 | 864 | _, |
|
889 | 865 | ungettext |
|
890 | 866 | are the template stuff initialized inside and can be re-used later |
|
891 | 867 | |
|
892 | 868 | :param tmpl_name: template path relate to /templates/ dir |
|
893 | 869 | """ |
|
894 | 870 | |
|
895 | 871 | def __init__(self, tmpl_name): |
|
896 | 872 | import rhodecode |
|
897 | 873 | from pylons import request, tmpl_context as c |
|
898 | 874 | from pylons.i18n.translation import _, ungettext |
|
899 | 875 | from rhodecode.lib import helpers as h |
|
900 | 876 | |
|
901 | 877 | self.tmpl_name = tmpl_name |
|
902 | 878 | self.rhodecode = rhodecode |
|
903 | 879 | self.c = c |
|
904 | 880 | self._ = _ |
|
905 | 881 | self.ungettext = ungettext |
|
906 | 882 | self.h = h |
|
907 | 883 | self.request = request |
|
908 | 884 | |
|
909 | 885 | def _mako_lookup(self): |
|
910 | 886 | _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup |
|
911 | 887 | return _tmpl_lookup.get_template(self.tmpl_name) |
|
912 | 888 | |
|
913 | 889 | def _update_kwargs_for_render(self, kwargs): |
|
914 | 890 | """ |
|
915 | 891 | Inject params required for Mako rendering |
|
916 | 892 | """ |
|
917 | 893 | _kwargs = { |
|
918 | 894 | '_': self._, |
|
919 | 895 | 'h': self.h, |
|
920 | 896 | 'c': self.c, |
|
921 | 897 | 'request': self.request, |
|
922 | 898 | '_ungettext': self.ungettext, |
|
923 | 899 | } |
|
924 | 900 | _kwargs.update(kwargs) |
|
925 | 901 | return _kwargs |
|
926 | 902 | |
|
927 | 903 | def _render_with_exc(self, render_func, args, kwargs): |
|
928 | 904 | try: |
|
929 | 905 | return render_func.render(*args, **kwargs) |
|
930 | 906 | except: |
|
931 | 907 | log.error(exceptions.text_error_template().render()) |
|
932 | 908 | raise |
|
933 | 909 | |
|
934 | 910 | def _get_template(self, template_obj, def_name): |
|
935 | 911 | if def_name: |
|
936 | 912 | tmpl = template_obj.get_def(def_name) |
|
937 | 913 | else: |
|
938 | 914 | tmpl = template_obj |
|
939 | 915 | return tmpl |
|
940 | 916 | |
|
941 | 917 | def render(self, def_name, *args, **kwargs): |
|
942 | 918 | lookup_obj = self._mako_lookup() |
|
943 | 919 | tmpl = self._get_template(lookup_obj, def_name=def_name) |
|
944 | 920 | kwargs = self._update_kwargs_for_render(kwargs) |
|
945 | 921 | return self._render_with_exc(tmpl, args, kwargs) |
|
946 | 922 | |
|
947 | 923 | def __call__(self, tmpl, *args, **kwargs): |
|
948 | 924 | return self.render(tmpl, *args, **kwargs) |
|
949 | 925 | |
|
950 | 926 | |
|
951 | 927 | def password_changed(auth_user, session): |
|
952 | 928 | # Never report password change in case of default user or anonymous user. |
|
953 | 929 | if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None: |
|
954 | 930 | return False |
|
955 | 931 | |
|
956 | 932 | password_hash = md5(auth_user.password) if auth_user.password else None |
|
957 | 933 | rhodecode_user = session.get('rhodecode_user', {}) |
|
958 | 934 | session_password_hash = rhodecode_user.get('password', '') |
|
959 | 935 | return password_hash != session_password_hash |
|
960 | 936 | |
|
961 | 937 | |
|
962 | 938 | def read_opensource_licenses(): |
|
963 | 939 | global _license_cache |
|
964 | 940 | |
|
965 | 941 | if not _license_cache: |
|
966 | 942 | licenses = pkg_resources.resource_string( |
|
967 | 943 | 'rhodecode', 'config/licenses.json') |
|
968 | 944 | _license_cache = json.loads(licenses) |
|
969 | 945 | |
|
970 | 946 | return _license_cache |
|
971 | 947 | |
|
972 | 948 | |
|
973 | 949 | def get_registry(request): |
|
974 | 950 | """ |
|
975 | 951 | Utility to get the pyramid registry from a request. During migration to |
|
976 | 952 | pyramid we sometimes want to use the pyramid registry from pylons context. |
|
977 | 953 | Therefore this utility returns `request.registry` for pyramid requests and |
|
978 | 954 | uses `get_current_registry()` for pylons requests. |
|
979 | 955 | """ |
|
980 | 956 | try: |
|
981 | 957 | return request.registry |
|
982 | 958 | except AttributeError: |
|
983 | 959 | return get_current_registry() |
|
984 | 960 | |
|
985 | 961 | |
|
986 | 962 | def generate_platform_uuid(): |
|
987 | 963 | """ |
|
988 | 964 | Generates platform UUID based on it's name |
|
989 | 965 | """ |
|
990 | 966 | import platform |
|
991 | 967 | |
|
992 | 968 | try: |
|
993 | 969 | uuid_list = [platform.platform()] |
|
994 | 970 | return hashlib.sha256(':'.join(uuid_list)).hexdigest() |
|
995 | 971 | except Exception as e: |
|
996 | 972 | log.error('Failed to generate host uuid: %s' % e) |
|
997 | 973 | return 'UNDEFINED' |
@@ -1,1029 +1,1025 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | """ | |
|
22 | Repository model for rhodecode | |
|
23 | """ | |
|
24 | ||
|
25 | 21 | import logging |
|
26 | 22 | import os |
|
27 | 23 | import re |
|
28 | 24 | import shutil |
|
29 | 25 | import time |
|
30 | 26 | import traceback |
|
31 | 27 | import datetime |
|
32 | 28 | |
|
33 | 29 | from pyramid.threadlocal import get_current_request |
|
34 | 30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
35 | 31 | |
|
36 | 32 | from rhodecode import events |
|
37 | 33 | from rhodecode.lib import helpers as h |
|
38 | 34 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
39 | 35 | from rhodecode.lib.caching_query import FromCache |
|
40 | 36 | from rhodecode.lib.exceptions import AttachedForksError |
|
41 | 37 | from rhodecode.lib.hooks_base import log_delete_repository |
|
42 | 38 | from rhodecode.lib.utils import make_db_config |
|
43 | 39 | from rhodecode.lib.utils2 import ( |
|
44 | 40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
45 | 41 | get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic) |
|
46 | 42 | from rhodecode.lib.vcs.backends import get_backend |
|
47 | 43 | from rhodecode.model import BaseModel |
|
48 | 44 | from rhodecode.model.db import (_hash_key, |
|
49 | 45 | Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, |
|
50 | 46 | UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup, |
|
51 | 47 | RepoGroup, RepositoryField) |
|
52 | 48 | |
|
53 | 49 | from rhodecode.model.settings import VcsSettingsModel |
|
54 | 50 | |
|
55 | 51 | |
|
56 | 52 | log = logging.getLogger(__name__) |
|
57 | 53 | |
|
58 | 54 | |
|
59 | 55 | class RepoModel(BaseModel): |
|
60 | 56 | |
|
61 | 57 | cls = Repository |
|
62 | 58 | |
|
63 | 59 | def _get_user_group(self, users_group): |
|
64 | 60 | return self._get_instance(UserGroup, users_group, |
|
65 | 61 | callback=UserGroup.get_by_group_name) |
|
66 | 62 | |
|
67 | 63 | def _get_repo_group(self, repo_group): |
|
68 | 64 | return self._get_instance(RepoGroup, repo_group, |
|
69 | 65 | callback=RepoGroup.get_by_group_name) |
|
70 | 66 | |
|
71 | 67 | def _create_default_perms(self, repository, private): |
|
72 | 68 | # create default permission |
|
73 | 69 | default = 'repository.read' |
|
74 | 70 | def_user = User.get_default_user() |
|
75 | 71 | for p in def_user.user_perms: |
|
76 | 72 | if p.permission.permission_name.startswith('repository.'): |
|
77 | 73 | default = p.permission.permission_name |
|
78 | 74 | break |
|
79 | 75 | |
|
80 | 76 | default_perm = 'repository.none' if private else default |
|
81 | 77 | |
|
82 | 78 | repo_to_perm = UserRepoToPerm() |
|
83 | 79 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
84 | 80 | |
|
85 | 81 | repo_to_perm.repository = repository |
|
86 | 82 | repo_to_perm.user_id = def_user.user_id |
|
87 | 83 | |
|
88 | 84 | return repo_to_perm |
|
89 | 85 | |
|
90 | 86 | @LazyProperty |
|
91 | 87 | def repos_path(self): |
|
92 | 88 | """ |
|
93 | 89 | Gets the repositories root path from database |
|
94 | 90 | """ |
|
95 | 91 | settings_model = VcsSettingsModel(sa=self.sa) |
|
96 | 92 | return settings_model.get_repos_location() |
|
97 | 93 | |
|
98 | 94 | def get(self, repo_id, cache=False): |
|
99 | 95 | repo = self.sa.query(Repository) \ |
|
100 | 96 | .filter(Repository.repo_id == repo_id) |
|
101 | 97 | |
|
102 | 98 | if cache: |
|
103 | 99 | repo = repo.options( |
|
104 | 100 | FromCache("sql_cache_short", "get_repo_%s" % repo_id)) |
|
105 | 101 | return repo.scalar() |
|
106 | 102 | |
|
107 | 103 | def get_repo(self, repository): |
|
108 | 104 | return self._get_repo(repository) |
|
109 | 105 | |
|
110 | 106 | def get_by_repo_name(self, repo_name, cache=False): |
|
111 | 107 | repo = self.sa.query(Repository) \ |
|
112 | 108 | .filter(Repository.repo_name == repo_name) |
|
113 | 109 | |
|
114 | 110 | if cache: |
|
115 | 111 | name_key = _hash_key(repo_name) |
|
116 | 112 | repo = repo.options( |
|
117 | 113 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
118 | 114 | return repo.scalar() |
|
119 | 115 | |
|
120 | 116 | def _extract_id_from_repo_name(self, repo_name): |
|
121 | 117 | if repo_name.startswith('/'): |
|
122 | 118 | repo_name = repo_name.lstrip('/') |
|
123 | 119 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
124 | 120 | if by_id_match: |
|
125 | 121 | return by_id_match.groups()[0] |
|
126 | 122 | |
|
127 | 123 | def get_repo_by_id(self, repo_name): |
|
128 | 124 | """ |
|
129 | 125 | Extracts repo_name by id from special urls. |
|
130 | 126 | Example url is _11/repo_name |
|
131 | 127 | |
|
132 | 128 | :param repo_name: |
|
133 | 129 | :return: repo object if matched else None |
|
134 | 130 | """ |
|
135 | 131 | |
|
136 | 132 | try: |
|
137 | 133 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
138 | 134 | if _repo_id: |
|
139 | 135 | return self.get(_repo_id) |
|
140 | 136 | except Exception: |
|
141 | 137 | log.exception('Failed to extract repo_name from URL') |
|
142 | 138 | |
|
143 | 139 | return None |
|
144 | 140 | |
|
145 | 141 | def get_repos_for_root(self, root, traverse=False): |
|
146 | 142 | if traverse: |
|
147 | 143 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
148 | 144 | repos = Repository.query().filter( |
|
149 | 145 | Repository.repo_name.like(like_expression)).all() |
|
150 | 146 | else: |
|
151 | 147 | if root and not isinstance(root, RepoGroup): |
|
152 | 148 | raise ValueError( |
|
153 | 149 | 'Root must be an instance ' |
|
154 | 150 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
155 | 151 | repos = Repository.query().filter(Repository.group == root).all() |
|
156 | 152 | return repos |
|
157 | 153 | |
|
158 | 154 | def get_url(self, repo, request=None, permalink=False): |
|
159 | 155 | if not request: |
|
160 | 156 | request = get_current_request() |
|
161 | 157 | |
|
162 | 158 | if not request: |
|
163 | 159 | return |
|
164 | 160 | |
|
165 | 161 | if permalink: |
|
166 | 162 | return request.route_url( |
|
167 | 163 | 'repo_summary', repo_name=safe_str(repo.repo_id)) |
|
168 | 164 | else: |
|
169 | 165 | return request.route_url( |
|
170 | 166 | 'repo_summary', repo_name=safe_str(repo.repo_name)) |
|
171 | 167 | |
|
172 | 168 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): |
|
173 | 169 | if not request: |
|
174 | 170 | request = get_current_request() |
|
175 | 171 | |
|
176 | 172 | if not request: |
|
177 | 173 | return |
|
178 | 174 | |
|
179 | 175 | if permalink: |
|
180 | 176 | return request.route_url( |
|
181 | 177 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
182 | 178 | commit_id=commit_id) |
|
183 | 179 | |
|
184 | 180 | else: |
|
185 | 181 | return request.route_url( |
|
186 | 182 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
187 | 183 | commit_id=commit_id) |
|
188 | 184 | |
|
189 | 185 | @classmethod |
|
190 | 186 | def update_repoinfo(cls, repositories=None): |
|
191 | 187 | if not repositories: |
|
192 | 188 | repositories = Repository.getAll() |
|
193 | 189 | for repo in repositories: |
|
194 | 190 | repo.update_commit_cache() |
|
195 | 191 | |
|
196 | 192 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
197 | 193 | super_user_actions=False): |
|
198 | 194 | _render = get_current_request().get_partial_renderer( |
|
199 | 195 | 'data_table/_dt_elements.mako') |
|
200 | 196 | c = _render.get_call_context() |
|
201 | 197 | |
|
202 | 198 | def quick_menu(repo_name): |
|
203 | 199 | return _render('quick_menu', repo_name) |
|
204 | 200 | |
|
205 | 201 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
206 | 202 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
207 | 203 | short_name=not admin, admin=False) |
|
208 | 204 | |
|
209 | 205 | def last_change(last_change): |
|
210 | 206 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
211 | 207 | last_change = last_change + datetime.timedelta(seconds= |
|
212 | 208 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) |
|
213 | 209 | return _render("last_change", last_change) |
|
214 | 210 | |
|
215 | 211 | def rss_lnk(repo_name): |
|
216 | 212 | return _render("rss", repo_name) |
|
217 | 213 | |
|
218 | 214 | def atom_lnk(repo_name): |
|
219 | 215 | return _render("atom", repo_name) |
|
220 | 216 | |
|
221 | 217 | def last_rev(repo_name, cs_cache): |
|
222 | 218 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
223 | 219 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
224 | 220 | cs_cache.get('message')) |
|
225 | 221 | |
|
226 | 222 | def desc(desc): |
|
227 | 223 | if c.visual.stylify_metatags: |
|
228 | 224 | desc = h.urlify_text(h.escaped_stylize(desc)) |
|
229 | 225 | else: |
|
230 | 226 | desc = h.urlify_text(h.html_escape(desc)) |
|
231 | 227 | |
|
232 | 228 | return _render('repo_desc', desc) |
|
233 | 229 | |
|
234 | 230 | def state(repo_state): |
|
235 | 231 | return _render("repo_state", repo_state) |
|
236 | 232 | |
|
237 | 233 | def repo_actions(repo_name): |
|
238 | 234 | return _render('repo_actions', repo_name, super_user_actions) |
|
239 | 235 | |
|
240 | 236 | def user_profile(username): |
|
241 | 237 | return _render('user_profile', username) |
|
242 | 238 | |
|
243 | 239 | repos_data = [] |
|
244 | 240 | for repo in repo_list: |
|
245 | 241 | cs_cache = repo.changeset_cache |
|
246 | 242 | row = { |
|
247 | 243 | "menu": quick_menu(repo.repo_name), |
|
248 | 244 | |
|
249 | 245 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
250 | 246 | repo.repo_state, repo.private, repo.fork), |
|
251 | 247 | "name_raw": repo.repo_name.lower(), |
|
252 | 248 | |
|
253 | 249 | "last_change": last_change(repo.last_db_change), |
|
254 | 250 | "last_change_raw": datetime_to_time(repo.last_db_change), |
|
255 | 251 | |
|
256 | 252 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
257 | 253 | "last_changeset_raw": cs_cache.get('revision'), |
|
258 | 254 | |
|
259 | 255 | "desc": desc(repo.description_safe), |
|
260 | 256 | "owner": user_profile(repo.user.username), |
|
261 | 257 | |
|
262 | 258 | "state": state(repo.repo_state), |
|
263 | 259 | "rss": rss_lnk(repo.repo_name), |
|
264 | 260 | |
|
265 | 261 | "atom": atom_lnk(repo.repo_name), |
|
266 | 262 | } |
|
267 | 263 | if admin: |
|
268 | 264 | row.update({ |
|
269 | 265 | "action": repo_actions(repo.repo_name), |
|
270 | 266 | }) |
|
271 | 267 | repos_data.append(row) |
|
272 | 268 | |
|
273 | 269 | return repos_data |
|
274 | 270 | |
|
275 | 271 | def _get_defaults(self, repo_name): |
|
276 | 272 | """ |
|
277 | 273 | Gets information about repository, and returns a dict for |
|
278 | 274 | usage in forms |
|
279 | 275 | |
|
280 | 276 | :param repo_name: |
|
281 | 277 | """ |
|
282 | 278 | |
|
283 | 279 | repo_info = Repository.get_by_repo_name(repo_name) |
|
284 | 280 | |
|
285 | 281 | if repo_info is None: |
|
286 | 282 | return None |
|
287 | 283 | |
|
288 | 284 | defaults = repo_info.get_dict() |
|
289 | 285 | defaults['repo_name'] = repo_info.just_name |
|
290 | 286 | |
|
291 | 287 | groups = repo_info.groups_with_parents |
|
292 | 288 | parent_group = groups[-1] if groups else None |
|
293 | 289 | |
|
294 | 290 | # we use -1 as this is how in HTML, we mark an empty group |
|
295 | 291 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
296 | 292 | |
|
297 | 293 | keys_to_process = ( |
|
298 | 294 | {'k': 'repo_type', 'strip': False}, |
|
299 | 295 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
300 | 296 | {'k': 'repo_description', 'strip': True}, |
|
301 | 297 | {'k': 'repo_enable_locking', 'strip': True}, |
|
302 | 298 | {'k': 'repo_landing_rev', 'strip': True}, |
|
303 | 299 | {'k': 'clone_uri', 'strip': False}, |
|
304 | 300 | {'k': 'repo_private', 'strip': True}, |
|
305 | 301 | {'k': 'repo_enable_statistics', 'strip': True} |
|
306 | 302 | ) |
|
307 | 303 | |
|
308 | 304 | for item in keys_to_process: |
|
309 | 305 | attr = item['k'] |
|
310 | 306 | if item['strip']: |
|
311 | 307 | attr = remove_prefix(item['k'], 'repo_') |
|
312 | 308 | |
|
313 | 309 | val = defaults[attr] |
|
314 | 310 | if item['k'] == 'repo_landing_rev': |
|
315 | 311 | val = ':'.join(defaults[attr]) |
|
316 | 312 | defaults[item['k']] = val |
|
317 | 313 | if item['k'] == 'clone_uri': |
|
318 | 314 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
319 | 315 | |
|
320 | 316 | # fill owner |
|
321 | 317 | if repo_info.user: |
|
322 | 318 | defaults.update({'user': repo_info.user.username}) |
|
323 | 319 | else: |
|
324 | 320 | replacement_user = User.get_first_super_admin().username |
|
325 | 321 | defaults.update({'user': replacement_user}) |
|
326 | 322 | |
|
327 | 323 | return defaults |
|
328 | 324 | |
|
329 | 325 | def update(self, repo, **kwargs): |
|
330 | 326 | try: |
|
331 | 327 | cur_repo = self._get_repo(repo) |
|
332 | 328 | source_repo_name = cur_repo.repo_name |
|
333 | 329 | if 'user' in kwargs: |
|
334 | 330 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
335 | 331 | |
|
336 | 332 | if 'repo_group' in kwargs: |
|
337 | 333 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
338 | 334 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
339 | 335 | |
|
340 | 336 | update_keys = [ |
|
341 | 337 | (1, 'repo_description'), |
|
342 | 338 | (1, 'repo_landing_rev'), |
|
343 | 339 | (1, 'repo_private'), |
|
344 | 340 | (1, 'repo_enable_downloads'), |
|
345 | 341 | (1, 'repo_enable_locking'), |
|
346 | 342 | (1, 'repo_enable_statistics'), |
|
347 | 343 | (0, 'clone_uri'), |
|
348 | 344 | (0, 'fork_id') |
|
349 | 345 | ] |
|
350 | 346 | for strip, k in update_keys: |
|
351 | 347 | if k in kwargs: |
|
352 | 348 | val = kwargs[k] |
|
353 | 349 | if strip: |
|
354 | 350 | k = remove_prefix(k, 'repo_') |
|
355 | 351 | |
|
356 | 352 | setattr(cur_repo, k, val) |
|
357 | 353 | |
|
358 | 354 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
359 | 355 | cur_repo.repo_name = new_name |
|
360 | 356 | |
|
361 | 357 | # if private flag is set, reset default permission to NONE |
|
362 | 358 | if kwargs.get('repo_private'): |
|
363 | 359 | EMPTY_PERM = 'repository.none' |
|
364 | 360 | RepoModel().grant_user_permission( |
|
365 | 361 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
366 | 362 | ) |
|
367 | 363 | |
|
368 | 364 | # handle extra fields |
|
369 | 365 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
370 | 366 | kwargs): |
|
371 | 367 | k = RepositoryField.un_prefix_key(field) |
|
372 | 368 | ex_field = RepositoryField.get_by_key_name( |
|
373 | 369 | key=k, repo=cur_repo) |
|
374 | 370 | if ex_field: |
|
375 | 371 | ex_field.field_value = kwargs[field] |
|
376 | 372 | self.sa.add(ex_field) |
|
377 | 373 | cur_repo.updated_on = datetime.datetime.now() |
|
378 | 374 | self.sa.add(cur_repo) |
|
379 | 375 | |
|
380 | 376 | if source_repo_name != new_name: |
|
381 | 377 | # rename repository |
|
382 | 378 | self._rename_filesystem_repo( |
|
383 | 379 | old=source_repo_name, new=new_name) |
|
384 | 380 | |
|
385 | 381 | return cur_repo |
|
386 | 382 | except Exception: |
|
387 | 383 | log.error(traceback.format_exc()) |
|
388 | 384 | raise |
|
389 | 385 | |
|
390 | 386 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
391 | 387 | private=False, clone_uri=None, repo_group=None, |
|
392 | 388 | landing_rev='rev:tip', fork_of=None, |
|
393 | 389 | copy_fork_permissions=False, enable_statistics=False, |
|
394 | 390 | enable_locking=False, enable_downloads=False, |
|
395 | 391 | copy_group_permissions=False, |
|
396 | 392 | state=Repository.STATE_PENDING): |
|
397 | 393 | """ |
|
398 | 394 | Create repository inside database with PENDING state, this should be |
|
399 | 395 | only executed by create() repo. With exception of importing existing |
|
400 | 396 | repos |
|
401 | 397 | """ |
|
402 | 398 | from rhodecode.model.scm import ScmModel |
|
403 | 399 | |
|
404 | 400 | owner = self._get_user(owner) |
|
405 | 401 | fork_of = self._get_repo(fork_of) |
|
406 | 402 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
407 | 403 | |
|
408 | 404 | try: |
|
409 | 405 | repo_name = safe_unicode(repo_name) |
|
410 | 406 | description = safe_unicode(description) |
|
411 | 407 | # repo name is just a name of repository |
|
412 | 408 | # while repo_name_full is a full qualified name that is combined |
|
413 | 409 | # with name and path of group |
|
414 | 410 | repo_name_full = repo_name |
|
415 | 411 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
416 | 412 | |
|
417 | 413 | new_repo = Repository() |
|
418 | 414 | new_repo.repo_state = state |
|
419 | 415 | new_repo.enable_statistics = False |
|
420 | 416 | new_repo.repo_name = repo_name_full |
|
421 | 417 | new_repo.repo_type = repo_type |
|
422 | 418 | new_repo.user = owner |
|
423 | 419 | new_repo.group = repo_group |
|
424 | 420 | new_repo.description = description or repo_name |
|
425 | 421 | new_repo.private = private |
|
426 | 422 | new_repo.clone_uri = clone_uri |
|
427 | 423 | new_repo.landing_rev = landing_rev |
|
428 | 424 | |
|
429 | 425 | new_repo.enable_statistics = enable_statistics |
|
430 | 426 | new_repo.enable_locking = enable_locking |
|
431 | 427 | new_repo.enable_downloads = enable_downloads |
|
432 | 428 | |
|
433 | 429 | if repo_group: |
|
434 | 430 | new_repo.enable_locking = repo_group.enable_locking |
|
435 | 431 | |
|
436 | 432 | if fork_of: |
|
437 | 433 | parent_repo = fork_of |
|
438 | 434 | new_repo.fork = parent_repo |
|
439 | 435 | |
|
440 | 436 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
441 | 437 | |
|
442 | 438 | self.sa.add(new_repo) |
|
443 | 439 | |
|
444 | 440 | EMPTY_PERM = 'repository.none' |
|
445 | 441 | if fork_of and copy_fork_permissions: |
|
446 | 442 | repo = fork_of |
|
447 | 443 | user_perms = UserRepoToPerm.query() \ |
|
448 | 444 | .filter(UserRepoToPerm.repository == repo).all() |
|
449 | 445 | group_perms = UserGroupRepoToPerm.query() \ |
|
450 | 446 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
451 | 447 | |
|
452 | 448 | for perm in user_perms: |
|
453 | 449 | UserRepoToPerm.create( |
|
454 | 450 | perm.user, new_repo, perm.permission) |
|
455 | 451 | |
|
456 | 452 | for perm in group_perms: |
|
457 | 453 | UserGroupRepoToPerm.create( |
|
458 | 454 | perm.users_group, new_repo, perm.permission) |
|
459 | 455 | # in case we copy permissions and also set this repo to private |
|
460 | 456 | # override the default user permission to make it a private |
|
461 | 457 | # repo |
|
462 | 458 | if private: |
|
463 | 459 | RepoModel(self.sa).grant_user_permission( |
|
464 | 460 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
465 | 461 | |
|
466 | 462 | elif repo_group and copy_group_permissions: |
|
467 | 463 | user_perms = UserRepoGroupToPerm.query() \ |
|
468 | 464 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
469 | 465 | |
|
470 | 466 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
471 | 467 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
472 | 468 | |
|
473 | 469 | for perm in user_perms: |
|
474 | 470 | perm_name = perm.permission.permission_name.replace( |
|
475 | 471 | 'group.', 'repository.') |
|
476 | 472 | perm_obj = Permission.get_by_key(perm_name) |
|
477 | 473 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
478 | 474 | |
|
479 | 475 | for perm in group_perms: |
|
480 | 476 | perm_name = perm.permission.permission_name.replace( |
|
481 | 477 | 'group.', 'repository.') |
|
482 | 478 | perm_obj = Permission.get_by_key(perm_name) |
|
483 | 479 | UserGroupRepoToPerm.create( |
|
484 | 480 | perm.users_group, new_repo, perm_obj) |
|
485 | 481 | |
|
486 | 482 | if private: |
|
487 | 483 | RepoModel(self.sa).grant_user_permission( |
|
488 | 484 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
489 | 485 | |
|
490 | 486 | else: |
|
491 | 487 | perm_obj = self._create_default_perms(new_repo, private) |
|
492 | 488 | self.sa.add(perm_obj) |
|
493 | 489 | |
|
494 | 490 | # now automatically start following this repository as owner |
|
495 | 491 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, |
|
496 | 492 | owner.user_id) |
|
497 | 493 | |
|
498 | 494 | # we need to flush here, in order to check if database won't |
|
499 | 495 | # throw any exceptions, create filesystem dirs at the very end |
|
500 | 496 | self.sa.flush() |
|
501 | 497 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
502 | 498 | return new_repo |
|
503 | 499 | |
|
504 | 500 | except Exception: |
|
505 | 501 | log.error(traceback.format_exc()) |
|
506 | 502 | raise |
|
507 | 503 | |
|
508 | 504 | def create(self, form_data, cur_user): |
|
509 | 505 | """ |
|
510 | 506 | Create repository using celery tasks |
|
511 | 507 | |
|
512 | 508 | :param form_data: |
|
513 | 509 | :param cur_user: |
|
514 | 510 | """ |
|
515 | 511 | from rhodecode.lib.celerylib import tasks, run_task |
|
516 | 512 | return run_task(tasks.create_repo, form_data, cur_user) |
|
517 | 513 | |
|
518 | 514 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
519 | 515 | perm_deletions=None, check_perms=True, |
|
520 | 516 | cur_user=None): |
|
521 | 517 | if not perm_additions: |
|
522 | 518 | perm_additions = [] |
|
523 | 519 | if not perm_updates: |
|
524 | 520 | perm_updates = [] |
|
525 | 521 | if not perm_deletions: |
|
526 | 522 | perm_deletions = [] |
|
527 | 523 | |
|
528 | 524 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
529 | 525 | |
|
530 | 526 | changes = { |
|
531 | 527 | 'added': [], |
|
532 | 528 | 'updated': [], |
|
533 | 529 | 'deleted': [] |
|
534 | 530 | } |
|
535 | 531 | # update permissions |
|
536 | 532 | for member_id, perm, member_type in perm_updates: |
|
537 | 533 | member_id = int(member_id) |
|
538 | 534 | if member_type == 'user': |
|
539 | 535 | member_name = User.get(member_id).username |
|
540 | 536 | # this updates also current one if found |
|
541 | 537 | self.grant_user_permission( |
|
542 | 538 | repo=repo, user=member_id, perm=perm) |
|
543 | 539 | else: # set for user group |
|
544 | 540 | # check if we have permissions to alter this usergroup |
|
545 | 541 | member_name = UserGroup.get(member_id).users_group_name |
|
546 | 542 | if not check_perms or HasUserGroupPermissionAny( |
|
547 | 543 | *req_perms)(member_name, user=cur_user): |
|
548 | 544 | self.grant_user_group_permission( |
|
549 | 545 | repo=repo, group_name=member_id, perm=perm) |
|
550 | 546 | |
|
551 | 547 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
552 | 548 | 'name': member_name, 'new_perm': perm}) |
|
553 | 549 | |
|
554 | 550 | # set new permissions |
|
555 | 551 | for member_id, perm, member_type in perm_additions: |
|
556 | 552 | member_id = int(member_id) |
|
557 | 553 | if member_type == 'user': |
|
558 | 554 | member_name = User.get(member_id).username |
|
559 | 555 | self.grant_user_permission( |
|
560 | 556 | repo=repo, user=member_id, perm=perm) |
|
561 | 557 | else: # set for user group |
|
562 | 558 | # check if we have permissions to alter this usergroup |
|
563 | 559 | member_name = UserGroup.get(member_id).users_group_name |
|
564 | 560 | if not check_perms or HasUserGroupPermissionAny( |
|
565 | 561 | *req_perms)(member_name, user=cur_user): |
|
566 | 562 | self.grant_user_group_permission( |
|
567 | 563 | repo=repo, group_name=member_id, perm=perm) |
|
568 | 564 | changes['added'].append({'type': member_type, 'id': member_id, |
|
569 | 565 | 'name': member_name, 'new_perm': perm}) |
|
570 | 566 | # delete permissions |
|
571 | 567 | for member_id, perm, member_type in perm_deletions: |
|
572 | 568 | member_id = int(member_id) |
|
573 | 569 | if member_type == 'user': |
|
574 | 570 | member_name = User.get(member_id).username |
|
575 | 571 | self.revoke_user_permission(repo=repo, user=member_id) |
|
576 | 572 | else: # set for user group |
|
577 | 573 | # check if we have permissions to alter this usergroup |
|
578 | 574 | member_name = UserGroup.get(member_id).users_group_name |
|
579 | 575 | if not check_perms or HasUserGroupPermissionAny( |
|
580 | 576 | *req_perms)(member_name, user=cur_user): |
|
581 | 577 | self.revoke_user_group_permission( |
|
582 | 578 | repo=repo, group_name=member_id) |
|
583 | 579 | |
|
584 | 580 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
585 | 581 | 'name': member_name, 'new_perm': perm}) |
|
586 | 582 | return changes |
|
587 | 583 | |
|
588 | 584 | def create_fork(self, form_data, cur_user): |
|
589 | 585 | """ |
|
590 | 586 | Simple wrapper into executing celery task for fork creation |
|
591 | 587 | |
|
592 | 588 | :param form_data: |
|
593 | 589 | :param cur_user: |
|
594 | 590 | """ |
|
595 | 591 | from rhodecode.lib.celerylib import tasks, run_task |
|
596 | 592 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
597 | 593 | |
|
598 | 594 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
599 | 595 | """ |
|
600 | 596 | Delete given repository, forks parameter defines what do do with |
|
601 | 597 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
602 | 598 | forks |
|
603 | 599 | |
|
604 | 600 | :param repo: |
|
605 | 601 | :param forks: str 'delete' or 'detach' |
|
606 | 602 | :param fs_remove: remove(archive) repo from filesystem |
|
607 | 603 | """ |
|
608 | 604 | if not cur_user: |
|
609 | 605 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
610 | 606 | repo = self._get_repo(repo) |
|
611 | 607 | if repo: |
|
612 | 608 | if forks == 'detach': |
|
613 | 609 | for r in repo.forks: |
|
614 | 610 | r.fork = None |
|
615 | 611 | self.sa.add(r) |
|
616 | 612 | elif forks == 'delete': |
|
617 | 613 | for r in repo.forks: |
|
618 | 614 | self.delete(r, forks='delete') |
|
619 | 615 | elif [f for f in repo.forks]: |
|
620 | 616 | raise AttachedForksError() |
|
621 | 617 | |
|
622 | 618 | old_repo_dict = repo.get_dict() |
|
623 | 619 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
624 | 620 | try: |
|
625 | 621 | self.sa.delete(repo) |
|
626 | 622 | if fs_remove: |
|
627 | 623 | self._delete_filesystem_repo(repo) |
|
628 | 624 | else: |
|
629 | 625 | log.debug('skipping removal from filesystem') |
|
630 | 626 | old_repo_dict.update({ |
|
631 | 627 | 'deleted_by': cur_user, |
|
632 | 628 | 'deleted_on': time.time(), |
|
633 | 629 | }) |
|
634 | 630 | log_delete_repository(**old_repo_dict) |
|
635 | 631 | events.trigger(events.RepoDeleteEvent(repo)) |
|
636 | 632 | except Exception: |
|
637 | 633 | log.error(traceback.format_exc()) |
|
638 | 634 | raise |
|
639 | 635 | |
|
640 | 636 | def grant_user_permission(self, repo, user, perm): |
|
641 | 637 | """ |
|
642 | 638 | Grant permission for user on given repository, or update existing one |
|
643 | 639 | if found |
|
644 | 640 | |
|
645 | 641 | :param repo: Instance of Repository, repository_id, or repository name |
|
646 | 642 | :param user: Instance of User, user_id or username |
|
647 | 643 | :param perm: Instance of Permission, or permission_name |
|
648 | 644 | """ |
|
649 | 645 | user = self._get_user(user) |
|
650 | 646 | repo = self._get_repo(repo) |
|
651 | 647 | permission = self._get_perm(perm) |
|
652 | 648 | |
|
653 | 649 | # check if we have that permission already |
|
654 | 650 | obj = self.sa.query(UserRepoToPerm) \ |
|
655 | 651 | .filter(UserRepoToPerm.user == user) \ |
|
656 | 652 | .filter(UserRepoToPerm.repository == repo) \ |
|
657 | 653 | .scalar() |
|
658 | 654 | if obj is None: |
|
659 | 655 | # create new ! |
|
660 | 656 | obj = UserRepoToPerm() |
|
661 | 657 | obj.repository = repo |
|
662 | 658 | obj.user = user |
|
663 | 659 | obj.permission = permission |
|
664 | 660 | self.sa.add(obj) |
|
665 | 661 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
666 | 662 | action_logger_generic( |
|
667 | 663 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
668 | 664 | perm, user, repo), namespace='security.repo') |
|
669 | 665 | return obj |
|
670 | 666 | |
|
671 | 667 | def revoke_user_permission(self, repo, user): |
|
672 | 668 | """ |
|
673 | 669 | Revoke permission for user on given repository |
|
674 | 670 | |
|
675 | 671 | :param repo: Instance of Repository, repository_id, or repository name |
|
676 | 672 | :param user: Instance of User, user_id or username |
|
677 | 673 | """ |
|
678 | 674 | |
|
679 | 675 | user = self._get_user(user) |
|
680 | 676 | repo = self._get_repo(repo) |
|
681 | 677 | |
|
682 | 678 | obj = self.sa.query(UserRepoToPerm) \ |
|
683 | 679 | .filter(UserRepoToPerm.repository == repo) \ |
|
684 | 680 | .filter(UserRepoToPerm.user == user) \ |
|
685 | 681 | .scalar() |
|
686 | 682 | if obj: |
|
687 | 683 | self.sa.delete(obj) |
|
688 | 684 | log.debug('Revoked perm on %s on %s', repo, user) |
|
689 | 685 | action_logger_generic( |
|
690 | 686 | 'revoked permission from user: {} on repo: {}'.format( |
|
691 | 687 | user, repo), namespace='security.repo') |
|
692 | 688 | |
|
693 | 689 | def grant_user_group_permission(self, repo, group_name, perm): |
|
694 | 690 | """ |
|
695 | 691 | Grant permission for user group on given repository, or update |
|
696 | 692 | existing one if found |
|
697 | 693 | |
|
698 | 694 | :param repo: Instance of Repository, repository_id, or repository name |
|
699 | 695 | :param group_name: Instance of UserGroup, users_group_id, |
|
700 | 696 | or user group name |
|
701 | 697 | :param perm: Instance of Permission, or permission_name |
|
702 | 698 | """ |
|
703 | 699 | repo = self._get_repo(repo) |
|
704 | 700 | group_name = self._get_user_group(group_name) |
|
705 | 701 | permission = self._get_perm(perm) |
|
706 | 702 | |
|
707 | 703 | # check if we have that permission already |
|
708 | 704 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
709 | 705 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
710 | 706 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
711 | 707 | .scalar() |
|
712 | 708 | |
|
713 | 709 | if obj is None: |
|
714 | 710 | # create new |
|
715 | 711 | obj = UserGroupRepoToPerm() |
|
716 | 712 | |
|
717 | 713 | obj.repository = repo |
|
718 | 714 | obj.users_group = group_name |
|
719 | 715 | obj.permission = permission |
|
720 | 716 | self.sa.add(obj) |
|
721 | 717 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
722 | 718 | action_logger_generic( |
|
723 | 719 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
724 | 720 | perm, group_name, repo), namespace='security.repo') |
|
725 | 721 | |
|
726 | 722 | return obj |
|
727 | 723 | |
|
728 | 724 | def revoke_user_group_permission(self, repo, group_name): |
|
729 | 725 | """ |
|
730 | 726 | Revoke permission for user group on given repository |
|
731 | 727 | |
|
732 | 728 | :param repo: Instance of Repository, repository_id, or repository name |
|
733 | 729 | :param group_name: Instance of UserGroup, users_group_id, |
|
734 | 730 | or user group name |
|
735 | 731 | """ |
|
736 | 732 | repo = self._get_repo(repo) |
|
737 | 733 | group_name = self._get_user_group(group_name) |
|
738 | 734 | |
|
739 | 735 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
740 | 736 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
741 | 737 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
742 | 738 | .scalar() |
|
743 | 739 | if obj: |
|
744 | 740 | self.sa.delete(obj) |
|
745 | 741 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
746 | 742 | action_logger_generic( |
|
747 | 743 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
748 | 744 | group_name, repo), namespace='security.repo') |
|
749 | 745 | |
|
750 | 746 | def delete_stats(self, repo_name): |
|
751 | 747 | """ |
|
752 | 748 | removes stats for given repo |
|
753 | 749 | |
|
754 | 750 | :param repo_name: |
|
755 | 751 | """ |
|
756 | 752 | repo = self._get_repo(repo_name) |
|
757 | 753 | try: |
|
758 | 754 | obj = self.sa.query(Statistics) \ |
|
759 | 755 | .filter(Statistics.repository == repo).scalar() |
|
760 | 756 | if obj: |
|
761 | 757 | self.sa.delete(obj) |
|
762 | 758 | except Exception: |
|
763 | 759 | log.error(traceback.format_exc()) |
|
764 | 760 | raise |
|
765 | 761 | |
|
766 | 762 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
767 | 763 | field_type='str', field_desc=''): |
|
768 | 764 | |
|
769 | 765 | repo = self._get_repo(repo_name) |
|
770 | 766 | |
|
771 | 767 | new_field = RepositoryField() |
|
772 | 768 | new_field.repository = repo |
|
773 | 769 | new_field.field_key = field_key |
|
774 | 770 | new_field.field_type = field_type # python type |
|
775 | 771 | new_field.field_value = field_value |
|
776 | 772 | new_field.field_desc = field_desc |
|
777 | 773 | new_field.field_label = field_label |
|
778 | 774 | self.sa.add(new_field) |
|
779 | 775 | return new_field |
|
780 | 776 | |
|
781 | 777 | def delete_repo_field(self, repo_name, field_key): |
|
782 | 778 | repo = self._get_repo(repo_name) |
|
783 | 779 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
784 | 780 | if field: |
|
785 | 781 | self.sa.delete(field) |
|
786 | 782 | |
|
787 | 783 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
788 | 784 | clone_uri=None, repo_store_location=None, |
|
789 | 785 | use_global_config=False): |
|
790 | 786 | """ |
|
791 | 787 | makes repository on filesystem. It's group aware means it'll create |
|
792 | 788 | a repository within a group, and alter the paths accordingly of |
|
793 | 789 | group location |
|
794 | 790 | |
|
795 | 791 | :param repo_name: |
|
796 | 792 | :param alias: |
|
797 | 793 | :param parent: |
|
798 | 794 | :param clone_uri: |
|
799 | 795 | :param repo_store_location: |
|
800 | 796 | """ |
|
801 | 797 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
802 | 798 | from rhodecode.model.scm import ScmModel |
|
803 | 799 | |
|
804 | 800 | if Repository.NAME_SEP in repo_name: |
|
805 | 801 | raise ValueError( |
|
806 | 802 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
807 | 803 | |
|
808 | 804 | if isinstance(repo_group, RepoGroup): |
|
809 | 805 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
810 | 806 | else: |
|
811 | 807 | new_parent_path = repo_group or '' |
|
812 | 808 | |
|
813 | 809 | if repo_store_location: |
|
814 | 810 | _paths = [repo_store_location] |
|
815 | 811 | else: |
|
816 | 812 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
817 | 813 | # we need to make it str for mercurial |
|
818 | 814 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
819 | 815 | |
|
820 | 816 | # check if this path is not a repository |
|
821 | 817 | if is_valid_repo(repo_path, self.repos_path): |
|
822 | 818 | raise Exception('This path %s is a valid repository' % repo_path) |
|
823 | 819 | |
|
824 | 820 | # check if this path is a group |
|
825 | 821 | if is_valid_repo_group(repo_path, self.repos_path): |
|
826 | 822 | raise Exception('This path %s is a valid group' % repo_path) |
|
827 | 823 | |
|
828 | 824 | log.info('creating repo %s in %s from url: `%s`', |
|
829 | 825 | repo_name, safe_unicode(repo_path), |
|
830 | 826 | obfuscate_url_pw(clone_uri)) |
|
831 | 827 | |
|
832 | 828 | backend = get_backend(repo_type) |
|
833 | 829 | |
|
834 | 830 | config_repo = None if use_global_config else repo_name |
|
835 | 831 | if config_repo and new_parent_path: |
|
836 | 832 | config_repo = Repository.NAME_SEP.join( |
|
837 | 833 | (new_parent_path, config_repo)) |
|
838 | 834 | config = make_db_config(clear_session=False, repo=config_repo) |
|
839 | 835 | config.set('extensions', 'largefiles', '') |
|
840 | 836 | |
|
841 | 837 | # patch and reset hooks section of UI config to not run any |
|
842 | 838 | # hooks on creating remote repo |
|
843 | 839 | config.clear_section('hooks') |
|
844 | 840 | |
|
845 | 841 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
846 | 842 | if repo_type == 'git': |
|
847 | 843 | repo = backend( |
|
848 | 844 | repo_path, config=config, create=True, src_url=clone_uri, |
|
849 | 845 | bare=True) |
|
850 | 846 | else: |
|
851 | 847 | repo = backend( |
|
852 | 848 | repo_path, config=config, create=True, src_url=clone_uri) |
|
853 | 849 | |
|
854 | 850 | ScmModel().install_hooks(repo, repo_type=repo_type) |
|
855 | 851 | |
|
856 | 852 | log.debug('Created repo %s with %s backend', |
|
857 | 853 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
858 | 854 | return repo |
|
859 | 855 | |
|
860 | 856 | def _rename_filesystem_repo(self, old, new): |
|
861 | 857 | """ |
|
862 | 858 | renames repository on filesystem |
|
863 | 859 | |
|
864 | 860 | :param old: old name |
|
865 | 861 | :param new: new name |
|
866 | 862 | """ |
|
867 | 863 | log.info('renaming repo from %s to %s', old, new) |
|
868 | 864 | |
|
869 | 865 | old_path = os.path.join(self.repos_path, old) |
|
870 | 866 | new_path = os.path.join(self.repos_path, new) |
|
871 | 867 | if os.path.isdir(new_path): |
|
872 | 868 | raise Exception( |
|
873 | 869 | 'Was trying to rename to already existing dir %s' % new_path |
|
874 | 870 | ) |
|
875 | 871 | shutil.move(old_path, new_path) |
|
876 | 872 | |
|
877 | 873 | def _delete_filesystem_repo(self, repo): |
|
878 | 874 | """ |
|
879 | 875 | removes repo from filesystem, the removal is acctually made by |
|
880 | 876 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
881 | 877 | repository is no longer valid for rhodecode, can be undeleted later on |
|
882 | 878 | by reverting the renames on this repository |
|
883 | 879 | |
|
884 | 880 | :param repo: repo object |
|
885 | 881 | """ |
|
886 | 882 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
887 | 883 | repo_group = repo.group |
|
888 | 884 | log.info("Removing repository %s", rm_path) |
|
889 | 885 | # disable hg/git internal that it doesn't get detected as repo |
|
890 | 886 | alias = repo.repo_type |
|
891 | 887 | |
|
892 | 888 | config = make_db_config(clear_session=False) |
|
893 | 889 | config.set('extensions', 'largefiles', '') |
|
894 | 890 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
895 | 891 | |
|
896 | 892 | # skip this for bare git repos |
|
897 | 893 | if not bare: |
|
898 | 894 | # disable VCS repo |
|
899 | 895 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
900 | 896 | if os.path.exists(vcs_path): |
|
901 | 897 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
902 | 898 | |
|
903 | 899 | _now = datetime.datetime.now() |
|
904 | 900 | _ms = str(_now.microsecond).rjust(6, '0') |
|
905 | 901 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
906 | 902 | repo.just_name) |
|
907 | 903 | if repo_group: |
|
908 | 904 | # if repository is in group, prefix the removal path with the group |
|
909 | 905 | args = repo_group.full_path_splitted + [_d] |
|
910 | 906 | _d = os.path.join(*args) |
|
911 | 907 | |
|
912 | 908 | if os.path.isdir(rm_path): |
|
913 | 909 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
914 | 910 | |
|
915 | 911 | |
|
916 | 912 | class ReadmeFinder: |
|
917 | 913 | """ |
|
918 | 914 | Utility which knows how to find a readme for a specific commit. |
|
919 | 915 | |
|
920 | 916 | The main idea is that this is a configurable algorithm. When creating an |
|
921 | 917 | instance you can define parameters, currently only the `default_renderer`. |
|
922 | 918 | Based on this configuration the method :meth:`search` behaves slightly |
|
923 | 919 | different. |
|
924 | 920 | """ |
|
925 | 921 | |
|
926 | 922 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
927 | 923 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
928 | 924 | |
|
929 | 925 | default_priorities = { |
|
930 | 926 | None: 0, |
|
931 | 927 | '.text': 2, |
|
932 | 928 | '.txt': 3, |
|
933 | 929 | '.rst': 1, |
|
934 | 930 | '.rest': 2, |
|
935 | 931 | '.md': 1, |
|
936 | 932 | '.mkdn': 2, |
|
937 | 933 | '.mdown': 3, |
|
938 | 934 | '.markdown': 4, |
|
939 | 935 | } |
|
940 | 936 | |
|
941 | 937 | path_priority = { |
|
942 | 938 | 'doc': 0, |
|
943 | 939 | 'docs': 1, |
|
944 | 940 | } |
|
945 | 941 | |
|
946 | 942 | FALLBACK_PRIORITY = 99 |
|
947 | 943 | |
|
948 | 944 | RENDERER_TO_EXTENSION = { |
|
949 | 945 | 'rst': ['.rst', '.rest'], |
|
950 | 946 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
951 | 947 | } |
|
952 | 948 | |
|
953 | 949 | def __init__(self, default_renderer=None): |
|
954 | 950 | self._default_renderer = default_renderer |
|
955 | 951 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
956 | 952 | default_renderer, []) |
|
957 | 953 | |
|
958 | 954 | def search(self, commit, path='/'): |
|
959 | 955 | """ |
|
960 | 956 | Find a readme in the given `commit`. |
|
961 | 957 | """ |
|
962 | 958 | nodes = commit.get_nodes(path) |
|
963 | 959 | matches = self._match_readmes(nodes) |
|
964 | 960 | matches = self._sort_according_to_priority(matches) |
|
965 | 961 | if matches: |
|
966 | 962 | return matches[0].node |
|
967 | 963 | |
|
968 | 964 | paths = self._match_paths(nodes) |
|
969 | 965 | paths = self._sort_paths_according_to_priority(paths) |
|
970 | 966 | for path in paths: |
|
971 | 967 | match = self.search(commit, path=path) |
|
972 | 968 | if match: |
|
973 | 969 | return match |
|
974 | 970 | |
|
975 | 971 | return None |
|
976 | 972 | |
|
977 | 973 | def _match_readmes(self, nodes): |
|
978 | 974 | for node in nodes: |
|
979 | 975 | if not node.is_file(): |
|
980 | 976 | continue |
|
981 | 977 | path = node.path.rsplit('/', 1)[-1] |
|
982 | 978 | match = self.readme_re.match(path) |
|
983 | 979 | if match: |
|
984 | 980 | extension = match.group(1) |
|
985 | 981 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
986 | 982 | |
|
987 | 983 | def _match_paths(self, nodes): |
|
988 | 984 | for node in nodes: |
|
989 | 985 | if not node.is_dir(): |
|
990 | 986 | continue |
|
991 | 987 | match = self.path_re.match(node.path) |
|
992 | 988 | if match: |
|
993 | 989 | yield node.path |
|
994 | 990 | |
|
995 | 991 | def _priority(self, extension): |
|
996 | 992 | renderer_priority = ( |
|
997 | 993 | 0 if extension in self._renderer_extensions else 1) |
|
998 | 994 | extension_priority = self.default_priorities.get( |
|
999 | 995 | extension, self.FALLBACK_PRIORITY) |
|
1000 | 996 | return (renderer_priority, extension_priority) |
|
1001 | 997 | |
|
1002 | 998 | def _sort_according_to_priority(self, matches): |
|
1003 | 999 | |
|
1004 | 1000 | def priority_and_path(match): |
|
1005 | 1001 | return (match.priority, match.path) |
|
1006 | 1002 | |
|
1007 | 1003 | return sorted(matches, key=priority_and_path) |
|
1008 | 1004 | |
|
1009 | 1005 | def _sort_paths_according_to_priority(self, paths): |
|
1010 | 1006 | |
|
1011 | 1007 | def priority_and_path(path): |
|
1012 | 1008 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1013 | 1009 | |
|
1014 | 1010 | return sorted(paths, key=priority_and_path) |
|
1015 | 1011 | |
|
1016 | 1012 | |
|
1017 | 1013 | class ReadmeMatch: |
|
1018 | 1014 | |
|
1019 | 1015 | def __init__(self, node, match, priority): |
|
1020 | 1016 | self.node = node |
|
1021 | 1017 | self._match = match |
|
1022 | 1018 | self.priority = priority |
|
1023 | 1019 | |
|
1024 | 1020 | @property |
|
1025 | 1021 | def path(self): |
|
1026 | 1022 | return self.node.path |
|
1027 | 1023 | |
|
1028 | 1024 | def __repr__(self): |
|
1029 | 1025 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,101 +1,101 b'' | |||
|
1 | 1 | // Global keyboard bindings |
|
2 | 2 | |
|
3 | 3 | function setRCMouseBindings(repoName, repoLandingRev) { |
|
4 | 4 | |
|
5 | 5 | /** custom callback for supressing mousetrap from firing */ |
|
6 | 6 | Mousetrap.stopCallback = function(e, element) { |
|
7 | 7 | // if the element has the class "mousetrap" then no need to stop |
|
8 | 8 | if ((' ' + element.className + ' ').indexOf(' mousetrap ') > -1) { |
|
9 | 9 | return false; |
|
10 | 10 | } |
|
11 | 11 | |
|
12 | 12 | // stop for input, select, and textarea |
|
13 | 13 | return element.tagName == 'INPUT' || element.tagName == 'SELECT' || element.tagName == 'TEXTAREA' || element.isContentEditable; |
|
14 | 14 | }; |
|
15 | 15 | |
|
16 | 16 | // general help "?" |
|
17 | 17 | Mousetrap.bind(['?'], function(e) { |
|
18 | 18 | $('#help_kb').modal({}); |
|
19 | 19 | }); |
|
20 | 20 | |
|
21 | 21 | // / open the quick filter |
|
22 | 22 | Mousetrap.bind(['/'], function(e) { |
|
23 | 23 | $('#repo_switcher').select2('open'); |
|
24 | 24 | |
|
25 | 25 | // return false to prevent default browser behavior |
|
26 | 26 | // and stop event from bubbling |
|
27 | 27 | return false; |
|
28 | 28 | }); |
|
29 | 29 | |
|
30 | 30 | // ctrl/command+b, show the the main bar |
|
31 | 31 | Mousetrap.bind(['command+b', 'ctrl+b'], function(e) { |
|
32 | 32 | var $headerInner = $('#header-inner'), |
|
33 | 33 | $content = $('#content'); |
|
34 | 34 | if ($headerInner.hasClass('hover') && $content.hasClass('hover')) { |
|
35 | 35 | $headerInner.removeClass('hover'); |
|
36 | 36 | $content.removeClass('hover'); |
|
37 | 37 | } else { |
|
38 | 38 | $headerInner.addClass('hover'); |
|
39 | 39 | $content.addClass('hover'); |
|
40 | 40 | } |
|
41 | 41 | return false; |
|
42 | 42 | }); |
|
43 | 43 | |
|
44 | 44 | // general nav g + action |
|
45 | 45 | Mousetrap.bind(['g h'], function(e) { |
|
46 | 46 | window.location = pyroutes.url('home'); |
|
47 | 47 | }); |
|
48 | 48 | Mousetrap.bind(['g g'], function(e) { |
|
49 | 49 | window.location = pyroutes.url('gists_show', {'private': 1}); |
|
50 | 50 | }); |
|
51 | 51 | Mousetrap.bind(['g G'], function(e) { |
|
52 | 52 | window.location = pyroutes.url('gists_show', {'public': 1}); |
|
53 | 53 | }); |
|
54 | 54 | Mousetrap.bind(['n g'], function(e) { |
|
55 | 55 | window.location = pyroutes.url('gists_new'); |
|
56 | 56 | }); |
|
57 | 57 | Mousetrap.bind(['n r'], function(e) { |
|
58 |
window.location = pyroutes.url(' |
|
|
58 | window.location = pyroutes.url('repo_new'); | |
|
59 | 59 | }); |
|
60 | 60 | |
|
61 | 61 | if (repoName && repoName != '') { |
|
62 | 62 | // nav in repo context |
|
63 | 63 | Mousetrap.bind(['g s'], function(e) { |
|
64 | 64 | window.location = pyroutes.url( |
|
65 | 65 | 'repo_summary', {'repo_name': repoName}); |
|
66 | 66 | }); |
|
67 | 67 | Mousetrap.bind(['g c'], function(e) { |
|
68 | 68 | window.location = pyroutes.url( |
|
69 | 69 | 'repo_changelog', {'repo_name': repoName}); |
|
70 | 70 | }); |
|
71 | 71 | Mousetrap.bind(['g F'], function(e) { |
|
72 | 72 | window.location = pyroutes.url( |
|
73 | 73 | 'repo_files', |
|
74 | 74 | { |
|
75 | 75 | 'repo_name': repoName, |
|
76 | 76 | 'commit_id': repoLandingRev, |
|
77 | 77 | 'f_path': '', |
|
78 | 78 | 'search': '1' |
|
79 | 79 | }); |
|
80 | 80 | }); |
|
81 | 81 | Mousetrap.bind(['g f'], function(e) { |
|
82 | 82 | window.location = pyroutes.url( |
|
83 | 83 | 'repo_files', |
|
84 | 84 | { |
|
85 | 85 | 'repo_name': repoName, |
|
86 | 86 | 'commit_id': repoLandingRev, |
|
87 | 87 | 'f_path': '' |
|
88 | 88 | }); |
|
89 | 89 | }); |
|
90 | 90 | Mousetrap.bind(['g o'], function(e) { |
|
91 | 91 | window.location = pyroutes.url( |
|
92 | 92 | 'edit_repo', {'repo_name': repoName}); |
|
93 | 93 | }); |
|
94 | 94 | Mousetrap.bind(['g O'], function(e) { |
|
95 | 95 | window.location = pyroutes.url( |
|
96 | 96 | 'edit_repo_perms', {'repo_name': repoName}); |
|
97 | 97 | }); |
|
98 | 98 | } |
|
99 | 99 | } |
|
100 | 100 | |
|
101 | 101 | setRCMouseBindings(templateContext.repo_name, templateContext.repo_landing_commit); |
@@ -1,234 +1,251 b'' | |||
|
1 | 1 | |
|
2 | 2 | /****************************************************************************** |
|
3 | 3 | * * |
|
4 | 4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
5 | 5 | * * |
|
6 | 6 | * * |
|
7 | 7 | * This file is automatically generated when the app starts up with * |
|
8 | 8 | * generate_js_files = true * |
|
9 | 9 | * * |
|
10 | 10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
11 | 11 | * * |
|
12 | 12 | ******************************************************************************/ |
|
13 | 13 | function registerRCRoutes() { |
|
14 | 14 | // routes registration |
|
15 | pyroutes.register('new_repo', '/_admin/create_repository', []); | |
|
16 | 15 | pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
17 | 16 | pyroutes.register('favicon', '/favicon.ico', []); |
|
18 | 17 | pyroutes.register('robots', '/robots.txt', []); |
|
19 | 18 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
20 | 19 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
21 | 20 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
22 | 21 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
23 | 22 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
24 | 23 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
25 | 24 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/settings/integrations', ['repo_group_name']); |
|
26 | 25 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/settings/integrations/new', ['repo_group_name']); |
|
27 | 26 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
28 | 27 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
29 | 28 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
30 | 29 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
31 | 30 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
32 | 31 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
33 | 32 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
34 | 33 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
35 | 34 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
36 | 35 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
37 | 36 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
38 | 37 | pyroutes.register('admin_home', '/_admin', []); |
|
39 | 38 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
40 | 39 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
41 | 40 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
42 | 41 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
43 | 42 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
44 | 43 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
45 | 44 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
46 | 45 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
47 | 46 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
48 | 47 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
49 | 48 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
50 | 49 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
51 | 50 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
52 | 51 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
53 | 52 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
54 | 53 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
55 | 54 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
56 | 55 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
57 | 56 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
58 | 57 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
59 | 58 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
60 | 59 | pyroutes.register('users', '/_admin/users', []); |
|
61 | 60 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
62 | 61 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
63 | 62 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
64 | 63 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
65 | 64 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
66 | 65 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
67 | 66 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
68 | 67 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
69 | 68 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
70 | 69 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
71 | 70 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
72 | 71 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
73 | 72 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
74 | 73 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
75 | 74 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
76 | 75 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
77 | 76 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
78 | 77 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
79 | 78 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
80 | 79 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
81 | 80 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
82 | 81 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
83 | 82 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
84 | 83 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
84 | pyroutes.register('repos', '/_admin/repos', []); | |
|
85 | pyroutes.register('repo_new', '/_admin/repos/new', []); | |
|
86 | pyroutes.register('repo_create', '/_admin/repos/create', []); | |
|
85 | 87 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
86 | 88 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
87 | 89 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
88 | 90 | pyroutes.register('login', '/_admin/login', []); |
|
89 | 91 | pyroutes.register('logout', '/_admin/logout', []); |
|
90 | 92 | pyroutes.register('register', '/_admin/register', []); |
|
91 | 93 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
92 | 94 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
93 | 95 | pyroutes.register('home', '/', []); |
|
94 | 96 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
95 | 97 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
96 | 98 | pyroutes.register('repo_list_data', '/_repos', []); |
|
97 | 99 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
98 | 100 | pyroutes.register('journal', '/_admin/journal', []); |
|
99 | 101 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
100 | 102 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
101 | 103 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
102 | 104 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
103 | 105 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
104 | 106 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
105 | 107 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
106 | 108 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
107 | 109 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
108 | 110 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
109 | 111 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
110 | 112 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
111 | 113 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
112 | 114 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
113 | 115 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
114 | 116 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
115 | 117 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
116 | 118 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
117 | 119 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
118 | 120 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
119 | 121 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
120 | 122 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
121 | 123 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
122 | 124 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
123 | 125 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
124 | 126 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
125 | 127 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
126 | 128 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
127 | 129 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
128 | 130 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
129 | 131 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
130 | 132 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
131 | 133 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
132 | 134 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
133 | 135 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
134 | 136 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
135 | 137 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
136 | 138 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
137 | 139 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
138 | 140 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
139 | 141 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
140 | 142 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
141 | 143 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
142 | 144 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
143 | 145 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
144 | 146 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
145 | 147 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
146 | 148 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
147 | 149 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
148 | 150 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
149 | 151 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
150 | 152 | pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']); |
|
151 | 153 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
152 | 154 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
153 | 155 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
154 | 156 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
155 | 157 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
156 | 158 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
157 | 159 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
158 | 160 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
159 | 161 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
160 | 162 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
161 | 163 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
162 | 164 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
163 | 165 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
164 | 166 | pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); |
|
165 | 167 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
166 | 168 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
167 | 169 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
168 | 170 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
169 | 171 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
170 | 172 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
171 | 173 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
172 | 174 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
173 | 175 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
174 | 176 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
175 | 177 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
176 | 178 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
177 | 179 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
178 | 180 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
179 | 181 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
182 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); | |
|
183 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); | |
|
184 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); | |
|
185 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); | |
|
186 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); | |
|
187 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); | |
|
188 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); | |
|
189 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); | |
|
190 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); | |
|
191 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); | |
|
192 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); | |
|
193 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); | |
|
194 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); | |
|
195 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); | |
|
196 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); | |
|
197 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); | |
|
198 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); | |
|
180 | 199 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
181 | 200 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
182 |
pyroutes.register(' |
|
|
183 | pyroutes.register('repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); | |
|
184 | pyroutes.register('strip', '/%(repo_name)s/settings/strip', ['repo_name']); | |
|
201 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); | |
|
185 | 202 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
186 | 203 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
187 | 204 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
188 | 205 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
189 | 206 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
190 | 207 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
191 | 208 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
192 | 209 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
193 | 210 | pyroutes.register('search', '/_admin/search', []); |
|
194 | 211 | pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']); |
|
195 | 212 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
196 | 213 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
197 | 214 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
198 | 215 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
199 | 216 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
200 | 217 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
201 | 218 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
202 | 219 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
203 | 220 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
204 | 221 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
205 | 222 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
206 | 223 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
207 | 224 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
208 | 225 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
209 | 226 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
210 | 227 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
211 | 228 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
212 | 229 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
213 | 230 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
214 | 231 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
215 | 232 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
216 | 233 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
217 | 234 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
218 | 235 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
219 | 236 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
220 | 237 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
221 | 238 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
222 | 239 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
223 | 240 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
224 | 241 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
225 | 242 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
226 | 243 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
227 | 244 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
228 | 245 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
229 | 246 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
230 | 247 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
231 | 248 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
232 | 249 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
233 | 250 | pyroutes.register('apiv2', '/_admin/api', []); |
|
234 | 251 | } |
@@ -1,37 +1,37 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | <%inherit file="/base/base.mako"/> |
|
3 | 3 | |
|
4 | 4 | <%def name="title()"> |
|
5 | 5 | ${_('Add repository')} |
|
6 | 6 | %if c.rhodecode_name: |
|
7 | 7 | · ${h.branding(c.rhodecode_name)} |
|
8 | 8 | %endif |
|
9 | 9 | </%def> |
|
10 | 10 | |
|
11 | 11 | <%def name="breadcrumbs_links()"> |
|
12 | 12 | %if c.rhodecode_user.is_admin: |
|
13 | 13 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} |
|
14 | 14 | » |
|
15 |
${h.link_to(_('Repositories'),h. |
|
|
15 | ${h.link_to(_('Repositories'), h.route_path('repos'))} | |
|
16 | 16 | %else: |
|
17 | 17 | ${_('Admin')} |
|
18 | 18 | » |
|
19 | 19 | ${_('Repositories')} |
|
20 | 20 | %endif |
|
21 | 21 | » |
|
22 | 22 | ${_('Add Repository')} |
|
23 | 23 | </%def> |
|
24 | 24 | |
|
25 | 25 | <%def name="menu_bar_nav()"> |
|
26 | 26 | ${self.menu_items(active='admin')} |
|
27 | 27 | </%def> |
|
28 | 28 | |
|
29 | 29 | <%def name="main()"> |
|
30 | 30 | <div class="box"> |
|
31 | 31 | <!-- box / title --> |
|
32 | 32 | <div class="title"> |
|
33 | 33 | ${self.breadcrumbs()} |
|
34 | 34 | </div> |
|
35 | 35 | <%include file="repo_add_base.mako"/> |
|
36 | 36 | </div> |
|
37 | 37 | </%def> |
@@ -1,159 +1,159 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | ${h.secure_form(h.url('repos'))} | |
|
3 | ${h.secure_form(h.route_path('repo_create'), method='POST', request=request)} | |
|
4 | 4 | <div class="form"> |
|
5 | 5 | <!-- fields --> |
|
6 | 6 | <div class="fields"> |
|
7 | 7 | <div class="field"> |
|
8 | 8 | <div class="label"> |
|
9 | 9 | <label for="repo_name">${_('Name')}:</label> |
|
10 | 10 | </div> |
|
11 | 11 | <div class="input"> |
|
12 | 12 | ${h.text('repo_name', class_="medium")} |
|
13 | 13 | <div class="info-block"> |
|
14 | 14 | <a id="remote_clone_toggle" href="#"><i class="icon-download-alt"></i> ${_('Import Existing Repository ?')}</a> |
|
15 | 15 | </div> |
|
16 | 16 | %if not c.rhodecode_user.is_admin: |
|
17 | 17 | ${h.hidden('user_created',True)} |
|
18 | 18 | %endif |
|
19 | 19 | </div> |
|
20 | 20 | </div> |
|
21 | 21 | <div id="remote_clone" class="field" style="display: none;"> |
|
22 | 22 | <div class="label"> |
|
23 | 23 | <label for="clone_uri">${_('Clone from')}:</label> |
|
24 | 24 | </div> |
|
25 | 25 | <div class="input"> |
|
26 | 26 | ${h.text('clone_uri', class_="medium")} |
|
27 | 27 | <span class="help-block"> |
|
28 | 28 | <pre> |
|
29 | 29 | - The repository must be accessible over http:// or https:// |
|
30 | 30 | - For Git projects it's recommended appending .git to the end of clone url. |
|
31 | 31 | - Make sure to select proper repository type from the below selector before importing it. |
|
32 | 32 | - If your HTTP[S] repository is not publicly accessible, |
|
33 | 33 | add authentication information to the URL: https://username:password@server.company.com/repo-name. |
|
34 | 34 | - The Git LFS/Mercurial Largefiles objects will not be imported. |
|
35 | 35 | - For very large repositories, it's recommended to manually copy them into the |
|
36 | 36 | RhodeCode <a href="${h.url('admin_settings_vcs', anchor='vcs-storage-options')}">storage location</a> and run <a href="${h.url('admin_settings_mapping')}">Remap and Rescan</a>. |
|
37 | 37 | </pre> |
|
38 | 38 | </span> |
|
39 | 39 | </div> |
|
40 | 40 | </div> |
|
41 | 41 | <div class="field"> |
|
42 | 42 | <div class="label"> |
|
43 | 43 | <label for="repo_description">${_('Description')}:</label> |
|
44 | 44 | </div> |
|
45 | 45 | <div class="textarea editor"> |
|
46 | 46 | ${h.textarea('repo_description')} |
|
47 | 47 | <span class="help-block">${_('Keep it short and to the point. Use a README file for longer descriptions.')}</span> |
|
48 | 48 | </div> |
|
49 | 49 | </div> |
|
50 | 50 | <div class="field"> |
|
51 | 51 | <div class="label"> |
|
52 | 52 | <label for="repo_group">${_('Repository Group')}:</label> |
|
53 | 53 | </div> |
|
54 | 54 | <div class="select"> |
|
55 | 55 | ${h.select('repo_group',request.GET.get('parent_group'),c.repo_groups,class_="medium")} |
|
56 | 56 | % if c.personal_repo_group: |
|
57 | 57 | <a class="btn" href="#" id="select_my_group" data-personal-group-id="${c.personal_repo_group.group_id}"> |
|
58 | 58 | ${_('Select my personal group (%(repo_group_name)s)') % {'repo_group_name': c.personal_repo_group.group_name}} |
|
59 | 59 | </a> |
|
60 | 60 | % endif |
|
61 | 61 | <span class="help-block">${_('Optionally select a group to put this repository into.')}</span> |
|
62 | 62 | </div> |
|
63 | 63 | </div> |
|
64 | 64 | <div id="copy_perms" class="field"> |
|
65 | 65 | <div class="label label-checkbox"> |
|
66 | 66 | <label for="repo_copy_permissions">${_('Copy Parent Group Permissions')}:</label> |
|
67 | 67 | </div> |
|
68 | 68 | <div class="checkboxes"> |
|
69 | 69 | ${h.checkbox('repo_copy_permissions', value="True", checked="checked")} |
|
70 | 70 | <span class="help-block">${_('Copy permission set from the parent repository group.')}</span> |
|
71 | 71 | </div> |
|
72 | 72 | </div> |
|
73 | 73 | <div class="field"> |
|
74 | 74 | <div class="label"> |
|
75 | 75 | <label for="repo_type">${_('Type')}:</label> |
|
76 | 76 | </div> |
|
77 | 77 | <div class="select"> |
|
78 | 78 | ${h.select('repo_type','hg',c.backends)} |
|
79 | 79 | <span class="help-block">${_('Set the type of repository to create.')}</span> |
|
80 | 80 | </div> |
|
81 | 81 | </div> |
|
82 | 82 | <div class="field"> |
|
83 | 83 | <div class="label"> |
|
84 | 84 | <label for="repo_landing_rev">${_('Landing commit')}:</label> |
|
85 | 85 | </div> |
|
86 | 86 | <div class="select"> |
|
87 | 87 | ${h.select('repo_landing_rev','',c.landing_revs,class_="medium")} |
|
88 | 88 | <span class="help-block">${_('The default commit for file pages, downloads, full text search index, and README generation.')}</span> |
|
89 | 89 | </div> |
|
90 | 90 | </div> |
|
91 | 91 | <div class="field"> |
|
92 | 92 | <div class="label label-checkbox"> |
|
93 | 93 | <label for="repo_private">${_('Private Repository')}:</label> |
|
94 | 94 | </div> |
|
95 | 95 | <div class="checkboxes"> |
|
96 | 96 | ${h.checkbox('repo_private',value="True")} |
|
97 | 97 | <span class="help-block">${_('Private repositories are only visible to people explicitly added as collaborators.')}</span> |
|
98 | 98 | </div> |
|
99 | 99 | </div> |
|
100 | 100 | <div class="buttons"> |
|
101 | 101 | ${h.submit('save',_('Save'),class_="btn")} |
|
102 | 102 | </div> |
|
103 | 103 | </div> |
|
104 | 104 | </div> |
|
105 | 105 | <script> |
|
106 | 106 | $(document).ready(function(){ |
|
107 | 107 | var setCopyPermsOption = function(group_val){ |
|
108 | 108 | if(group_val != "-1"){ |
|
109 | 109 | $('#copy_perms').show() |
|
110 | 110 | } |
|
111 | 111 | else{ |
|
112 | 112 | $('#copy_perms').hide(); |
|
113 | 113 | } |
|
114 | 114 | }; |
|
115 | 115 | |
|
116 | 116 | $('#remote_clone_toggle').on('click', function(e){ |
|
117 | 117 | $('#remote_clone').show(); |
|
118 | 118 | e.preventDefault(); |
|
119 | 119 | }); |
|
120 | 120 | |
|
121 | 121 | if($('#remote_clone input').hasClass('error')){ |
|
122 | 122 | $('#remote_clone').show(); |
|
123 | 123 | } |
|
124 | 124 | if($('#remote_clone input').val()){ |
|
125 | 125 | $('#remote_clone').show(); |
|
126 | 126 | } |
|
127 | 127 | |
|
128 | 128 | $("#repo_group").select2({ |
|
129 | 129 | 'containerCssClass': "drop-menu", |
|
130 | 130 | 'dropdownCssClass': "drop-menu-dropdown", |
|
131 | 131 | 'dropdownAutoWidth': true, |
|
132 | 132 | 'width': "resolve" |
|
133 | 133 | }); |
|
134 | 134 | |
|
135 | 135 | setCopyPermsOption($('#repo_group').val()); |
|
136 | 136 | $("#repo_group").on("change", function(e) { |
|
137 | 137 | setCopyPermsOption(e.val) |
|
138 | 138 | }); |
|
139 | 139 | |
|
140 | 140 | $("#repo_type").select2({ |
|
141 | 141 | 'containerCssClass': "drop-menu", |
|
142 | 142 | 'dropdownCssClass': "drop-menu-dropdown", |
|
143 | 143 | 'minimumResultsForSearch': -1, |
|
144 | 144 | }); |
|
145 | 145 | $("#repo_landing_rev").select2({ |
|
146 | 146 | 'containerCssClass': "drop-menu", |
|
147 | 147 | 'dropdownCssClass': "drop-menu-dropdown", |
|
148 | 148 | 'minimumResultsForSearch': -1, |
|
149 | 149 | }); |
|
150 | 150 | $('#repo_name').focus(); |
|
151 | 151 | |
|
152 | 152 | $('#select_my_group').on('click', function(e){ |
|
153 | 153 | e.preventDefault(); |
|
154 | 154 | $("#repo_group").val($(this).data('personalGroupId')).trigger("change"); |
|
155 | 155 | }) |
|
156 | 156 | |
|
157 | 157 | }) |
|
158 | 158 | </script> |
|
159 | 159 | ${h.end_form()} |
@@ -1,99 +1,99 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | ## |
|
3 | 3 | ## See also repo_settings.html |
|
4 | 4 | ## |
|
5 | 5 | <%inherit file="/base/base.mako"/> |
|
6 | 6 | |
|
7 | 7 | <%def name="title()"> |
|
8 | 8 | ${_('%s repository settings') % c.repo_info.repo_name} |
|
9 | 9 | %if c.rhodecode_name: |
|
10 | 10 | · ${h.branding(c.rhodecode_name)} |
|
11 | 11 | %endif |
|
12 | 12 | </%def> |
|
13 | 13 | |
|
14 | 14 | <%def name="breadcrumbs_links()"> |
|
15 | 15 | ${_('Settings')} |
|
16 | 16 | </%def> |
|
17 | 17 | |
|
18 | 18 | <%def name="menu_bar_nav()"> |
|
19 | 19 | ${self.menu_items(active='repositories')} |
|
20 | 20 | </%def> |
|
21 | 21 | |
|
22 | 22 | <%def name="menu_bar_subnav()"> |
|
23 | 23 | ${self.repo_menu(active='options')} |
|
24 | 24 | </%def> |
|
25 | 25 | |
|
26 | 26 | <%def name="main_content()"> |
|
27 | 27 | % if hasattr(c, 'repo_edit_template'): |
|
28 | 28 | <%include file="${c.repo_edit_template}"/> |
|
29 | 29 | % else: |
|
30 | 30 | <%include file="/admin/repos/repo_edit_${c.active}.mako"/> |
|
31 | 31 | % endif |
|
32 | 32 | </%def> |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | <%def name="main()"> |
|
36 | 36 | <div class="box"> |
|
37 | 37 | <div class="title"> |
|
38 | 38 | ${self.repo_page_title(c.rhodecode_db_repo)} |
|
39 | 39 | ${self.breadcrumbs()} |
|
40 | 40 | </div> |
|
41 | 41 | |
|
42 | 42 | <div class="sidebar-col-wrapper scw-small"> |
|
43 | 43 | <div class="sidebar"> |
|
44 | 44 | <ul class="nav nav-pills nav-stacked"> |
|
45 | 45 | <li class="${'active' if c.active=='settings' else ''}"> |
|
46 | 46 | <a href="${h.route_path('edit_repo', repo_name=c.repo_name)}">${_('Settings')}</a> |
|
47 | 47 | </li> |
|
48 | 48 | <li class="${'active' if c.active=='permissions' else ''}"> |
|
49 | 49 | <a href="${h.route_path('edit_repo_perms', repo_name=c.repo_name)}">${_('Permissions')}</a> |
|
50 | 50 | </li> |
|
51 | 51 | <li class="${'active' if c.active=='advanced' else ''}"> |
|
52 | 52 | <a href="${h.route_path('edit_repo_advanced', repo_name=c.repo_name)}">${_('Advanced')}</a> |
|
53 | 53 | </li> |
|
54 | 54 | <li class="${'active' if c.active=='vcs' else ''}"> |
|
55 |
<a href="${h. |
|
|
55 | <a href="${h.route_path('edit_repo_vcs', repo_name=c.repo_name)}">${_('VCS')}</a> | |
|
56 | 56 | </li> |
|
57 | 57 | <li class="${'active' if c.active=='fields' else ''}"> |
|
58 |
<a href="${h. |
|
|
58 | <a href="${h.route_path('edit_repo_fields', repo_name=c.repo_name)}">${_('Extra Fields')}</a> | |
|
59 | 59 | </li> |
|
60 | 60 | <li class="${'active' if c.active=='issuetracker' else ''}"> |
|
61 |
<a href="${h. |
|
|
61 | <a href="${h.route_path('edit_repo_issuetracker', repo_name=c.repo_name)}">${_('Issue Tracker')}</a> | |
|
62 | 62 | </li> |
|
63 | 63 | <li class="${'active' if c.active=='caches' else ''}"> |
|
64 | 64 | <a href="${h.route_path('edit_repo_caches', repo_name=c.repo_name)}">${_('Caches')}</a> |
|
65 | 65 | </li> |
|
66 | 66 | %if c.repo_info.repo_type != 'svn': |
|
67 | 67 | <li class="${'active' if c.active=='remote' else ''}"> |
|
68 |
<a href="${h. |
|
|
68 | <a href="${h.route_path('edit_repo_remote', repo_name=c.repo_name)}">${_('Remote')}</a> | |
|
69 | 69 | </li> |
|
70 | 70 | %endif |
|
71 | 71 | <li class="${'active' if c.active=='statistics' else ''}"> |
|
72 |
<a href="${h. |
|
|
72 | <a href="${h.route_path('edit_repo_statistics', repo_name=c.repo_name)}">${_('Statistics')}</a> | |
|
73 | 73 | </li> |
|
74 | 74 | <li class="${'active' if c.active=='integrations' else ''}"> |
|
75 | 75 | <a href="${h.route_path('repo_integrations_home', repo_name=c.repo_name)}">${_('Integrations')}</a> |
|
76 | 76 | </li> |
|
77 | 77 | %if c.repo_info.repo_type != 'svn': |
|
78 | 78 | <li class="${'active' if c.active=='reviewers' else ''}"> |
|
79 | 79 | <a href="${h.route_path('repo_reviewers', repo_name=c.repo_name)}">${_('Reviewer Rules')}</a> |
|
80 | 80 | </li> |
|
81 | 81 | %endif |
|
82 | 82 | <li class="${'active' if c.active=='maintenance' else ''}"> |
|
83 | <a href="${h.route_path('repo_maintenance', repo_name=c.repo_name)}">${_('Maintenance')}</a> | |
|
83 | <a href="${h.route_path('edit_repo_maintenance', repo_name=c.repo_name)}">${_('Maintenance')}</a> | |
|
84 | 84 | </li> |
|
85 | 85 | <li class="${'active' if c.active=='strip' else ''}"> |
|
86 | <a href="${h.route_path('strip', repo_name=c.repo_name)}">${_('Strip')}</a> | |
|
86 | <a href="${h.route_path('edit_repo_strip', repo_name=c.repo_name)}">${_('Strip')}</a> | |
|
87 | 87 | </li> |
|
88 | 88 | |
|
89 | 89 | </ul> |
|
90 | 90 | </div> |
|
91 | 91 | |
|
92 | 92 | <div class="main-content-full-width"> |
|
93 | 93 | ${self.main_content()} |
|
94 | 94 | </div> |
|
95 | 95 | |
|
96 | 96 | </div> |
|
97 | 97 | </div> |
|
98 | 98 | |
|
99 | 99 | </%def> No newline at end of file |
@@ -1,79 +1,79 b'' | |||
|
1 | 1 | <div class="panel panel-default"> |
|
2 | 2 | <div class="panel-heading"> |
|
3 | 3 | <h3 class="panel-title">${_('Custom extra fields for this repository')}</h3> |
|
4 | 4 | </div> |
|
5 | 5 | <div class="panel-body"> |
|
6 | 6 | %if c.visual.repository_fields: |
|
7 | 7 | %if c.repo_fields: |
|
8 | 8 | <div class="emails_wrap"> |
|
9 | 9 | <table class="rctable edit_fields"> |
|
10 | 10 | <th>${_('Label')}</th> |
|
11 | 11 | <th>${_('Key')}</th> |
|
12 | 12 | <th>${_('Type')}</th> |
|
13 | 13 | <th>${_('Action')}</th> |
|
14 | 14 | |
|
15 | 15 | %for field in c.repo_fields: |
|
16 | 16 | <tr> |
|
17 | 17 | <td class="td-tags">${field.field_label}</td> |
|
18 | 18 | <td class="td-hash">${field.field_key}</td> |
|
19 | 19 | <td class="td-type">${field.field_type}</td> |
|
20 | 20 | <td class="td-action"> |
|
21 |
${h.secure_form(h. |
|
|
21 | ${h.secure_form(h.route_path('edit_repo_fields_delete', repo_name=c.repo_info.repo_name, field_id=field.repo_field_id), method='POST', request=request)} | |
|
22 | 22 | ${h.hidden('del_repo_field',field.repo_field_id)} |
|
23 | 23 | <button class="btn btn-link btn-danger" type="submit" |
|
24 | 24 | onclick="return confirm('${_('Confirm to delete this field: %s') % field.field_key}');"> |
|
25 | 25 | ${_('Delete')} |
|
26 | 26 | </button> |
|
27 | 27 | ${h.end_form()} |
|
28 | 28 | </td> |
|
29 | 29 | </tr> |
|
30 | 30 | %endfor |
|
31 | 31 | </table> |
|
32 | 32 | </div> |
|
33 | 33 | %endif |
|
34 |
${h.secure_form(h. |
|
|
34 | ${h.secure_form(h.route_path('edit_repo_fields_create', repo_name=c.repo_name), method='POST', request=request)} | |
|
35 | 35 | <div class="form"> |
|
36 | 36 | <!-- fields --> |
|
37 | 37 | <div class="fields"> |
|
38 | 38 | <div class="field"> |
|
39 | 39 | <div class="label"> |
|
40 | 40 | <label for="new_field_key">${_('New Field Key')}:</label> |
|
41 | 41 | </div> |
|
42 | 42 | <div class="input"> |
|
43 | 43 | ${h.text('new_field_key', class_='medium')} |
|
44 | 44 | </div> |
|
45 | 45 | </div> |
|
46 | 46 | <div class="field"> |
|
47 | 47 | <div class="label"> |
|
48 | 48 | <label for="new_field_label">${_('New Field Label')}:</label> |
|
49 | 49 | </div> |
|
50 | 50 | <div class="input"> |
|
51 | 51 | ${h.text('new_field_label', class_='medium', placeholder=_('Enter short label'))} |
|
52 | 52 | </div> |
|
53 | 53 | </div> |
|
54 | 54 | |
|
55 | 55 | <div class="field"> |
|
56 | 56 | <div class="label"> |
|
57 | 57 | <label for="new_field_desc">${_('New Field Description')}:</label> |
|
58 | 58 | </div> |
|
59 | 59 | <div class="input"> |
|
60 | 60 | ${h.text('new_field_desc', class_='medium', placeholder=_('Enter a full description for the field'))} |
|
61 | 61 | </div> |
|
62 | 62 | </div> |
|
63 | 63 | |
|
64 | 64 | <div class="buttons"> |
|
65 | 65 | ${h.submit('save',_('Add'),class_="btn")} |
|
66 | 66 | ${h.reset('reset',_('Reset'),class_="btn")} |
|
67 | 67 | </div> |
|
68 | 68 | </div> |
|
69 | 69 | </div> |
|
70 | 70 | ${h.end_form()} |
|
71 | 71 | %else: |
|
72 | 72 | <h2> |
|
73 | 73 | ${_('Extra fields are disabled. You can enable them from the Admin/Settings/Visual page.')} |
|
74 | 74 | </h2> |
|
75 | 75 | %endif |
|
76 | 76 | </div> |
|
77 | 77 | </div> |
|
78 | 78 | |
|
79 | 79 |
@@ -1,109 +1,109 b'' | |||
|
1 | 1 | <%namespace name="its" file="/base/issue_tracker_settings.mako"/> |
|
2 | 2 | |
|
3 | 3 | <div id="repo_issue_tracker" class="${'inherited' if c.settings_model.inherit_global_settings else ''}"> |
|
4 |
${h.secure_form(h. |
|
|
4 | ${h.secure_form(h.route_path('edit_repo_issuetracker_update', repo_name=c.repo_name), id="inherit-form", method='POST', request=request)} | |
|
5 | 5 | <div class="panel panel-default panel-body"> |
|
6 | 6 | <div class="fields"> |
|
7 | 7 | <div class="field"> |
|
8 | 8 | <div class="label label-checkbox"> |
|
9 | 9 | <label for="inherit_default_permissions">${_('Inherit from global settings')}:</label> |
|
10 | 10 | </div> |
|
11 | 11 | <div class="checkboxes"> |
|
12 | 12 | ${h.checkbox('inherit_global_issuetracker', value='inherited', checked=c.settings_model.inherit_global_settings)} |
|
13 | 13 | <span class="help-block"> |
|
14 | 14 | ${h.literal(_('Select to inherit global patterns for issue tracker.'))} |
|
15 | 15 | </span> |
|
16 | 16 | </div> |
|
17 | 17 | </div> |
|
18 | 18 | </div> |
|
19 | 19 | </div> |
|
20 | 20 | |
|
21 | 21 | <div id="inherit_overlay"> |
|
22 | 22 | <div class="panel panel-default"> |
|
23 | 23 | <div class="panel-heading"> |
|
24 | 24 | <h3 class="panel-title">${_('Inherited Issue Tracker Patterns')}</h3> |
|
25 | 25 | </div> |
|
26 | 26 | <div class="panel-body"> |
|
27 | 27 | <table class="rctable issuetracker readonly"> |
|
28 | 28 | <tr> |
|
29 | 29 | <th>${_('Description')}</th> |
|
30 | 30 | <th>${_('Pattern')}</th> |
|
31 | 31 | <th>${_('Url')}</th> |
|
32 | 32 | <th>${_('Prefix')}</th> |
|
33 | 33 | <th ></th> |
|
34 | 34 | </tr> |
|
35 | 35 | %for uid, entry in c.global_patterns.items(): |
|
36 | 36 | <tr id="${uid}"> |
|
37 | 37 | <td class="td-description issuetracker_desc"> |
|
38 | 38 | <span class="entry"> |
|
39 | 39 | ${entry.desc} |
|
40 | 40 | </span> |
|
41 | 41 | </td> |
|
42 | 42 | <td class="td-regex issuetracker_pat"> |
|
43 | 43 | <span class="entry"> |
|
44 | 44 | ${entry.pat} |
|
45 | 45 | </span> |
|
46 | 46 | </td> |
|
47 | 47 | <td class="td-url issuetracker_url"> |
|
48 | 48 | <span class="entry"> |
|
49 | 49 | ${entry.url} |
|
50 | 50 | </span> |
|
51 | 51 | </td> |
|
52 | 52 | <td class="td-prefix issuetracker_pref"> |
|
53 | 53 | <span class="entry"> |
|
54 | 54 | ${entry.pref} |
|
55 | 55 | </span> |
|
56 | 56 | </td> |
|
57 | 57 | <td class="td-action"> |
|
58 | 58 | </td> |
|
59 | 59 | </tr> |
|
60 | 60 | %endfor |
|
61 | 61 | |
|
62 | 62 | </table> |
|
63 | 63 | </div> |
|
64 | 64 | </div> |
|
65 | 65 | </div> |
|
66 | 66 | |
|
67 | 67 | <div id="custom_overlay"> |
|
68 | 68 | <div class="panel panel-default"> |
|
69 | 69 | <div class="panel-heading"> |
|
70 | 70 | <h3 class="panel-title">${_('Issue Tracker / Wiki Patterns')}</h3> |
|
71 | 71 | </div> |
|
72 | 72 | <div class="panel-body"> |
|
73 | 73 | ${its.issue_tracker_settings_table( |
|
74 | 74 | patterns=c.repo_patterns.items(), |
|
75 |
form_url=h. |
|
|
76 |
delete_url=h. |
|
|
75 | form_url=h.route_path('edit_repo_issuetracker', repo_name=c.repo_info.repo_name), | |
|
76 | delete_url=h.route_path('edit_repo_issuetracker_delete', repo_name=c.repo_info.repo_name) | |
|
77 | 77 | )} |
|
78 | 78 | <div class="buttons"> |
|
79 | 79 | <button type="submit" class="btn btn-primary save-inheritance" id="save">${_('Save')}</button> |
|
80 | 80 | <button type="reset" class="btn reset-inheritance">${_('Reset')}</button> |
|
81 | 81 | </div> |
|
82 | 82 | </div> |
|
83 | 83 | </div> |
|
84 | 84 | </div> |
|
85 | 85 | |
|
86 | 86 | |
|
87 | 87 | ${h.end_form()} |
|
88 | 88 | |
|
89 | 89 | <div class="panel panel-default"> |
|
90 | 90 | <div class="panel-heading"> |
|
91 | 91 | <h3 class="panel-title">${_('Test Patterns')}</h3> |
|
92 | 92 | </div> |
|
93 | 93 | <div class="panel-body"> |
|
94 | 94 | ${its.issue_tracker_new_row()} |
|
95 |
${its.issue_tracker_settings_test(test_url=h. |
|
|
95 | ${its.issue_tracker_settings_test(test_url=h.route_path('edit_repo_issuetracker_test', repo_name=c.repo_info.repo_name))} | |
|
96 | 96 | </div> |
|
97 | 97 | </div> |
|
98 | 98 | |
|
99 | 99 | </div> |
|
100 | 100 | |
|
101 | 101 | <script> |
|
102 | 102 | $('#inherit_global_issuetracker').on('change', function(e){ |
|
103 | 103 | $('#repo_issue_tracker').toggleClass('inherited',this.checked); |
|
104 | 104 | }); |
|
105 | 105 | |
|
106 | 106 | $('.reset-inheritance').on('click', function(e){ |
|
107 | 107 | $('#inherit_global_issuetracker').prop('checked', false).change(); |
|
108 | 108 | }); |
|
109 | 109 | </script> |
@@ -1,40 +1,40 b'' | |||
|
1 | 1 | <div class="panel panel-default"> |
|
2 | 2 | <div class="panel-heading"> |
|
3 | 3 | <h3 class="panel-title">${_('Remote url')}</h3> |
|
4 | 4 | </div> |
|
5 | 5 | <div class="panel-body"> |
|
6 | 6 | |
|
7 | 7 | <h4>${_('Manually pull changes from external repository.')}</h4> |
|
8 | 8 | |
|
9 | 9 | %if c.repo_info.clone_uri: |
|
10 | 10 | |
|
11 | 11 | ${_('Remote mirror url')}: |
|
12 | 12 | <a href="${c.repo_info.clone_uri}">${c.repo_info.clone_uri_hidden}</a> |
|
13 | 13 | |
|
14 | 14 | <p> |
|
15 | 15 | ${_('Pull can be automated by such api call. Can be called periodically in crontab etc.')} |
|
16 | 16 | <br/> |
|
17 | 17 | <code> |
|
18 | 18 | ${h.api_call_example(method='pull', args={"repoid": c.repo_info.repo_name})} |
|
19 | 19 | </code> |
|
20 | 20 | </p> |
|
21 | 21 | |
|
22 |
${h.secure_form(h. |
|
|
22 | ${h.secure_form(h.route_path('edit_repo_remote_pull', repo_name=c.repo_name), method='POST', request=request)} | |
|
23 | 23 | <div class="form"> |
|
24 | 24 | <div class="fields"> |
|
25 | 25 | ${h.submit('remote_pull_%s' % c.repo_info.repo_name,_('Pull changes from remote location'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to pull changes from remote side')+"');")} |
|
26 | 26 | </div> |
|
27 | 27 | </div> |
|
28 | 28 | ${h.end_form()} |
|
29 | 29 | %else: |
|
30 | 30 | |
|
31 | 31 | ${_('This repository does not have any remote mirror url set.')} |
|
32 | 32 | <a href="${h.route_path('edit_repo', repo_name=c.repo_info.repo_name)}">${_('Set remote url.')}</a> |
|
33 | 33 | <br/> |
|
34 | 34 | <br/> |
|
35 | 35 | <button class="btn disabled" type="submit" disabled="disabled"> |
|
36 | 36 | ${_('Pull changes from remote location')} |
|
37 | 37 | </button> |
|
38 | 38 | %endif |
|
39 | 39 | </div> |
|
40 | 40 | </div> |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now