Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,180 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import logging | |||
|
22 | import formencode | |||
|
23 | ||||
|
24 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden | |||
|
25 | from pyramid.view import view_config | |||
|
26 | from pyramid.renderers import render | |||
|
27 | from pyramid.response import Response | |||
|
28 | ||||
|
29 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |||
|
30 | ||||
|
31 | from rhodecode.lib.ext_json import json | |||
|
32 | from rhodecode.lib.auth import ( | |||
|
33 | LoginRequired, CSRFRequired, NotAnonymous, | |||
|
34 | HasPermissionAny, HasRepoGroupPermissionAny) | |||
|
35 | from rhodecode.lib import helpers as h | |||
|
36 | from rhodecode.lib.utils import repo_name_slug | |||
|
37 | from rhodecode.lib.utils2 import safe_int, safe_unicode | |||
|
38 | from rhodecode.model.forms import RepoForm | |||
|
39 | from rhodecode.model.repo import RepoModel | |||
|
40 | from rhodecode.model.scm import RepoList, RepoGroupList, ScmModel | |||
|
41 | from rhodecode.model.settings import SettingsModel | |||
|
42 | from rhodecode.model.db import Repository, RepoGroup | |||
|
43 | ||||
|
44 | log = logging.getLogger(__name__) | |||
|
45 | ||||
|
46 | ||||
|
47 | class AdminReposView(BaseAppView, DataGridAppView): | |||
|
48 | ||||
|
49 | def load_default_context(self): | |||
|
50 | c = self._get_local_tmpl_context() | |||
|
51 | self._register_global_c(c) | |||
|
52 | return c | |||
|
53 | ||||
|
54 | def _load_form_data(self, c): | |||
|
55 | acl_groups = RepoGroupList(RepoGroup.query().all(), | |||
|
56 | perm_set=['group.write', 'group.admin']) | |||
|
57 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) | |||
|
58 | c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups) | |||
|
59 | c.landing_revs_choices, c.landing_revs = \ | |||
|
60 | ScmModel().get_repo_landing_revs() | |||
|
61 | c.personal_repo_group = self._rhodecode_user.personal_repo_group | |||
|
62 | ||||
|
63 | @LoginRequired() | |||
|
64 | @NotAnonymous() | |||
|
65 | @view_config( | |||
|
66 | route_name='repos', request_method='GET', | |||
|
67 | renderer='rhodecode:templates/admin/repos/repos.mako') | |||
|
68 | def repository_list(self): | |||
|
69 | c = self.load_default_context() | |||
|
70 | ||||
|
71 | repo_list = Repository.get_all_repos() | |||
|
72 | c.repo_list = RepoList(repo_list, perm_set=['repository.admin']) | |||
|
73 | repos_data = RepoModel().get_repos_as_dict( | |||
|
74 | repo_list=c.repo_list, admin=True, super_user_actions=True) | |||
|
75 | # json used to render the grid | |||
|
76 | c.data = json.dumps(repos_data) | |||
|
77 | ||||
|
78 | return self._get_template_context(c) | |||
|
79 | ||||
|
80 | @LoginRequired() | |||
|
81 | @NotAnonymous() | |||
|
82 | # perms check inside | |||
|
83 | @view_config( | |||
|
84 | route_name='repo_new', request_method='GET', | |||
|
85 | renderer='rhodecode:templates/admin/repos/repo_add.mako') | |||
|
86 | def repository_new(self): | |||
|
87 | c = self.load_default_context() | |||
|
88 | ||||
|
89 | new_repo = self.request.GET.get('repo', '') | |||
|
90 | parent_group = safe_int(self.request.GET.get('parent_group')) | |||
|
91 | _gr = RepoGroup.get(parent_group) | |||
|
92 | ||||
|
93 | if not HasPermissionAny('hg.admin', 'hg.create.repository')(): | |||
|
94 | # you're not super admin nor have global create permissions, | |||
|
95 | # but maybe you have at least write permission to a parent group ? | |||
|
96 | ||||
|
97 | gr_name = _gr.group_name if _gr else None | |||
|
98 | # create repositories with write permission on group is set to true | |||
|
99 | create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')() | |||
|
100 | group_admin = HasRepoGroupPermissionAny('group.admin')(group_name=gr_name) | |||
|
101 | group_write = HasRepoGroupPermissionAny('group.write')(group_name=gr_name) | |||
|
102 | if not (group_admin or (group_write and create_on_write)): | |||
|
103 | raise HTTPForbidden() | |||
|
104 | ||||
|
105 | self._load_form_data(c) | |||
|
106 | c.new_repo = repo_name_slug(new_repo) | |||
|
107 | ||||
|
108 | # apply the defaults from defaults page | |||
|
109 | defaults = SettingsModel().get_default_repo_settings(strip_prefix=True) | |||
|
110 | # set checkbox to autochecked | |||
|
111 | defaults['repo_copy_permissions'] = True | |||
|
112 | ||||
|
113 | parent_group_choice = '-1' | |||
|
114 | if not self._rhodecode_user.is_admin and self._rhodecode_user.personal_repo_group: | |||
|
115 | parent_group_choice = self._rhodecode_user.personal_repo_group | |||
|
116 | ||||
|
117 | if parent_group and _gr: | |||
|
118 | if parent_group in [x[0] for x in c.repo_groups]: | |||
|
119 | parent_group_choice = safe_unicode(parent_group) | |||
|
120 | ||||
|
121 | defaults.update({'repo_group': parent_group_choice}) | |||
|
122 | ||||
|
123 | data = render('rhodecode:templates/admin/repos/repo_add.mako', | |||
|
124 | self._get_template_context(c), self.request) | |||
|
125 | html = formencode.htmlfill.render( | |||
|
126 | data, | |||
|
127 | defaults=defaults, | |||
|
128 | encoding="UTF-8", | |||
|
129 | force_defaults=False | |||
|
130 | ) | |||
|
131 | return Response(html) | |||
|
132 | ||||
|
133 | @LoginRequired() | |||
|
134 | @NotAnonymous() | |||
|
135 | @CSRFRequired() | |||
|
136 | # perms check inside | |||
|
137 | @view_config( | |||
|
138 | route_name='repo_create', request_method='POST', | |||
|
139 | renderer='rhodecode:templates/admin/repos/repos.mako') | |||
|
140 | def repository_create(self): | |||
|
141 | c = self.load_default_context() | |||
|
142 | ||||
|
143 | form_result = {} | |||
|
144 | task_id = None | |||
|
145 | self._load_form_data(c) | |||
|
146 | ||||
|
147 | try: | |||
|
148 | # CanWriteToGroup validators checks permissions of this POST | |||
|
149 | form_result = RepoForm(repo_groups=c.repo_groups_choices, | |||
|
150 | landing_revs=c.landing_revs_choices)()\ | |||
|
151 | .to_python(dict(self.request.POST)) | |||
|
152 | ||||
|
153 | # create is done sometimes async on celery, db transaction | |||
|
154 | # management is handled there. | |||
|
155 | task = RepoModel().create(form_result, self._rhodecode_user.user_id) | |||
|
156 | from celery.result import BaseAsyncResult | |||
|
157 | if isinstance(task, BaseAsyncResult): | |||
|
158 | task_id = task.task_id | |||
|
159 | except formencode.Invalid as errors: | |||
|
160 | data = render('rhodecode:templates/admin/repos/repo_add.mako', | |||
|
161 | self._get_template_context(c), self.request) | |||
|
162 | html = formencode.htmlfill.render( | |||
|
163 | data, | |||
|
164 | defaults=errors.value, | |||
|
165 | errors=errors.error_dict or {}, | |||
|
166 | prefix_error=False, | |||
|
167 | encoding="UTF-8", | |||
|
168 | force_defaults=False | |||
|
169 | ) | |||
|
170 | return Response(html) | |||
|
171 | ||||
|
172 | except Exception as e: | |||
|
173 | msg = self._log_creation_exception(e, form_result.get('repo_name')) | |||
|
174 | h.flash(msg, category='error') | |||
|
175 | raise HTTPFound(h.route_path('home')) | |||
|
176 | ||||
|
177 | raise HTTPFound( | |||
|
178 | h.route_path('repo_creating', | |||
|
179 | repo_name=form_result['repo_name_full'], | |||
|
180 | _query=dict(task_id=task_id))) |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 |
This diff has been collapsed as it changes many lines, (685 lines changed) Show them Hide them | |||||
@@ -0,0 +1,685 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import mock | |||
|
22 | import pytest | |||
|
23 | ||||
|
24 | from rhodecode.lib import auth | |||
|
25 | from rhodecode.lib.utils2 import str2bool | |||
|
26 | from rhodecode.model.db import ( | |||
|
27 | Repository, UserRepoToPerm, User) | |||
|
28 | from rhodecode.model.meta import Session | |||
|
29 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel | |||
|
30 | from rhodecode.model.user import UserModel | |||
|
31 | from rhodecode.tests import ( | |||
|
32 | login_user_session, logout_user_session, | |||
|
33 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |||
|
34 | from rhodecode.tests.fixture import Fixture | |||
|
35 | from rhodecode.tests.utils import AssertResponse | |||
|
36 | ||||
|
37 | fixture = Fixture() | |||
|
38 | ||||
|
39 | ||||
|
40 | def route_path(name, params=None, **kwargs): | |||
|
41 | import urllib | |||
|
42 | ||||
|
43 | base_url = { | |||
|
44 | 'repo_summary': '/{repo_name}', | |||
|
45 | 'repo_creating_check': '/{repo_name}/repo_creating_check', | |||
|
46 | 'edit_repo': '/{repo_name}/settings', | |||
|
47 | 'edit_repo_vcs': '/{repo_name}/settings/vcs', | |||
|
48 | 'edit_repo_vcs_update': '/{repo_name}/settings/vcs/update', | |||
|
49 | 'edit_repo_vcs_svn_pattern_delete': '/{repo_name}/settings/vcs/svn_pattern/delete' | |||
|
50 | }[name].format(**kwargs) | |||
|
51 | ||||
|
52 | if params: | |||
|
53 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |||
|
54 | return base_url | |||
|
55 | ||||
|
56 | ||||
|
57 | @pytest.mark.usefixtures("app") | |||
|
58 | class TestVcsSettings(object): | |||
|
59 | FORM_DATA = { | |||
|
60 | 'inherit_global_settings': False, | |||
|
61 | 'hooks_changegroup_repo_size': False, | |||
|
62 | 'hooks_changegroup_push_logger': False, | |||
|
63 | 'hooks_outgoing_pull_logger': False, | |||
|
64 | 'extensions_largefiles': False, | |||
|
65 | 'extensions_evolve': False, | |||
|
66 | 'phases_publish': 'False', | |||
|
67 | 'rhodecode_pr_merge_enabled': False, | |||
|
68 | 'rhodecode_use_outdated_comments': False, | |||
|
69 | 'new_svn_branch': '', | |||
|
70 | 'new_svn_tag': '' | |||
|
71 | } | |||
|
72 | ||||
|
73 | @pytest.mark.skip_backends('svn') | |||
|
74 | def test_global_settings_initial_values(self, autologin_user, backend): | |||
|
75 | repo_name = backend.repo_name | |||
|
76 | response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name)) | |||
|
77 | ||||
|
78 | expected_settings = ( | |||
|
79 | 'rhodecode_use_outdated_comments', 'rhodecode_pr_merge_enabled', | |||
|
80 | 'hooks_changegroup_repo_size', 'hooks_changegroup_push_logger', | |||
|
81 | 'hooks_outgoing_pull_logger' | |||
|
82 | ) | |||
|
83 | for setting in expected_settings: | |||
|
84 | self.assert_repo_value_equals_global_value(response, setting) | |||
|
85 | ||||
|
86 | def test_show_settings_requires_repo_admin_permission( | |||
|
87 | self, backend, user_util, settings_util): | |||
|
88 | repo = backend.create_repo() | |||
|
89 | repo_name = repo.repo_name | |||
|
90 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |||
|
91 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |||
|
92 | login_user_session( | |||
|
93 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |||
|
94 | self.app.get(route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |||
|
95 | ||||
|
96 | def test_inherit_global_settings_flag_is_true_by_default( | |||
|
97 | self, autologin_user, backend): | |||
|
98 | repo_name = backend.repo_name | |||
|
99 | response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name)) | |||
|
100 | ||||
|
101 | assert_response = AssertResponse(response) | |||
|
102 | element = assert_response.get_element('#inherit_global_settings') | |||
|
103 | assert element.checked | |||
|
104 | ||||
|
105 | @pytest.mark.parametrize('checked_value', [True, False]) | |||
|
106 | def test_inherit_global_settings_value( | |||
|
107 | self, autologin_user, backend, checked_value, settings_util): | |||
|
108 | repo = backend.create_repo() | |||
|
109 | repo_name = repo.repo_name | |||
|
110 | settings_util.create_repo_rhodecode_setting( | |||
|
111 | repo, 'inherit_vcs_settings', checked_value, 'bool') | |||
|
112 | response = self.app.get(route_path('edit_repo_vcs', repo_name=repo_name)) | |||
|
113 | ||||
|
114 | assert_response = AssertResponse(response) | |||
|
115 | element = assert_response.get_element('#inherit_global_settings') | |||
|
116 | assert element.checked == checked_value | |||
|
117 | ||||
|
118 | @pytest.mark.skip_backends('svn') | |||
|
119 | def test_hooks_settings_are_created( | |||
|
120 | self, autologin_user, backend, csrf_token): | |||
|
121 | repo_name = backend.repo_name | |||
|
122 | data = self.FORM_DATA.copy() | |||
|
123 | data['csrf_token'] = csrf_token | |||
|
124 | self.app.post( | |||
|
125 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
126 | settings = SettingsModel(repo=repo_name) | |||
|
127 | try: | |||
|
128 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |||
|
129 | ui = settings.get_ui_by_section_and_key(section, key) | |||
|
130 | assert ui.ui_active is False | |||
|
131 | finally: | |||
|
132 | self._cleanup_repo_settings(settings) | |||
|
133 | ||||
|
134 | def test_hooks_settings_are_not_created_for_svn( | |||
|
135 | self, autologin_user, backend_svn, csrf_token): | |||
|
136 | repo_name = backend_svn.repo_name | |||
|
137 | data = self.FORM_DATA.copy() | |||
|
138 | data['csrf_token'] = csrf_token | |||
|
139 | self.app.post( | |||
|
140 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
141 | settings = SettingsModel(repo=repo_name) | |||
|
142 | try: | |||
|
143 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |||
|
144 | ui = settings.get_ui_by_section_and_key(section, key) | |||
|
145 | assert ui is None | |||
|
146 | finally: | |||
|
147 | self._cleanup_repo_settings(settings) | |||
|
148 | ||||
|
149 | @pytest.mark.skip_backends('svn') | |||
|
150 | def test_hooks_settings_are_updated( | |||
|
151 | self, autologin_user, backend, csrf_token): | |||
|
152 | repo_name = backend.repo_name | |||
|
153 | settings = SettingsModel(repo=repo_name) | |||
|
154 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |||
|
155 | settings.create_ui_section_value(section, '', key=key, active=True) | |||
|
156 | ||||
|
157 | data = self.FORM_DATA.copy() | |||
|
158 | data['csrf_token'] = csrf_token | |||
|
159 | self.app.post( | |||
|
160 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
161 | try: | |||
|
162 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |||
|
163 | ui = settings.get_ui_by_section_and_key(section, key) | |||
|
164 | assert ui.ui_active is False | |||
|
165 | finally: | |||
|
166 | self._cleanup_repo_settings(settings) | |||
|
167 | ||||
|
168 | def test_hooks_settings_are_not_updated_for_svn( | |||
|
169 | self, autologin_user, backend_svn, csrf_token): | |||
|
170 | repo_name = backend_svn.repo_name | |||
|
171 | settings = SettingsModel(repo=repo_name) | |||
|
172 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |||
|
173 | settings.create_ui_section_value(section, '', key=key, active=True) | |||
|
174 | ||||
|
175 | data = self.FORM_DATA.copy() | |||
|
176 | data['csrf_token'] = csrf_token | |||
|
177 | self.app.post( | |||
|
178 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
179 | try: | |||
|
180 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: | |||
|
181 | ui = settings.get_ui_by_section_and_key(section, key) | |||
|
182 | assert ui.ui_active is True | |||
|
183 | finally: | |||
|
184 | self._cleanup_repo_settings(settings) | |||
|
185 | ||||
|
186 | @pytest.mark.skip_backends('svn') | |||
|
187 | def test_pr_settings_are_created( | |||
|
188 | self, autologin_user, backend, csrf_token): | |||
|
189 | repo_name = backend.repo_name | |||
|
190 | data = self.FORM_DATA.copy() | |||
|
191 | data['csrf_token'] = csrf_token | |||
|
192 | self.app.post( | |||
|
193 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
194 | settings = SettingsModel(repo=repo_name) | |||
|
195 | try: | |||
|
196 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |||
|
197 | setting = settings.get_setting_by_name(name) | |||
|
198 | assert setting.app_settings_value is False | |||
|
199 | finally: | |||
|
200 | self._cleanup_repo_settings(settings) | |||
|
201 | ||||
|
202 | def test_pr_settings_are_not_created_for_svn( | |||
|
203 | self, autologin_user, backend_svn, csrf_token): | |||
|
204 | repo_name = backend_svn.repo_name | |||
|
205 | data = self.FORM_DATA.copy() | |||
|
206 | data['csrf_token'] = csrf_token | |||
|
207 | self.app.post( | |||
|
208 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
209 | settings = SettingsModel(repo=repo_name) | |||
|
210 | try: | |||
|
211 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |||
|
212 | setting = settings.get_setting_by_name(name) | |||
|
213 | assert setting is None | |||
|
214 | finally: | |||
|
215 | self._cleanup_repo_settings(settings) | |||
|
216 | ||||
|
217 | def test_pr_settings_creation_requires_repo_admin_permission( | |||
|
218 | self, backend, user_util, settings_util, csrf_token): | |||
|
219 | repo = backend.create_repo() | |||
|
220 | repo_name = repo.repo_name | |||
|
221 | ||||
|
222 | logout_user_session(self.app, csrf_token) | |||
|
223 | session = login_user_session( | |||
|
224 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |||
|
225 | new_csrf_token = auth.get_csrf_token(session) | |||
|
226 | ||||
|
227 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |||
|
228 | repo = Repository.get_by_repo_name(repo_name) | |||
|
229 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |||
|
230 | data = self.FORM_DATA.copy() | |||
|
231 | data['csrf_token'] = new_csrf_token | |||
|
232 | settings = SettingsModel(repo=repo_name) | |||
|
233 | ||||
|
234 | try: | |||
|
235 | self.app.post( | |||
|
236 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, | |||
|
237 | status=302) | |||
|
238 | finally: | |||
|
239 | self._cleanup_repo_settings(settings) | |||
|
240 | ||||
|
241 | @pytest.mark.skip_backends('svn') | |||
|
242 | def test_pr_settings_are_updated( | |||
|
243 | self, autologin_user, backend, csrf_token): | |||
|
244 | repo_name = backend.repo_name | |||
|
245 | settings = SettingsModel(repo=repo_name) | |||
|
246 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |||
|
247 | settings.create_or_update_setting(name, True, 'bool') | |||
|
248 | ||||
|
249 | data = self.FORM_DATA.copy() | |||
|
250 | data['csrf_token'] = csrf_token | |||
|
251 | self.app.post( | |||
|
252 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
253 | try: | |||
|
254 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |||
|
255 | setting = settings.get_setting_by_name(name) | |||
|
256 | assert setting.app_settings_value is False | |||
|
257 | finally: | |||
|
258 | self._cleanup_repo_settings(settings) | |||
|
259 | ||||
|
260 | def test_pr_settings_are_not_updated_for_svn( | |||
|
261 | self, autologin_user, backend_svn, csrf_token): | |||
|
262 | repo_name = backend_svn.repo_name | |||
|
263 | settings = SettingsModel(repo=repo_name) | |||
|
264 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |||
|
265 | settings.create_or_update_setting(name, True, 'bool') | |||
|
266 | ||||
|
267 | data = self.FORM_DATA.copy() | |||
|
268 | data['csrf_token'] = csrf_token | |||
|
269 | self.app.post( | |||
|
270 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
271 | try: | |||
|
272 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |||
|
273 | setting = settings.get_setting_by_name(name) | |||
|
274 | assert setting.app_settings_value is True | |||
|
275 | finally: | |||
|
276 | self._cleanup_repo_settings(settings) | |||
|
277 | ||||
|
278 | def test_svn_settings_are_created( | |||
|
279 | self, autologin_user, backend_svn, csrf_token, settings_util): | |||
|
280 | repo_name = backend_svn.repo_name | |||
|
281 | data = self.FORM_DATA.copy() | |||
|
282 | data['new_svn_tag'] = 'svn-tag' | |||
|
283 | data['new_svn_branch'] = 'svn-branch' | |||
|
284 | data['csrf_token'] = csrf_token | |||
|
285 | ||||
|
286 | # Create few global settings to make sure that uniqueness validators | |||
|
287 | # are not triggered | |||
|
288 | settings_util.create_rhodecode_ui( | |||
|
289 | VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch') | |||
|
290 | settings_util.create_rhodecode_ui( | |||
|
291 | VcsSettingsModel.SVN_TAG_SECTION, 'svn-tag') | |||
|
292 | ||||
|
293 | self.app.post( | |||
|
294 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
295 | settings = SettingsModel(repo=repo_name) | |||
|
296 | try: | |||
|
297 | svn_branches = settings.get_ui_by_section( | |||
|
298 | VcsSettingsModel.SVN_BRANCH_SECTION) | |||
|
299 | svn_branch_names = [b.ui_value for b in svn_branches] | |||
|
300 | svn_tags = settings.get_ui_by_section( | |||
|
301 | VcsSettingsModel.SVN_TAG_SECTION) | |||
|
302 | svn_tag_names = [b.ui_value for b in svn_tags] | |||
|
303 | assert 'svn-branch' in svn_branch_names | |||
|
304 | assert 'svn-tag' in svn_tag_names | |||
|
305 | finally: | |||
|
306 | self._cleanup_repo_settings(settings) | |||
|
307 | ||||
|
308 | def test_svn_settings_are_unique( | |||
|
309 | self, autologin_user, backend_svn, csrf_token, settings_util): | |||
|
310 | repo = backend_svn.repo | |||
|
311 | repo_name = repo.repo_name | |||
|
312 | data = self.FORM_DATA.copy() | |||
|
313 | data['new_svn_tag'] = 'test_tag' | |||
|
314 | data['new_svn_branch'] = 'test_branch' | |||
|
315 | data['csrf_token'] = csrf_token | |||
|
316 | settings_util.create_repo_rhodecode_ui( | |||
|
317 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch') | |||
|
318 | settings_util.create_repo_rhodecode_ui( | |||
|
319 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag') | |||
|
320 | ||||
|
321 | response = self.app.post( | |||
|
322 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=200) | |||
|
323 | response.mustcontain('Pattern already exists') | |||
|
324 | ||||
|
325 | def test_svn_settings_with_empty_values_are_not_created( | |||
|
326 | self, autologin_user, backend_svn, csrf_token): | |||
|
327 | repo_name = backend_svn.repo_name | |||
|
328 | data = self.FORM_DATA.copy() | |||
|
329 | data['csrf_token'] = csrf_token | |||
|
330 | self.app.post( | |||
|
331 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
332 | settings = SettingsModel(repo=repo_name) | |||
|
333 | try: | |||
|
334 | svn_branches = settings.get_ui_by_section( | |||
|
335 | VcsSettingsModel.SVN_BRANCH_SECTION) | |||
|
336 | svn_tags = settings.get_ui_by_section( | |||
|
337 | VcsSettingsModel.SVN_TAG_SECTION) | |||
|
338 | assert len(svn_branches) == 0 | |||
|
339 | assert len(svn_tags) == 0 | |||
|
340 | finally: | |||
|
341 | self._cleanup_repo_settings(settings) | |||
|
342 | ||||
|
343 | def test_svn_settings_are_shown_for_svn_repository( | |||
|
344 | self, autologin_user, backend_svn, csrf_token): | |||
|
345 | repo_name = backend_svn.repo_name | |||
|
346 | response = self.app.get( | |||
|
347 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |||
|
348 | response.mustcontain('Subversion Settings') | |||
|
349 | ||||
|
350 | @pytest.mark.skip_backends('svn') | |||
|
351 | def test_svn_settings_are_not_created_for_not_svn_repository( | |||
|
352 | self, autologin_user, backend, csrf_token): | |||
|
353 | repo_name = backend.repo_name | |||
|
354 | data = self.FORM_DATA.copy() | |||
|
355 | data['csrf_token'] = csrf_token | |||
|
356 | self.app.post( | |||
|
357 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
358 | settings = SettingsModel(repo=repo_name) | |||
|
359 | try: | |||
|
360 | svn_branches = settings.get_ui_by_section( | |||
|
361 | VcsSettingsModel.SVN_BRANCH_SECTION) | |||
|
362 | svn_tags = settings.get_ui_by_section( | |||
|
363 | VcsSettingsModel.SVN_TAG_SECTION) | |||
|
364 | assert len(svn_branches) == 0 | |||
|
365 | assert len(svn_tags) == 0 | |||
|
366 | finally: | |||
|
367 | self._cleanup_repo_settings(settings) | |||
|
368 | ||||
|
369 | @pytest.mark.skip_backends('svn') | |||
|
370 | def test_svn_settings_are_shown_only_for_svn_repository( | |||
|
371 | self, autologin_user, backend, csrf_token): | |||
|
372 | repo_name = backend.repo_name | |||
|
373 | response = self.app.get( | |||
|
374 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |||
|
375 | response.mustcontain(no='Subversion Settings') | |||
|
376 | ||||
|
377 | def test_hg_settings_are_created( | |||
|
378 | self, autologin_user, backend_hg, csrf_token): | |||
|
379 | repo_name = backend_hg.repo_name | |||
|
380 | data = self.FORM_DATA.copy() | |||
|
381 | data['new_svn_tag'] = 'svn-tag' | |||
|
382 | data['new_svn_branch'] = 'svn-branch' | |||
|
383 | data['csrf_token'] = csrf_token | |||
|
384 | self.app.post( | |||
|
385 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
386 | settings = SettingsModel(repo=repo_name) | |||
|
387 | try: | |||
|
388 | largefiles_ui = settings.get_ui_by_section_and_key( | |||
|
389 | 'extensions', 'largefiles') | |||
|
390 | assert largefiles_ui.ui_active is False | |||
|
391 | phases_ui = settings.get_ui_by_section_and_key( | |||
|
392 | 'phases', 'publish') | |||
|
393 | assert str2bool(phases_ui.ui_value) is False | |||
|
394 | finally: | |||
|
395 | self._cleanup_repo_settings(settings) | |||
|
396 | ||||
|
397 | def test_hg_settings_are_updated( | |||
|
398 | self, autologin_user, backend_hg, csrf_token): | |||
|
399 | repo_name = backend_hg.repo_name | |||
|
400 | settings = SettingsModel(repo=repo_name) | |||
|
401 | settings.create_ui_section_value( | |||
|
402 | 'extensions', '', key='largefiles', active=True) | |||
|
403 | settings.create_ui_section_value( | |||
|
404 | 'phases', '1', key='publish', active=True) | |||
|
405 | ||||
|
406 | data = self.FORM_DATA.copy() | |||
|
407 | data['csrf_token'] = csrf_token | |||
|
408 | self.app.post( | |||
|
409 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
410 | try: | |||
|
411 | largefiles_ui = settings.get_ui_by_section_and_key( | |||
|
412 | 'extensions', 'largefiles') | |||
|
413 | assert largefiles_ui.ui_active is False | |||
|
414 | phases_ui = settings.get_ui_by_section_and_key( | |||
|
415 | 'phases', 'publish') | |||
|
416 | assert str2bool(phases_ui.ui_value) is False | |||
|
417 | finally: | |||
|
418 | self._cleanup_repo_settings(settings) | |||
|
419 | ||||
|
420 | def test_hg_settings_are_shown_for_hg_repository( | |||
|
421 | self, autologin_user, backend_hg, csrf_token): | |||
|
422 | repo_name = backend_hg.repo_name | |||
|
423 | response = self.app.get( | |||
|
424 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |||
|
425 | response.mustcontain('Mercurial Settings') | |||
|
426 | ||||
|
427 | @pytest.mark.skip_backends('hg') | |||
|
428 | def test_hg_settings_are_created_only_for_hg_repository( | |||
|
429 | self, autologin_user, backend, csrf_token): | |||
|
430 | repo_name = backend.repo_name | |||
|
431 | data = self.FORM_DATA.copy() | |||
|
432 | data['csrf_token'] = csrf_token | |||
|
433 | self.app.post( | |||
|
434 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
435 | settings = SettingsModel(repo=repo_name) | |||
|
436 | try: | |||
|
437 | largefiles_ui = settings.get_ui_by_section_and_key( | |||
|
438 | 'extensions', 'largefiles') | |||
|
439 | assert largefiles_ui is None | |||
|
440 | phases_ui = settings.get_ui_by_section_and_key( | |||
|
441 | 'phases', 'publish') | |||
|
442 | assert phases_ui is None | |||
|
443 | finally: | |||
|
444 | self._cleanup_repo_settings(settings) | |||
|
445 | ||||
|
446 | @pytest.mark.skip_backends('hg') | |||
|
447 | def test_hg_settings_are_shown_only_for_hg_repository( | |||
|
448 | self, autologin_user, backend, csrf_token): | |||
|
449 | repo_name = backend.repo_name | |||
|
450 | response = self.app.get( | |||
|
451 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |||
|
452 | response.mustcontain(no='Mercurial Settings') | |||
|
453 | ||||
|
454 | @pytest.mark.skip_backends('hg') | |||
|
455 | def test_hg_settings_are_updated_only_for_hg_repository( | |||
|
456 | self, autologin_user, backend, csrf_token): | |||
|
457 | repo_name = backend.repo_name | |||
|
458 | settings = SettingsModel(repo=repo_name) | |||
|
459 | settings.create_ui_section_value( | |||
|
460 | 'extensions', '', key='largefiles', active=True) | |||
|
461 | settings.create_ui_section_value( | |||
|
462 | 'phases', '1', key='publish', active=True) | |||
|
463 | ||||
|
464 | data = self.FORM_DATA.copy() | |||
|
465 | data['csrf_token'] = csrf_token | |||
|
466 | self.app.post( | |||
|
467 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
468 | try: | |||
|
469 | largefiles_ui = settings.get_ui_by_section_and_key( | |||
|
470 | 'extensions', 'largefiles') | |||
|
471 | assert largefiles_ui.ui_active is True | |||
|
472 | phases_ui = settings.get_ui_by_section_and_key( | |||
|
473 | 'phases', 'publish') | |||
|
474 | assert phases_ui.ui_value == '1' | |||
|
475 | finally: | |||
|
476 | self._cleanup_repo_settings(settings) | |||
|
477 | ||||
|
478 | def test_per_repo_svn_settings_are_displayed( | |||
|
479 | self, autologin_user, backend_svn, settings_util): | |||
|
480 | repo = backend_svn.create_repo() | |||
|
481 | repo_name = repo.repo_name | |||
|
482 | branches = [ | |||
|
483 | settings_util.create_repo_rhodecode_ui( | |||
|
484 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, | |||
|
485 | 'branch_{}'.format(i)) | |||
|
486 | for i in range(10)] | |||
|
487 | tags = [ | |||
|
488 | settings_util.create_repo_rhodecode_ui( | |||
|
489 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'tag_{}'.format(i)) | |||
|
490 | for i in range(10)] | |||
|
491 | ||||
|
492 | response = self.app.get( | |||
|
493 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |||
|
494 | assert_response = AssertResponse(response) | |||
|
495 | for branch in branches: | |||
|
496 | css_selector = '[name=branch_value_{}]'.format(branch.ui_id) | |||
|
497 | element = assert_response.get_element(css_selector) | |||
|
498 | assert element.value == branch.ui_value | |||
|
499 | for tag in tags: | |||
|
500 | css_selector = '[name=tag_ui_value_new_{}]'.format(tag.ui_id) | |||
|
501 | element = assert_response.get_element(css_selector) | |||
|
502 | assert element.value == tag.ui_value | |||
|
503 | ||||
|
504 | def test_per_repo_hg_and_pr_settings_are_not_displayed_for_svn( | |||
|
505 | self, autologin_user, backend_svn, settings_util): | |||
|
506 | repo = backend_svn.create_repo() | |||
|
507 | repo_name = repo.repo_name | |||
|
508 | response = self.app.get( | |||
|
509 | route_path('edit_repo_vcs', repo_name=repo_name), status=200) | |||
|
510 | response.mustcontain(no='<label>Hooks:</label>') | |||
|
511 | response.mustcontain(no='<label>Pull Request Settings:</label>') | |||
|
512 | ||||
|
513 | def test_inherit_global_settings_value_is_saved( | |||
|
514 | self, autologin_user, backend, csrf_token): | |||
|
515 | repo_name = backend.repo_name | |||
|
516 | data = self.FORM_DATA.copy() | |||
|
517 | data['csrf_token'] = csrf_token | |||
|
518 | data['inherit_global_settings'] = True | |||
|
519 | self.app.post( | |||
|
520 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
521 | ||||
|
522 | settings = SettingsModel(repo=repo_name) | |||
|
523 | vcs_settings = VcsSettingsModel(repo=repo_name) | |||
|
524 | try: | |||
|
525 | assert vcs_settings.inherit_global_settings is True | |||
|
526 | finally: | |||
|
527 | self._cleanup_repo_settings(settings) | |||
|
528 | ||||
|
529 | def test_repo_cache_is_invalidated_when_settings_are_updated( | |||
|
530 | self, autologin_user, backend, csrf_token): | |||
|
531 | repo_name = backend.repo_name | |||
|
532 | data = self.FORM_DATA.copy() | |||
|
533 | data['csrf_token'] = csrf_token | |||
|
534 | data['inherit_global_settings'] = True | |||
|
535 | settings = SettingsModel(repo=repo_name) | |||
|
536 | ||||
|
537 | invalidation_patcher = mock.patch( | |||
|
538 | 'rhodecode.model.scm.ScmModel.mark_for_invalidation') | |||
|
539 | with invalidation_patcher as invalidation_mock: | |||
|
540 | self.app.post( | |||
|
541 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, | |||
|
542 | status=302) | |||
|
543 | try: | |||
|
544 | invalidation_mock.assert_called_once_with(repo_name, delete=True) | |||
|
545 | finally: | |||
|
546 | self._cleanup_repo_settings(settings) | |||
|
547 | ||||
|
548 | def test_other_settings_not_saved_inherit_global_settings_is_true( | |||
|
549 | self, autologin_user, backend, csrf_token): | |||
|
550 | repo_name = backend.repo_name | |||
|
551 | data = self.FORM_DATA.copy() | |||
|
552 | data['csrf_token'] = csrf_token | |||
|
553 | data['inherit_global_settings'] = True | |||
|
554 | self.app.post( | |||
|
555 | route_path('edit_repo_vcs_update', repo_name=repo_name), data, status=302) | |||
|
556 | ||||
|
557 | settings = SettingsModel(repo=repo_name) | |||
|
558 | ui_settings = ( | |||
|
559 | VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS) | |||
|
560 | ||||
|
561 | vcs_settings = [] | |||
|
562 | try: | |||
|
563 | for section, key in ui_settings: | |||
|
564 | ui = settings.get_ui_by_section_and_key(section, key) | |||
|
565 | if ui: | |||
|
566 | vcs_settings.append(ui) | |||
|
567 | vcs_settings.extend(settings.get_ui_by_section( | |||
|
568 | VcsSettingsModel.SVN_BRANCH_SECTION)) | |||
|
569 | vcs_settings.extend(settings.get_ui_by_section( | |||
|
570 | VcsSettingsModel.SVN_TAG_SECTION)) | |||
|
571 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |||
|
572 | setting = settings.get_setting_by_name(name) | |||
|
573 | if setting: | |||
|
574 | vcs_settings.append(setting) | |||
|
575 | assert vcs_settings == [] | |||
|
576 | finally: | |||
|
577 | self._cleanup_repo_settings(settings) | |||
|
578 | ||||
|
579 | def test_delete_svn_branch_and_tag_patterns( | |||
|
580 | self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header): | |||
|
581 | repo = backend_svn.create_repo() | |||
|
582 | repo_name = repo.repo_name | |||
|
583 | branch = settings_util.create_repo_rhodecode_ui( | |||
|
584 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch', | |||
|
585 | cleanup=False) | |||
|
586 | tag = settings_util.create_repo_rhodecode_ui( | |||
|
587 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag', cleanup=False) | |||
|
588 | data = { | |||
|
589 | 'csrf_token': csrf_token | |||
|
590 | } | |||
|
591 | for id_ in (branch.ui_id, tag.ui_id): | |||
|
592 | data['delete_svn_pattern'] = id_, | |||
|
593 | self.app.post( | |||
|
594 | route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name), | |||
|
595 | data, extra_environ=xhr_header, status=200) | |||
|
596 | settings = VcsSettingsModel(repo=repo_name) | |||
|
597 | assert settings.get_repo_svn_branch_patterns() == [] | |||
|
598 | ||||
|
599 | def test_delete_svn_branch_requires_repo_admin_permission( | |||
|
600 | self, backend_svn, user_util, settings_util, csrf_token, xhr_header): | |||
|
601 | repo = backend_svn.create_repo() | |||
|
602 | repo_name = repo.repo_name | |||
|
603 | ||||
|
604 | logout_user_session(self.app, csrf_token) | |||
|
605 | session = login_user_session( | |||
|
606 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |||
|
607 | csrf_token = auth.get_csrf_token(session) | |||
|
608 | ||||
|
609 | repo = Repository.get_by_repo_name(repo_name) | |||
|
610 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) | |||
|
611 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') | |||
|
612 | branch = settings_util.create_repo_rhodecode_ui( | |||
|
613 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch', | |||
|
614 | cleanup=False) | |||
|
615 | data = { | |||
|
616 | 'csrf_token': csrf_token, | |||
|
617 | 'delete_svn_pattern': branch.ui_id | |||
|
618 | } | |||
|
619 | self.app.post( | |||
|
620 | route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name), | |||
|
621 | data, extra_environ=xhr_header, status=200) | |||
|
622 | ||||
|
623 | def test_delete_svn_branch_raises_400_when_not_found( | |||
|
624 | self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header): | |||
|
625 | repo_name = backend_svn.repo_name | |||
|
626 | data = { | |||
|
627 | 'delete_svn_pattern': 123, | |||
|
628 | 'csrf_token': csrf_token | |||
|
629 | } | |||
|
630 | self.app.post( | |||
|
631 | route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name), | |||
|
632 | data, extra_environ=xhr_header, status=400) | |||
|
633 | ||||
|
634 | def test_delete_svn_branch_raises_400_when_no_id_specified( | |||
|
635 | self, autologin_user, backend_svn, settings_util, csrf_token, xhr_header): | |||
|
636 | repo_name = backend_svn.repo_name | |||
|
637 | data = { | |||
|
638 | 'csrf_token': csrf_token | |||
|
639 | } | |||
|
640 | self.app.post( | |||
|
641 | route_path('edit_repo_vcs_svn_pattern_delete', repo_name=repo_name), | |||
|
642 | data, extra_environ=xhr_header, status=400) | |||
|
643 | ||||
|
644 | def _cleanup_repo_settings(self, settings_model): | |||
|
645 | cleanup = [] | |||
|
646 | ui_settings = ( | |||
|
647 | VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS) | |||
|
648 | ||||
|
649 | for section, key in ui_settings: | |||
|
650 | ui = settings_model.get_ui_by_section_and_key(section, key) | |||
|
651 | if ui: | |||
|
652 | cleanup.append(ui) | |||
|
653 | ||||
|
654 | cleanup.extend(settings_model.get_ui_by_section( | |||
|
655 | VcsSettingsModel.INHERIT_SETTINGS)) | |||
|
656 | cleanup.extend(settings_model.get_ui_by_section( | |||
|
657 | VcsSettingsModel.SVN_BRANCH_SECTION)) | |||
|
658 | cleanup.extend(settings_model.get_ui_by_section( | |||
|
659 | VcsSettingsModel.SVN_TAG_SECTION)) | |||
|
660 | ||||
|
661 | for name in VcsSettingsModel.GENERAL_SETTINGS: | |||
|
662 | setting = settings_model.get_setting_by_name(name) | |||
|
663 | if setting: | |||
|
664 | cleanup.append(setting) | |||
|
665 | ||||
|
666 | for object_ in cleanup: | |||
|
667 | Session().delete(object_) | |||
|
668 | Session().commit() | |||
|
669 | ||||
|
670 | def assert_repo_value_equals_global_value(self, response, setting): | |||
|
671 | assert_response = AssertResponse(response) | |||
|
672 | global_css_selector = '[name={}_inherited]'.format(setting) | |||
|
673 | repo_css_selector = '[name={}]'.format(setting) | |||
|
674 | repo_element = assert_response.get_element(repo_css_selector) | |||
|
675 | global_element = assert_response.get_element(global_css_selector) | |||
|
676 | assert repo_element.value == global_element.value | |||
|
677 | ||||
|
678 | ||||
|
679 | def _get_permission_for_user(user, repo): | |||
|
680 | perm = UserRepoToPerm.query()\ | |||
|
681 | .filter(UserRepoToPerm.repository == | |||
|
682 | Repository.get_by_repo_name(repo))\ | |||
|
683 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ | |||
|
684 | .all() | |||
|
685 | return perm |
@@ -0,0 +1,113 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import logging | |||
|
22 | ||||
|
23 | import formencode | |||
|
24 | ||||
|
25 | from pyramid.httpexceptions import HTTPFound | |||
|
26 | from pyramid.view import view_config | |||
|
27 | ||||
|
28 | from rhodecode.apps._base import RepoAppView | |||
|
29 | from rhodecode.lib import audit_logger | |||
|
30 | from rhodecode.lib import helpers as h | |||
|
31 | from rhodecode.lib.auth import ( | |||
|
32 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |||
|
33 | from rhodecode.model.db import RepositoryField | |||
|
34 | from rhodecode.model.forms import RepoFieldForm | |||
|
35 | from rhodecode.model.meta import Session | |||
|
36 | from rhodecode.model.repo import RepoModel | |||
|
37 | ||||
|
38 | log = logging.getLogger(__name__) | |||
|
39 | ||||
|
40 | ||||
|
41 | class RepoSettingsFieldsView(RepoAppView): | |||
|
42 | def load_default_context(self): | |||
|
43 | c = self._get_local_tmpl_context() | |||
|
44 | ||||
|
45 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |||
|
46 | c.repo_info = self.db_repo | |||
|
47 | ||||
|
48 | self._register_global_c(c) | |||
|
49 | return c | |||
|
50 | ||||
|
51 | @LoginRequired() | |||
|
52 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
53 | @view_config( | |||
|
54 | route_name='edit_repo_fields', request_method='GET', | |||
|
55 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
56 | def repo_field_edit(self): | |||
|
57 | c = self.load_default_context() | |||
|
58 | ||||
|
59 | c.active = 'fields' | |||
|
60 | c.repo_fields = RepositoryField.query() \ | |||
|
61 | .filter(RepositoryField.repository == self.db_repo).all() | |||
|
62 | ||||
|
63 | return self._get_template_context(c) | |||
|
64 | ||||
|
65 | @LoginRequired() | |||
|
66 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
67 | @CSRFRequired() | |||
|
68 | @view_config( | |||
|
69 | route_name='edit_repo_fields_create', request_method='POST', | |||
|
70 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
71 | def repo_field_create(self): | |||
|
72 | _ = self.request.translate | |||
|
73 | ||||
|
74 | try: | |||
|
75 | form_result = RepoFieldForm()().to_python(dict(self.request.POST)) | |||
|
76 | RepoModel().add_repo_field( | |||
|
77 | self.db_repo_name, | |||
|
78 | form_result['new_field_key'], | |||
|
79 | field_type=form_result['new_field_type'], | |||
|
80 | field_value=form_result['new_field_value'], | |||
|
81 | field_label=form_result['new_field_label'], | |||
|
82 | field_desc=form_result['new_field_desc']) | |||
|
83 | ||||
|
84 | Session().commit() | |||
|
85 | except Exception as e: | |||
|
86 | log.exception("Exception creating field") | |||
|
87 | msg = _('An error occurred during creation of field') | |||
|
88 | if isinstance(e, formencode.Invalid): | |||
|
89 | msg += ". " + e.msg | |||
|
90 | h.flash(msg, category='error') | |||
|
91 | ||||
|
92 | raise HTTPFound( | |||
|
93 | h.route_path('edit_repo_fields', repo_name=self.db_repo_name)) | |||
|
94 | ||||
|
95 | @LoginRequired() | |||
|
96 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
97 | @CSRFRequired() | |||
|
98 | @view_config( | |||
|
99 | route_name='edit_repo_fields_delete', request_method='POST', | |||
|
100 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
101 | def repo_field_delete(self): | |||
|
102 | _ = self.request.translate | |||
|
103 | field = RepositoryField.get_or_404(self.request.matchdict['field_id']) | |||
|
104 | try: | |||
|
105 | RepoModel().delete_repo_field(self.db_repo_name, field.field_key) | |||
|
106 | Session().commit() | |||
|
107 | except Exception: | |||
|
108 | log.exception('Exception during removal of field') | |||
|
109 | msg = _('An error occurred during removal of field') | |||
|
110 | h.flash(msg, category='error') | |||
|
111 | ||||
|
112 | raise HTTPFound( | |||
|
113 | h.route_path('edit_repo_fields', repo_name=self.db_repo_name)) |
@@ -0,0 +1,129 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import logging | |||
|
22 | ||||
|
23 | from pyramid.httpexceptions import HTTPFound | |||
|
24 | from pyramid.view import view_config | |||
|
25 | ||||
|
26 | from rhodecode.apps._base import RepoAppView | |||
|
27 | from rhodecode.lib import audit_logger | |||
|
28 | from rhodecode.lib import helpers as h | |||
|
29 | from rhodecode.lib.auth import ( | |||
|
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |||
|
31 | from rhodecode.model.forms import IssueTrackerPatternsForm | |||
|
32 | from rhodecode.model.meta import Session | |||
|
33 | from rhodecode.model.settings import IssueTrackerSettingsModel | |||
|
34 | ||||
|
35 | log = logging.getLogger(__name__) | |||
|
36 | ||||
|
37 | ||||
|
38 | class RepoSettingsIssueTrackersView(RepoAppView): | |||
|
39 | def load_default_context(self): | |||
|
40 | c = self._get_local_tmpl_context() | |||
|
41 | ||||
|
42 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |||
|
43 | c.repo_info = self.db_repo | |||
|
44 | ||||
|
45 | self._register_global_c(c) | |||
|
46 | return c | |||
|
47 | ||||
|
48 | @LoginRequired() | |||
|
49 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
50 | @view_config( | |||
|
51 | route_name='edit_repo_issuetracker', request_method='GET', | |||
|
52 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
53 | def repo_issuetracker(self): | |||
|
54 | c = self.load_default_context() | |||
|
55 | c.active = 'issuetracker' | |||
|
56 | c.data = 'data' | |||
|
57 | ||||
|
58 | c.settings_model = IssueTrackerSettingsModel(repo=self.db_repo) | |||
|
59 | c.global_patterns = c.settings_model.get_global_settings() | |||
|
60 | c.repo_patterns = c.settings_model.get_repo_settings() | |||
|
61 | ||||
|
62 | return self._get_template_context(c) | |||
|
63 | ||||
|
64 | @LoginRequired() | |||
|
65 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
66 | @CSRFRequired() | |||
|
67 | @view_config( | |||
|
68 | route_name='edit_repo_issuetracker_test', request_method='POST', | |||
|
69 | xhr=True, renderer='string') | |||
|
70 | def repo_issuetracker_test(self): | |||
|
71 | return h.urlify_commit_message( | |||
|
72 | self.request.POST.get('test_text', ''), | |||
|
73 | self.db_repo_name) | |||
|
74 | ||||
|
75 | @LoginRequired() | |||
|
76 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
77 | @CSRFRequired() | |||
|
78 | @view_config( | |||
|
79 | route_name='edit_repo_issuetracker_delete', request_method='POST', | |||
|
80 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
81 | def repo_issuetracker_delete(self): | |||
|
82 | _ = self.request.translate | |||
|
83 | uid = self.request.POST.get('uid') | |||
|
84 | repo_settings = IssueTrackerSettingsModel(repo=self.db_repo_name) | |||
|
85 | try: | |||
|
86 | repo_settings.delete_entries(uid) | |||
|
87 | except Exception: | |||
|
88 | h.flash(_('Error occurred during deleting issue tracker entry'), | |||
|
89 | category='error') | |||
|
90 | else: | |||
|
91 | h.flash(_('Removed issue tracker entry'), category='success') | |||
|
92 | raise HTTPFound( | |||
|
93 | h.route_path('edit_repo_issuetracker', repo_name=self.db_repo_name)) | |||
|
94 | ||||
|
95 | def _update_patterns(self, form, repo_settings): | |||
|
96 | for uid in form['delete_patterns']: | |||
|
97 | repo_settings.delete_entries(uid) | |||
|
98 | ||||
|
99 | for pattern_data in form['patterns']: | |||
|
100 | for setting_key, pattern, type_ in pattern_data: | |||
|
101 | sett = repo_settings.create_or_update_setting( | |||
|
102 | setting_key, pattern.strip(), type_) | |||
|
103 | Session().add(sett) | |||
|
104 | ||||
|
105 | Session().commit() | |||
|
106 | ||||
|
107 | @LoginRequired() | |||
|
108 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
109 | @CSRFRequired() | |||
|
110 | @view_config( | |||
|
111 | route_name='edit_repo_issuetracker_update', request_method='POST', | |||
|
112 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
113 | def repo_issuetracker_update(self): | |||
|
114 | _ = self.request.translate | |||
|
115 | # Save inheritance | |||
|
116 | repo_settings = IssueTrackerSettingsModel(repo=self.db_repo_name) | |||
|
117 | inherited = ( | |||
|
118 | self.request.POST.get('inherit_global_issuetracker') == "inherited") | |||
|
119 | repo_settings.inherit_global_settings = inherited | |||
|
120 | Session().commit() | |||
|
121 | ||||
|
122 | form = IssueTrackerPatternsForm()().to_python(self.request.POST) | |||
|
123 | if form: | |||
|
124 | self._update_patterns(form, repo_settings) | |||
|
125 | ||||
|
126 | h.flash(_('Updated issue tracker entries'), category='success') | |||
|
127 | raise HTTPFound( | |||
|
128 | h.route_path('edit_repo_issuetracker', repo_name=self.db_repo_name)) | |||
|
129 |
@@ -0,0 +1,75 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import logging | |||
|
22 | ||||
|
23 | from pyramid.httpexceptions import HTTPFound | |||
|
24 | from pyramid.view import view_config | |||
|
25 | ||||
|
26 | from rhodecode.apps._base import RepoAppView | |||
|
27 | from rhodecode.lib import helpers as h | |||
|
28 | from rhodecode.lib.auth import ( | |||
|
29 | LoginRequired, CSRFRequired, HasRepoPermissionAnyDecorator) | |||
|
30 | from rhodecode.model.scm import ScmModel | |||
|
31 | ||||
|
32 | log = logging.getLogger(__name__) | |||
|
33 | ||||
|
34 | ||||
|
35 | class RepoSettingsRemoteView(RepoAppView): | |||
|
36 | def load_default_context(self): | |||
|
37 | c = self._get_local_tmpl_context() | |||
|
38 | ||||
|
39 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |||
|
40 | c.repo_info = self.db_repo | |||
|
41 | ||||
|
42 | self._register_global_c(c) | |||
|
43 | return c | |||
|
44 | ||||
|
45 | @LoginRequired() | |||
|
46 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
47 | @view_config( | |||
|
48 | route_name='edit_repo_remote', request_method='GET', | |||
|
49 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
50 | def repo_remote_edit_form(self): | |||
|
51 | c = self.load_default_context() | |||
|
52 | c.active = 'remote' | |||
|
53 | ||||
|
54 | return self._get_template_context(c) | |||
|
55 | ||||
|
56 | @LoginRequired() | |||
|
57 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
58 | @CSRFRequired() | |||
|
59 | @view_config( | |||
|
60 | route_name='edit_repo_remote_pull', request_method='POST', | |||
|
61 | renderer=None) | |||
|
62 | def repo_remote_pull_changes(self): | |||
|
63 | _ = self.request.translate | |||
|
64 | self.load_default_context() | |||
|
65 | ||||
|
66 | try: | |||
|
67 | ScmModel().pull_changes( | |||
|
68 | self.db_repo_name, self._rhodecode_user.username) | |||
|
69 | h.flash(_('Pulled from remote location'), category='success') | |||
|
70 | except Exception: | |||
|
71 | log.exception("Exception during pull from remote") | |||
|
72 | h.flash(_('An error occurred during pull from remote location'), | |||
|
73 | category='error') | |||
|
74 | raise HTTPFound( | |||
|
75 | h.route_path('edit_repo_remote', repo_name=self.db_repo_name)) |
@@ -0,0 +1,172 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import logging | |||
|
22 | ||||
|
23 | import formencode | |||
|
24 | from pyramid.httpexceptions import HTTPFound, HTTPBadRequest | |||
|
25 | from pyramid.response import Response | |||
|
26 | from pyramid.renderers import render | |||
|
27 | from pyramid.view import view_config | |||
|
28 | ||||
|
29 | from rhodecode.apps._base import RepoAppView | |||
|
30 | from rhodecode.lib import audit_logger | |||
|
31 | from rhodecode.lib import helpers as h | |||
|
32 | from rhodecode.lib.auth import ( | |||
|
33 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |||
|
34 | from rhodecode.model.forms import RepoVcsSettingsForm | |||
|
35 | from rhodecode.model.meta import Session | |||
|
36 | from rhodecode.model.settings import VcsSettingsModel, SettingNotFound | |||
|
37 | ||||
|
38 | log = logging.getLogger(__name__) | |||
|
39 | ||||
|
40 | ||||
|
41 | class RepoSettingsVcsView(RepoAppView): | |||
|
42 | def load_default_context(self): | |||
|
43 | c = self._get_local_tmpl_context() | |||
|
44 | ||||
|
45 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |||
|
46 | c.repo_info = self.db_repo | |||
|
47 | ||||
|
48 | self._register_global_c(c) | |||
|
49 | return c | |||
|
50 | ||||
|
51 | def _vcs_form_defaults(self, repo_name): | |||
|
52 | model = VcsSettingsModel(repo=repo_name) | |||
|
53 | global_defaults = model.get_global_settings() | |||
|
54 | ||||
|
55 | repo_defaults = {} | |||
|
56 | repo_defaults.update(global_defaults) | |||
|
57 | repo_defaults.update(model.get_repo_settings()) | |||
|
58 | ||||
|
59 | global_defaults = { | |||
|
60 | '{}_inherited'.format(k): global_defaults[k] | |||
|
61 | for k in global_defaults} | |||
|
62 | ||||
|
63 | defaults = { | |||
|
64 | 'inherit_global_settings': model.inherit_global_settings | |||
|
65 | } | |||
|
66 | defaults.update(global_defaults) | |||
|
67 | defaults.update(repo_defaults) | |||
|
68 | defaults.update({ | |||
|
69 | 'new_svn_branch': '', | |||
|
70 | 'new_svn_tag': '', | |||
|
71 | }) | |||
|
72 | return defaults | |||
|
73 | ||||
|
74 | @LoginRequired() | |||
|
75 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
76 | @view_config( | |||
|
77 | route_name='edit_repo_vcs', request_method='GET', | |||
|
78 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
79 | def repo_vcs_settings(self): | |||
|
80 | c = self.load_default_context() | |||
|
81 | model = VcsSettingsModel(repo=self.db_repo_name) | |||
|
82 | ||||
|
83 | c.active = 'vcs' | |||
|
84 | c.global_svn_branch_patterns = model.get_global_svn_branch_patterns() | |||
|
85 | c.global_svn_tag_patterns = model.get_global_svn_tag_patterns() | |||
|
86 | c.svn_branch_patterns = model.get_repo_svn_branch_patterns() | |||
|
87 | c.svn_tag_patterns = model.get_repo_svn_tag_patterns() | |||
|
88 | ||||
|
89 | defaults = self._vcs_form_defaults(self.db_repo_name) | |||
|
90 | c.inherit_global_settings = defaults['inherit_global_settings'] | |||
|
91 | ||||
|
92 | data = render('rhodecode:templates/admin/repos/repo_edit.mako', | |||
|
93 | self._get_template_context(c), self.request) | |||
|
94 | html = formencode.htmlfill.render( | |||
|
95 | data, | |||
|
96 | defaults=defaults, | |||
|
97 | encoding="UTF-8", | |||
|
98 | force_defaults=False | |||
|
99 | ) | |||
|
100 | return Response(html) | |||
|
101 | ||||
|
102 | @LoginRequired() | |||
|
103 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
104 | @CSRFRequired() | |||
|
105 | @view_config( | |||
|
106 | route_name='edit_repo_vcs_update', request_method='POST', | |||
|
107 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
108 | def repo_settings_vcs_update(self): | |||
|
109 | _ = self.request.translate | |||
|
110 | c = self.load_default_context() | |||
|
111 | c.active = 'vcs' | |||
|
112 | ||||
|
113 | model = VcsSettingsModel(repo=self.db_repo_name) | |||
|
114 | c.global_svn_branch_patterns = model.get_global_svn_branch_patterns() | |||
|
115 | c.global_svn_tag_patterns = model.get_global_svn_tag_patterns() | |||
|
116 | c.svn_branch_patterns = model.get_repo_svn_branch_patterns() | |||
|
117 | c.svn_tag_patterns = model.get_repo_svn_tag_patterns() | |||
|
118 | ||||
|
119 | defaults = self._vcs_form_defaults(self.db_repo_name) | |||
|
120 | c.inherit_global_settings = defaults['inherit_global_settings'] | |||
|
121 | ||||
|
122 | application_form = RepoVcsSettingsForm(self.db_repo_name)() | |||
|
123 | try: | |||
|
124 | form_result = application_form.to_python(dict(self.request.POST)) | |||
|
125 | except formencode.Invalid as errors: | |||
|
126 | h.flash(_("Some form inputs contain invalid data."), | |||
|
127 | category='error') | |||
|
128 | ||||
|
129 | data = render('rhodecode:templates/admin/repos/repo_edit.mako', | |||
|
130 | self._get_template_context(c), self.request) | |||
|
131 | html = formencode.htmlfill.render( | |||
|
132 | data, | |||
|
133 | defaults=errors.value, | |||
|
134 | errors=errors.error_dict or {}, | |||
|
135 | encoding="UTF-8", | |||
|
136 | force_defaults=False | |||
|
137 | ) | |||
|
138 | return Response(html) | |||
|
139 | ||||
|
140 | try: | |||
|
141 | inherit_global_settings = form_result['inherit_global_settings'] | |||
|
142 | model.create_or_update_repo_settings( | |||
|
143 | form_result, inherit_global_settings=inherit_global_settings) | |||
|
144 | Session().commit() | |||
|
145 | h.flash(_('Updated VCS settings'), category='success') | |||
|
146 | except Exception: | |||
|
147 | log.exception("Exception while updating settings") | |||
|
148 | h.flash( | |||
|
149 | _('Error occurred during updating repository VCS settings'), | |||
|
150 | category='error') | |||
|
151 | ||||
|
152 | raise HTTPFound( | |||
|
153 | h.route_path('edit_repo_vcs', repo_name=self.db_repo_name)) | |||
|
154 | ||||
|
155 | @LoginRequired() | |||
|
156 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
157 | @CSRFRequired() | |||
|
158 | @view_config( | |||
|
159 | route_name='edit_repo_vcs_svn_pattern_delete', request_method='POST', | |||
|
160 | renderer='json_ext', xhr=True) | |||
|
161 | def repo_settings_delete_svn_pattern(self): | |||
|
162 | self.load_default_context() | |||
|
163 | delete_pattern_id = self.request.POST.get('delete_svn_pattern') | |||
|
164 | model = VcsSettingsModel(repo=self.db_repo_name) | |||
|
165 | try: | |||
|
166 | model.delete_repo_svn_pattern(delete_pattern_id) | |||
|
167 | except SettingNotFound: | |||
|
168 | log.exception('Failed to delete SVN pattern') | |||
|
169 | raise HTTPBadRequest() | |||
|
170 | ||||
|
171 | Session().commit() | |||
|
172 | return True |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 |
@@ -1,222 +1,235 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | from rhodecode.apps.admin.navigation import NavigationRegistry |
|
22 | from rhodecode.apps.admin.navigation import NavigationRegistry | |
23 | from rhodecode.config.routing import ADMIN_PREFIX |
|
23 | from rhodecode.config.routing import ADMIN_PREFIX | |
24 | from rhodecode.lib.utils2 import str2bool |
|
24 | from rhodecode.lib.utils2 import str2bool | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | def admin_routes(config): |
|
27 | def admin_routes(config): | |
28 | """ |
|
28 | """ | |
29 | Admin prefixed routes |
|
29 | Admin prefixed routes | |
30 | """ |
|
30 | """ | |
31 |
|
31 | |||
32 | config.add_route( |
|
32 | config.add_route( | |
33 | name='admin_audit_logs', |
|
33 | name='admin_audit_logs', | |
34 | pattern='/audit_logs') |
|
34 | pattern='/audit_logs') | |
35 |
|
35 | |||
36 | config.add_route( |
|
36 | config.add_route( | |
37 | name='pull_requests_global_0', # backward compat |
|
37 | name='pull_requests_global_0', # backward compat | |
38 | pattern='/pull_requests/{pull_request_id:\d+}') |
|
38 | pattern='/pull_requests/{pull_request_id:\d+}') | |
39 | config.add_route( |
|
39 | config.add_route( | |
40 | name='pull_requests_global_1', # backward compat |
|
40 | name='pull_requests_global_1', # backward compat | |
41 | pattern='/pull-requests/{pull_request_id:\d+}') |
|
41 | pattern='/pull-requests/{pull_request_id:\d+}') | |
42 | config.add_route( |
|
42 | config.add_route( | |
43 | name='pull_requests_global', |
|
43 | name='pull_requests_global', | |
44 | pattern='/pull-request/{pull_request_id:\d+}') |
|
44 | pattern='/pull-request/{pull_request_id:\d+}') | |
45 |
|
45 | |||
46 | config.add_route( |
|
46 | config.add_route( | |
47 | name='admin_settings_open_source', |
|
47 | name='admin_settings_open_source', | |
48 | pattern='/settings/open_source') |
|
48 | pattern='/settings/open_source') | |
49 | config.add_route( |
|
49 | config.add_route( | |
50 | name='admin_settings_vcs_svn_generate_cfg', |
|
50 | name='admin_settings_vcs_svn_generate_cfg', | |
51 | pattern='/settings/vcs/svn_generate_cfg') |
|
51 | pattern='/settings/vcs/svn_generate_cfg') | |
52 |
|
52 | |||
53 | config.add_route( |
|
53 | config.add_route( | |
54 | name='admin_settings_system', |
|
54 | name='admin_settings_system', | |
55 | pattern='/settings/system') |
|
55 | pattern='/settings/system') | |
56 | config.add_route( |
|
56 | config.add_route( | |
57 | name='admin_settings_system_update', |
|
57 | name='admin_settings_system_update', | |
58 | pattern='/settings/system/updates') |
|
58 | pattern='/settings/system/updates') | |
59 |
|
59 | |||
60 | config.add_route( |
|
60 | config.add_route( | |
61 | name='admin_settings_sessions', |
|
61 | name='admin_settings_sessions', | |
62 | pattern='/settings/sessions') |
|
62 | pattern='/settings/sessions') | |
63 | config.add_route( |
|
63 | config.add_route( | |
64 | name='admin_settings_sessions_cleanup', |
|
64 | name='admin_settings_sessions_cleanup', | |
65 | pattern='/settings/sessions/cleanup') |
|
65 | pattern='/settings/sessions/cleanup') | |
66 |
|
66 | |||
67 | config.add_route( |
|
67 | config.add_route( | |
68 | name='admin_settings_process_management', |
|
68 | name='admin_settings_process_management', | |
69 | pattern='/settings/process_management') |
|
69 | pattern='/settings/process_management') | |
70 | config.add_route( |
|
70 | config.add_route( | |
71 | name='admin_settings_process_management_signal', |
|
71 | name='admin_settings_process_management_signal', | |
72 | pattern='/settings/process_management/signal') |
|
72 | pattern='/settings/process_management/signal') | |
73 |
|
73 | |||
74 | # global permissions |
|
74 | # global permissions | |
75 |
|
75 | |||
76 | config.add_route( |
|
76 | config.add_route( | |
77 | name='admin_permissions_application', |
|
77 | name='admin_permissions_application', | |
78 | pattern='/permissions/application') |
|
78 | pattern='/permissions/application') | |
79 | config.add_route( |
|
79 | config.add_route( | |
80 | name='admin_permissions_application_update', |
|
80 | name='admin_permissions_application_update', | |
81 | pattern='/permissions/application/update') |
|
81 | pattern='/permissions/application/update') | |
82 |
|
82 | |||
83 | config.add_route( |
|
83 | config.add_route( | |
84 | name='admin_permissions_global', |
|
84 | name='admin_permissions_global', | |
85 | pattern='/permissions/global') |
|
85 | pattern='/permissions/global') | |
86 | config.add_route( |
|
86 | config.add_route( | |
87 | name='admin_permissions_global_update', |
|
87 | name='admin_permissions_global_update', | |
88 | pattern='/permissions/global/update') |
|
88 | pattern='/permissions/global/update') | |
89 |
|
89 | |||
90 | config.add_route( |
|
90 | config.add_route( | |
91 | name='admin_permissions_object', |
|
91 | name='admin_permissions_object', | |
92 | pattern='/permissions/object') |
|
92 | pattern='/permissions/object') | |
93 | config.add_route( |
|
93 | config.add_route( | |
94 | name='admin_permissions_object_update', |
|
94 | name='admin_permissions_object_update', | |
95 | pattern='/permissions/object/update') |
|
95 | pattern='/permissions/object/update') | |
96 |
|
96 | |||
97 | config.add_route( |
|
97 | config.add_route( | |
98 | name='admin_permissions_ips', |
|
98 | name='admin_permissions_ips', | |
99 | pattern='/permissions/ips') |
|
99 | pattern='/permissions/ips') | |
100 |
|
100 | |||
101 | config.add_route( |
|
101 | config.add_route( | |
102 | name='admin_permissions_overview', |
|
102 | name='admin_permissions_overview', | |
103 | pattern='/permissions/overview') |
|
103 | pattern='/permissions/overview') | |
104 |
|
104 | |||
105 | config.add_route( |
|
105 | config.add_route( | |
106 | name='admin_permissions_auth_token_access', |
|
106 | name='admin_permissions_auth_token_access', | |
107 | pattern='/permissions/auth_token_access') |
|
107 | pattern='/permissions/auth_token_access') | |
108 |
|
108 | |||
109 | # users admin |
|
109 | # users admin | |
110 | config.add_route( |
|
110 | config.add_route( | |
111 | name='users', |
|
111 | name='users', | |
112 | pattern='/users') |
|
112 | pattern='/users') | |
113 |
|
113 | |||
114 | config.add_route( |
|
114 | config.add_route( | |
115 | name='users_data', |
|
115 | name='users_data', | |
116 | pattern='/users_data') |
|
116 | pattern='/users_data') | |
117 |
|
117 | |||
118 | # user auth tokens |
|
118 | # user auth tokens | |
119 | config.add_route( |
|
119 | config.add_route( | |
120 | name='edit_user_auth_tokens', |
|
120 | name='edit_user_auth_tokens', | |
121 | pattern='/users/{user_id:\d+}/edit/auth_tokens') |
|
121 | pattern='/users/{user_id:\d+}/edit/auth_tokens') | |
122 | config.add_route( |
|
122 | config.add_route( | |
123 | name='edit_user_auth_tokens_add', |
|
123 | name='edit_user_auth_tokens_add', | |
124 | pattern='/users/{user_id:\d+}/edit/auth_tokens/new') |
|
124 | pattern='/users/{user_id:\d+}/edit/auth_tokens/new') | |
125 | config.add_route( |
|
125 | config.add_route( | |
126 | name='edit_user_auth_tokens_delete', |
|
126 | name='edit_user_auth_tokens_delete', | |
127 | pattern='/users/{user_id:\d+}/edit/auth_tokens/delete') |
|
127 | pattern='/users/{user_id:\d+}/edit/auth_tokens/delete') | |
128 |
|
128 | |||
129 | # user ssh keys |
|
129 | # user ssh keys | |
130 | config.add_route( |
|
130 | config.add_route( | |
131 | name='edit_user_ssh_keys', |
|
131 | name='edit_user_ssh_keys', | |
132 | pattern='/users/{user_id:\d+}/edit/ssh_keys') |
|
132 | pattern='/users/{user_id:\d+}/edit/ssh_keys') | |
133 | config.add_route( |
|
133 | config.add_route( | |
134 | name='edit_user_ssh_keys_generate_keypair', |
|
134 | name='edit_user_ssh_keys_generate_keypair', | |
135 | pattern='/users/{user_id:\d+}/edit/ssh_keys/generate') |
|
135 | pattern='/users/{user_id:\d+}/edit/ssh_keys/generate') | |
136 | config.add_route( |
|
136 | config.add_route( | |
137 | name='edit_user_ssh_keys_add', |
|
137 | name='edit_user_ssh_keys_add', | |
138 | pattern='/users/{user_id:\d+}/edit/ssh_keys/new') |
|
138 | pattern='/users/{user_id:\d+}/edit/ssh_keys/new') | |
139 | config.add_route( |
|
139 | config.add_route( | |
140 | name='edit_user_ssh_keys_delete', |
|
140 | name='edit_user_ssh_keys_delete', | |
141 | pattern='/users/{user_id:\d+}/edit/ssh_keys/delete') |
|
141 | pattern='/users/{user_id:\d+}/edit/ssh_keys/delete') | |
142 |
|
142 | |||
143 | # user emails |
|
143 | # user emails | |
144 | config.add_route( |
|
144 | config.add_route( | |
145 | name='edit_user_emails', |
|
145 | name='edit_user_emails', | |
146 | pattern='/users/{user_id:\d+}/edit/emails') |
|
146 | pattern='/users/{user_id:\d+}/edit/emails') | |
147 | config.add_route( |
|
147 | config.add_route( | |
148 | name='edit_user_emails_add', |
|
148 | name='edit_user_emails_add', | |
149 | pattern='/users/{user_id:\d+}/edit/emails/new') |
|
149 | pattern='/users/{user_id:\d+}/edit/emails/new') | |
150 | config.add_route( |
|
150 | config.add_route( | |
151 | name='edit_user_emails_delete', |
|
151 | name='edit_user_emails_delete', | |
152 | pattern='/users/{user_id:\d+}/edit/emails/delete') |
|
152 | pattern='/users/{user_id:\d+}/edit/emails/delete') | |
153 |
|
153 | |||
154 | # user IPs |
|
154 | # user IPs | |
155 | config.add_route( |
|
155 | config.add_route( | |
156 | name='edit_user_ips', |
|
156 | name='edit_user_ips', | |
157 | pattern='/users/{user_id:\d+}/edit/ips') |
|
157 | pattern='/users/{user_id:\d+}/edit/ips') | |
158 | config.add_route( |
|
158 | config.add_route( | |
159 | name='edit_user_ips_add', |
|
159 | name='edit_user_ips_add', | |
160 | pattern='/users/{user_id:\d+}/edit/ips/new') |
|
160 | pattern='/users/{user_id:\d+}/edit/ips/new') | |
161 | config.add_route( |
|
161 | config.add_route( | |
162 | name='edit_user_ips_delete', |
|
162 | name='edit_user_ips_delete', | |
163 | pattern='/users/{user_id:\d+}/edit/ips/delete') |
|
163 | pattern='/users/{user_id:\d+}/edit/ips/delete') | |
164 |
|
164 | |||
165 | # user perms |
|
165 | # user perms | |
166 | config.add_route( |
|
166 | config.add_route( | |
167 | name='edit_user_perms_summary', |
|
167 | name='edit_user_perms_summary', | |
168 | pattern='/users/{user_id:\d+}/edit/permissions_summary') |
|
168 | pattern='/users/{user_id:\d+}/edit/permissions_summary') | |
169 | config.add_route( |
|
169 | config.add_route( | |
170 | name='edit_user_perms_summary_json', |
|
170 | name='edit_user_perms_summary_json', | |
171 | pattern='/users/{user_id:\d+}/edit/permissions_summary/json') |
|
171 | pattern='/users/{user_id:\d+}/edit/permissions_summary/json') | |
172 |
|
172 | |||
173 | # user groups management |
|
173 | # user groups management | |
174 | config.add_route( |
|
174 | config.add_route( | |
175 | name='edit_user_groups_management', |
|
175 | name='edit_user_groups_management', | |
176 | pattern='/users/{user_id:\d+}/edit/groups_management') |
|
176 | pattern='/users/{user_id:\d+}/edit/groups_management') | |
177 |
|
177 | |||
178 | config.add_route( |
|
178 | config.add_route( | |
179 | name='edit_user_groups_management_updates', |
|
179 | name='edit_user_groups_management_updates', | |
180 | pattern='/users/{user_id:\d+}/edit/edit_user_groups_management/updates') |
|
180 | pattern='/users/{user_id:\d+}/edit/edit_user_groups_management/updates') | |
181 |
|
181 | |||
182 | # user audit logs |
|
182 | # user audit logs | |
183 | config.add_route( |
|
183 | config.add_route( | |
184 | name='edit_user_audit_logs', |
|
184 | name='edit_user_audit_logs', | |
185 | pattern='/users/{user_id:\d+}/edit/audit') |
|
185 | pattern='/users/{user_id:\d+}/edit/audit') | |
186 |
|
186 | |||
187 | # user groups admin |
|
187 | # user groups admin | |
188 | config.add_route( |
|
188 | config.add_route( | |
189 | name='user_groups', |
|
189 | name='user_groups', | |
190 | pattern='/user_groups') |
|
190 | pattern='/user_groups') | |
191 |
|
191 | |||
192 | config.add_route( |
|
192 | config.add_route( | |
193 | name='user_groups_data', |
|
193 | name='user_groups_data', | |
194 | pattern='/user_groups_data') |
|
194 | pattern='/user_groups_data') | |
195 |
|
195 | |||
196 | config.add_route( |
|
196 | config.add_route( | |
197 | name='user_group_members_data', |
|
197 | name='user_group_members_data', | |
198 | pattern='/user_groups/{user_group_id:\d+}/members') |
|
198 | pattern='/user_groups/{user_group_id:\d+}/members') | |
199 |
|
199 | |||
200 | # user groups perms |
|
200 | # user groups perms | |
201 | config.add_route( |
|
201 | config.add_route( | |
202 | name='edit_user_group_perms_summary', |
|
202 | name='edit_user_group_perms_summary', | |
203 | pattern='/user_groups/{user_group_id:\d+}/edit/permissions_summary') |
|
203 | pattern='/user_groups/{user_group_id:\d+}/edit/permissions_summary') | |
204 | config.add_route( |
|
204 | config.add_route( | |
205 | name='edit_user_group_perms_summary_json', |
|
205 | name='edit_user_group_perms_summary_json', | |
206 | pattern='/user_groups/{user_group_id:\d+}/edit/permissions_summary/json') |
|
206 | pattern='/user_groups/{user_group_id:\d+}/edit/permissions_summary/json') | |
207 |
|
207 | |||
|
208 | # repos admin | |||
|
209 | config.add_route( | |||
|
210 | name='repos', | |||
|
211 | pattern='/repos') | |||
|
212 | ||||
|
213 | config.add_route( | |||
|
214 | name='repo_new', | |||
|
215 | pattern='/repos/new') | |||
|
216 | ||||
|
217 | config.add_route( | |||
|
218 | name='repo_create', | |||
|
219 | pattern='/repos/create') | |||
|
220 | ||||
208 |
|
221 | |||
209 | def includeme(config): |
|
222 | def includeme(config): | |
210 | settings = config.get_settings() |
|
223 | settings = config.get_settings() | |
211 |
|
224 | |||
212 | # Create admin navigation registry and add it to the pyramid registry. |
|
225 | # Create admin navigation registry and add it to the pyramid registry. | |
213 | labs_active = str2bool(settings.get('labs_settings_active', False)) |
|
226 | labs_active = str2bool(settings.get('labs_settings_active', False)) | |
214 | navigation_registry = NavigationRegistry(labs_active=labs_active) |
|
227 | navigation_registry = NavigationRegistry(labs_active=labs_active) | |
215 | config.registry.registerUtility(navigation_registry) |
|
228 | config.registry.registerUtility(navigation_registry) | |
216 |
|
229 | |||
217 | # main admin routes |
|
230 | # main admin routes | |
218 | config.add_route(name='admin_home', pattern=ADMIN_PREFIX) |
|
231 | config.add_route(name='admin_home', pattern=ADMIN_PREFIX) | |
219 | config.include(admin_routes, route_prefix=ADMIN_PREFIX) |
|
232 | config.include(admin_routes, route_prefix=ADMIN_PREFIX) | |
220 |
|
233 | |||
221 | # Scan module for configuration decorators. |
|
234 | # Scan module for configuration decorators. | |
222 | config.scan('.views', ignore='.tests') |
|
235 | config.scan('.views', ignore='.tests') |
This diff has been collapsed as it changes many lines, (712 lines changed) Show them Hide them | |||||
@@ -1,1131 +1,509 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import urllib |
|
21 | import urllib | |
22 |
|
22 | |||
23 | import mock |
|
23 | import mock | |
24 | import pytest |
|
24 | import pytest | |
25 |
|
25 | |||
|
26 | from rhodecode.apps._base import ADMIN_PREFIX | |||
26 | from rhodecode.lib import auth |
|
27 | from rhodecode.lib import auth | |
27 |
from rhodecode.lib.utils2 import safe_str |
|
28 | from rhodecode.lib.utils2 import safe_str | |
28 | from rhodecode.lib import helpers as h |
|
29 | from rhodecode.lib import helpers as h | |
29 | from rhodecode.model.db import ( |
|
30 | from rhodecode.model.db import ( | |
30 | Repository, RepoGroup, UserRepoToPerm, User, Permission) |
|
31 | Repository, RepoGroup, UserRepoToPerm, User, Permission) | |
31 | from rhodecode.model.meta import Session |
|
32 | from rhodecode.model.meta import Session | |
32 | from rhodecode.model.repo import RepoModel |
|
33 | from rhodecode.model.repo import RepoModel | |
33 | from rhodecode.model.repo_group import RepoGroupModel |
|
34 | from rhodecode.model.repo_group import RepoGroupModel | |
34 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
|||
35 | from rhodecode.model.user import UserModel |
|
35 | from rhodecode.model.user import UserModel | |
36 | from rhodecode.tests import ( |
|
36 | from rhodecode.tests import ( | |
37 |
login_user_session |
|
37 | login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN, | |
38 |
TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS |
|
38 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
39 | from rhodecode.tests.fixture import Fixture, error_function |
|
39 | from rhodecode.tests.fixture import Fixture, error_function | |
40 | from rhodecode.tests.utils import AssertResponse, repo_on_filesystem |
|
40 | from rhodecode.tests.utils import AssertResponse, repo_on_filesystem | |
41 |
|
41 | |||
42 | fixture = Fixture() |
|
42 | fixture = Fixture() | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | def route_path(name, params=None, **kwargs): |
|
45 | def route_path(name, params=None, **kwargs): | |
46 | import urllib |
|
46 | import urllib | |
47 |
|
47 | |||
48 | base_url = { |
|
48 | base_url = { | |
49 |
'repo |
|
49 | 'repos': ADMIN_PREFIX + '/repos', | |
|
50 | 'repo_new': ADMIN_PREFIX + '/repos/new', | |||
|
51 | 'repo_create': ADMIN_PREFIX + '/repos/create', | |||
|
52 | ||||
50 | 'repo_creating_check': '/{repo_name}/repo_creating_check', |
|
53 | 'repo_creating_check': '/{repo_name}/repo_creating_check', | |
51 | }[name].format(**kwargs) |
|
54 | }[name].format(**kwargs) | |
52 |
|
55 | |||
53 | if params: |
|
56 | if params: | |
54 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
57 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
55 | return base_url |
|
58 | return base_url | |
56 |
|
59 | |||
57 |
|
60 | |||
|
61 | def _get_permission_for_user(user, repo): | |||
|
62 | perm = UserRepoToPerm.query()\ | |||
|
63 | .filter(UserRepoToPerm.repository == | |||
|
64 | Repository.get_by_repo_name(repo))\ | |||
|
65 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ | |||
|
66 | .all() | |||
|
67 | return perm | |||
|
68 | ||||
|
69 | ||||
58 | @pytest.mark.usefixtures("app") |
|
70 | @pytest.mark.usefixtures("app") | |
59 | class TestAdminRepos(object): |
|
71 | class TestAdminRepos(object): | |
60 |
|
72 | |||
61 | def test_index(self): |
|
73 | def test_repo_list(self, autologin_user, user_util): | |
62 | self.app.get(url('repos')) |
|
74 | repo = user_util.create_repo() | |
|
75 | response = self.app.get( | |||
|
76 | route_path('repos'), status=200) | |||
63 |
|
77 | |||
64 | def test_create_page_restricted(self, autologin_user, backend): |
|
78 | response.mustcontain(repo.repo_name) | |
|
79 | ||||
|
80 | def test_create_page_restricted_to_single_backend(self, autologin_user, backend): | |||
65 | with mock.patch('rhodecode.BACKENDS', {'git': 'git'}): |
|
81 | with mock.patch('rhodecode.BACKENDS', {'git': 'git'}): | |
66 |
response = self.app.get( |
|
82 | response = self.app.get(route_path('repo_new'), status=200) | |
67 | assert_response = AssertResponse(response) |
|
83 | assert_response = AssertResponse(response) | |
68 | element = assert_response.get_element('#repo_type') |
|
84 | element = assert_response.get_element('#repo_type') | |
69 | assert element.text_content() == '\ngit\n' |
|
85 | assert element.text_content() == '\ngit\n' | |
70 |
|
86 | |||
71 | def test_create_page_non_restricted(self, autologin_user, backend): |
|
87 | def test_create_page_non_restricted_backends(self, autologin_user, backend): | |
72 |
response = self.app.get( |
|
88 | response = self.app.get(route_path('repo_new'), status=200) | |
73 | assert_response = AssertResponse(response) |
|
89 | assert_response = AssertResponse(response) | |
74 | assert_response.element_contains('#repo_type', 'git') |
|
90 | assert_response.element_contains('#repo_type', 'git') | |
75 | assert_response.element_contains('#repo_type', 'svn') |
|
91 | assert_response.element_contains('#repo_type', 'svn') | |
76 | assert_response.element_contains('#repo_type', 'hg') |
|
92 | assert_response.element_contains('#repo_type', 'hg') | |
77 |
|
93 | |||
78 |
@pytest.mark.parametrize( |
|
94 | @pytest.mark.parametrize( | |
79 |
|
|
95 | "suffix", [u'', u'xxa'], ids=['', 'non-ascii']) | |
80 | def test_create(self, autologin_user, backend, suffix, csrf_token): |
|
96 | def test_create(self, autologin_user, backend, suffix, csrf_token): | |
81 | repo_name_unicode = backend.new_repo_name(suffix=suffix) |
|
97 | repo_name_unicode = backend.new_repo_name(suffix=suffix) | |
82 | repo_name = repo_name_unicode.encode('utf8') |
|
98 | repo_name = repo_name_unicode.encode('utf8') | |
83 | description_unicode = u'description for newly created repo' + suffix |
|
99 | description_unicode = u'description for newly created repo' + suffix | |
84 | description = description_unicode.encode('utf8') |
|
100 | description = description_unicode.encode('utf8') | |
85 | response = self.app.post( |
|
101 | response = self.app.post( | |
86 |
|
|
102 | route_path('repo_create'), | |
87 | fixture._get_repo_create_params( |
|
103 | fixture._get_repo_create_params( | |
88 | repo_private=False, |
|
104 | repo_private=False, | |
89 | repo_name=repo_name, |
|
105 | repo_name=repo_name, | |
90 | repo_type=backend.alias, |
|
106 | repo_type=backend.alias, | |
91 | repo_description=description, |
|
107 | repo_description=description, | |
92 | csrf_token=csrf_token), |
|
108 | csrf_token=csrf_token), | |
93 | status=302) |
|
109 | status=302) | |
94 |
|
110 | |||
95 | self.assert_repository_is_created_correctly( |
|
111 | self.assert_repository_is_created_correctly( | |
96 | repo_name, description, backend) |
|
112 | repo_name, description, backend) | |
97 |
|
113 | |||
98 | def test_create_numeric(self, autologin_user, backend, csrf_token): |
|
114 | def test_create_numeric_name(self, autologin_user, backend, csrf_token): | |
99 | numeric_repo = '1234' |
|
115 | numeric_repo = '1234' | |
100 | repo_name = numeric_repo |
|
116 | repo_name = numeric_repo | |
101 | description = 'description for newly created repo' + numeric_repo |
|
117 | description = 'description for newly created repo' + numeric_repo | |
102 | self.app.post( |
|
118 | self.app.post( | |
103 |
|
|
119 | route_path('repo_create'), | |
104 | fixture._get_repo_create_params( |
|
120 | fixture._get_repo_create_params( | |
105 | repo_private=False, |
|
121 | repo_private=False, | |
106 | repo_name=repo_name, |
|
122 | repo_name=repo_name, | |
107 | repo_type=backend.alias, |
|
123 | repo_type=backend.alias, | |
108 | repo_description=description, |
|
124 | repo_description=description, | |
109 | csrf_token=csrf_token)) |
|
125 | csrf_token=csrf_token)) | |
110 |
|
126 | |||
111 | self.assert_repository_is_created_correctly( |
|
127 | self.assert_repository_is_created_correctly( | |
112 | repo_name, description, backend) |
|
128 | repo_name, description, backend) | |
113 |
|
129 | |||
114 | @pytest.mark.parametrize("suffix", [u'', u'ąćę'], ids=['', 'non-ascii']) |
|
130 | @pytest.mark.parametrize("suffix", [u'', u'ąćę'], ids=['', 'non-ascii']) | |
115 | def test_create_in_group( |
|
131 | def test_create_in_group( | |
116 | self, autologin_user, backend, suffix, csrf_token): |
|
132 | self, autologin_user, backend, suffix, csrf_token): | |
117 | # create GROUP |
|
133 | # create GROUP | |
118 | group_name = 'sometest_%s' % backend.alias |
|
134 | group_name = 'sometest_%s' % backend.alias | |
119 | gr = RepoGroupModel().create(group_name=group_name, |
|
135 | gr = RepoGroupModel().create(group_name=group_name, | |
120 | group_description='test', |
|
136 | group_description='test', | |
121 | owner=TEST_USER_ADMIN_LOGIN) |
|
137 | owner=TEST_USER_ADMIN_LOGIN) | |
122 | Session().commit() |
|
138 | Session().commit() | |
123 |
|
139 | |||
124 | repo_name = u'ingroup' + suffix |
|
140 | repo_name = u'ingroup' + suffix | |
125 | repo_name_full = RepoGroup.url_sep().join( |
|
141 | repo_name_full = RepoGroup.url_sep().join( | |
126 | [group_name, repo_name]) |
|
142 | [group_name, repo_name]) | |
127 | description = u'description for newly created repo' |
|
143 | description = u'description for newly created repo' | |
128 | self.app.post( |
|
144 | self.app.post( | |
129 |
|
|
145 | route_path('repo_create'), | |
130 | fixture._get_repo_create_params( |
|
146 | fixture._get_repo_create_params( | |
131 | repo_private=False, |
|
147 | repo_private=False, | |
132 | repo_name=safe_str(repo_name), |
|
148 | repo_name=safe_str(repo_name), | |
133 | repo_type=backend.alias, |
|
149 | repo_type=backend.alias, | |
134 | repo_description=description, |
|
150 | repo_description=description, | |
135 | repo_group=gr.group_id, |
|
151 | repo_group=gr.group_id, | |
136 | csrf_token=csrf_token)) |
|
152 | csrf_token=csrf_token)) | |
137 |
|
153 | |||
138 | # TODO: johbo: Cleanup work to fixture |
|
154 | # TODO: johbo: Cleanup work to fixture | |
139 | try: |
|
155 | try: | |
140 | self.assert_repository_is_created_correctly( |
|
156 | self.assert_repository_is_created_correctly( | |
141 | repo_name_full, description, backend) |
|
157 | repo_name_full, description, backend) | |
142 |
|
158 | |||
143 | new_repo = RepoModel().get_by_repo_name(repo_name_full) |
|
159 | new_repo = RepoModel().get_by_repo_name(repo_name_full) | |
144 | inherited_perms = UserRepoToPerm.query().filter( |
|
160 | inherited_perms = UserRepoToPerm.query().filter( | |
145 | UserRepoToPerm.repository_id == new_repo.repo_id).all() |
|
161 | UserRepoToPerm.repository_id == new_repo.repo_id).all() | |
146 | assert len(inherited_perms) == 1 |
|
162 | assert len(inherited_perms) == 1 | |
147 | finally: |
|
163 | finally: | |
148 | RepoModel().delete(repo_name_full) |
|
164 | RepoModel().delete(repo_name_full) | |
149 | RepoGroupModel().delete(group_name) |
|
165 | RepoGroupModel().delete(group_name) | |
150 | Session().commit() |
|
166 | Session().commit() | |
151 |
|
167 | |||
152 | def test_create_in_group_numeric( |
|
168 | def test_create_in_group_numeric_name( | |
153 | self, autologin_user, backend, csrf_token): |
|
169 | self, autologin_user, backend, csrf_token): | |
154 | # create GROUP |
|
170 | # create GROUP | |
155 | group_name = 'sometest_%s' % backend.alias |
|
171 | group_name = 'sometest_%s' % backend.alias | |
156 | gr = RepoGroupModel().create(group_name=group_name, |
|
172 | gr = RepoGroupModel().create(group_name=group_name, | |
157 | group_description='test', |
|
173 | group_description='test', | |
158 | owner=TEST_USER_ADMIN_LOGIN) |
|
174 | owner=TEST_USER_ADMIN_LOGIN) | |
159 | Session().commit() |
|
175 | Session().commit() | |
160 |
|
176 | |||
161 | repo_name = '12345' |
|
177 | repo_name = '12345' | |
162 | repo_name_full = RepoGroup.url_sep().join([group_name, repo_name]) |
|
178 | repo_name_full = RepoGroup.url_sep().join([group_name, repo_name]) | |
163 | description = 'description for newly created repo' |
|
179 | description = 'description for newly created repo' | |
164 | self.app.post( |
|
180 | self.app.post( | |
165 |
|
|
181 | route_path('repo_create'), | |
166 | fixture._get_repo_create_params( |
|
182 | fixture._get_repo_create_params( | |
167 | repo_private=False, |
|
183 | repo_private=False, | |
168 | repo_name=repo_name, |
|
184 | repo_name=repo_name, | |
169 | repo_type=backend.alias, |
|
185 | repo_type=backend.alias, | |
170 | repo_description=description, |
|
186 | repo_description=description, | |
171 | repo_group=gr.group_id, |
|
187 | repo_group=gr.group_id, | |
172 | csrf_token=csrf_token)) |
|
188 | csrf_token=csrf_token)) | |
173 |
|
189 | |||
174 | # TODO: johbo: Cleanup work to fixture |
|
190 | # TODO: johbo: Cleanup work to fixture | |
175 | try: |
|
191 | try: | |
176 | self.assert_repository_is_created_correctly( |
|
192 | self.assert_repository_is_created_correctly( | |
177 | repo_name_full, description, backend) |
|
193 | repo_name_full, description, backend) | |
178 |
|
194 | |||
179 | new_repo = RepoModel().get_by_repo_name(repo_name_full) |
|
195 | new_repo = RepoModel().get_by_repo_name(repo_name_full) | |
180 | inherited_perms = UserRepoToPerm.query()\ |
|
196 | inherited_perms = UserRepoToPerm.query()\ | |
181 | .filter(UserRepoToPerm.repository_id == new_repo.repo_id).all() |
|
197 | .filter(UserRepoToPerm.repository_id == new_repo.repo_id).all() | |
182 | assert len(inherited_perms) == 1 |
|
198 | assert len(inherited_perms) == 1 | |
183 | finally: |
|
199 | finally: | |
184 | RepoModel().delete(repo_name_full) |
|
200 | RepoModel().delete(repo_name_full) | |
185 | RepoGroupModel().delete(group_name) |
|
201 | RepoGroupModel().delete(group_name) | |
186 | Session().commit() |
|
202 | Session().commit() | |
187 |
|
203 | |||
188 | def test_create_in_group_without_needed_permissions(self, backend): |
|
204 | def test_create_in_group_without_needed_permissions(self, backend): | |
189 | session = login_user_session( |
|
205 | session = login_user_session( | |
190 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
206 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
191 | csrf_token = auth.get_csrf_token(session) |
|
207 | csrf_token = auth.get_csrf_token(session) | |
192 | # revoke |
|
208 | # revoke | |
193 | user_model = UserModel() |
|
209 | user_model = UserModel() | |
194 | # disable fork and create on default user |
|
210 | # disable fork and create on default user | |
195 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository') |
|
211 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository') | |
196 | user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none') |
|
212 | user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none') | |
197 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository') |
|
213 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository') | |
198 | user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none') |
|
214 | user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none') | |
199 |
|
215 | |||
200 | # disable on regular user |
|
216 | # disable on regular user | |
201 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository') |
|
217 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository') | |
202 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none') |
|
218 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none') | |
203 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository') |
|
219 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository') | |
204 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none') |
|
220 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none') | |
205 | Session().commit() |
|
221 | Session().commit() | |
206 |
|
222 | |||
207 | # create GROUP |
|
223 | # create GROUP | |
208 | group_name = 'reg_sometest_%s' % backend.alias |
|
224 | group_name = 'reg_sometest_%s' % backend.alias | |
209 | gr = RepoGroupModel().create(group_name=group_name, |
|
225 | gr = RepoGroupModel().create(group_name=group_name, | |
210 | group_description='test', |
|
226 | group_description='test', | |
211 | owner=TEST_USER_ADMIN_LOGIN) |
|
227 | owner=TEST_USER_ADMIN_LOGIN) | |
212 | Session().commit() |
|
228 | Session().commit() | |
213 |
|
229 | |||
214 | group_name_allowed = 'reg_sometest_allowed_%s' % backend.alias |
|
230 | group_name_allowed = 'reg_sometest_allowed_%s' % backend.alias | |
215 | gr_allowed = RepoGroupModel().create( |
|
231 | gr_allowed = RepoGroupModel().create( | |
216 | group_name=group_name_allowed, |
|
232 | group_name=group_name_allowed, | |
217 | group_description='test', |
|
233 | group_description='test', | |
218 | owner=TEST_USER_REGULAR_LOGIN) |
|
234 | owner=TEST_USER_REGULAR_LOGIN) | |
219 | Session().commit() |
|
235 | Session().commit() | |
220 |
|
236 | |||
221 | repo_name = 'ingroup' |
|
237 | repo_name = 'ingroup' | |
222 | description = 'description for newly created repo' |
|
238 | description = 'description for newly created repo' | |
223 | response = self.app.post( |
|
239 | response = self.app.post( | |
224 |
|
|
240 | route_path('repo_create'), | |
225 | fixture._get_repo_create_params( |
|
241 | fixture._get_repo_create_params( | |
226 | repo_private=False, |
|
242 | repo_private=False, | |
227 | repo_name=repo_name, |
|
243 | repo_name=repo_name, | |
228 | repo_type=backend.alias, |
|
244 | repo_type=backend.alias, | |
229 | repo_description=description, |
|
245 | repo_description=description, | |
230 | repo_group=gr.group_id, |
|
246 | repo_group=gr.group_id, | |
231 | csrf_token=csrf_token)) |
|
247 | csrf_token=csrf_token)) | |
232 |
|
248 | |||
233 | response.mustcontain('Invalid value') |
|
249 | response.mustcontain('Invalid value') | |
234 |
|
250 | |||
235 | # user is allowed to create in this group |
|
251 | # user is allowed to create in this group | |
236 | repo_name = 'ingroup' |
|
252 | repo_name = 'ingroup' | |
237 | repo_name_full = RepoGroup.url_sep().join( |
|
253 | repo_name_full = RepoGroup.url_sep().join( | |
238 | [group_name_allowed, repo_name]) |
|
254 | [group_name_allowed, repo_name]) | |
239 | description = 'description for newly created repo' |
|
255 | description = 'description for newly created repo' | |
240 | response = self.app.post( |
|
256 | response = self.app.post( | |
241 |
|
|
257 | route_path('repo_create'), | |
242 | fixture._get_repo_create_params( |
|
258 | fixture._get_repo_create_params( | |
243 | repo_private=False, |
|
259 | repo_private=False, | |
244 | repo_name=repo_name, |
|
260 | repo_name=repo_name, | |
245 | repo_type=backend.alias, |
|
261 | repo_type=backend.alias, | |
246 | repo_description=description, |
|
262 | repo_description=description, | |
247 | repo_group=gr_allowed.group_id, |
|
263 | repo_group=gr_allowed.group_id, | |
248 | csrf_token=csrf_token)) |
|
264 | csrf_token=csrf_token)) | |
249 |
|
265 | |||
250 | # TODO: johbo: Cleanup in pytest fixture |
|
266 | # TODO: johbo: Cleanup in pytest fixture | |
251 | try: |
|
267 | try: | |
252 | self.assert_repository_is_created_correctly( |
|
268 | self.assert_repository_is_created_correctly( | |
253 | repo_name_full, description, backend) |
|
269 | repo_name_full, description, backend) | |
254 |
|
270 | |||
255 | new_repo = RepoModel().get_by_repo_name(repo_name_full) |
|
271 | new_repo = RepoModel().get_by_repo_name(repo_name_full) | |
256 | inherited_perms = UserRepoToPerm.query().filter( |
|
272 | inherited_perms = UserRepoToPerm.query().filter( | |
257 | UserRepoToPerm.repository_id == new_repo.repo_id).all() |
|
273 | UserRepoToPerm.repository_id == new_repo.repo_id).all() | |
258 | assert len(inherited_perms) == 1 |
|
274 | assert len(inherited_perms) == 1 | |
259 |
|
275 | |||
260 | assert repo_on_filesystem(repo_name_full) |
|
276 | assert repo_on_filesystem(repo_name_full) | |
261 | finally: |
|
277 | finally: | |
262 | RepoModel().delete(repo_name_full) |
|
278 | RepoModel().delete(repo_name_full) | |
263 | RepoGroupModel().delete(group_name) |
|
279 | RepoGroupModel().delete(group_name) | |
264 | RepoGroupModel().delete(group_name_allowed) |
|
280 | RepoGroupModel().delete(group_name_allowed) | |
265 | Session().commit() |
|
281 | Session().commit() | |
266 |
|
282 | |||
267 | def test_create_in_group_inherit_permissions(self, autologin_user, backend, |
|
283 | def test_create_in_group_inherit_permissions(self, autologin_user, backend, | |
268 | csrf_token): |
|
284 | csrf_token): | |
269 | # create GROUP |
|
285 | # create GROUP | |
270 | group_name = 'sometest_%s' % backend.alias |
|
286 | group_name = 'sometest_%s' % backend.alias | |
271 | gr = RepoGroupModel().create(group_name=group_name, |
|
287 | gr = RepoGroupModel().create(group_name=group_name, | |
272 | group_description='test', |
|
288 | group_description='test', | |
273 | owner=TEST_USER_ADMIN_LOGIN) |
|
289 | owner=TEST_USER_ADMIN_LOGIN) | |
274 | perm = Permission.get_by_key('repository.write') |
|
290 | perm = Permission.get_by_key('repository.write') | |
275 | RepoGroupModel().grant_user_permission( |
|
291 | RepoGroupModel().grant_user_permission( | |
276 | gr, TEST_USER_REGULAR_LOGIN, perm) |
|
292 | gr, TEST_USER_REGULAR_LOGIN, perm) | |
277 |
|
293 | |||
278 | # add repo permissions |
|
294 | # add repo permissions | |
279 | Session().commit() |
|
295 | Session().commit() | |
280 |
|
296 | |||
281 | repo_name = 'ingroup_inherited_%s' % backend.alias |
|
297 | repo_name = 'ingroup_inherited_%s' % backend.alias | |
282 | repo_name_full = RepoGroup.url_sep().join([group_name, repo_name]) |
|
298 | repo_name_full = RepoGroup.url_sep().join([group_name, repo_name]) | |
283 | description = 'description for newly created repo' |
|
299 | description = 'description for newly created repo' | |
284 | self.app.post( |
|
300 | self.app.post( | |
285 |
|
|
301 | route_path('repo_create'), | |
286 | fixture._get_repo_create_params( |
|
302 | fixture._get_repo_create_params( | |
287 | repo_private=False, |
|
303 | repo_private=False, | |
288 | repo_name=repo_name, |
|
304 | repo_name=repo_name, | |
289 | repo_type=backend.alias, |
|
305 | repo_type=backend.alias, | |
290 | repo_description=description, |
|
306 | repo_description=description, | |
291 | repo_group=gr.group_id, |
|
307 | repo_group=gr.group_id, | |
292 | repo_copy_permissions=True, |
|
308 | repo_copy_permissions=True, | |
293 | csrf_token=csrf_token)) |
|
309 | csrf_token=csrf_token)) | |
294 |
|
310 | |||
295 | # TODO: johbo: Cleanup to pytest fixture |
|
311 | # TODO: johbo: Cleanup to pytest fixture | |
296 | try: |
|
312 | try: | |
297 | self.assert_repository_is_created_correctly( |
|
313 | self.assert_repository_is_created_correctly( | |
298 | repo_name_full, description, backend) |
|
314 | repo_name_full, description, backend) | |
299 | except Exception: |
|
315 | except Exception: | |
300 | RepoGroupModel().delete(group_name) |
|
316 | RepoGroupModel().delete(group_name) | |
301 | Session().commit() |
|
317 | Session().commit() | |
302 | raise |
|
318 | raise | |
303 |
|
319 | |||
304 | # check if inherited permissions are applied |
|
320 | # check if inherited permissions are applied | |
305 | new_repo = RepoModel().get_by_repo_name(repo_name_full) |
|
321 | new_repo = RepoModel().get_by_repo_name(repo_name_full) | |
306 | inherited_perms = UserRepoToPerm.query().filter( |
|
322 | inherited_perms = UserRepoToPerm.query().filter( | |
307 | UserRepoToPerm.repository_id == new_repo.repo_id).all() |
|
323 | UserRepoToPerm.repository_id == new_repo.repo_id).all() | |
308 | assert len(inherited_perms) == 2 |
|
324 | assert len(inherited_perms) == 2 | |
309 |
|
325 | |||
310 | assert TEST_USER_REGULAR_LOGIN in [ |
|
326 | assert TEST_USER_REGULAR_LOGIN in [ | |
311 | x.user.username for x in inherited_perms] |
|
327 | x.user.username for x in inherited_perms] | |
312 | assert 'repository.write' in [ |
|
328 | assert 'repository.write' in [ | |
313 | x.permission.permission_name for x in inherited_perms] |
|
329 | x.permission.permission_name for x in inherited_perms] | |
314 |
|
330 | |||
315 | RepoModel().delete(repo_name_full) |
|
331 | RepoModel().delete(repo_name_full) | |
316 | RepoGroupModel().delete(group_name) |
|
332 | RepoGroupModel().delete(group_name) | |
317 | Session().commit() |
|
333 | Session().commit() | |
318 |
|
334 | |||
319 | @pytest.mark.xfail_backends( |
|
335 | @pytest.mark.xfail_backends( | |
320 | "git", "hg", reason="Missing reposerver support") |
|
336 | "git", "hg", reason="Missing reposerver support") | |
321 | def test_create_with_clone_uri(self, autologin_user, backend, reposerver, |
|
337 | def test_create_with_clone_uri(self, autologin_user, backend, reposerver, | |
322 | csrf_token): |
|
338 | csrf_token): | |
323 | source_repo = backend.create_repo(number_of_commits=2) |
|
339 | source_repo = backend.create_repo(number_of_commits=2) | |
324 | source_repo_name = source_repo.repo_name |
|
340 | source_repo_name = source_repo.repo_name | |
325 | reposerver.serve(source_repo.scm_instance()) |
|
341 | reposerver.serve(source_repo.scm_instance()) | |
326 |
|
342 | |||
327 | repo_name = backend.new_repo_name() |
|
343 | repo_name = backend.new_repo_name() | |
328 | response = self.app.post( |
|
344 | response = self.app.post( | |
329 |
|
|
345 | route_path('repo_create'), | |
330 | fixture._get_repo_create_params( |
|
346 | fixture._get_repo_create_params( | |
331 | repo_private=False, |
|
347 | repo_private=False, | |
332 | repo_name=repo_name, |
|
348 | repo_name=repo_name, | |
333 | repo_type=backend.alias, |
|
349 | repo_type=backend.alias, | |
334 | repo_description='', |
|
350 | repo_description='', | |
335 | clone_uri=reposerver.url, |
|
351 | clone_uri=reposerver.url, | |
336 | csrf_token=csrf_token), |
|
352 | csrf_token=csrf_token), | |
337 | status=302) |
|
353 | status=302) | |
338 |
|
354 | |||
339 | # Should be redirected to the creating page |
|
355 | # Should be redirected to the creating page | |
340 | response.mustcontain('repo_creating') |
|
356 | response.mustcontain('repo_creating') | |
341 |
|
357 | |||
342 | # Expecting that both repositories have same history |
|
358 | # Expecting that both repositories have same history | |
343 | source_repo = RepoModel().get_by_repo_name(source_repo_name) |
|
359 | source_repo = RepoModel().get_by_repo_name(source_repo_name) | |
344 | source_vcs = source_repo.scm_instance() |
|
360 | source_vcs = source_repo.scm_instance() | |
345 | repo = RepoModel().get_by_repo_name(repo_name) |
|
361 | repo = RepoModel().get_by_repo_name(repo_name) | |
346 | repo_vcs = repo.scm_instance() |
|
362 | repo_vcs = repo.scm_instance() | |
347 | assert source_vcs[0].message == repo_vcs[0].message |
|
363 | assert source_vcs[0].message == repo_vcs[0].message | |
348 | assert source_vcs.count() == repo_vcs.count() |
|
364 | assert source_vcs.count() == repo_vcs.count() | |
349 | assert source_vcs.commit_ids == repo_vcs.commit_ids |
|
365 | assert source_vcs.commit_ids == repo_vcs.commit_ids | |
350 |
|
366 | |||
351 | @pytest.mark.xfail_backends("svn", reason="Depends on import support") |
|
367 | @pytest.mark.xfail_backends("svn", reason="Depends on import support") | |
352 | def test_create_remote_repo_wrong_clone_uri(self, autologin_user, backend, |
|
368 | def test_create_remote_repo_wrong_clone_uri(self, autologin_user, backend, | |
353 | csrf_token): |
|
369 | csrf_token): | |
354 | repo_name = backend.new_repo_name() |
|
370 | repo_name = backend.new_repo_name() | |
355 | description = 'description for newly created repo' |
|
371 | description = 'description for newly created repo' | |
356 | response = self.app.post( |
|
372 | response = self.app.post( | |
357 |
|
|
373 | route_path('repo_create'), | |
358 | fixture._get_repo_create_params( |
|
374 | fixture._get_repo_create_params( | |
359 | repo_private=False, |
|
375 | repo_private=False, | |
360 | repo_name=repo_name, |
|
376 | repo_name=repo_name, | |
361 | repo_type=backend.alias, |
|
377 | repo_type=backend.alias, | |
362 | repo_description=description, |
|
378 | repo_description=description, | |
363 | clone_uri='http://repo.invalid/repo', |
|
379 | clone_uri='http://repo.invalid/repo', | |
364 | csrf_token=csrf_token)) |
|
380 | csrf_token=csrf_token)) | |
365 | response.mustcontain('invalid clone url') |
|
381 | response.mustcontain('invalid clone url') | |
366 |
|
382 | |||
367 | @pytest.mark.xfail_backends("svn", reason="Depends on import support") |
|
383 | @pytest.mark.xfail_backends("svn", reason="Depends on import support") | |
368 | def test_create_remote_repo_wrong_clone_uri_hg_svn( |
|
384 | def test_create_remote_repo_wrong_clone_uri_hg_svn( | |
369 | self, autologin_user, backend, csrf_token): |
|
385 | self, autologin_user, backend, csrf_token): | |
370 | repo_name = backend.new_repo_name() |
|
386 | repo_name = backend.new_repo_name() | |
371 | description = 'description for newly created repo' |
|
387 | description = 'description for newly created repo' | |
372 | response = self.app.post( |
|
388 | response = self.app.post( | |
373 |
|
|
389 | route_path('repo_create'), | |
374 | fixture._get_repo_create_params( |
|
390 | fixture._get_repo_create_params( | |
375 | repo_private=False, |
|
391 | repo_private=False, | |
376 | repo_name=repo_name, |
|
392 | repo_name=repo_name, | |
377 | repo_type=backend.alias, |
|
393 | repo_type=backend.alias, | |
378 | repo_description=description, |
|
394 | repo_description=description, | |
379 | clone_uri='svn+http://svn.invalid/repo', |
|
395 | clone_uri='svn+http://svn.invalid/repo', | |
380 | csrf_token=csrf_token)) |
|
396 | csrf_token=csrf_token)) | |
381 | response.mustcontain('invalid clone url') |
|
397 | response.mustcontain('invalid clone url') | |
382 |
|
398 | |||
383 | def test_create_with_git_suffix( |
|
399 | def test_create_with_git_suffix( | |
384 | self, autologin_user, backend, csrf_token): |
|
400 | self, autologin_user, backend, csrf_token): | |
385 | repo_name = backend.new_repo_name() + ".git" |
|
401 | repo_name = backend.new_repo_name() + ".git" | |
386 | description = 'description for newly created repo' |
|
402 | description = 'description for newly created repo' | |
387 | response = self.app.post( |
|
403 | response = self.app.post( | |
388 |
|
|
404 | route_path('repo_create'), | |
389 | fixture._get_repo_create_params( |
|
405 | fixture._get_repo_create_params( | |
390 | repo_private=False, |
|
406 | repo_private=False, | |
391 | repo_name=repo_name, |
|
407 | repo_name=repo_name, | |
392 | repo_type=backend.alias, |
|
408 | repo_type=backend.alias, | |
393 | repo_description=description, |
|
409 | repo_description=description, | |
394 | csrf_token=csrf_token)) |
|
410 | csrf_token=csrf_token)) | |
395 | response.mustcontain('Repository name cannot end with .git') |
|
411 | response.mustcontain('Repository name cannot end with .git') | |
396 |
|
412 | |||
397 | def test_show(self, autologin_user, backend): |
|
|||
398 | self.app.get(url('repo', repo_name=backend.repo_name)) |
|
|||
399 |
|
||||
400 | def test_default_user_cannot_access_private_repo_in_a_group( |
|
413 | def test_default_user_cannot_access_private_repo_in_a_group( | |
401 |
self, autologin_user, user_util, backend |
|
414 | self, autologin_user, user_util, backend): | |
402 |
|
415 | |||
403 | group = user_util.create_repo_group() |
|
416 | group = user_util.create_repo_group() | |
404 |
|
417 | |||
405 | repo = backend.create_repo( |
|
418 | repo = backend.create_repo( | |
406 | repo_private=True, repo_group=group, repo_copy_permissions=True) |
|
419 | repo_private=True, repo_group=group, repo_copy_permissions=True) | |
407 |
|
420 | |||
408 | permissions = _get_permission_for_user( |
|
421 | permissions = _get_permission_for_user( | |
409 | user='default', repo=repo.repo_name) |
|
422 | user='default', repo=repo.repo_name) | |
410 | assert len(permissions) == 1 |
|
423 | assert len(permissions) == 1 | |
411 | assert permissions[0].permission.permission_name == 'repository.none' |
|
424 | assert permissions[0].permission.permission_name == 'repository.none' | |
412 | assert permissions[0].repository.private is True |
|
425 | assert permissions[0].repository.private is True | |
413 |
|
426 | |||
414 | def test_create_on_top_level_without_permissions(self, backend): |
|
427 | def test_create_on_top_level_without_permissions(self, backend): | |
415 | session = login_user_session( |
|
428 | session = login_user_session( | |
416 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
429 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
417 | csrf_token = auth.get_csrf_token(session) |
|
430 | csrf_token = auth.get_csrf_token(session) | |
418 |
|
431 | |||
419 | # revoke |
|
432 | # revoke | |
420 | user_model = UserModel() |
|
433 | user_model = UserModel() | |
421 | # disable fork and create on default user |
|
434 | # disable fork and create on default user | |
422 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository') |
|
435 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository') | |
423 | user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none') |
|
436 | user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none') | |
424 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository') |
|
437 | user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository') | |
425 | user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none') |
|
438 | user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none') | |
426 |
|
439 | |||
427 | # disable on regular user |
|
440 | # disable on regular user | |
428 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository') |
|
441 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository') | |
429 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none') |
|
442 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none') | |
430 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository') |
|
443 | user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository') | |
431 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none') |
|
444 | user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none') | |
432 | Session().commit() |
|
445 | Session().commit() | |
433 |
|
446 | |||
434 | repo_name = backend.new_repo_name() |
|
447 | repo_name = backend.new_repo_name() | |
435 | description = 'description for newly created repo' |
|
448 | description = 'description for newly created repo' | |
436 | response = self.app.post( |
|
449 | response = self.app.post( | |
437 |
|
|
450 | route_path('repo_create'), | |
438 | fixture._get_repo_create_params( |
|
451 | fixture._get_repo_create_params( | |
439 | repo_private=False, |
|
452 | repo_private=False, | |
440 | repo_name=repo_name, |
|
453 | repo_name=repo_name, | |
441 | repo_type=backend.alias, |
|
454 | repo_type=backend.alias, | |
442 | repo_description=description, |
|
455 | repo_description=description, | |
443 | csrf_token=csrf_token)) |
|
456 | csrf_token=csrf_token)) | |
444 |
|
457 | |||
445 | response.mustcontain( |
|
458 | response.mustcontain( | |
446 | u"You do not have the permission to store repositories in " |
|
459 | u"You do not have the permission to store repositories in " | |
447 | u"the root location.") |
|
460 | u"the root location.") | |
448 |
|
461 | |||
449 | @mock.patch.object(RepoModel, '_create_filesystem_repo', error_function) |
|
462 | @mock.patch.object(RepoModel, '_create_filesystem_repo', error_function) | |
450 | def test_create_repo_when_filesystem_op_fails( |
|
463 | def test_create_repo_when_filesystem_op_fails( | |
451 | self, autologin_user, backend, csrf_token): |
|
464 | self, autologin_user, backend, csrf_token): | |
452 | repo_name = backend.new_repo_name() |
|
465 | repo_name = backend.new_repo_name() | |
453 | description = 'description for newly created repo' |
|
466 | description = 'description for newly created repo' | |
454 |
|
467 | |||
455 | response = self.app.post( |
|
468 | response = self.app.post( | |
456 |
|
|
469 | route_path('repo_create'), | |
457 | fixture._get_repo_create_params( |
|
470 | fixture._get_repo_create_params( | |
458 | repo_private=False, |
|
471 | repo_private=False, | |
459 | repo_name=repo_name, |
|
472 | repo_name=repo_name, | |
460 | repo_type=backend.alias, |
|
473 | repo_type=backend.alias, | |
461 | repo_description=description, |
|
474 | repo_description=description, | |
462 | csrf_token=csrf_token)) |
|
475 | csrf_token=csrf_token)) | |
463 |
|
476 | |||
464 | assert_session_flash( |
|
477 | assert_session_flash( | |
465 | response, 'Error creating repository %s' % repo_name) |
|
478 | response, 'Error creating repository %s' % repo_name) | |
466 | # repo must not be in db |
|
479 | # repo must not be in db | |
467 | assert backend.repo is None |
|
480 | assert backend.repo is None | |
468 | # repo must not be in filesystem ! |
|
481 | # repo must not be in filesystem ! | |
469 | assert not repo_on_filesystem(repo_name) |
|
482 | assert not repo_on_filesystem(repo_name) | |
470 |
|
483 | |||
471 | def assert_repository_is_created_correctly( |
|
484 | def assert_repository_is_created_correctly( | |
472 | self, repo_name, description, backend): |
|
485 | self, repo_name, description, backend): | |
473 | repo_name_utf8 = safe_str(repo_name) |
|
486 | repo_name_utf8 = safe_str(repo_name) | |
474 |
|
487 | |||
475 | # run the check page that triggers the flash message |
|
488 | # run the check page that triggers the flash message | |
476 | response = self.app.get( |
|
489 | response = self.app.get( | |
477 | route_path('repo_creating_check', repo_name=safe_str(repo_name))) |
|
490 | route_path('repo_creating_check', repo_name=safe_str(repo_name))) | |
478 | assert response.json == {u'result': True} |
|
491 | assert response.json == {u'result': True} | |
479 |
|
492 | |||
480 | flash_msg = u'Created repository <a href="/{}">{}</a>'.format( |
|
493 | flash_msg = u'Created repository <a href="/{}">{}</a>'.format( | |
481 | urllib.quote(repo_name_utf8), repo_name) |
|
494 | urllib.quote(repo_name_utf8), repo_name) | |
482 | assert_session_flash(response, flash_msg) |
|
495 | assert_session_flash(response, flash_msg) | |
483 |
|
496 | |||
484 | # test if the repo was created in the database |
|
497 | # test if the repo was created in the database | |
485 | new_repo = RepoModel().get_by_repo_name(repo_name) |
|
498 | new_repo = RepoModel().get_by_repo_name(repo_name) | |
486 |
|
499 | |||
487 | assert new_repo.repo_name == repo_name |
|
500 | assert new_repo.repo_name == repo_name | |
488 | assert new_repo.description == description |
|
501 | assert new_repo.description == description | |
489 |
|
502 | |||
490 | # test if the repository is visible in the list ? |
|
503 | # test if the repository is visible in the list ? | |
491 | response = self.app.get( |
|
504 | response = self.app.get( | |
492 | h.route_path('repo_summary', repo_name=safe_str(repo_name))) |
|
505 | h.route_path('repo_summary', repo_name=safe_str(repo_name))) | |
493 | response.mustcontain(repo_name) |
|
506 | response.mustcontain(repo_name) | |
494 | response.mustcontain(backend.alias) |
|
507 | response.mustcontain(backend.alias) | |
495 |
|
508 | |||
496 | assert repo_on_filesystem(repo_name) |
|
509 | assert repo_on_filesystem(repo_name) | |
497 |
|
||||
498 |
|
||||
499 | @pytest.mark.usefixtures("app") |
|
|||
500 | class TestVcsSettings(object): |
|
|||
501 | FORM_DATA = { |
|
|||
502 | 'inherit_global_settings': False, |
|
|||
503 | 'hooks_changegroup_repo_size': False, |
|
|||
504 | 'hooks_changegroup_push_logger': False, |
|
|||
505 | 'hooks_outgoing_pull_logger': False, |
|
|||
506 | 'extensions_largefiles': False, |
|
|||
507 | 'extensions_evolve': False, |
|
|||
508 | 'phases_publish': 'False', |
|
|||
509 | 'rhodecode_pr_merge_enabled': False, |
|
|||
510 | 'rhodecode_use_outdated_comments': False, |
|
|||
511 | 'new_svn_branch': '', |
|
|||
512 | 'new_svn_tag': '' |
|
|||
513 | } |
|
|||
514 |
|
||||
515 | @pytest.mark.skip_backends('svn') |
|
|||
516 | def test_global_settings_initial_values(self, autologin_user, backend): |
|
|||
517 | repo_name = backend.repo_name |
|
|||
518 | response = self.app.get(url('repo_vcs_settings', repo_name=repo_name)) |
|
|||
519 |
|
||||
520 | expected_settings = ( |
|
|||
521 | 'rhodecode_use_outdated_comments', 'rhodecode_pr_merge_enabled', |
|
|||
522 | 'hooks_changegroup_repo_size', 'hooks_changegroup_push_logger', |
|
|||
523 | 'hooks_outgoing_pull_logger' |
|
|||
524 | ) |
|
|||
525 | for setting in expected_settings: |
|
|||
526 | self.assert_repo_value_equals_global_value(response, setting) |
|
|||
527 |
|
||||
528 | def test_show_settings_requires_repo_admin_permission( |
|
|||
529 | self, backend, user_util, settings_util): |
|
|||
530 | repo = backend.create_repo() |
|
|||
531 | repo_name = repo.repo_name |
|
|||
532 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
|||
533 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') |
|
|||
534 | login_user_session( |
|
|||
535 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
|||
536 | self.app.get(url('repo_vcs_settings', repo_name=repo_name), status=200) |
|
|||
537 |
|
||||
538 | def test_inherit_global_settings_flag_is_true_by_default( |
|
|||
539 | self, autologin_user, backend): |
|
|||
540 | repo_name = backend.repo_name |
|
|||
541 | response = self.app.get(url('repo_vcs_settings', repo_name=repo_name)) |
|
|||
542 |
|
||||
543 | assert_response = AssertResponse(response) |
|
|||
544 | element = assert_response.get_element('#inherit_global_settings') |
|
|||
545 | assert element.checked |
|
|||
546 |
|
||||
547 | @pytest.mark.parametrize('checked_value', [True, False]) |
|
|||
548 | def test_inherit_global_settings_value( |
|
|||
549 | self, autologin_user, backend, checked_value, settings_util): |
|
|||
550 | repo = backend.create_repo() |
|
|||
551 | repo_name = repo.repo_name |
|
|||
552 | settings_util.create_repo_rhodecode_setting( |
|
|||
553 | repo, 'inherit_vcs_settings', checked_value, 'bool') |
|
|||
554 | response = self.app.get(url('repo_vcs_settings', repo_name=repo_name)) |
|
|||
555 |
|
||||
556 | assert_response = AssertResponse(response) |
|
|||
557 | element = assert_response.get_element('#inherit_global_settings') |
|
|||
558 | assert element.checked == checked_value |
|
|||
559 |
|
||||
560 | @pytest.mark.skip_backends('svn') |
|
|||
561 | def test_hooks_settings_are_created( |
|
|||
562 | self, autologin_user, backend, csrf_token): |
|
|||
563 | repo_name = backend.repo_name |
|
|||
564 | data = self.FORM_DATA.copy() |
|
|||
565 | data['csrf_token'] = csrf_token |
|
|||
566 | self.app.post( |
|
|||
567 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
568 | settings = SettingsModel(repo=repo_name) |
|
|||
569 | try: |
|
|||
570 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: |
|
|||
571 | ui = settings.get_ui_by_section_and_key(section, key) |
|
|||
572 | assert ui.ui_active is False |
|
|||
573 | finally: |
|
|||
574 | self._cleanup_repo_settings(settings) |
|
|||
575 |
|
||||
576 | def test_hooks_settings_are_not_created_for_svn( |
|
|||
577 | self, autologin_user, backend_svn, csrf_token): |
|
|||
578 | repo_name = backend_svn.repo_name |
|
|||
579 | data = self.FORM_DATA.copy() |
|
|||
580 | data['csrf_token'] = csrf_token |
|
|||
581 | self.app.post( |
|
|||
582 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
583 | settings = SettingsModel(repo=repo_name) |
|
|||
584 | try: |
|
|||
585 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: |
|
|||
586 | ui = settings.get_ui_by_section_and_key(section, key) |
|
|||
587 | assert ui is None |
|
|||
588 | finally: |
|
|||
589 | self._cleanup_repo_settings(settings) |
|
|||
590 |
|
||||
591 | @pytest.mark.skip_backends('svn') |
|
|||
592 | def test_hooks_settings_are_updated( |
|
|||
593 | self, autologin_user, backend, csrf_token): |
|
|||
594 | repo_name = backend.repo_name |
|
|||
595 | settings = SettingsModel(repo=repo_name) |
|
|||
596 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: |
|
|||
597 | settings.create_ui_section_value(section, '', key=key, active=True) |
|
|||
598 |
|
||||
599 | data = self.FORM_DATA.copy() |
|
|||
600 | data['csrf_token'] = csrf_token |
|
|||
601 | self.app.post( |
|
|||
602 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
603 | try: |
|
|||
604 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: |
|
|||
605 | ui = settings.get_ui_by_section_and_key(section, key) |
|
|||
606 | assert ui.ui_active is False |
|
|||
607 | finally: |
|
|||
608 | self._cleanup_repo_settings(settings) |
|
|||
609 |
|
||||
610 | def test_hooks_settings_are_not_updated_for_svn( |
|
|||
611 | self, autologin_user, backend_svn, csrf_token): |
|
|||
612 | repo_name = backend_svn.repo_name |
|
|||
613 | settings = SettingsModel(repo=repo_name) |
|
|||
614 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: |
|
|||
615 | settings.create_ui_section_value(section, '', key=key, active=True) |
|
|||
616 |
|
||||
617 | data = self.FORM_DATA.copy() |
|
|||
618 | data['csrf_token'] = csrf_token |
|
|||
619 | self.app.post( |
|
|||
620 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
621 | try: |
|
|||
622 | for section, key in VcsSettingsModel.HOOKS_SETTINGS: |
|
|||
623 | ui = settings.get_ui_by_section_and_key(section, key) |
|
|||
624 | assert ui.ui_active is True |
|
|||
625 | finally: |
|
|||
626 | self._cleanup_repo_settings(settings) |
|
|||
627 |
|
||||
628 | @pytest.mark.skip_backends('svn') |
|
|||
629 | def test_pr_settings_are_created( |
|
|||
630 | self, autologin_user, backend, csrf_token): |
|
|||
631 | repo_name = backend.repo_name |
|
|||
632 | data = self.FORM_DATA.copy() |
|
|||
633 | data['csrf_token'] = csrf_token |
|
|||
634 | self.app.post( |
|
|||
635 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
636 | settings = SettingsModel(repo=repo_name) |
|
|||
637 | try: |
|
|||
638 | for name in VcsSettingsModel.GENERAL_SETTINGS: |
|
|||
639 | setting = settings.get_setting_by_name(name) |
|
|||
640 | assert setting.app_settings_value is False |
|
|||
641 | finally: |
|
|||
642 | self._cleanup_repo_settings(settings) |
|
|||
643 |
|
||||
644 | def test_pr_settings_are_not_created_for_svn( |
|
|||
645 | self, autologin_user, backend_svn, csrf_token): |
|
|||
646 | repo_name = backend_svn.repo_name |
|
|||
647 | data = self.FORM_DATA.copy() |
|
|||
648 | data['csrf_token'] = csrf_token |
|
|||
649 | self.app.post( |
|
|||
650 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
651 | settings = SettingsModel(repo=repo_name) |
|
|||
652 | try: |
|
|||
653 | for name in VcsSettingsModel.GENERAL_SETTINGS: |
|
|||
654 | setting = settings.get_setting_by_name(name) |
|
|||
655 | assert setting is None |
|
|||
656 | finally: |
|
|||
657 | self._cleanup_repo_settings(settings) |
|
|||
658 |
|
||||
659 | def test_pr_settings_creation_requires_repo_admin_permission( |
|
|||
660 | self, backend, user_util, settings_util, csrf_token): |
|
|||
661 | repo = backend.create_repo() |
|
|||
662 | repo_name = repo.repo_name |
|
|||
663 |
|
||||
664 | logout_user_session(self.app, csrf_token) |
|
|||
665 | session = login_user_session( |
|
|||
666 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
|||
667 | new_csrf_token = auth.get_csrf_token(session) |
|
|||
668 |
|
||||
669 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
|||
670 | repo = Repository.get_by_repo_name(repo_name) |
|
|||
671 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') |
|
|||
672 | data = self.FORM_DATA.copy() |
|
|||
673 | data['csrf_token'] = new_csrf_token |
|
|||
674 | settings = SettingsModel(repo=repo_name) |
|
|||
675 |
|
||||
676 | try: |
|
|||
677 | self.app.post( |
|
|||
678 | url('repo_vcs_settings', repo_name=repo_name), data, |
|
|||
679 | status=302) |
|
|||
680 | finally: |
|
|||
681 | self._cleanup_repo_settings(settings) |
|
|||
682 |
|
||||
683 | @pytest.mark.skip_backends('svn') |
|
|||
684 | def test_pr_settings_are_updated( |
|
|||
685 | self, autologin_user, backend, csrf_token): |
|
|||
686 | repo_name = backend.repo_name |
|
|||
687 | settings = SettingsModel(repo=repo_name) |
|
|||
688 | for name in VcsSettingsModel.GENERAL_SETTINGS: |
|
|||
689 | settings.create_or_update_setting(name, True, 'bool') |
|
|||
690 |
|
||||
691 | data = self.FORM_DATA.copy() |
|
|||
692 | data['csrf_token'] = csrf_token |
|
|||
693 | self.app.post( |
|
|||
694 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
695 | try: |
|
|||
696 | for name in VcsSettingsModel.GENERAL_SETTINGS: |
|
|||
697 | setting = settings.get_setting_by_name(name) |
|
|||
698 | assert setting.app_settings_value is False |
|
|||
699 | finally: |
|
|||
700 | self._cleanup_repo_settings(settings) |
|
|||
701 |
|
||||
702 | def test_pr_settings_are_not_updated_for_svn( |
|
|||
703 | self, autologin_user, backend_svn, csrf_token): |
|
|||
704 | repo_name = backend_svn.repo_name |
|
|||
705 | settings = SettingsModel(repo=repo_name) |
|
|||
706 | for name in VcsSettingsModel.GENERAL_SETTINGS: |
|
|||
707 | settings.create_or_update_setting(name, True, 'bool') |
|
|||
708 |
|
||||
709 | data = self.FORM_DATA.copy() |
|
|||
710 | data['csrf_token'] = csrf_token |
|
|||
711 | self.app.post( |
|
|||
712 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
713 | try: |
|
|||
714 | for name in VcsSettingsModel.GENERAL_SETTINGS: |
|
|||
715 | setting = settings.get_setting_by_name(name) |
|
|||
716 | assert setting.app_settings_value is True |
|
|||
717 | finally: |
|
|||
718 | self._cleanup_repo_settings(settings) |
|
|||
719 |
|
||||
720 | def test_svn_settings_are_created( |
|
|||
721 | self, autologin_user, backend_svn, csrf_token, settings_util): |
|
|||
722 | repo_name = backend_svn.repo_name |
|
|||
723 | data = self.FORM_DATA.copy() |
|
|||
724 | data['new_svn_tag'] = 'svn-tag' |
|
|||
725 | data['new_svn_branch'] = 'svn-branch' |
|
|||
726 | data['csrf_token'] = csrf_token |
|
|||
727 |
|
||||
728 | # Create few global settings to make sure that uniqueness validators |
|
|||
729 | # are not triggered |
|
|||
730 | settings_util.create_rhodecode_ui( |
|
|||
731 | VcsSettingsModel.SVN_BRANCH_SECTION, 'svn-branch') |
|
|||
732 | settings_util.create_rhodecode_ui( |
|
|||
733 | VcsSettingsModel.SVN_TAG_SECTION, 'svn-tag') |
|
|||
734 |
|
||||
735 | self.app.post( |
|
|||
736 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
737 | settings = SettingsModel(repo=repo_name) |
|
|||
738 | try: |
|
|||
739 | svn_branches = settings.get_ui_by_section( |
|
|||
740 | VcsSettingsModel.SVN_BRANCH_SECTION) |
|
|||
741 | svn_branch_names = [b.ui_value for b in svn_branches] |
|
|||
742 | svn_tags = settings.get_ui_by_section( |
|
|||
743 | VcsSettingsModel.SVN_TAG_SECTION) |
|
|||
744 | svn_tag_names = [b.ui_value for b in svn_tags] |
|
|||
745 | assert 'svn-branch' in svn_branch_names |
|
|||
746 | assert 'svn-tag' in svn_tag_names |
|
|||
747 | finally: |
|
|||
748 | self._cleanup_repo_settings(settings) |
|
|||
749 |
|
||||
750 | def test_svn_settings_are_unique( |
|
|||
751 | self, autologin_user, backend_svn, csrf_token, settings_util): |
|
|||
752 | repo = backend_svn.repo |
|
|||
753 | repo_name = repo.repo_name |
|
|||
754 | data = self.FORM_DATA.copy() |
|
|||
755 | data['new_svn_tag'] = 'test_tag' |
|
|||
756 | data['new_svn_branch'] = 'test_branch' |
|
|||
757 | data['csrf_token'] = csrf_token |
|
|||
758 | settings_util.create_repo_rhodecode_ui( |
|
|||
759 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch') |
|
|||
760 | settings_util.create_repo_rhodecode_ui( |
|
|||
761 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag') |
|
|||
762 |
|
||||
763 | response = self.app.post( |
|
|||
764 | url('repo_vcs_settings', repo_name=repo_name), data, status=200) |
|
|||
765 | response.mustcontain('Pattern already exists') |
|
|||
766 |
|
||||
767 | def test_svn_settings_with_empty_values_are_not_created( |
|
|||
768 | self, autologin_user, backend_svn, csrf_token): |
|
|||
769 | repo_name = backend_svn.repo_name |
|
|||
770 | data = self.FORM_DATA.copy() |
|
|||
771 | data['csrf_token'] = csrf_token |
|
|||
772 | self.app.post( |
|
|||
773 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
774 | settings = SettingsModel(repo=repo_name) |
|
|||
775 | try: |
|
|||
776 | svn_branches = settings.get_ui_by_section( |
|
|||
777 | VcsSettingsModel.SVN_BRANCH_SECTION) |
|
|||
778 | svn_tags = settings.get_ui_by_section( |
|
|||
779 | VcsSettingsModel.SVN_TAG_SECTION) |
|
|||
780 | assert len(svn_branches) == 0 |
|
|||
781 | assert len(svn_tags) == 0 |
|
|||
782 | finally: |
|
|||
783 | self._cleanup_repo_settings(settings) |
|
|||
784 |
|
||||
785 | def test_svn_settings_are_shown_for_svn_repository( |
|
|||
786 | self, autologin_user, backend_svn, csrf_token): |
|
|||
787 | repo_name = backend_svn.repo_name |
|
|||
788 | response = self.app.get( |
|
|||
789 | url('repo_vcs_settings', repo_name=repo_name), status=200) |
|
|||
790 | response.mustcontain('Subversion Settings') |
|
|||
791 |
|
||||
792 | @pytest.mark.skip_backends('svn') |
|
|||
793 | def test_svn_settings_are_not_created_for_not_svn_repository( |
|
|||
794 | self, autologin_user, backend, csrf_token): |
|
|||
795 | repo_name = backend.repo_name |
|
|||
796 | data = self.FORM_DATA.copy() |
|
|||
797 | data['csrf_token'] = csrf_token |
|
|||
798 | self.app.post( |
|
|||
799 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
800 | settings = SettingsModel(repo=repo_name) |
|
|||
801 | try: |
|
|||
802 | svn_branches = settings.get_ui_by_section( |
|
|||
803 | VcsSettingsModel.SVN_BRANCH_SECTION) |
|
|||
804 | svn_tags = settings.get_ui_by_section( |
|
|||
805 | VcsSettingsModel.SVN_TAG_SECTION) |
|
|||
806 | assert len(svn_branches) == 0 |
|
|||
807 | assert len(svn_tags) == 0 |
|
|||
808 | finally: |
|
|||
809 | self._cleanup_repo_settings(settings) |
|
|||
810 |
|
||||
811 | @pytest.mark.skip_backends('svn') |
|
|||
812 | def test_svn_settings_are_shown_only_for_svn_repository( |
|
|||
813 | self, autologin_user, backend, csrf_token): |
|
|||
814 | repo_name = backend.repo_name |
|
|||
815 | response = self.app.get( |
|
|||
816 | url('repo_vcs_settings', repo_name=repo_name), status=200) |
|
|||
817 | response.mustcontain(no='Subversion Settings') |
|
|||
818 |
|
||||
819 | def test_hg_settings_are_created( |
|
|||
820 | self, autologin_user, backend_hg, csrf_token): |
|
|||
821 | repo_name = backend_hg.repo_name |
|
|||
822 | data = self.FORM_DATA.copy() |
|
|||
823 | data['new_svn_tag'] = 'svn-tag' |
|
|||
824 | data['new_svn_branch'] = 'svn-branch' |
|
|||
825 | data['csrf_token'] = csrf_token |
|
|||
826 | self.app.post( |
|
|||
827 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
828 | settings = SettingsModel(repo=repo_name) |
|
|||
829 | try: |
|
|||
830 | largefiles_ui = settings.get_ui_by_section_and_key( |
|
|||
831 | 'extensions', 'largefiles') |
|
|||
832 | assert largefiles_ui.ui_active is False |
|
|||
833 | phases_ui = settings.get_ui_by_section_and_key( |
|
|||
834 | 'phases', 'publish') |
|
|||
835 | assert str2bool(phases_ui.ui_value) is False |
|
|||
836 | finally: |
|
|||
837 | self._cleanup_repo_settings(settings) |
|
|||
838 |
|
||||
839 | def test_hg_settings_are_updated( |
|
|||
840 | self, autologin_user, backend_hg, csrf_token): |
|
|||
841 | repo_name = backend_hg.repo_name |
|
|||
842 | settings = SettingsModel(repo=repo_name) |
|
|||
843 | settings.create_ui_section_value( |
|
|||
844 | 'extensions', '', key='largefiles', active=True) |
|
|||
845 | settings.create_ui_section_value( |
|
|||
846 | 'phases', '1', key='publish', active=True) |
|
|||
847 |
|
||||
848 | data = self.FORM_DATA.copy() |
|
|||
849 | data['csrf_token'] = csrf_token |
|
|||
850 | self.app.post( |
|
|||
851 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
852 | try: |
|
|||
853 | largefiles_ui = settings.get_ui_by_section_and_key( |
|
|||
854 | 'extensions', 'largefiles') |
|
|||
855 | assert largefiles_ui.ui_active is False |
|
|||
856 | phases_ui = settings.get_ui_by_section_and_key( |
|
|||
857 | 'phases', 'publish') |
|
|||
858 | assert str2bool(phases_ui.ui_value) is False |
|
|||
859 | finally: |
|
|||
860 | self._cleanup_repo_settings(settings) |
|
|||
861 |
|
||||
862 | def test_hg_settings_are_shown_for_hg_repository( |
|
|||
863 | self, autologin_user, backend_hg, csrf_token): |
|
|||
864 | repo_name = backend_hg.repo_name |
|
|||
865 | response = self.app.get( |
|
|||
866 | url('repo_vcs_settings', repo_name=repo_name), status=200) |
|
|||
867 | response.mustcontain('Mercurial Settings') |
|
|||
868 |
|
||||
869 | @pytest.mark.skip_backends('hg') |
|
|||
870 | def test_hg_settings_are_created_only_for_hg_repository( |
|
|||
871 | self, autologin_user, backend, csrf_token): |
|
|||
872 | repo_name = backend.repo_name |
|
|||
873 | data = self.FORM_DATA.copy() |
|
|||
874 | data['csrf_token'] = csrf_token |
|
|||
875 | self.app.post( |
|
|||
876 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
877 | settings = SettingsModel(repo=repo_name) |
|
|||
878 | try: |
|
|||
879 | largefiles_ui = settings.get_ui_by_section_and_key( |
|
|||
880 | 'extensions', 'largefiles') |
|
|||
881 | assert largefiles_ui is None |
|
|||
882 | phases_ui = settings.get_ui_by_section_and_key( |
|
|||
883 | 'phases', 'publish') |
|
|||
884 | assert phases_ui is None |
|
|||
885 | finally: |
|
|||
886 | self._cleanup_repo_settings(settings) |
|
|||
887 |
|
||||
888 | @pytest.mark.skip_backends('hg') |
|
|||
889 | def test_hg_settings_are_shown_only_for_hg_repository( |
|
|||
890 | self, autologin_user, backend, csrf_token): |
|
|||
891 | repo_name = backend.repo_name |
|
|||
892 | response = self.app.get( |
|
|||
893 | url('repo_vcs_settings', repo_name=repo_name), status=200) |
|
|||
894 | response.mustcontain(no='Mercurial Settings') |
|
|||
895 |
|
||||
896 | @pytest.mark.skip_backends('hg') |
|
|||
897 | def test_hg_settings_are_updated_only_for_hg_repository( |
|
|||
898 | self, autologin_user, backend, csrf_token): |
|
|||
899 | repo_name = backend.repo_name |
|
|||
900 | settings = SettingsModel(repo=repo_name) |
|
|||
901 | settings.create_ui_section_value( |
|
|||
902 | 'extensions', '', key='largefiles', active=True) |
|
|||
903 | settings.create_ui_section_value( |
|
|||
904 | 'phases', '1', key='publish', active=True) |
|
|||
905 |
|
||||
906 | data = self.FORM_DATA.copy() |
|
|||
907 | data['csrf_token'] = csrf_token |
|
|||
908 | self.app.post( |
|
|||
909 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
910 | try: |
|
|||
911 | largefiles_ui = settings.get_ui_by_section_and_key( |
|
|||
912 | 'extensions', 'largefiles') |
|
|||
913 | assert largefiles_ui.ui_active is True |
|
|||
914 | phases_ui = settings.get_ui_by_section_and_key( |
|
|||
915 | 'phases', 'publish') |
|
|||
916 | assert phases_ui.ui_value == '1' |
|
|||
917 | finally: |
|
|||
918 | self._cleanup_repo_settings(settings) |
|
|||
919 |
|
||||
920 | def test_per_repo_svn_settings_are_displayed( |
|
|||
921 | self, autologin_user, backend_svn, settings_util): |
|
|||
922 | repo = backend_svn.create_repo() |
|
|||
923 | repo_name = repo.repo_name |
|
|||
924 | branches = [ |
|
|||
925 | settings_util.create_repo_rhodecode_ui( |
|
|||
926 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, |
|
|||
927 | 'branch_{}'.format(i)) |
|
|||
928 | for i in range(10)] |
|
|||
929 | tags = [ |
|
|||
930 | settings_util.create_repo_rhodecode_ui( |
|
|||
931 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'tag_{}'.format(i)) |
|
|||
932 | for i in range(10)] |
|
|||
933 |
|
||||
934 | response = self.app.get( |
|
|||
935 | url('repo_vcs_settings', repo_name=repo_name), status=200) |
|
|||
936 | assert_response = AssertResponse(response) |
|
|||
937 | for branch in branches: |
|
|||
938 | css_selector = '[name=branch_value_{}]'.format(branch.ui_id) |
|
|||
939 | element = assert_response.get_element(css_selector) |
|
|||
940 | assert element.value == branch.ui_value |
|
|||
941 | for tag in tags: |
|
|||
942 | css_selector = '[name=tag_ui_value_new_{}]'.format(tag.ui_id) |
|
|||
943 | element = assert_response.get_element(css_selector) |
|
|||
944 | assert element.value == tag.ui_value |
|
|||
945 |
|
||||
946 | def test_per_repo_hg_and_pr_settings_are_not_displayed_for_svn( |
|
|||
947 | self, autologin_user, backend_svn, settings_util): |
|
|||
948 | repo = backend_svn.create_repo() |
|
|||
949 | repo_name = repo.repo_name |
|
|||
950 | response = self.app.get( |
|
|||
951 | url('repo_vcs_settings', repo_name=repo_name), status=200) |
|
|||
952 | response.mustcontain(no='<label>Hooks:</label>') |
|
|||
953 | response.mustcontain(no='<label>Pull Request Settings:</label>') |
|
|||
954 |
|
||||
955 | def test_inherit_global_settings_value_is_saved( |
|
|||
956 | self, autologin_user, backend, csrf_token): |
|
|||
957 | repo_name = backend.repo_name |
|
|||
958 | data = self.FORM_DATA.copy() |
|
|||
959 | data['csrf_token'] = csrf_token |
|
|||
960 | data['inherit_global_settings'] = True |
|
|||
961 | self.app.post( |
|
|||
962 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
963 |
|
||||
964 | settings = SettingsModel(repo=repo_name) |
|
|||
965 | vcs_settings = VcsSettingsModel(repo=repo_name) |
|
|||
966 | try: |
|
|||
967 | assert vcs_settings.inherit_global_settings is True |
|
|||
968 | finally: |
|
|||
969 | self._cleanup_repo_settings(settings) |
|
|||
970 |
|
||||
971 | def test_repo_cache_is_invalidated_when_settings_are_updated( |
|
|||
972 | self, autologin_user, backend, csrf_token): |
|
|||
973 | repo_name = backend.repo_name |
|
|||
974 | data = self.FORM_DATA.copy() |
|
|||
975 | data['csrf_token'] = csrf_token |
|
|||
976 | data['inherit_global_settings'] = True |
|
|||
977 | settings = SettingsModel(repo=repo_name) |
|
|||
978 |
|
||||
979 | invalidation_patcher = mock.patch( |
|
|||
980 | 'rhodecode.controllers.admin.repos.ScmModel.mark_for_invalidation') |
|
|||
981 | with invalidation_patcher as invalidation_mock: |
|
|||
982 | self.app.post( |
|
|||
983 | url('repo_vcs_settings', repo_name=repo_name), data, |
|
|||
984 | status=302) |
|
|||
985 | try: |
|
|||
986 | invalidation_mock.assert_called_once_with(repo_name, delete=True) |
|
|||
987 | finally: |
|
|||
988 | self._cleanup_repo_settings(settings) |
|
|||
989 |
|
||||
990 | def test_other_settings_not_saved_inherit_global_settings_is_true( |
|
|||
991 | self, autologin_user, backend, csrf_token): |
|
|||
992 | repo_name = backend.repo_name |
|
|||
993 | data = self.FORM_DATA.copy() |
|
|||
994 | data['csrf_token'] = csrf_token |
|
|||
995 | data['inherit_global_settings'] = True |
|
|||
996 | self.app.post( |
|
|||
997 | url('repo_vcs_settings', repo_name=repo_name), data, status=302) |
|
|||
998 |
|
||||
999 | settings = SettingsModel(repo=repo_name) |
|
|||
1000 | ui_settings = ( |
|
|||
1001 | VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS) |
|
|||
1002 |
|
||||
1003 | vcs_settings = [] |
|
|||
1004 | try: |
|
|||
1005 | for section, key in ui_settings: |
|
|||
1006 | ui = settings.get_ui_by_section_and_key(section, key) |
|
|||
1007 | if ui: |
|
|||
1008 | vcs_settings.append(ui) |
|
|||
1009 | vcs_settings.extend(settings.get_ui_by_section( |
|
|||
1010 | VcsSettingsModel.SVN_BRANCH_SECTION)) |
|
|||
1011 | vcs_settings.extend(settings.get_ui_by_section( |
|
|||
1012 | VcsSettingsModel.SVN_TAG_SECTION)) |
|
|||
1013 | for name in VcsSettingsModel.GENERAL_SETTINGS: |
|
|||
1014 | setting = settings.get_setting_by_name(name) |
|
|||
1015 | if setting: |
|
|||
1016 | vcs_settings.append(setting) |
|
|||
1017 | assert vcs_settings == [] |
|
|||
1018 | finally: |
|
|||
1019 | self._cleanup_repo_settings(settings) |
|
|||
1020 |
|
||||
1021 | def test_delete_svn_branch_and_tag_patterns( |
|
|||
1022 | self, autologin_user, backend_svn, settings_util, csrf_token): |
|
|||
1023 | repo = backend_svn.create_repo() |
|
|||
1024 | repo_name = repo.repo_name |
|
|||
1025 | branch = settings_util.create_repo_rhodecode_ui( |
|
|||
1026 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch', |
|
|||
1027 | cleanup=False) |
|
|||
1028 | tag = settings_util.create_repo_rhodecode_ui( |
|
|||
1029 | repo, VcsSettingsModel.SVN_TAG_SECTION, 'test_tag', cleanup=False) |
|
|||
1030 | data = { |
|
|||
1031 | '_method': 'delete', |
|
|||
1032 | 'csrf_token': csrf_token |
|
|||
1033 | } |
|
|||
1034 | for id_ in (branch.ui_id, tag.ui_id): |
|
|||
1035 | data['delete_svn_pattern'] = id_, |
|
|||
1036 | self.app.post( |
|
|||
1037 | url('repo_vcs_settings', repo_name=repo_name), data, |
|
|||
1038 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
|||
1039 | settings = VcsSettingsModel(repo=repo_name) |
|
|||
1040 | assert settings.get_repo_svn_branch_patterns() == [] |
|
|||
1041 |
|
||||
1042 | def test_delete_svn_branch_requires_repo_admin_permission( |
|
|||
1043 | self, backend_svn, user_util, settings_util, csrf_token): |
|
|||
1044 | repo = backend_svn.create_repo() |
|
|||
1045 | repo_name = repo.repo_name |
|
|||
1046 |
|
||||
1047 | logout_user_session(self.app, csrf_token) |
|
|||
1048 | session = login_user_session( |
|
|||
1049 | self.app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
|||
1050 | csrf_token = auth.get_csrf_token(session) |
|
|||
1051 |
|
||||
1052 | repo = Repository.get_by_repo_name(repo_name) |
|
|||
1053 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
|||
1054 | user_util.grant_user_permission_to_repo(repo, user, 'repository.admin') |
|
|||
1055 | branch = settings_util.create_repo_rhodecode_ui( |
|
|||
1056 | repo, VcsSettingsModel.SVN_BRANCH_SECTION, 'test_branch', |
|
|||
1057 | cleanup=False) |
|
|||
1058 | data = { |
|
|||
1059 | '_method': 'delete', |
|
|||
1060 | 'csrf_token': csrf_token, |
|
|||
1061 | 'delete_svn_pattern': branch.ui_id |
|
|||
1062 | } |
|
|||
1063 | self.app.post( |
|
|||
1064 | url('repo_vcs_settings', repo_name=repo_name), data, |
|
|||
1065 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=200) |
|
|||
1066 |
|
||||
1067 | def test_delete_svn_branch_raises_400_when_not_found( |
|
|||
1068 | self, autologin_user, backend_svn, settings_util, csrf_token): |
|
|||
1069 | repo_name = backend_svn.repo_name |
|
|||
1070 | data = { |
|
|||
1071 | '_method': 'delete', |
|
|||
1072 | 'delete_svn_pattern': 123, |
|
|||
1073 | 'csrf_token': csrf_token |
|
|||
1074 | } |
|
|||
1075 | self.app.post( |
|
|||
1076 | url('repo_vcs_settings', repo_name=repo_name), data, |
|
|||
1077 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=400) |
|
|||
1078 |
|
||||
1079 | def test_delete_svn_branch_raises_400_when_no_id_specified( |
|
|||
1080 | self, autologin_user, backend_svn, settings_util, csrf_token): |
|
|||
1081 | repo_name = backend_svn.repo_name |
|
|||
1082 | data = { |
|
|||
1083 | '_method': 'delete', |
|
|||
1084 | 'csrf_token': csrf_token |
|
|||
1085 | } |
|
|||
1086 | self.app.post( |
|
|||
1087 | url('repo_vcs_settings', repo_name=repo_name), data, |
|
|||
1088 | headers={'X-REQUESTED-WITH': 'XMLHttpRequest', }, status=400) |
|
|||
1089 |
|
||||
1090 | def _cleanup_repo_settings(self, settings_model): |
|
|||
1091 | cleanup = [] |
|
|||
1092 | ui_settings = ( |
|
|||
1093 | VcsSettingsModel.HOOKS_SETTINGS + VcsSettingsModel.HG_SETTINGS) |
|
|||
1094 |
|
||||
1095 | for section, key in ui_settings: |
|
|||
1096 | ui = settings_model.get_ui_by_section_and_key(section, key) |
|
|||
1097 | if ui: |
|
|||
1098 | cleanup.append(ui) |
|
|||
1099 |
|
||||
1100 | cleanup.extend(settings_model.get_ui_by_section( |
|
|||
1101 | VcsSettingsModel.INHERIT_SETTINGS)) |
|
|||
1102 | cleanup.extend(settings_model.get_ui_by_section( |
|
|||
1103 | VcsSettingsModel.SVN_BRANCH_SECTION)) |
|
|||
1104 | cleanup.extend(settings_model.get_ui_by_section( |
|
|||
1105 | VcsSettingsModel.SVN_TAG_SECTION)) |
|
|||
1106 |
|
||||
1107 | for name in VcsSettingsModel.GENERAL_SETTINGS: |
|
|||
1108 | setting = settings_model.get_setting_by_name(name) |
|
|||
1109 | if setting: |
|
|||
1110 | cleanup.append(setting) |
|
|||
1111 |
|
||||
1112 | for object_ in cleanup: |
|
|||
1113 | Session().delete(object_) |
|
|||
1114 | Session().commit() |
|
|||
1115 |
|
||||
1116 | def assert_repo_value_equals_global_value(self, response, setting): |
|
|||
1117 | assert_response = AssertResponse(response) |
|
|||
1118 | global_css_selector = '[name={}_inherited]'.format(setting) |
|
|||
1119 | repo_css_selector = '[name={}]'.format(setting) |
|
|||
1120 | repo_element = assert_response.get_element(repo_css_selector) |
|
|||
1121 | global_element = assert_response.get_element(global_css_selector) |
|
|||
1122 | assert repo_element.value == global_element.value |
|
|||
1123 |
|
||||
1124 |
|
||||
1125 | def _get_permission_for_user(user, repo): |
|
|||
1126 | perm = UserRepoToPerm.query()\ |
|
|||
1127 | .filter(UserRepoToPerm.repository == |
|
|||
1128 | Repository.get_by_repo_name(repo))\ |
|
|||
1129 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ |
|
|||
1130 | .all() |
|
|||
1131 | return perm |
|
@@ -1,54 +1,54 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import collections |
|
21 | import collections | |
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | from pyramid.view import view_config |
|
25 | from pyramid.view import view_config | |
26 |
|
26 | |||
27 | from rhodecode.apps._base import BaseAppView |
|
27 | from rhodecode.apps._base import BaseAppView | |
28 | from rhodecode.apps.admin.navigation import navigation_list |
|
28 | from rhodecode.apps.admin.navigation import navigation_list | |
29 | from rhodecode.lib.auth import (LoginRequired, HasPermissionAllDecorator) |
|
29 | from rhodecode.lib.auth import (LoginRequired, HasPermissionAllDecorator) | |
30 | from rhodecode.lib.utils import read_opensource_licenses |
|
30 | from rhodecode.lib.utils import read_opensource_licenses | |
31 |
|
31 | |||
32 | log = logging.getLogger(__name__) |
|
32 | log = logging.getLogger(__name__) | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | class OpenSourceLicensesAdminSettingsView(BaseAppView): |
|
35 | class OpenSourceLicensesAdminSettingsView(BaseAppView): | |
36 |
|
36 | |||
37 | def load_default_context(self): |
|
37 | def load_default_context(self): | |
38 | c = self._get_local_tmpl_context() |
|
38 | c = self._get_local_tmpl_context() | |
39 | self._register_global_c(c) |
|
39 | self._register_global_c(c) | |
40 | return c |
|
40 | return c | |
41 |
|
41 | |||
42 | @LoginRequired() |
|
42 | @LoginRequired() | |
43 | @HasPermissionAllDecorator('hg.admin') |
|
43 | @HasPermissionAllDecorator('hg.admin') | |
44 | @view_config( |
|
44 | @view_config( | |
45 | route_name='admin_settings_open_source', request_method='GET', |
|
45 | route_name='admin_settings_open_source', request_method='GET', | |
46 | renderer='rhodecode:templates/admin/settings/settings.mako') |
|
46 | renderer='rhodecode:templates/admin/settings/settings.mako') | |
47 | def open_source_licenses(self): |
|
47 | def open_source_licenses(self): | |
48 | c = self.load_default_context() |
|
48 | c = self.load_default_context() | |
49 | c.active = 'open_source' |
|
49 | c.active = 'open_source' | |
50 | c.navlist = navigation_list(self.request) |
|
50 | c.navlist = navigation_list(self.request) | |
51 | c.opensource_licenses = collections.OrderedDict( |
|
51 | items = sorted(read_opensource_licenses().items(), key=lambda t: t[0]) | |
52 | sorted(read_opensource_licenses().items(), key=lambda t: t[0])) |
|
52 | c.opensource_licenses = collections.OrderedDict(items) | |
53 |
|
53 | |||
54 | return self._get_template_context(c) |
|
54 | return self._get_template_context(c) |
@@ -1,258 +1,256 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 | import datetime |
|
|||
23 |
|
22 | |||
24 | from pyramid.httpexceptions import HTTPFound |
|
23 | from pyramid.httpexceptions import HTTPFound | |
25 | from pyramid.view import view_config |
|
24 | from pyramid.view import view_config | |
26 |
|
25 | |||
27 | from rhodecode.model.scm import UserGroupList |
|
26 | from rhodecode.model.scm import UserGroupList | |
28 |
|
27 | |||
29 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
|
28 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |
30 | from rhodecode.lib.auth import ( |
|
29 | from rhodecode.lib.auth import ( | |
31 |
LoginRequired, |
|
30 | LoginRequired, NotAnonymous, | |
32 | HasUserGroupPermissionAnyDecorator) |
|
31 | HasUserGroupPermissionAnyDecorator) | |
33 | from rhodecode.lib import helpers as h |
|
32 | from rhodecode.lib import helpers as h | |
34 | from rhodecode.lib.utils import PartialRenderer |
|
33 | from rhodecode.lib.utils import PartialRenderer | |
35 |
from rhodecode.lib.utils2 import |
|
34 | from rhodecode.lib.utils2 import safe_unicode | |
36 | from rhodecode.model.user_group import UserGroupModel |
|
|||
37 | from rhodecode.model.db import ( |
|
35 | from rhodecode.model.db import ( | |
38 | joinedload, or_, count, User, UserGroup, UserGroupMember, |
|
36 | joinedload, or_, count, User, UserGroup, UserGroupMember, | |
39 | UserGroupRepoToPerm, UserGroupRepoGroupToPerm) |
|
37 | UserGroupRepoToPerm, UserGroupRepoGroupToPerm) | |
40 | from rhodecode.model.meta import Session |
|
38 | from rhodecode.model.meta import Session | |
41 |
|
39 | |||
42 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
43 |
|
41 | |||
44 |
|
42 | |||
45 | class AdminUserGroupsView(BaseAppView, DataGridAppView): |
|
43 | class AdminUserGroupsView(BaseAppView, DataGridAppView): | |
46 |
|
44 | |||
47 | def load_default_context(self): |
|
45 | def load_default_context(self): | |
48 | c = self._get_local_tmpl_context() |
|
46 | c = self._get_local_tmpl_context() | |
49 | self._register_global_c(c) |
|
47 | self._register_global_c(c) | |
50 | return c |
|
48 | return c | |
51 |
|
49 | |||
52 | # permission check in data loading of |
|
50 | # permission check in data loading of | |
53 | # `user_groups_list_data` via UserGroupList |
|
51 | # `user_groups_list_data` via UserGroupList | |
54 | @LoginRequired() |
|
52 | @LoginRequired() | |
55 | @NotAnonymous() |
|
53 | @NotAnonymous() | |
56 | @view_config( |
|
54 | @view_config( | |
57 | route_name='user_groups', request_method='GET', |
|
55 | route_name='user_groups', request_method='GET', | |
58 | renderer='rhodecode:templates/admin/user_groups/user_groups.mako') |
|
56 | renderer='rhodecode:templates/admin/user_groups/user_groups.mako') | |
59 | def user_groups_list(self): |
|
57 | def user_groups_list(self): | |
60 | c = self.load_default_context() |
|
58 | c = self.load_default_context() | |
61 | return self._get_template_context(c) |
|
59 | return self._get_template_context(c) | |
62 |
|
60 | |||
63 | # permission check inside |
|
61 | # permission check inside | |
64 | @LoginRequired() |
|
62 | @LoginRequired() | |
65 | @NotAnonymous() |
|
63 | @NotAnonymous() | |
66 | @view_config( |
|
64 | @view_config( | |
67 | route_name='user_groups_data', request_method='GET', |
|
65 | route_name='user_groups_data', request_method='GET', | |
68 | renderer='json_ext', xhr=True) |
|
66 | renderer='json_ext', xhr=True) | |
69 | def user_groups_list_data(self): |
|
67 | def user_groups_list_data(self): | |
70 | column_map = { |
|
68 | column_map = { | |
71 | 'active': 'users_group_active', |
|
69 | 'active': 'users_group_active', | |
72 | 'description': 'user_group_description', |
|
70 | 'description': 'user_group_description', | |
73 | 'members': 'members_total', |
|
71 | 'members': 'members_total', | |
74 | 'owner': 'user_username', |
|
72 | 'owner': 'user_username', | |
75 | 'sync': 'group_data' |
|
73 | 'sync': 'group_data' | |
76 | } |
|
74 | } | |
77 | draw, start, limit = self._extract_chunk(self.request) |
|
75 | draw, start, limit = self._extract_chunk(self.request) | |
78 | search_q, order_by, order_dir = self._extract_ordering( |
|
76 | search_q, order_by, order_dir = self._extract_ordering( | |
79 | self.request, column_map=column_map) |
|
77 | self.request, column_map=column_map) | |
80 |
|
78 | |||
81 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
79 | _render = PartialRenderer('data_table/_dt_elements.mako') | |
82 |
|
80 | |||
83 | def user_group_name(user_group_id, user_group_name): |
|
81 | def user_group_name(user_group_id, user_group_name): | |
84 | return _render("user_group_name", user_group_id, user_group_name) |
|
82 | return _render("user_group_name", user_group_id, user_group_name) | |
85 |
|
83 | |||
86 | def user_group_actions(user_group_id, user_group_name): |
|
84 | def user_group_actions(user_group_id, user_group_name): | |
87 | return _render("user_group_actions", user_group_id, user_group_name) |
|
85 | return _render("user_group_actions", user_group_id, user_group_name) | |
88 |
|
86 | |||
89 | def user_profile(username): |
|
87 | def user_profile(username): | |
90 | return _render('user_profile', username) |
|
88 | return _render('user_profile', username) | |
91 |
|
89 | |||
92 | auth_user_group_list = UserGroupList( |
|
90 | auth_user_group_list = UserGroupList( | |
93 | UserGroup.query().all(), perm_set=['usergroup.admin']) |
|
91 | UserGroup.query().all(), perm_set=['usergroup.admin']) | |
94 |
|
92 | |||
95 | allowed_ids = [] |
|
93 | allowed_ids = [] | |
96 | for user_group in auth_user_group_list: |
|
94 | for user_group in auth_user_group_list: | |
97 | allowed_ids.append(user_group.users_group_id) |
|
95 | allowed_ids.append(user_group.users_group_id) | |
98 |
|
96 | |||
99 | user_groups_data_total_count = UserGroup.query()\ |
|
97 | user_groups_data_total_count = UserGroup.query()\ | |
100 | .filter(UserGroup.users_group_id.in_(allowed_ids))\ |
|
98 | .filter(UserGroup.users_group_id.in_(allowed_ids))\ | |
101 | .count() |
|
99 | .count() | |
102 |
|
100 | |||
103 | member_count = count(UserGroupMember.user_id) |
|
101 | member_count = count(UserGroupMember.user_id) | |
104 | base_q = Session.query( |
|
102 | base_q = Session.query( | |
105 | UserGroup.users_group_name, |
|
103 | UserGroup.users_group_name, | |
106 | UserGroup.user_group_description, |
|
104 | UserGroup.user_group_description, | |
107 | UserGroup.users_group_active, |
|
105 | UserGroup.users_group_active, | |
108 | UserGroup.users_group_id, |
|
106 | UserGroup.users_group_id, | |
109 | UserGroup.group_data, |
|
107 | UserGroup.group_data, | |
110 | User, |
|
108 | User, | |
111 | member_count.label('member_count') |
|
109 | member_count.label('member_count') | |
112 | ) \ |
|
110 | ) \ | |
113 | .filter(UserGroup.users_group_id.in_(allowed_ids)) \ |
|
111 | .filter(UserGroup.users_group_id.in_(allowed_ids)) \ | |
114 | .outerjoin(UserGroupMember) \ |
|
112 | .outerjoin(UserGroupMember) \ | |
115 | .join(User, User.user_id == UserGroup.user_id) \ |
|
113 | .join(User, User.user_id == UserGroup.user_id) \ | |
116 | .group_by(UserGroup, User) |
|
114 | .group_by(UserGroup, User) | |
117 |
|
115 | |||
118 | if search_q: |
|
116 | if search_q: | |
119 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
117 | like_expression = u'%{}%'.format(safe_unicode(search_q)) | |
120 | base_q = base_q.filter(or_( |
|
118 | base_q = base_q.filter(or_( | |
121 | UserGroup.users_group_name.ilike(like_expression), |
|
119 | UserGroup.users_group_name.ilike(like_expression), | |
122 | )) |
|
120 | )) | |
123 |
|
121 | |||
124 | user_groups_data_total_filtered_count = base_q.count() |
|
122 | user_groups_data_total_filtered_count = base_q.count() | |
125 |
|
123 | |||
126 | if order_by == 'members_total': |
|
124 | if order_by == 'members_total': | |
127 | sort_col = member_count |
|
125 | sort_col = member_count | |
128 | elif order_by == 'user_username': |
|
126 | elif order_by == 'user_username': | |
129 | sort_col = User.username |
|
127 | sort_col = User.username | |
130 | else: |
|
128 | else: | |
131 | sort_col = getattr(UserGroup, order_by, None) |
|
129 | sort_col = getattr(UserGroup, order_by, None) | |
132 |
|
130 | |||
133 | if isinstance(sort_col, count) or sort_col: |
|
131 | if isinstance(sort_col, count) or sort_col: | |
134 | if order_dir == 'asc': |
|
132 | if order_dir == 'asc': | |
135 | sort_col = sort_col.asc() |
|
133 | sort_col = sort_col.asc() | |
136 | else: |
|
134 | else: | |
137 | sort_col = sort_col.desc() |
|
135 | sort_col = sort_col.desc() | |
138 |
|
136 | |||
139 | base_q = base_q.order_by(sort_col) |
|
137 | base_q = base_q.order_by(sort_col) | |
140 | base_q = base_q.offset(start).limit(limit) |
|
138 | base_q = base_q.offset(start).limit(limit) | |
141 |
|
139 | |||
142 | # authenticated access to user groups |
|
140 | # authenticated access to user groups | |
143 | auth_user_group_list = base_q.all() |
|
141 | auth_user_group_list = base_q.all() | |
144 |
|
142 | |||
145 | user_groups_data = [] |
|
143 | user_groups_data = [] | |
146 | for user_gr in auth_user_group_list: |
|
144 | for user_gr in auth_user_group_list: | |
147 | user_groups_data.append({ |
|
145 | user_groups_data.append({ | |
148 | "users_group_name": user_group_name( |
|
146 | "users_group_name": user_group_name( | |
149 | user_gr.users_group_id, h.escape(user_gr.users_group_name)), |
|
147 | user_gr.users_group_id, h.escape(user_gr.users_group_name)), | |
150 | "name_raw": h.escape(user_gr.users_group_name), |
|
148 | "name_raw": h.escape(user_gr.users_group_name), | |
151 | "description": h.escape(user_gr.user_group_description), |
|
149 | "description": h.escape(user_gr.user_group_description), | |
152 | "members": user_gr.member_count, |
|
150 | "members": user_gr.member_count, | |
153 | # NOTE(marcink): because of advanced query we |
|
151 | # NOTE(marcink): because of advanced query we | |
154 | # need to load it like that |
|
152 | # need to load it like that | |
155 | "sync": UserGroup._load_group_data( |
|
153 | "sync": UserGroup._load_group_data( | |
156 | user_gr.group_data).get('extern_type'), |
|
154 | user_gr.group_data).get('extern_type'), | |
157 | "active": h.bool2icon(user_gr.users_group_active), |
|
155 | "active": h.bool2icon(user_gr.users_group_active), | |
158 | "owner": user_profile(user_gr.User.username), |
|
156 | "owner": user_profile(user_gr.User.username), | |
159 | "action": user_group_actions( |
|
157 | "action": user_group_actions( | |
160 | user_gr.users_group_id, user_gr.users_group_name) |
|
158 | user_gr.users_group_id, user_gr.users_group_name) | |
161 | }) |
|
159 | }) | |
162 |
|
160 | |||
163 | data = ({ |
|
161 | data = ({ | |
164 | 'draw': draw, |
|
162 | 'draw': draw, | |
165 | 'data': user_groups_data, |
|
163 | 'data': user_groups_data, | |
166 | 'recordsTotal': user_groups_data_total_count, |
|
164 | 'recordsTotal': user_groups_data_total_count, | |
167 | 'recordsFiltered': user_groups_data_total_filtered_count, |
|
165 | 'recordsFiltered': user_groups_data_total_filtered_count, | |
168 | }) |
|
166 | }) | |
169 |
|
167 | |||
170 | return data |
|
168 | return data | |
171 |
|
169 | |||
172 | @LoginRequired() |
|
170 | @LoginRequired() | |
173 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
171 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
174 | @view_config( |
|
172 | @view_config( | |
175 | route_name='user_group_members_data', request_method='GET', |
|
173 | route_name='user_group_members_data', request_method='GET', | |
176 | renderer='json_ext', xhr=True) |
|
174 | renderer='json_ext', xhr=True) | |
177 | def user_group_members(self): |
|
175 | def user_group_members(self): | |
178 | """ |
|
176 | """ | |
179 | Return members of given user group |
|
177 | Return members of given user group | |
180 | """ |
|
178 | """ | |
181 | user_group_id = self.request.matchdict['user_group_id'] |
|
179 | user_group_id = self.request.matchdict['user_group_id'] | |
182 | user_group = UserGroup.get_or_404(user_group_id) |
|
180 | user_group = UserGroup.get_or_404(user_group_id) | |
183 | group_members_obj = sorted((x.user for x in user_group.members), |
|
181 | group_members_obj = sorted((x.user for x in user_group.members), | |
184 | key=lambda u: u.username.lower()) |
|
182 | key=lambda u: u.username.lower()) | |
185 |
|
183 | |||
186 | group_members = [ |
|
184 | group_members = [ | |
187 | { |
|
185 | { | |
188 | 'id': user.user_id, |
|
186 | 'id': user.user_id, | |
189 | 'first_name': user.first_name, |
|
187 | 'first_name': user.first_name, | |
190 | 'last_name': user.last_name, |
|
188 | 'last_name': user.last_name, | |
191 | 'username': user.username, |
|
189 | 'username': user.username, | |
192 | 'icon_link': h.gravatar_url(user.email, 30), |
|
190 | 'icon_link': h.gravatar_url(user.email, 30), | |
193 | 'value_display': h.person(user.email), |
|
191 | 'value_display': h.person(user.email), | |
194 | 'value': user.username, |
|
192 | 'value': user.username, | |
195 | 'value_type': 'user', |
|
193 | 'value_type': 'user', | |
196 | 'active': user.active, |
|
194 | 'active': user.active, | |
197 | } |
|
195 | } | |
198 | for user in group_members_obj |
|
196 | for user in group_members_obj | |
199 | ] |
|
197 | ] | |
200 |
|
198 | |||
201 | return { |
|
199 | return { | |
202 | 'members': group_members |
|
200 | 'members': group_members | |
203 | } |
|
201 | } | |
204 |
|
202 | |||
205 | def _get_perms_summary(self, user_group_id): |
|
203 | def _get_perms_summary(self, user_group_id): | |
206 | permissions = { |
|
204 | permissions = { | |
207 | 'repositories': {}, |
|
205 | 'repositories': {}, | |
208 | 'repositories_groups': {}, |
|
206 | 'repositories_groups': {}, | |
209 | } |
|
207 | } | |
210 | ugroup_repo_perms = UserGroupRepoToPerm.query()\ |
|
208 | ugroup_repo_perms = UserGroupRepoToPerm.query()\ | |
211 | .options(joinedload(UserGroupRepoToPerm.permission))\ |
|
209 | .options(joinedload(UserGroupRepoToPerm.permission))\ | |
212 | .options(joinedload(UserGroupRepoToPerm.repository))\ |
|
210 | .options(joinedload(UserGroupRepoToPerm.repository))\ | |
213 | .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\ |
|
211 | .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\ | |
214 | .all() |
|
212 | .all() | |
215 |
|
213 | |||
216 | for gr in ugroup_repo_perms: |
|
214 | for gr in ugroup_repo_perms: | |
217 | permissions['repositories'][gr.repository.repo_name] \ |
|
215 | permissions['repositories'][gr.repository.repo_name] \ | |
218 | = gr.permission.permission_name |
|
216 | = gr.permission.permission_name | |
219 |
|
217 | |||
220 | ugroup_group_perms = UserGroupRepoGroupToPerm.query()\ |
|
218 | ugroup_group_perms = UserGroupRepoGroupToPerm.query()\ | |
221 | .options(joinedload(UserGroupRepoGroupToPerm.permission))\ |
|
219 | .options(joinedload(UserGroupRepoGroupToPerm.permission))\ | |
222 | .options(joinedload(UserGroupRepoGroupToPerm.group))\ |
|
220 | .options(joinedload(UserGroupRepoGroupToPerm.group))\ | |
223 | .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\ |
|
221 | .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\ | |
224 | .all() |
|
222 | .all() | |
225 |
|
223 | |||
226 | for gr in ugroup_group_perms: |
|
224 | for gr in ugroup_group_perms: | |
227 | permissions['repositories_groups'][gr.group.group_name] \ |
|
225 | permissions['repositories_groups'][gr.group.group_name] \ | |
228 | = gr.permission.permission_name |
|
226 | = gr.permission.permission_name | |
229 | return permissions |
|
227 | return permissions | |
230 |
|
228 | |||
231 | @LoginRequired() |
|
229 | @LoginRequired() | |
232 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
230 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
233 | @view_config( |
|
231 | @view_config( | |
234 | route_name='edit_user_group_perms_summary', request_method='GET', |
|
232 | route_name='edit_user_group_perms_summary', request_method='GET', | |
235 | renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako') |
|
233 | renderer='rhodecode:templates/admin/user_groups/user_group_edit.mako') | |
236 | def user_group_perms_summary(self): |
|
234 | def user_group_perms_summary(self): | |
237 | c = self.load_default_context() |
|
235 | c = self.load_default_context() | |
238 |
|
236 | |||
239 | user_group_id = self.request.matchdict.get('user_group_id') |
|
237 | user_group_id = self.request.matchdict.get('user_group_id') | |
240 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
238 | c.user_group = UserGroup.get_or_404(user_group_id) | |
241 |
|
239 | |||
242 | c.active = 'perms_summary' |
|
240 | c.active = 'perms_summary' | |
243 |
|
241 | |||
244 | c.permissions = self._get_perms_summary(c.user_group.users_group_id) |
|
242 | c.permissions = self._get_perms_summary(c.user_group.users_group_id) | |
245 | return self._get_template_context(c) |
|
243 | return self._get_template_context(c) | |
246 |
|
244 | |||
247 | @LoginRequired() |
|
245 | @LoginRequired() | |
248 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
246 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
249 | @view_config( |
|
247 | @view_config( | |
250 | route_name='edit_user_group_perms_summary_json', request_method='GET', |
|
248 | route_name='edit_user_group_perms_summary_json', request_method='GET', | |
251 | renderer='json_ext') |
|
249 | renderer='json_ext') | |
252 | def user_group_perms_summary(self): |
|
250 | def user_group_perms_summary(self): | |
253 | self.load_default_context() |
|
251 | self.load_default_context() | |
254 |
|
252 | |||
255 | user_group_id = self.request.matchdict.get('user_group_id') |
|
253 | user_group_id = self.request.matchdict.get('user_group_id') | |
256 | user_group = UserGroup.get_or_404(user_group_id) |
|
254 | user_group = UserGroup.get_or_404(user_group_id) | |
257 |
|
255 | |||
258 | return self._get_perms_summary(user_group.users_group_id) |
|
256 | return self._get_perms_summary(user_group.users_group_id) |
@@ -1,387 +1,447 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | from rhodecode.apps._base import add_route_with_slash |
|
20 | from rhodecode.apps._base import add_route_with_slash | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | def includeme(config): |
|
23 | def includeme(config): | |
24 |
|
24 | |||
25 | # repo creating checks, special cases that aren't repo routes |
|
25 | # repo creating checks, special cases that aren't repo routes | |
26 | config.add_route( |
|
26 | config.add_route( | |
27 | name='repo_creating', |
|
27 | name='repo_creating', | |
28 | pattern='/{repo_name:.*?[^/]}/repo_creating') |
|
28 | pattern='/{repo_name:.*?[^/]}/repo_creating') | |
29 |
|
29 | |||
30 | config.add_route( |
|
30 | config.add_route( | |
31 | name='repo_creating_check', |
|
31 | name='repo_creating_check', | |
32 | pattern='/{repo_name:.*?[^/]}/repo_creating_check') |
|
32 | pattern='/{repo_name:.*?[^/]}/repo_creating_check') | |
33 |
|
33 | |||
34 | # Summary |
|
34 | # Summary | |
35 | # NOTE(marcink): one additional route is defined in very bottom, catch |
|
35 | # NOTE(marcink): one additional route is defined in very bottom, catch | |
36 | # all pattern |
|
36 | # all pattern | |
37 | config.add_route( |
|
37 | config.add_route( | |
38 | name='repo_summary_explicit', |
|
38 | name='repo_summary_explicit', | |
39 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
39 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) | |
40 | config.add_route( |
|
40 | config.add_route( | |
41 | name='repo_summary_commits', |
|
41 | name='repo_summary_commits', | |
42 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) |
|
42 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) | |
43 |
|
43 | |||
44 | # Commits |
|
44 | # Commits | |
45 | config.add_route( |
|
45 | config.add_route( | |
46 | name='repo_commit', |
|
46 | name='repo_commit', | |
47 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) |
|
47 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) | |
48 |
|
48 | |||
49 | config.add_route( |
|
49 | config.add_route( | |
50 | name='repo_commit_children', |
|
50 | name='repo_commit_children', | |
51 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) |
|
51 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) | |
52 |
|
52 | |||
53 | config.add_route( |
|
53 | config.add_route( | |
54 | name='repo_commit_parents', |
|
54 | name='repo_commit_parents', | |
55 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) |
|
55 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) | |
56 |
|
56 | |||
57 | config.add_route( |
|
57 | config.add_route( | |
58 | name='repo_commit_raw', |
|
58 | name='repo_commit_raw', | |
59 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) |
|
59 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) | |
60 |
|
60 | |||
61 | config.add_route( |
|
61 | config.add_route( | |
62 | name='repo_commit_patch', |
|
62 | name='repo_commit_patch', | |
63 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) |
|
63 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) | |
64 |
|
64 | |||
65 | config.add_route( |
|
65 | config.add_route( | |
66 | name='repo_commit_download', |
|
66 | name='repo_commit_download', | |
67 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) |
|
67 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) | |
68 |
|
68 | |||
69 | config.add_route( |
|
69 | config.add_route( | |
70 | name='repo_commit_data', |
|
70 | name='repo_commit_data', | |
71 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) |
|
71 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) | |
72 |
|
72 | |||
73 | config.add_route( |
|
73 | config.add_route( | |
74 | name='repo_commit_comment_create', |
|
74 | name='repo_commit_comment_create', | |
75 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) |
|
75 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) | |
76 |
|
76 | |||
77 | config.add_route( |
|
77 | config.add_route( | |
78 | name='repo_commit_comment_preview', |
|
78 | name='repo_commit_comment_preview', | |
79 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) |
|
79 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) | |
80 |
|
80 | |||
81 | config.add_route( |
|
81 | config.add_route( | |
82 | name='repo_commit_comment_delete', |
|
82 | name='repo_commit_comment_delete', | |
83 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) |
|
83 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) | |
84 |
|
84 | |||
85 | # still working url for backward compat. |
|
85 | # still working url for backward compat. | |
86 | config.add_route( |
|
86 | config.add_route( | |
87 | name='repo_commit_raw_deprecated', |
|
87 | name='repo_commit_raw_deprecated', | |
88 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) |
|
88 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) | |
89 |
|
89 | |||
90 | # Files |
|
90 | # Files | |
91 | config.add_route( |
|
91 | config.add_route( | |
92 | name='repo_archivefile', |
|
92 | name='repo_archivefile', | |
93 | pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True) |
|
93 | pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True) | |
94 |
|
94 | |||
95 | config.add_route( |
|
95 | config.add_route( | |
96 | name='repo_files_diff', |
|
96 | name='repo_files_diff', | |
97 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) |
|
97 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) | |
98 | config.add_route( # legacy route to make old links work |
|
98 | config.add_route( # legacy route to make old links work | |
99 | name='repo_files_diff_2way_redirect', |
|
99 | name='repo_files_diff_2way_redirect', | |
100 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) |
|
100 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) | |
101 |
|
101 | |||
102 | config.add_route( |
|
102 | config.add_route( | |
103 | name='repo_files', |
|
103 | name='repo_files', | |
104 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) |
|
104 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) | |
105 | config.add_route( |
|
105 | config.add_route( | |
106 | name='repo_files:default_path', |
|
106 | name='repo_files:default_path', | |
107 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) |
|
107 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) | |
108 | config.add_route( |
|
108 | config.add_route( | |
109 | name='repo_files:default_commit', |
|
109 | name='repo_files:default_commit', | |
110 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) |
|
110 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) | |
111 |
|
111 | |||
112 | config.add_route( |
|
112 | config.add_route( | |
113 | name='repo_files:rendered', |
|
113 | name='repo_files:rendered', | |
114 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) |
|
114 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) | |
115 |
|
115 | |||
116 | config.add_route( |
|
116 | config.add_route( | |
117 | name='repo_files:annotated', |
|
117 | name='repo_files:annotated', | |
118 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) |
|
118 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) | |
119 | config.add_route( |
|
119 | config.add_route( | |
120 | name='repo_files:annotated_previous', |
|
120 | name='repo_files:annotated_previous', | |
121 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) |
|
121 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) | |
122 |
|
122 | |||
123 | config.add_route( |
|
123 | config.add_route( | |
124 | name='repo_nodetree_full', |
|
124 | name='repo_nodetree_full', | |
125 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) |
|
125 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) | |
126 | config.add_route( |
|
126 | config.add_route( | |
127 | name='repo_nodetree_full:default_path', |
|
127 | name='repo_nodetree_full:default_path', | |
128 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) |
|
128 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) | |
129 |
|
129 | |||
130 | config.add_route( |
|
130 | config.add_route( | |
131 | name='repo_files_nodelist', |
|
131 | name='repo_files_nodelist', | |
132 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) |
|
132 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) | |
133 |
|
133 | |||
134 | config.add_route( |
|
134 | config.add_route( | |
135 | name='repo_file_raw', |
|
135 | name='repo_file_raw', | |
136 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) |
|
136 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) | |
137 |
|
137 | |||
138 | config.add_route( |
|
138 | config.add_route( | |
139 | name='repo_file_download', |
|
139 | name='repo_file_download', | |
140 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) |
|
140 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) | |
141 | config.add_route( # backward compat to keep old links working |
|
141 | config.add_route( # backward compat to keep old links working | |
142 | name='repo_file_download:legacy', |
|
142 | name='repo_file_download:legacy', | |
143 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', |
|
143 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', | |
144 | repo_route=True) |
|
144 | repo_route=True) | |
145 |
|
145 | |||
146 | config.add_route( |
|
146 | config.add_route( | |
147 | name='repo_file_history', |
|
147 | name='repo_file_history', | |
148 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) |
|
148 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) | |
149 |
|
149 | |||
150 | config.add_route( |
|
150 | config.add_route( | |
151 | name='repo_file_authors', |
|
151 | name='repo_file_authors', | |
152 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) |
|
152 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) | |
153 |
|
153 | |||
154 | config.add_route( |
|
154 | config.add_route( | |
155 | name='repo_files_remove_file', |
|
155 | name='repo_files_remove_file', | |
156 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', |
|
156 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', | |
157 | repo_route=True) |
|
157 | repo_route=True) | |
158 | config.add_route( |
|
158 | config.add_route( | |
159 | name='repo_files_delete_file', |
|
159 | name='repo_files_delete_file', | |
160 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', |
|
160 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', | |
161 | repo_route=True) |
|
161 | repo_route=True) | |
162 | config.add_route( |
|
162 | config.add_route( | |
163 | name='repo_files_edit_file', |
|
163 | name='repo_files_edit_file', | |
164 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', |
|
164 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', | |
165 | repo_route=True) |
|
165 | repo_route=True) | |
166 | config.add_route( |
|
166 | config.add_route( | |
167 | name='repo_files_update_file', |
|
167 | name='repo_files_update_file', | |
168 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', |
|
168 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', | |
169 | repo_route=True) |
|
169 | repo_route=True) | |
170 | config.add_route( |
|
170 | config.add_route( | |
171 | name='repo_files_add_file', |
|
171 | name='repo_files_add_file', | |
172 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', |
|
172 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', | |
173 | repo_route=True) |
|
173 | repo_route=True) | |
174 | config.add_route( |
|
174 | config.add_route( | |
175 | name='repo_files_create_file', |
|
175 | name='repo_files_create_file', | |
176 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', |
|
176 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', | |
177 | repo_route=True) |
|
177 | repo_route=True) | |
178 |
|
178 | |||
179 | # Refs data |
|
179 | # Refs data | |
180 | config.add_route( |
|
180 | config.add_route( | |
181 | name='repo_refs_data', |
|
181 | name='repo_refs_data', | |
182 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) |
|
182 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) | |
183 |
|
183 | |||
184 | config.add_route( |
|
184 | config.add_route( | |
185 | name='repo_refs_changelog_data', |
|
185 | name='repo_refs_changelog_data', | |
186 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) |
|
186 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) | |
187 |
|
187 | |||
188 | config.add_route( |
|
188 | config.add_route( | |
189 | name='repo_stats', |
|
189 | name='repo_stats', | |
190 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) |
|
190 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) | |
191 |
|
191 | |||
192 | # Changelog |
|
192 | # Changelog | |
193 | config.add_route( |
|
193 | config.add_route( | |
194 | name='repo_changelog', |
|
194 | name='repo_changelog', | |
195 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) |
|
195 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) | |
196 | config.add_route( |
|
196 | config.add_route( | |
197 | name='repo_changelog_file', |
|
197 | name='repo_changelog_file', | |
198 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) |
|
198 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) | |
199 | config.add_route( |
|
199 | config.add_route( | |
200 | name='repo_changelog_elements', |
|
200 | name='repo_changelog_elements', | |
201 | pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True) |
|
201 | pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True) | |
202 |
|
202 | |||
203 | # Compare |
|
203 | # Compare | |
204 | config.add_route( |
|
204 | config.add_route( | |
205 | name='repo_compare_select', |
|
205 | name='repo_compare_select', | |
206 | pattern='/{repo_name:.*?[^/]}/compare', repo_route=True) |
|
206 | pattern='/{repo_name:.*?[^/]}/compare', repo_route=True) | |
207 |
|
207 | |||
208 | config.add_route( |
|
208 | config.add_route( | |
209 | name='repo_compare', |
|
209 | name='repo_compare', | |
210 | pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True) |
|
210 | pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True) | |
211 |
|
211 | |||
212 | # Tags |
|
212 | # Tags | |
213 | config.add_route( |
|
213 | config.add_route( | |
214 | name='tags_home', |
|
214 | name='tags_home', | |
215 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
215 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) | |
216 |
|
216 | |||
217 | # Branches |
|
217 | # Branches | |
218 | config.add_route( |
|
218 | config.add_route( | |
219 | name='branches_home', |
|
219 | name='branches_home', | |
220 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
220 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) | |
221 |
|
221 | |||
222 | # Bookmarks |
|
222 | # Bookmarks | |
223 | config.add_route( |
|
223 | config.add_route( | |
224 | name='bookmarks_home', |
|
224 | name='bookmarks_home', | |
225 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
225 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) | |
226 |
|
226 | |||
227 | # Forks |
|
227 | # Forks | |
228 | config.add_route( |
|
228 | config.add_route( | |
229 | name='repo_fork_new', |
|
229 | name='repo_fork_new', | |
230 | pattern='/{repo_name:.*?[^/]}/fork', repo_route=True, |
|
230 | pattern='/{repo_name:.*?[^/]}/fork', repo_route=True, | |
231 | repo_accepted_types=['hg', 'git']) |
|
231 | repo_accepted_types=['hg', 'git']) | |
232 |
|
232 | |||
233 | config.add_route( |
|
233 | config.add_route( | |
234 | name='repo_fork_create', |
|
234 | name='repo_fork_create', | |
235 | pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True, |
|
235 | pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True, | |
236 | repo_accepted_types=['hg', 'git']) |
|
236 | repo_accepted_types=['hg', 'git']) | |
237 |
|
237 | |||
238 | config.add_route( |
|
238 | config.add_route( | |
239 | name='repo_forks_show_all', |
|
239 | name='repo_forks_show_all', | |
240 | pattern='/{repo_name:.*?[^/]}/forks', repo_route=True, |
|
240 | pattern='/{repo_name:.*?[^/]}/forks', repo_route=True, | |
241 | repo_accepted_types=['hg', 'git']) |
|
241 | repo_accepted_types=['hg', 'git']) | |
242 | config.add_route( |
|
242 | config.add_route( | |
243 | name='repo_forks_data', |
|
243 | name='repo_forks_data', | |
244 | pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True, |
|
244 | pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True, | |
245 | repo_accepted_types=['hg', 'git']) |
|
245 | repo_accepted_types=['hg', 'git']) | |
246 |
|
246 | |||
247 | # Pull Requests |
|
247 | # Pull Requests | |
248 | config.add_route( |
|
248 | config.add_route( | |
249 | name='pullrequest_show', |
|
249 | name='pullrequest_show', | |
250 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}', |
|
250 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}', | |
251 | repo_route=True) |
|
251 | repo_route=True) | |
252 |
|
252 | |||
253 | config.add_route( |
|
253 | config.add_route( | |
254 | name='pullrequest_show_all', |
|
254 | name='pullrequest_show_all', | |
255 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
255 | pattern='/{repo_name:.*?[^/]}/pull-request', | |
256 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
256 | repo_route=True, repo_accepted_types=['hg', 'git']) | |
257 |
|
257 | |||
258 | config.add_route( |
|
258 | config.add_route( | |
259 | name='pullrequest_show_all_data', |
|
259 | name='pullrequest_show_all_data', | |
260 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
260 | pattern='/{repo_name:.*?[^/]}/pull-request-data', | |
261 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
261 | repo_route=True, repo_accepted_types=['hg', 'git']) | |
262 |
|
262 | |||
263 | config.add_route( |
|
263 | config.add_route( | |
264 | name='pullrequest_repo_refs', |
|
264 | name='pullrequest_repo_refs', | |
265 | pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
265 | pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}', | |
266 | repo_route=True) |
|
266 | repo_route=True) | |
267 |
|
267 | |||
268 | config.add_route( |
|
268 | config.add_route( | |
269 | name='pullrequest_repo_destinations', |
|
269 | name='pullrequest_repo_destinations', | |
270 | pattern='/{repo_name:.*?[^/]}/pull-request/repo-destinations', |
|
270 | pattern='/{repo_name:.*?[^/]}/pull-request/repo-destinations', | |
271 | repo_route=True) |
|
271 | repo_route=True) | |
272 |
|
272 | |||
273 | config.add_route( |
|
273 | config.add_route( | |
274 | name='pullrequest_new', |
|
274 | name='pullrequest_new', | |
275 | pattern='/{repo_name:.*?[^/]}/pull-request/new', |
|
275 | pattern='/{repo_name:.*?[^/]}/pull-request/new', | |
276 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
276 | repo_route=True, repo_accepted_types=['hg', 'git']) | |
277 |
|
277 | |||
278 | config.add_route( |
|
278 | config.add_route( | |
279 | name='pullrequest_create', |
|
279 | name='pullrequest_create', | |
280 | pattern='/{repo_name:.*?[^/]}/pull-request/create', |
|
280 | pattern='/{repo_name:.*?[^/]}/pull-request/create', | |
281 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
281 | repo_route=True, repo_accepted_types=['hg', 'git']) | |
282 |
|
282 | |||
283 | config.add_route( |
|
283 | config.add_route( | |
284 | name='pullrequest_update', |
|
284 | name='pullrequest_update', | |
285 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update', |
|
285 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update', | |
286 | repo_route=True) |
|
286 | repo_route=True) | |
287 |
|
287 | |||
288 | config.add_route( |
|
288 | config.add_route( | |
289 | name='pullrequest_merge', |
|
289 | name='pullrequest_merge', | |
290 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge', |
|
290 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge', | |
291 | repo_route=True) |
|
291 | repo_route=True) | |
292 |
|
292 | |||
293 | config.add_route( |
|
293 | config.add_route( | |
294 | name='pullrequest_delete', |
|
294 | name='pullrequest_delete', | |
295 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete', |
|
295 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete', | |
296 | repo_route=True) |
|
296 | repo_route=True) | |
297 |
|
297 | |||
298 | config.add_route( |
|
298 | config.add_route( | |
299 | name='pullrequest_comment_create', |
|
299 | name='pullrequest_comment_create', | |
300 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment', |
|
300 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment', | |
301 | repo_route=True) |
|
301 | repo_route=True) | |
302 |
|
302 | |||
303 | config.add_route( |
|
303 | config.add_route( | |
304 | name='pullrequest_comment_delete', |
|
304 | name='pullrequest_comment_delete', | |
305 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', |
|
305 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', | |
306 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
306 | repo_route=True, repo_accepted_types=['hg', 'git']) | |
307 |
|
307 | |||
308 | # Settings |
|
308 | # Settings | |
309 | config.add_route( |
|
309 | config.add_route( | |
310 | name='edit_repo', |
|
310 | name='edit_repo', | |
311 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
311 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) | |
312 |
|
312 | |||
313 | # Settings advanced |
|
313 | # Settings advanced | |
314 | config.add_route( |
|
314 | config.add_route( | |
315 | name='edit_repo_advanced', |
|
315 | name='edit_repo_advanced', | |
316 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
316 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) | |
317 | config.add_route( |
|
317 | config.add_route( | |
318 | name='edit_repo_advanced_delete', |
|
318 | name='edit_repo_advanced_delete', | |
319 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
319 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) | |
320 | config.add_route( |
|
320 | config.add_route( | |
321 | name='edit_repo_advanced_locking', |
|
321 | name='edit_repo_advanced_locking', | |
322 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
322 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) | |
323 | config.add_route( |
|
323 | config.add_route( | |
324 | name='edit_repo_advanced_journal', |
|
324 | name='edit_repo_advanced_journal', | |
325 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
325 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) | |
326 | config.add_route( |
|
326 | config.add_route( | |
327 | name='edit_repo_advanced_fork', |
|
327 | name='edit_repo_advanced_fork', | |
328 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
328 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) | |
329 |
|
329 | |||
330 | # Caches |
|
330 | # Caches | |
331 | config.add_route( |
|
331 | config.add_route( | |
332 | name='edit_repo_caches', |
|
332 | name='edit_repo_caches', | |
333 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
333 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) | |
334 |
|
334 | |||
335 | # Permissions |
|
335 | # Permissions | |
336 | config.add_route( |
|
336 | config.add_route( | |
337 | name='edit_repo_perms', |
|
337 | name='edit_repo_perms', | |
338 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
338 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) | |
339 |
|
339 | |||
340 | # Repo Review Rules |
|
340 | # Maintenance | |
|
341 | config.add_route( | |||
|
342 | name='edit_repo_maintenance', | |||
|
343 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) | |||
|
344 | ||||
|
345 | config.add_route( | |||
|
346 | name='edit_repo_maintenance_execute', | |||
|
347 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) | |||
|
348 | ||||
|
349 | # Fields | |||
|
350 | config.add_route( | |||
|
351 | name='edit_repo_fields', | |||
|
352 | pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True) | |||
|
353 | config.add_route( | |||
|
354 | name='edit_repo_fields_create', | |||
|
355 | pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True) | |||
|
356 | config.add_route( | |||
|
357 | name='edit_repo_fields_delete', | |||
|
358 | pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True) | |||
|
359 | ||||
|
360 | # Locking | |||
|
361 | config.add_route( | |||
|
362 | name='repo_edit_toggle_locking', | |||
|
363 | pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True) | |||
|
364 | ||||
|
365 | # Remote | |||
|
366 | config.add_route( | |||
|
367 | name='edit_repo_remote', | |||
|
368 | pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True) | |||
|
369 | config.add_route( | |||
|
370 | name='edit_repo_remote_pull', | |||
|
371 | pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True) | |||
|
372 | ||||
|
373 | ||||
|
374 | # Statistics | |||
|
375 | config.add_route( | |||
|
376 | name='edit_repo_statistics', | |||
|
377 | pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True) | |||
|
378 | config.add_route( | |||
|
379 | name='edit_repo_statistics_reset', | |||
|
380 | pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True) | |||
|
381 | ||||
|
382 | # Issue trackers | |||
|
383 | config.add_route( | |||
|
384 | name='edit_repo_issuetracker', | |||
|
385 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True) | |||
|
386 | config.add_route( | |||
|
387 | name='edit_repo_issuetracker_test', | |||
|
388 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True) | |||
|
389 | config.add_route( | |||
|
390 | name='edit_repo_issuetracker_delete', | |||
|
391 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True) | |||
|
392 | config.add_route( | |||
|
393 | name='edit_repo_issuetracker_update', | |||
|
394 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True) | |||
|
395 | ||||
|
396 | # VCS Settings | |||
|
397 | config.add_route( | |||
|
398 | name='edit_repo_vcs', | |||
|
399 | pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True) | |||
|
400 | config.add_route( | |||
|
401 | name='edit_repo_vcs_update', | |||
|
402 | pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True) | |||
|
403 | ||||
|
404 | # svn pattern | |||
|
405 | config.add_route( | |||
|
406 | name='edit_repo_vcs_svn_pattern_delete', | |||
|
407 | pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True) | |||
|
408 | ||||
|
409 | # Repo Review Rules (EE feature) | |||
341 | config.add_route( |
|
410 | config.add_route( | |
342 | name='repo_reviewers', |
|
411 | name='repo_reviewers', | |
343 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
412 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) | |
344 |
|
413 | |||
345 | config.add_route( |
|
414 | config.add_route( | |
346 | name='repo_default_reviewers_data', |
|
415 | name='repo_default_reviewers_data', | |
347 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
416 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) | |
348 |
|
417 | |||
349 | # Maintenance |
|
|||
350 | config.add_route( |
|
|||
351 | name='repo_maintenance', |
|
|||
352 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) |
|
|||
353 |
|
||||
354 | config.add_route( |
|
|||
355 | name='repo_maintenance_execute', |
|
|||
356 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) |
|
|||
357 |
|
||||
358 | # Strip |
|
418 | # Strip | |
359 | config.add_route( |
|
419 | config.add_route( | |
360 | name='strip', |
|
420 | name='edit_repo_strip', | |
361 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
421 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) | |
362 |
|
422 | |||
363 | config.add_route( |
|
423 | config.add_route( | |
364 | name='strip_check', |
|
424 | name='strip_check', | |
365 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
425 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) | |
366 |
|
426 | |||
367 | config.add_route( |
|
427 | config.add_route( | |
368 | name='strip_execute', |
|
428 | name='strip_execute', | |
369 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
429 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) | |
370 |
|
430 | |||
371 | # ATOM/RSS Feed |
|
431 | # ATOM/RSS Feed | |
372 | config.add_route( |
|
432 | config.add_route( | |
373 | name='rss_feed_home', |
|
433 | name='rss_feed_home', | |
374 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) |
|
434 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) | |
375 |
|
435 | |||
376 | config.add_route( |
|
436 | config.add_route( | |
377 | name='atom_feed_home', |
|
437 | name='atom_feed_home', | |
378 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) |
|
438 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) | |
379 |
|
439 | |||
380 | # NOTE(marcink): needs to be at the end for catch-all |
|
440 | # NOTE(marcink): needs to be at the end for catch-all | |
381 | add_route_with_slash( |
|
441 | add_route_with_slash( | |
382 | config, |
|
442 | config, | |
383 | name='repo_summary', |
|
443 | name='repo_summary', | |
384 | pattern='/{repo_name:.*?[^/]}', repo_route=True) |
|
444 | pattern='/{repo_name:.*?[^/]}', repo_route=True) | |
385 |
|
445 | |||
386 | # Scan module for configuration decorators. |
|
446 | # Scan module for configuration decorators. | |
387 | config.scan('.views', ignore='.tests') |
|
447 | config.scan('.views', ignore='.tests') |
@@ -1,122 +1,148 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.lib.utils2 import md5 |
|
23 | from rhodecode.lib.utils2 import md5 | |
24 | from rhodecode.model.db import Repository |
|
24 | from rhodecode.model.db import Repository | |
25 | from rhodecode.model.meta import Session |
|
25 | from rhodecode.model.meta import Session | |
26 | from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel |
|
26 | from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel | |
27 | from rhodecode.tests import url |
|
27 | ||
|
28 | ||||
|
29 | def route_path(name, params=None, **kwargs): | |||
|
30 | import urllib | |||
|
31 | ||||
|
32 | base_url = { | |||
|
33 | 'repo_summary': '/{repo_name}', | |||
|
34 | 'edit_repo_issuetracker': '/{repo_name}/settings/issue_trackers', | |||
|
35 | 'edit_repo_issuetracker_test': '/{repo_name}/settings/issue_trackers/test', | |||
|
36 | 'edit_repo_issuetracker_delete': '/{repo_name}/settings/issue_trackers/delete', | |||
|
37 | 'edit_repo_issuetracker_update': '/{repo_name}/settings/issue_trackers/update', | |||
|
38 | }[name].format(**kwargs) | |||
|
39 | ||||
|
40 | if params: | |||
|
41 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |||
|
42 | return base_url | |||
28 |
|
43 | |||
29 |
|
44 | |||
30 | @pytest.mark.usefixtures("app") |
|
45 | @pytest.mark.usefixtures("app") | |
31 | class TestAdminRepos: |
|
46 | class TestRepoIssueTracker(object): | |
32 | def test_issuetracker_index(self, autologin_user, backend): |
|
47 | def test_issuetracker_index(self, autologin_user, backend): | |
33 | repo = backend.create_repo() |
|
48 | repo = backend.create_repo() | |
34 |
response = self.app.get( |
|
49 | response = self.app.get(route_path('edit_repo_issuetracker', | |
35 | repo_name=repo.repo_name)) |
|
50 | repo_name=repo.repo_name)) | |
36 | assert response.status_code == 200 |
|
51 | assert response.status_code == 200 | |
37 |
|
52 | |||
38 | def test_add_issuetracker_patterns( |
|
53 | def test_add_and_test_issuetracker_patterns( | |
39 | self, autologin_user, backend, csrf_token, request): |
|
54 | self, autologin_user, backend, csrf_token, request, xhr_header): | |
40 | pattern = 'issuetracker_pat' |
|
55 | pattern = 'issuetracker_pat' | |
41 | another_pattern = pattern+'1' |
|
56 | another_pattern = pattern+'1' | |
42 | post_url = url('repo_issuetracker_save', |
|
57 | post_url = route_path( | |
43 |
|
|
58 | 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name) | |
44 | post_data = { |
|
59 | post_data = { | |
45 | 'new_pattern_pattern_0': pattern, |
|
60 | 'new_pattern_pattern_0': pattern, | |
46 | 'new_pattern_url_0': 'url', |
|
61 | 'new_pattern_url_0': 'url', | |
47 | 'new_pattern_prefix_0': 'prefix', |
|
62 | 'new_pattern_prefix_0': 'prefix', | |
48 | 'new_pattern_description_0': 'description', |
|
63 | 'new_pattern_description_0': 'description', | |
49 | 'new_pattern_pattern_1': another_pattern, |
|
64 | 'new_pattern_pattern_1': another_pattern, | |
50 | 'new_pattern_url_1': 'url1', |
|
65 | 'new_pattern_url_1': 'url1', | |
51 | 'new_pattern_prefix_1': 'prefix1', |
|
66 | 'new_pattern_prefix_1': 'prefix1', | |
52 | 'new_pattern_description_1': 'description1', |
|
67 | 'new_pattern_description_1': 'description1', | |
53 | 'csrf_token': csrf_token |
|
68 | 'csrf_token': csrf_token | |
54 | } |
|
69 | } | |
55 | self.app.post(post_url, post_data, status=302) |
|
70 | self.app.post(post_url, post_data, status=302) | |
56 | self.settings_model = IssueTrackerSettingsModel(repo=backend.repo) |
|
71 | self.settings_model = IssueTrackerSettingsModel(repo=backend.repo) | |
57 | settings = self.settings_model.get_repo_settings() |
|
72 | settings = self.settings_model.get_repo_settings() | |
58 | self.uid = md5(pattern) |
|
73 | self.uid = md5(pattern) | |
59 | assert settings[self.uid]['pat'] == pattern |
|
74 | assert settings[self.uid]['pat'] == pattern | |
60 | self.another_uid = md5(another_pattern) |
|
75 | self.another_uid = md5(another_pattern) | |
61 | assert settings[self.another_uid]['pat'] == another_pattern |
|
76 | assert settings[self.another_uid]['pat'] == another_pattern | |
62 |
|
77 | |||
|
78 | # test pattern | |||
|
79 | data = {'test_text': 'example of issuetracker_pat replacement', | |||
|
80 | 'csrf_token': csrf_token} | |||
|
81 | response = self.app.post( | |||
|
82 | route_path('edit_repo_issuetracker_test', | |||
|
83 | repo_name=backend.repo.repo_name), | |||
|
84 | extra_environ=xhr_header, params=data) | |||
|
85 | ||||
|
86 | assert response.body == \ | |||
|
87 | 'example of <a class="issue-tracker-link" href="url">prefix</a> replacement' | |||
|
88 | ||||
63 | @request.addfinalizer |
|
89 | @request.addfinalizer | |
64 | def cleanup(): |
|
90 | def cleanup(): | |
65 | self.settings_model.delete_entries(self.uid) |
|
91 | self.settings_model.delete_entries(self.uid) | |
66 | self.settings_model.delete_entries(self.another_uid) |
|
92 | self.settings_model.delete_entries(self.another_uid) | |
67 |
|
93 | |||
68 | def test_edit_issuetracker_pattern( |
|
94 | def test_edit_issuetracker_pattern( | |
69 | self, autologin_user, backend, csrf_token, request): |
|
95 | self, autologin_user, backend, csrf_token, request): | |
70 | entry_key = 'issuetracker_pat_' |
|
96 | entry_key = 'issuetracker_pat_' | |
71 | pattern = 'issuetracker_pat2' |
|
97 | pattern = 'issuetracker_pat2' | |
72 | old_pattern = 'issuetracker_pat' |
|
98 | old_pattern = 'issuetracker_pat' | |
73 | old_uid = md5(old_pattern) |
|
99 | old_uid = md5(old_pattern) | |
74 |
|
100 | |||
75 | sett = SettingsModel(repo=backend.repo).create_or_update_setting( |
|
101 | sett = SettingsModel(repo=backend.repo).create_or_update_setting( | |
76 | entry_key+old_uid, old_pattern, 'unicode') |
|
102 | entry_key+old_uid, old_pattern, 'unicode') | |
77 | Session().add(sett) |
|
103 | Session().add(sett) | |
78 | Session().commit() |
|
104 | Session().commit() | |
79 | post_url = url('repo_issuetracker_save', |
|
105 | post_url = route_path( | |
80 |
|
|
106 | 'edit_repo_issuetracker_update', repo_name=backend.repo.repo_name) | |
81 | post_data = { |
|
107 | post_data = { | |
82 | 'new_pattern_pattern_0': pattern, |
|
108 | 'new_pattern_pattern_0': pattern, | |
83 | 'new_pattern_url_0': 'url', |
|
109 | 'new_pattern_url_0': 'url', | |
84 | 'new_pattern_prefix_0': 'prefix', |
|
110 | 'new_pattern_prefix_0': 'prefix', | |
85 | 'new_pattern_description_0': 'description', |
|
111 | 'new_pattern_description_0': 'description', | |
86 | 'uid': old_uid, |
|
112 | 'uid': old_uid, | |
87 | 'csrf_token': csrf_token |
|
113 | 'csrf_token': csrf_token | |
88 | } |
|
114 | } | |
89 | self.app.post(post_url, post_data, status=302) |
|
115 | self.app.post(post_url, post_data, status=302) | |
90 | self.settings_model = IssueTrackerSettingsModel(repo=backend.repo) |
|
116 | self.settings_model = IssueTrackerSettingsModel(repo=backend.repo) | |
91 | settings = self.settings_model.get_repo_settings() |
|
117 | settings = self.settings_model.get_repo_settings() | |
92 | self.uid = md5(pattern) |
|
118 | self.uid = md5(pattern) | |
93 | assert settings[self.uid]['pat'] == pattern |
|
119 | assert settings[self.uid]['pat'] == pattern | |
94 | with pytest.raises(KeyError): |
|
120 | with pytest.raises(KeyError): | |
95 | settings[old_uid] |
|
121 | key = settings[old_uid] | |
96 |
|
122 | |||
97 | @request.addfinalizer |
|
123 | @request.addfinalizer | |
98 | def cleanup(): |
|
124 | def cleanup(): | |
99 | self.settings_model.delete_entries(self.uid) |
|
125 | self.settings_model.delete_entries(self.uid) | |
100 |
|
126 | |||
101 | def test_delete_issuetracker_pattern( |
|
127 | def test_delete_issuetracker_pattern( | |
102 | self, autologin_user, backend, csrf_token, settings_util): |
|
128 | self, autologin_user, backend, csrf_token, settings_util): | |
103 | repo = backend.create_repo() |
|
129 | repo = backend.create_repo() | |
104 | repo_name = repo.repo_name |
|
130 | repo_name = repo.repo_name | |
105 | entry_key = 'issuetracker_pat_' |
|
131 | entry_key = 'issuetracker_pat_' | |
106 | pattern = 'issuetracker_pat3' |
|
132 | pattern = 'issuetracker_pat3' | |
107 | uid = md5(pattern) |
|
133 | uid = md5(pattern) | |
108 | settings_util.create_repo_rhodecode_setting( |
|
134 | settings_util.create_repo_rhodecode_setting( | |
109 | repo=backend.repo, name=entry_key+uid, |
|
135 | repo=backend.repo, name=entry_key+uid, | |
110 | value=entry_key, type_='unicode', cleanup=False) |
|
136 | value=entry_key, type_='unicode', cleanup=False) | |
111 |
|
137 | |||
112 | self.app.post( |
|
138 | self.app.post( | |
113 | url('repo_issuetracker_delete', |
|
139 | route_path( | |
|
140 | 'edit_repo_issuetracker_delete', | |||
114 | repo_name=backend.repo.repo_name), |
|
141 | repo_name=backend.repo.repo_name), | |
115 | { |
|
142 | { | |
116 | '_method': 'delete', |
|
|||
117 | 'uid': uid, |
|
143 | 'uid': uid, | |
118 | 'csrf_token': csrf_token |
|
144 | 'csrf_token': csrf_token | |
119 | }, status=302) |
|
145 | }, status=302) | |
120 | settings = IssueTrackerSettingsModel( |
|
146 | settings = IssueTrackerSettingsModel( | |
121 | repo=Repository.get_by_repo_name(repo_name)).get_repo_settings() |
|
147 | repo=Repository.get_by_repo_name(repo_name)).get_repo_settings() | |
122 | assert 'rhodecode_%s%s' % (entry_key, uid) not in settings |
|
148 | assert 'rhodecode_%s%s' % (entry_key, uid) not in settings |
@@ -1,233 +1,232 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import mock |
|
21 | import mock | |
22 | import pytest |
|
22 | import pytest | |
23 |
|
23 | |||
24 | from rhodecode.lib.utils2 import str2bool |
|
24 | from rhodecode.lib.utils2 import str2bool | |
25 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
25 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError | |
26 | from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User |
|
26 | from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User | |
27 | from rhodecode.model.meta import Session |
|
27 | from rhodecode.model.meta import Session | |
28 | from rhodecode.tests import ( |
|
28 | from rhodecode.tests import ( | |
29 |
|
|
29 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, assert_session_flash) | |
30 | assert_session_flash) |
|
|||
31 | from rhodecode.tests.fixture import Fixture |
|
30 | from rhodecode.tests.fixture import Fixture | |
32 |
|
31 | |||
33 | fixture = Fixture() |
|
32 | fixture = Fixture() | |
34 |
|
33 | |||
35 |
|
34 | |||
36 | def route_path(name, params=None, **kwargs): |
|
35 | def route_path(name, params=None, **kwargs): | |
37 | import urllib |
|
36 | import urllib | |
38 |
|
37 | |||
39 | base_url = { |
|
38 | base_url = { | |
40 | 'edit_repo': '/{repo_name}/settings', |
|
39 | 'edit_repo': '/{repo_name}/settings', | |
41 | 'edit_repo_advanced': '/{repo_name}/settings/advanced', |
|
40 | 'edit_repo_advanced': '/{repo_name}/settings/advanced', | |
42 | 'edit_repo_caches': '/{repo_name}/settings/caches', |
|
41 | 'edit_repo_caches': '/{repo_name}/settings/caches', | |
43 | 'edit_repo_perms': '/{repo_name}/settings/permissions', |
|
42 | 'edit_repo_perms': '/{repo_name}/settings/permissions', | |
|
43 | 'edit_repo_vcs': '/{repo_name}/settings/vcs', | |||
|
44 | 'edit_repo_issuetracker': '/{repo_name}/settings/issue_trackers', | |||
|
45 | 'edit_repo_fields': '/{repo_name}/settings/fields', | |||
|
46 | 'edit_repo_remote': '/{repo_name}/settings/remote', | |||
|
47 | 'edit_repo_statistics': '/{repo_name}/settings/statistics', | |||
44 | }[name].format(**kwargs) |
|
48 | }[name].format(**kwargs) | |
45 |
|
49 | |||
46 | if params: |
|
50 | if params: | |
47 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
51 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
48 | return base_url |
|
52 | return base_url | |
49 |
|
53 | |||
50 |
|
54 | |||
51 | def _get_permission_for_user(user, repo): |
|
55 | def _get_permission_for_user(user, repo): | |
52 | perm = UserRepoToPerm.query()\ |
|
56 | perm = UserRepoToPerm.query()\ | |
53 | .filter(UserRepoToPerm.repository == |
|
57 | .filter(UserRepoToPerm.repository == | |
54 | Repository.get_by_repo_name(repo))\ |
|
58 | Repository.get_by_repo_name(repo))\ | |
55 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ |
|
59 | .filter(UserRepoToPerm.user == User.get_by_username(user))\ | |
56 | .all() |
|
60 | .all() | |
57 | return perm |
|
61 | return perm | |
58 |
|
62 | |||
59 |
|
63 | |||
60 | @pytest.mark.usefixtures('autologin_user', 'app') |
|
64 | @pytest.mark.usefixtures('autologin_user', 'app') | |
61 | class TestAdminRepoSettings(object): |
|
65 | class TestAdminRepoSettings(object): | |
62 | @pytest.mark.parametrize('urlname', [ |
|
66 | @pytest.mark.parametrize('urlname', [ | |
63 | 'edit_repo', |
|
67 | 'edit_repo', | |
64 | 'edit_repo_caches', |
|
68 | 'edit_repo_caches', | |
65 | 'edit_repo_perms', |
|
69 | 'edit_repo_perms', | |
66 | 'edit_repo_advanced', |
|
70 | 'edit_repo_advanced', | |
|
71 | 'edit_repo_vcs', | |||
|
72 | 'edit_repo_issuetracker', | |||
|
73 | 'edit_repo_fields', | |||
|
74 | 'edit_repo_remote', | |||
|
75 | 'edit_repo_statistics', | |||
67 | ]) |
|
76 | ]) | |
68 | def test_show_page(self, urlname, app, backend): |
|
77 | def test_show_page(self, urlname, app, backend): | |
69 | app.get(route_path(urlname, repo_name=backend.repo_name), status=200) |
|
78 | app.get(route_path(urlname, repo_name=backend.repo_name), status=200) | |
70 |
|
79 | |||
71 | def test_edit_accessible_when_missing_requirements( |
|
80 | def test_edit_accessible_when_missing_requirements( | |
72 | self, backend_hg, autologin_user): |
|
81 | self, backend_hg, autologin_user): | |
73 | scm_patcher = mock.patch.object( |
|
82 | scm_patcher = mock.patch.object( | |
74 | Repository, 'scm_instance', side_effect=RepositoryRequirementError) |
|
83 | Repository, 'scm_instance', side_effect=RepositoryRequirementError) | |
75 | with scm_patcher: |
|
84 | with scm_patcher: | |
76 | self.app.get(route_path('edit_repo', repo_name=backend_hg.repo_name)) |
|
85 | self.app.get(route_path('edit_repo', repo_name=backend_hg.repo_name)) | |
77 |
|
86 | |||
78 | @pytest.mark.parametrize('urlname', [ |
|
|||
79 | 'repo_vcs_settings', |
|
|||
80 | 'repo_settings_issuetracker', |
|
|||
81 | 'edit_repo_fields', |
|
|||
82 | 'edit_repo_remote', |
|
|||
83 | 'edit_repo_statistics', |
|
|||
84 | ]) |
|
|||
85 | def test_show_page_pylons(self, urlname, app): |
|
|||
86 | app.get(url(urlname, repo_name=HG_REPO)) |
|
|||
87 |
|
||||
88 | @pytest.mark.parametrize('update_settings', [ |
|
87 | @pytest.mark.parametrize('update_settings', [ | |
89 | {'repo_description': 'alter-desc'}, |
|
88 | {'repo_description': 'alter-desc'}, | |
90 | {'repo_owner': TEST_USER_REGULAR_LOGIN}, |
|
89 | {'repo_owner': TEST_USER_REGULAR_LOGIN}, | |
91 | {'repo_private': 'true'}, |
|
90 | {'repo_private': 'true'}, | |
92 | {'repo_enable_locking': 'true'}, |
|
91 | {'repo_enable_locking': 'true'}, | |
93 | {'repo_enable_downloads': 'true'}, |
|
92 | {'repo_enable_downloads': 'true'}, | |
94 | ]) |
|
93 | ]) | |
95 | def test_update_repo_settings(self, update_settings, csrf_token, backend, user_util): |
|
94 | def test_update_repo_settings(self, update_settings, csrf_token, backend, user_util): | |
96 | repo = user_util.create_repo(repo_type=backend.alias) |
|
95 | repo = user_util.create_repo(repo_type=backend.alias) | |
97 | repo_name = repo.repo_name |
|
96 | repo_name = repo.repo_name | |
98 |
|
97 | |||
99 | params = fixture._get_repo_create_params( |
|
98 | params = fixture._get_repo_create_params( | |
100 | csrf_token=csrf_token, |
|
99 | csrf_token=csrf_token, | |
101 | repo_name=repo_name, |
|
100 | repo_name=repo_name, | |
102 | repo_type=backend.alias, |
|
101 | repo_type=backend.alias, | |
103 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
102 | repo_owner=TEST_USER_ADMIN_LOGIN, | |
104 | repo_description='DESC', |
|
103 | repo_description='DESC', | |
105 |
|
104 | |||
106 | repo_private='false', |
|
105 | repo_private='false', | |
107 | repo_enable_locking='false', |
|
106 | repo_enable_locking='false', | |
108 | repo_enable_downloads='false') |
|
107 | repo_enable_downloads='false') | |
109 | params.update(update_settings) |
|
108 | params.update(update_settings) | |
110 | self.app.post( |
|
109 | self.app.post( | |
111 | route_path('edit_repo', repo_name=repo_name), |
|
110 | route_path('edit_repo', repo_name=repo_name), | |
112 | params=params, status=302) |
|
111 | params=params, status=302) | |
113 |
|
112 | |||
114 | repo = Repository.get_by_repo_name(repo_name) |
|
113 | repo = Repository.get_by_repo_name(repo_name) | |
115 | assert repo.user.username == \ |
|
114 | assert repo.user.username == \ | |
116 | update_settings.get('repo_owner', repo.user.username) |
|
115 | update_settings.get('repo_owner', repo.user.username) | |
117 |
|
116 | |||
118 | assert repo.description == \ |
|
117 | assert repo.description == \ | |
119 | update_settings.get('repo_description', repo.description) |
|
118 | update_settings.get('repo_description', repo.description) | |
120 |
|
119 | |||
121 | assert repo.private == \ |
|
120 | assert repo.private == \ | |
122 | str2bool(update_settings.get( |
|
121 | str2bool(update_settings.get( | |
123 | 'repo_private', repo.private)) |
|
122 | 'repo_private', repo.private)) | |
124 |
|
123 | |||
125 | assert repo.enable_locking == \ |
|
124 | assert repo.enable_locking == \ | |
126 | str2bool(update_settings.get( |
|
125 | str2bool(update_settings.get( | |
127 | 'repo_enable_locking', repo.enable_locking)) |
|
126 | 'repo_enable_locking', repo.enable_locking)) | |
128 |
|
127 | |||
129 | assert repo.enable_downloads == \ |
|
128 | assert repo.enable_downloads == \ | |
130 | str2bool(update_settings.get( |
|
129 | str2bool(update_settings.get( | |
131 | 'repo_enable_downloads', repo.enable_downloads)) |
|
130 | 'repo_enable_downloads', repo.enable_downloads)) | |
132 |
|
131 | |||
133 | def test_update_repo_name_via_settings(self, csrf_token, user_util, backend): |
|
132 | def test_update_repo_name_via_settings(self, csrf_token, user_util, backend): | |
134 | repo = user_util.create_repo(repo_type=backend.alias) |
|
133 | repo = user_util.create_repo(repo_type=backend.alias) | |
135 | repo_name = repo.repo_name |
|
134 | repo_name = repo.repo_name | |
136 |
|
135 | |||
137 | repo_group = user_util.create_repo_group() |
|
136 | repo_group = user_util.create_repo_group() | |
138 | repo_group_name = repo_group.group_name |
|
137 | repo_group_name = repo_group.group_name | |
139 | new_name = repo_group_name + '_' + repo_name |
|
138 | new_name = repo_group_name + '_' + repo_name | |
140 |
|
139 | |||
141 | params = fixture._get_repo_create_params( |
|
140 | params = fixture._get_repo_create_params( | |
142 | csrf_token=csrf_token, |
|
141 | csrf_token=csrf_token, | |
143 | repo_name=new_name, |
|
142 | repo_name=new_name, | |
144 | repo_type=backend.alias, |
|
143 | repo_type=backend.alias, | |
145 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
144 | repo_owner=TEST_USER_ADMIN_LOGIN, | |
146 | repo_description='DESC', |
|
145 | repo_description='DESC', | |
147 | repo_private='false', |
|
146 | repo_private='false', | |
148 | repo_enable_locking='false', |
|
147 | repo_enable_locking='false', | |
149 | repo_enable_downloads='false') |
|
148 | repo_enable_downloads='false') | |
150 | self.app.post( |
|
149 | self.app.post( | |
151 | route_path('edit_repo', repo_name=repo_name), |
|
150 | route_path('edit_repo', repo_name=repo_name), | |
152 | params=params, status=302) |
|
151 | params=params, status=302) | |
153 | repo = Repository.get_by_repo_name(new_name) |
|
152 | repo = Repository.get_by_repo_name(new_name) | |
154 | assert repo.repo_name == new_name |
|
153 | assert repo.repo_name == new_name | |
155 |
|
154 | |||
156 | def test_update_repo_group_via_settings(self, csrf_token, user_util, backend): |
|
155 | def test_update_repo_group_via_settings(self, csrf_token, user_util, backend): | |
157 | repo = user_util.create_repo(repo_type=backend.alias) |
|
156 | repo = user_util.create_repo(repo_type=backend.alias) | |
158 | repo_name = repo.repo_name |
|
157 | repo_name = repo.repo_name | |
159 |
|
158 | |||
160 | repo_group = user_util.create_repo_group() |
|
159 | repo_group = user_util.create_repo_group() | |
161 | repo_group_name = repo_group.group_name |
|
160 | repo_group_name = repo_group.group_name | |
162 | repo_group_id = repo_group.group_id |
|
161 | repo_group_id = repo_group.group_id | |
163 |
|
162 | |||
164 | new_name = repo_group_name + '/' + repo_name |
|
163 | new_name = repo_group_name + '/' + repo_name | |
165 | params = fixture._get_repo_create_params( |
|
164 | params = fixture._get_repo_create_params( | |
166 | csrf_token=csrf_token, |
|
165 | csrf_token=csrf_token, | |
167 | repo_name=repo_name, |
|
166 | repo_name=repo_name, | |
168 | repo_type=backend.alias, |
|
167 | repo_type=backend.alias, | |
169 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
168 | repo_owner=TEST_USER_ADMIN_LOGIN, | |
170 | repo_description='DESC', |
|
169 | repo_description='DESC', | |
171 | repo_group=repo_group_id, |
|
170 | repo_group=repo_group_id, | |
172 | repo_private='false', |
|
171 | repo_private='false', | |
173 | repo_enable_locking='false', |
|
172 | repo_enable_locking='false', | |
174 | repo_enable_downloads='false') |
|
173 | repo_enable_downloads='false') | |
175 | self.app.post( |
|
174 | self.app.post( | |
176 | route_path('edit_repo', repo_name=repo_name), |
|
175 | route_path('edit_repo', repo_name=repo_name), | |
177 | params=params, status=302) |
|
176 | params=params, status=302) | |
178 | repo = Repository.get_by_repo_name(new_name) |
|
177 | repo = Repository.get_by_repo_name(new_name) | |
179 | assert repo.repo_name == new_name |
|
178 | assert repo.repo_name == new_name | |
180 |
|
179 | |||
181 | def test_set_private_flag_sets_default_user_permissions_to_none( |
|
180 | def test_set_private_flag_sets_default_user_permissions_to_none( | |
182 | self, autologin_user, backend, csrf_token): |
|
181 | self, autologin_user, backend, csrf_token): | |
183 |
|
182 | |||
184 | # initially repository perm should be read |
|
183 | # initially repository perm should be read | |
185 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) |
|
184 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) | |
186 | assert len(perm) == 1 |
|
185 | assert len(perm) == 1 | |
187 | assert perm[0].permission.permission_name == 'repository.read' |
|
186 | assert perm[0].permission.permission_name == 'repository.read' | |
188 | assert not backend.repo.private |
|
187 | assert not backend.repo.private | |
189 |
|
188 | |||
190 | response = self.app.post( |
|
189 | response = self.app.post( | |
191 | route_path('edit_repo', repo_name=backend.repo_name), |
|
190 | route_path('edit_repo', repo_name=backend.repo_name), | |
192 | params=fixture._get_repo_create_params( |
|
191 | params=fixture._get_repo_create_params( | |
193 | repo_private='true', |
|
192 | repo_private='true', | |
194 | repo_name=backend.repo_name, |
|
193 | repo_name=backend.repo_name, | |
195 | repo_type=backend.alias, |
|
194 | repo_type=backend.alias, | |
196 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
195 | repo_owner=TEST_USER_ADMIN_LOGIN, | |
197 | csrf_token=csrf_token), status=302) |
|
196 | csrf_token=csrf_token), status=302) | |
198 |
|
197 | |||
199 | assert_session_flash( |
|
198 | assert_session_flash( | |
200 | response, |
|
199 | response, | |
201 | msg='Repository %s updated successfully' % (backend.repo_name)) |
|
200 | msg='Repository %s updated successfully' % (backend.repo_name)) | |
202 |
|
201 | |||
203 | repo = Repository.get_by_repo_name(backend.repo_name) |
|
202 | repo = Repository.get_by_repo_name(backend.repo_name) | |
204 | assert repo.private is True |
|
203 | assert repo.private is True | |
205 |
|
204 | |||
206 | # now the repo default permission should be None |
|
205 | # now the repo default permission should be None | |
207 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) |
|
206 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) | |
208 | assert len(perm) == 1 |
|
207 | assert len(perm) == 1 | |
209 | assert perm[0].permission.permission_name == 'repository.none' |
|
208 | assert perm[0].permission.permission_name == 'repository.none' | |
210 |
|
209 | |||
211 | response = self.app.post( |
|
210 | response = self.app.post( | |
212 | route_path('edit_repo', repo_name=backend.repo_name), |
|
211 | route_path('edit_repo', repo_name=backend.repo_name), | |
213 | params=fixture._get_repo_create_params( |
|
212 | params=fixture._get_repo_create_params( | |
214 | repo_private='false', |
|
213 | repo_private='false', | |
215 | repo_name=backend.repo_name, |
|
214 | repo_name=backend.repo_name, | |
216 | repo_type=backend.alias, |
|
215 | repo_type=backend.alias, | |
217 | repo_owner=TEST_USER_ADMIN_LOGIN, |
|
216 | repo_owner=TEST_USER_ADMIN_LOGIN, | |
218 | csrf_token=csrf_token), status=302) |
|
217 | csrf_token=csrf_token), status=302) | |
219 |
|
218 | |||
220 | assert_session_flash( |
|
219 | assert_session_flash( | |
221 | response, |
|
220 | response, | |
222 | msg='Repository %s updated successfully' % (backend.repo_name)) |
|
221 | msg='Repository %s updated successfully' % (backend.repo_name)) | |
223 | assert backend.repo.private is False |
|
222 | assert backend.repo.private is False | |
224 |
|
223 | |||
225 | # we turn off private now the repo default permission should stay None |
|
224 | # we turn off private now the repo default permission should stay None | |
226 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) |
|
225 | perm = _get_permission_for_user(user='default', repo=backend.repo_name) | |
227 | assert len(perm) == 1 |
|
226 | assert len(perm) == 1 | |
228 | assert perm[0].permission.permission_name == 'repository.none' |
|
227 | assert perm[0].permission.permission_name == 'repository.none' | |
229 |
|
228 | |||
230 | # update this permission back |
|
229 | # update this permission back | |
231 | perm[0].permission = Permission.get_by_key('repository.read') |
|
230 | perm[0].permission = Permission.get_by_key('repository.read') | |
232 | Session().add(perm[0]) |
|
231 | Session().add(perm[0]) | |
233 | Session().commit() |
|
232 | Session().commit() |
@@ -1,121 +1,104 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import mock |
|
21 | import mock | |
22 | import pytest |
|
22 | import pytest | |
23 |
|
23 | |||
24 | import rhodecode |
|
24 | import rhodecode | |
25 | from rhodecode.model.db import Repository |
|
25 | from rhodecode.model.db import Repository | |
26 | from rhodecode.model.settings import SettingsModel |
|
26 | from rhodecode.model.settings import SettingsModel | |
27 | from rhodecode.tests import url |
|
|||
28 | from rhodecode.tests.utils import AssertResponse |
|
27 | from rhodecode.tests.utils import AssertResponse | |
29 |
|
28 | |||
30 |
|
29 | |||
31 | def route_path(name, params=None, **kwargs): |
|
30 | def route_path(name, params=None, **kwargs): | |
32 | import urllib |
|
31 | import urllib | |
33 |
|
32 | |||
34 | base_url = { |
|
33 | base_url = { | |
35 | 'edit_repo': '/{repo_name}/settings', |
|
34 | 'edit_repo': '/{repo_name}/settings', | |
|
35 | 'edit_repo_vcs': '/{repo_name}/settings/vcs', | |||
|
36 | 'edit_repo_vcs_update': '/{repo_name}/settings/vcs/update', | |||
36 | }[name].format(**kwargs) |
|
37 | }[name].format(**kwargs) | |
37 |
|
38 | |||
38 | if params: |
|
39 | if params: | |
39 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
40 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
40 | return base_url |
|
41 | return base_url | |
41 |
|
42 | |||
42 |
|
43 | |||
43 | @pytest.mark.usefixtures('autologin_user', 'app') |
|
44 | @pytest.mark.usefixtures('autologin_user', 'app') | |
44 | class TestAdminRepoVcsSettings(object): |
|
45 | class TestAdminRepoVcsSettings(object): | |
45 |
|
46 | |||
46 | @pytest.mark.parametrize('setting_name, setting_backends', [ |
|
47 | @pytest.mark.parametrize('setting_name, setting_backends', [ | |
47 | ('hg_use_rebase_for_merging', ['hg']), |
|
48 | ('hg_use_rebase_for_merging', ['hg']), | |
48 | ]) |
|
49 | ]) | |
49 | def test_labs_settings_visible_if_enabled( |
|
50 | def test_labs_settings_visible_if_enabled( | |
50 | self, setting_name, setting_backends, backend): |
|
51 | self, setting_name, setting_backends, backend): | |
51 | if backend.alias not in setting_backends: |
|
52 | if backend.alias not in setting_backends: | |
52 | pytest.skip('Setting not available for backend {}'.format(backend)) |
|
53 | pytest.skip('Setting not available for backend {}'.format(backend)) | |
53 |
|
54 | |||
54 |
vcs_settings_url = |
|
55 | vcs_settings_url = route_path( | |
55 |
'repo_vcs |
|
56 | 'edit_repo_vcs', repo_name=backend.repo.repo_name) | |
56 |
|
57 | |||
57 | with mock.patch.dict( |
|
58 | with mock.patch.dict( | |
58 | rhodecode.CONFIG, {'labs_settings_active': 'true'}): |
|
59 | rhodecode.CONFIG, {'labs_settings_active': 'true'}): | |
59 | response = self.app.get(vcs_settings_url) |
|
60 | response = self.app.get(vcs_settings_url) | |
60 |
|
61 | |||
61 | assertr = AssertResponse(response) |
|
62 | assertr = AssertResponse(response) | |
62 | assertr.one_element_exists('#rhodecode_{}'.format(setting_name)) |
|
63 | assertr.one_element_exists('#rhodecode_{}'.format(setting_name)) | |
63 |
|
64 | |||
64 | @pytest.mark.parametrize('setting_name, setting_backends', [ |
|
65 | @pytest.mark.parametrize('setting_name, setting_backends', [ | |
65 | ('hg_use_rebase_for_merging', ['hg']), |
|
66 | ('hg_use_rebase_for_merging', ['hg']), | |
66 | ]) |
|
67 | ]) | |
67 | def test_labs_settings_not_visible_if_disabled( |
|
|||
68 | self, setting_name, setting_backends, backend): |
|
|||
69 | if backend.alias not in setting_backends: |
|
|||
70 | pytest.skip('Setting not available for backend {}'.format(backend)) |
|
|||
71 |
|
||||
72 | vcs_settings_url = url( |
|
|||
73 | 'repo_vcs_settings', repo_name=backend.repo.repo_name) |
|
|||
74 |
|
||||
75 | with mock.patch.dict( |
|
|||
76 | rhodecode.CONFIG, {'labs_settings_active': 'false'}): |
|
|||
77 | response = self.app.get(vcs_settings_url) |
|
|||
78 |
|
||||
79 | assertr = AssertResponse(response) |
|
|||
80 | assertr.no_element_exists('#rhodecode_{}'.format(setting_name)) |
|
|||
81 |
|
||||
82 | @pytest.mark.parametrize('setting_name, setting_backends', [ |
|
|||
83 | ('hg_use_rebase_for_merging', ['hg']), |
|
|||
84 | ]) |
|
|||
85 | def test_update_boolean_settings( |
|
68 | def test_update_boolean_settings( | |
86 | self, csrf_token, setting_name, setting_backends, backend): |
|
69 | self, csrf_token, setting_name, setting_backends, backend): | |
87 | if backend.alias not in setting_backends: |
|
70 | if backend.alias not in setting_backends: | |
88 | pytest.skip('Setting not available for backend {}'.format(backend)) |
|
71 | pytest.skip('Setting not available for backend {}'.format(backend)) | |
89 |
|
72 | |||
90 | repo = backend.create_repo() |
|
73 | repo = backend.create_repo() | |
91 | repo_name = repo.repo_name |
|
74 | repo_name = repo.repo_name | |
92 |
|
75 | |||
93 | settings_model = SettingsModel(repo=repo) |
|
76 | settings_model = SettingsModel(repo=repo) | |
94 |
vcs_settings_url = |
|
77 | vcs_settings_url = route_path( | |
95 |
'repo_vcs_ |
|
78 | 'edit_repo_vcs_update', repo_name=repo_name) | |
96 |
|
79 | |||
97 | self.app.post( |
|
80 | self.app.post( | |
98 | vcs_settings_url, |
|
81 | vcs_settings_url, | |
99 | params={ |
|
82 | params={ | |
100 | 'inherit_global_settings': False, |
|
83 | 'inherit_global_settings': False, | |
101 | 'new_svn_branch': 'dummy-value-for-testing', |
|
84 | 'new_svn_branch': 'dummy-value-for-testing', | |
102 | 'new_svn_tag': 'dummy-value-for-testing', |
|
85 | 'new_svn_tag': 'dummy-value-for-testing', | |
103 | 'rhodecode_{}'.format(setting_name): 'true', |
|
86 | 'rhodecode_{}'.format(setting_name): 'true', | |
104 | 'csrf_token': csrf_token, |
|
87 | 'csrf_token': csrf_token, | |
105 | }) |
|
88 | }) | |
106 | settings_model = SettingsModel(repo=Repository.get_by_repo_name(repo_name)) |
|
89 | settings_model = SettingsModel(repo=Repository.get_by_repo_name(repo_name)) | |
107 | setting = settings_model.get_setting_by_name(setting_name) |
|
90 | setting = settings_model.get_setting_by_name(setting_name) | |
108 | assert setting.app_settings_value |
|
91 | assert setting.app_settings_value | |
109 |
|
92 | |||
110 | self.app.post( |
|
93 | self.app.post( | |
111 | vcs_settings_url, |
|
94 | vcs_settings_url, | |
112 | params={ |
|
95 | params={ | |
113 | 'inherit_global_settings': False, |
|
96 | 'inherit_global_settings': False, | |
114 | 'new_svn_branch': 'dummy-value-for-testing', |
|
97 | 'new_svn_branch': 'dummy-value-for-testing', | |
115 | 'new_svn_tag': 'dummy-value-for-testing', |
|
98 | 'new_svn_tag': 'dummy-value-for-testing', | |
116 | 'rhodecode_{}'.format(setting_name): 'false', |
|
99 | 'rhodecode_{}'.format(setting_name): 'false', | |
117 | 'csrf_token': csrf_token, |
|
100 | 'csrf_token': csrf_token, | |
118 | }) |
|
101 | }) | |
119 | settings_model = SettingsModel(repo=Repository.get_by_repo_name(repo_name)) |
|
102 | settings_model = SettingsModel(repo=Repository.get_by_repo_name(repo_name)) | |
120 | setting = settings_model.get_setting_by_name(setting_name) |
|
103 | setting = settings_model.get_setting_by_name(setting_name) | |
121 | assert not setting.app_settings_value |
|
104 | assert not setting.app_settings_value |
@@ -1,557 +1,557 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import logging |
|
22 | import logging | |
23 | import collections |
|
23 | import collections | |
24 |
|
24 | |||
25 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound |
|
25 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound | |
26 | from pyramid.view import view_config |
|
26 | from pyramid.view import view_config | |
27 | from pyramid.renderers import render |
|
27 | from pyramid.renderers import render | |
28 | from pyramid.response import Response |
|
28 | from pyramid.response import Response | |
29 |
|
29 | |||
30 | from rhodecode.apps._base import RepoAppView |
|
30 | from rhodecode.apps._base import RepoAppView | |
31 |
|
31 | |||
32 | from rhodecode.lib import diffs, codeblocks |
|
32 | from rhodecode.lib import diffs, codeblocks | |
33 | from rhodecode.lib.auth import ( |
|
33 | from rhodecode.lib.auth import ( | |
34 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) |
|
34 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) | |
35 |
|
35 | |||
36 | from rhodecode.lib.compat import OrderedDict |
|
36 | from rhodecode.lib.compat import OrderedDict | |
37 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
37 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError | |
38 | import rhodecode.lib.helpers as h |
|
38 | import rhodecode.lib.helpers as h | |
39 |
from rhodecode.lib.utils2 import safe_unicode |
|
39 | from rhodecode.lib.utils2 import safe_unicode | |
40 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
40 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
41 | from rhodecode.lib.vcs.exceptions import ( |
|
41 | from rhodecode.lib.vcs.exceptions import ( | |
42 | RepositoryError, CommitDoesNotExistError, NodeDoesNotExistError) |
|
42 | RepositoryError, CommitDoesNotExistError, NodeDoesNotExistError) | |
43 | from rhodecode.model.db import ChangesetComment, ChangesetStatus |
|
43 | from rhodecode.model.db import ChangesetComment, ChangesetStatus | |
44 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
44 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
45 | from rhodecode.model.comment import CommentsModel |
|
45 | from rhodecode.model.comment import CommentsModel | |
46 | from rhodecode.model.meta import Session |
|
46 | from rhodecode.model.meta import Session | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | log = logging.getLogger(__name__) |
|
49 | log = logging.getLogger(__name__) | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | def _update_with_GET(params, request): |
|
52 | def _update_with_GET(params, request): | |
53 | for k in ['diff1', 'diff2', 'diff']: |
|
53 | for k in ['diff1', 'diff2', 'diff']: | |
54 | params[k] += request.GET.getall(k) |
|
54 | params[k] += request.GET.getall(k) | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | def get_ignore_ws(fid, request): |
|
57 | def get_ignore_ws(fid, request): | |
58 | ig_ws_global = request.GET.get('ignorews') |
|
58 | ig_ws_global = request.GET.get('ignorews') | |
59 | ig_ws = filter(lambda k: k.startswith('WS'), request.GET.getall(fid)) |
|
59 | ig_ws = filter(lambda k: k.startswith('WS'), request.GET.getall(fid)) | |
60 | if ig_ws: |
|
60 | if ig_ws: | |
61 | try: |
|
61 | try: | |
62 | return int(ig_ws[0].split(':')[-1]) |
|
62 | return int(ig_ws[0].split(':')[-1]) | |
63 | except Exception: |
|
63 | except Exception: | |
64 | pass |
|
64 | pass | |
65 | return ig_ws_global |
|
65 | return ig_ws_global | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | def _ignorews_url(request, fileid=None): |
|
68 | def _ignorews_url(request, fileid=None): | |
69 | _ = request.translate |
|
69 | _ = request.translate | |
70 | fileid = str(fileid) if fileid else None |
|
70 | fileid = str(fileid) if fileid else None | |
71 | params = collections.defaultdict(list) |
|
71 | params = collections.defaultdict(list) | |
72 | _update_with_GET(params, request) |
|
72 | _update_with_GET(params, request) | |
73 | label = _('Show whitespace') |
|
73 | label = _('Show whitespace') | |
74 | tooltiplbl = _('Show whitespace for all diffs') |
|
74 | tooltiplbl = _('Show whitespace for all diffs') | |
75 | ig_ws = get_ignore_ws(fileid, request) |
|
75 | ig_ws = get_ignore_ws(fileid, request) | |
76 | ln_ctx = get_line_ctx(fileid, request) |
|
76 | ln_ctx = get_line_ctx(fileid, request) | |
77 |
|
77 | |||
78 | if ig_ws is None: |
|
78 | if ig_ws is None: | |
79 | params['ignorews'] += [1] |
|
79 | params['ignorews'] += [1] | |
80 | label = _('Ignore whitespace') |
|
80 | label = _('Ignore whitespace') | |
81 | tooltiplbl = _('Ignore whitespace for all diffs') |
|
81 | tooltiplbl = _('Ignore whitespace for all diffs') | |
82 | ctx_key = 'context' |
|
82 | ctx_key = 'context' | |
83 | ctx_val = ln_ctx |
|
83 | ctx_val = ln_ctx | |
84 |
|
84 | |||
85 | # if we have passed in ln_ctx pass it along to our params |
|
85 | # if we have passed in ln_ctx pass it along to our params | |
86 | if ln_ctx: |
|
86 | if ln_ctx: | |
87 | params[ctx_key] += [ctx_val] |
|
87 | params[ctx_key] += [ctx_val] | |
88 |
|
88 | |||
89 | if fileid: |
|
89 | if fileid: | |
90 | params['anchor'] = 'a_' + fileid |
|
90 | params['anchor'] = 'a_' + fileid | |
91 | return h.link_to(label, request.current_route_path(_query=params), |
|
91 | return h.link_to(label, request.current_route_path(_query=params), | |
92 | title=tooltiplbl, class_='tooltip') |
|
92 | title=tooltiplbl, class_='tooltip') | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | def get_line_ctx(fid, request): |
|
95 | def get_line_ctx(fid, request): | |
96 | ln_ctx_global = request.GET.get('context') |
|
96 | ln_ctx_global = request.GET.get('context') | |
97 | if fid: |
|
97 | if fid: | |
98 | ln_ctx = filter(lambda k: k.startswith('C'), request.GET.getall(fid)) |
|
98 | ln_ctx = filter(lambda k: k.startswith('C'), request.GET.getall(fid)) | |
99 | else: |
|
99 | else: | |
100 | _ln_ctx = filter(lambda k: k.startswith('C'), request.GET) |
|
100 | _ln_ctx = filter(lambda k: k.startswith('C'), request.GET) | |
101 | ln_ctx = request.GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global |
|
101 | ln_ctx = request.GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global | |
102 | if ln_ctx: |
|
102 | if ln_ctx: | |
103 | ln_ctx = [ln_ctx] |
|
103 | ln_ctx = [ln_ctx] | |
104 |
|
104 | |||
105 | if ln_ctx: |
|
105 | if ln_ctx: | |
106 | retval = ln_ctx[0].split(':')[-1] |
|
106 | retval = ln_ctx[0].split(':')[-1] | |
107 | else: |
|
107 | else: | |
108 | retval = ln_ctx_global |
|
108 | retval = ln_ctx_global | |
109 |
|
109 | |||
110 | try: |
|
110 | try: | |
111 | return int(retval) |
|
111 | return int(retval) | |
112 | except Exception: |
|
112 | except Exception: | |
113 | return 3 |
|
113 | return 3 | |
114 |
|
114 | |||
115 |
|
115 | |||
116 | def _context_url(request, fileid=None): |
|
116 | def _context_url(request, fileid=None): | |
117 | """ |
|
117 | """ | |
118 | Generates a url for context lines. |
|
118 | Generates a url for context lines. | |
119 |
|
119 | |||
120 | :param fileid: |
|
120 | :param fileid: | |
121 | """ |
|
121 | """ | |
122 |
|
122 | |||
123 | _ = request.translate |
|
123 | _ = request.translate | |
124 | fileid = str(fileid) if fileid else None |
|
124 | fileid = str(fileid) if fileid else None | |
125 | ig_ws = get_ignore_ws(fileid, request) |
|
125 | ig_ws = get_ignore_ws(fileid, request) | |
126 | ln_ctx = (get_line_ctx(fileid, request) or 3) * 2 |
|
126 | ln_ctx = (get_line_ctx(fileid, request) or 3) * 2 | |
127 |
|
127 | |||
128 | params = collections.defaultdict(list) |
|
128 | params = collections.defaultdict(list) | |
129 | _update_with_GET(params, request) |
|
129 | _update_with_GET(params, request) | |
130 |
|
130 | |||
131 | if ln_ctx > 0: |
|
131 | if ln_ctx > 0: | |
132 | params['context'] += [ln_ctx] |
|
132 | params['context'] += [ln_ctx] | |
133 |
|
133 | |||
134 | if ig_ws: |
|
134 | if ig_ws: | |
135 | ig_ws_key = 'ignorews' |
|
135 | ig_ws_key = 'ignorews' | |
136 | ig_ws_val = 1 |
|
136 | ig_ws_val = 1 | |
137 | params[ig_ws_key] += [ig_ws_val] |
|
137 | params[ig_ws_key] += [ig_ws_val] | |
138 |
|
138 | |||
139 | lbl = _('Increase context') |
|
139 | lbl = _('Increase context') | |
140 | tooltiplbl = _('Increase context for all diffs') |
|
140 | tooltiplbl = _('Increase context for all diffs') | |
141 |
|
141 | |||
142 | if fileid: |
|
142 | if fileid: | |
143 | params['anchor'] = 'a_' + fileid |
|
143 | params['anchor'] = 'a_' + fileid | |
144 | return h.link_to(lbl, request.current_route_path(_query=params), |
|
144 | return h.link_to(lbl, request.current_route_path(_query=params), | |
145 | title=tooltiplbl, class_='tooltip') |
|
145 | title=tooltiplbl, class_='tooltip') | |
146 |
|
146 | |||
147 |
|
147 | |||
148 | class RepoCommitsView(RepoAppView): |
|
148 | class RepoCommitsView(RepoAppView): | |
149 | def load_default_context(self): |
|
149 | def load_default_context(self): | |
150 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
150 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
151 |
|
151 | |||
152 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
152 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
153 | c.repo_info = self.db_repo |
|
153 | c.repo_info = self.db_repo | |
154 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
154 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
155 |
|
155 | |||
156 | self._register_global_c(c) |
|
156 | self._register_global_c(c) | |
157 | return c |
|
157 | return c | |
158 |
|
158 | |||
159 | def _commit(self, commit_id_range, method): |
|
159 | def _commit(self, commit_id_range, method): | |
160 | _ = self.request.translate |
|
160 | _ = self.request.translate | |
161 | c = self.load_default_context() |
|
161 | c = self.load_default_context() | |
162 | c.ignorews_url = _ignorews_url |
|
162 | c.ignorews_url = _ignorews_url | |
163 | c.context_url = _context_url |
|
163 | c.context_url = _context_url | |
164 | c.fulldiff = self.request.GET.get('fulldiff') |
|
164 | c.fulldiff = self.request.GET.get('fulldiff') | |
165 |
|
165 | |||
166 | # fetch global flags of ignore ws or context lines |
|
166 | # fetch global flags of ignore ws or context lines | |
167 | context_lcl = get_line_ctx('', self.request) |
|
167 | context_lcl = get_line_ctx('', self.request) | |
168 | ign_whitespace_lcl = get_ignore_ws('', self.request) |
|
168 | ign_whitespace_lcl = get_ignore_ws('', self.request) | |
169 |
|
169 | |||
170 | # diff_limit will cut off the whole diff if the limit is applied |
|
170 | # diff_limit will cut off the whole diff if the limit is applied | |
171 | # otherwise it will just hide the big files from the front-end |
|
171 | # otherwise it will just hide the big files from the front-end | |
172 | diff_limit = c.visual.cut_off_limit_diff |
|
172 | diff_limit = c.visual.cut_off_limit_diff | |
173 | file_limit = c.visual.cut_off_limit_file |
|
173 | file_limit = c.visual.cut_off_limit_file | |
174 |
|
174 | |||
175 | # get ranges of commit ids if preset |
|
175 | # get ranges of commit ids if preset | |
176 | commit_range = commit_id_range.split('...')[:2] |
|
176 | commit_range = commit_id_range.split('...')[:2] | |
177 |
|
177 | |||
178 | try: |
|
178 | try: | |
179 | pre_load = ['affected_files', 'author', 'branch', 'date', |
|
179 | pre_load = ['affected_files', 'author', 'branch', 'date', | |
180 | 'message', 'parents'] |
|
180 | 'message', 'parents'] | |
181 |
|
181 | |||
182 | if len(commit_range) == 2: |
|
182 | if len(commit_range) == 2: | |
183 | commits = self.rhodecode_vcs_repo.get_commits( |
|
183 | commits = self.rhodecode_vcs_repo.get_commits( | |
184 | start_id=commit_range[0], end_id=commit_range[1], |
|
184 | start_id=commit_range[0], end_id=commit_range[1], | |
185 | pre_load=pre_load) |
|
185 | pre_load=pre_load) | |
186 | commits = list(commits) |
|
186 | commits = list(commits) | |
187 | else: |
|
187 | else: | |
188 | commits = [self.rhodecode_vcs_repo.get_commit( |
|
188 | commits = [self.rhodecode_vcs_repo.get_commit( | |
189 | commit_id=commit_id_range, pre_load=pre_load)] |
|
189 | commit_id=commit_id_range, pre_load=pre_load)] | |
190 |
|
190 | |||
191 | c.commit_ranges = commits |
|
191 | c.commit_ranges = commits | |
192 | if not c.commit_ranges: |
|
192 | if not c.commit_ranges: | |
193 | raise RepositoryError( |
|
193 | raise RepositoryError( | |
194 | 'The commit range returned an empty result') |
|
194 | 'The commit range returned an empty result') | |
195 | except CommitDoesNotExistError: |
|
195 | except CommitDoesNotExistError: | |
196 | msg = _('No such commit exists for this repository') |
|
196 | msg = _('No such commit exists for this repository') | |
197 | h.flash(msg, category='error') |
|
197 | h.flash(msg, category='error') | |
198 | raise HTTPNotFound() |
|
198 | raise HTTPNotFound() | |
199 | except Exception: |
|
199 | except Exception: | |
200 | log.exception("General failure") |
|
200 | log.exception("General failure") | |
201 | raise HTTPNotFound() |
|
201 | raise HTTPNotFound() | |
202 |
|
202 | |||
203 | c.changes = OrderedDict() |
|
203 | c.changes = OrderedDict() | |
204 | c.lines_added = 0 |
|
204 | c.lines_added = 0 | |
205 | c.lines_deleted = 0 |
|
205 | c.lines_deleted = 0 | |
206 |
|
206 | |||
207 | # auto collapse if we have more than limit |
|
207 | # auto collapse if we have more than limit | |
208 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
208 | collapse_limit = diffs.DiffProcessor._collapse_commits_over | |
209 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
209 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit | |
210 |
|
210 | |||
211 | c.commit_statuses = ChangesetStatus.STATUSES |
|
211 | c.commit_statuses = ChangesetStatus.STATUSES | |
212 | c.inline_comments = [] |
|
212 | c.inline_comments = [] | |
213 | c.files = [] |
|
213 | c.files = [] | |
214 |
|
214 | |||
215 | c.statuses = [] |
|
215 | c.statuses = [] | |
216 | c.comments = [] |
|
216 | c.comments = [] | |
217 | c.unresolved_comments = [] |
|
217 | c.unresolved_comments = [] | |
218 | if len(c.commit_ranges) == 1: |
|
218 | if len(c.commit_ranges) == 1: | |
219 | commit = c.commit_ranges[0] |
|
219 | commit = c.commit_ranges[0] | |
220 | c.comments = CommentsModel().get_comments( |
|
220 | c.comments = CommentsModel().get_comments( | |
221 | self.db_repo.repo_id, |
|
221 | self.db_repo.repo_id, | |
222 | revision=commit.raw_id) |
|
222 | revision=commit.raw_id) | |
223 | c.statuses.append(ChangesetStatusModel().get_status( |
|
223 | c.statuses.append(ChangesetStatusModel().get_status( | |
224 | self.db_repo.repo_id, commit.raw_id)) |
|
224 | self.db_repo.repo_id, commit.raw_id)) | |
225 | # comments from PR |
|
225 | # comments from PR | |
226 | statuses = ChangesetStatusModel().get_statuses( |
|
226 | statuses = ChangesetStatusModel().get_statuses( | |
227 | self.db_repo.repo_id, commit.raw_id, |
|
227 | self.db_repo.repo_id, commit.raw_id, | |
228 | with_revisions=True) |
|
228 | with_revisions=True) | |
229 | prs = set(st.pull_request for st in statuses |
|
229 | prs = set(st.pull_request for st in statuses | |
230 | if st.pull_request is not None) |
|
230 | if st.pull_request is not None) | |
231 | # from associated statuses, check the pull requests, and |
|
231 | # from associated statuses, check the pull requests, and | |
232 | # show comments from them |
|
232 | # show comments from them | |
233 | for pr in prs: |
|
233 | for pr in prs: | |
234 | c.comments.extend(pr.comments) |
|
234 | c.comments.extend(pr.comments) | |
235 |
|
235 | |||
236 | c.unresolved_comments = CommentsModel()\ |
|
236 | c.unresolved_comments = CommentsModel()\ | |
237 | .get_commit_unresolved_todos(commit.raw_id) |
|
237 | .get_commit_unresolved_todos(commit.raw_id) | |
238 |
|
238 | |||
239 | diff = None |
|
239 | diff = None | |
240 | # Iterate over ranges (default commit view is always one commit) |
|
240 | # Iterate over ranges (default commit view is always one commit) | |
241 | for commit in c.commit_ranges: |
|
241 | for commit in c.commit_ranges: | |
242 | c.changes[commit.raw_id] = [] |
|
242 | c.changes[commit.raw_id] = [] | |
243 |
|
243 | |||
244 | commit2 = commit |
|
244 | commit2 = commit | |
245 | commit1 = commit.parents[0] if commit.parents else EmptyCommit() |
|
245 | commit1 = commit.parents[0] if commit.parents else EmptyCommit() | |
246 |
|
246 | |||
247 | _diff = self.rhodecode_vcs_repo.get_diff( |
|
247 | _diff = self.rhodecode_vcs_repo.get_diff( | |
248 | commit1, commit2, |
|
248 | commit1, commit2, | |
249 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) |
|
249 | ignore_whitespace=ign_whitespace_lcl, context=context_lcl) | |
250 | diff_processor = diffs.DiffProcessor( |
|
250 | diff_processor = diffs.DiffProcessor( | |
251 | _diff, format='newdiff', diff_limit=diff_limit, |
|
251 | _diff, format='newdiff', diff_limit=diff_limit, | |
252 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
252 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
253 |
|
253 | |||
254 | commit_changes = OrderedDict() |
|
254 | commit_changes = OrderedDict() | |
255 | if method == 'show': |
|
255 | if method == 'show': | |
256 | _parsed = diff_processor.prepare() |
|
256 | _parsed = diff_processor.prepare() | |
257 | c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer) |
|
257 | c.limited_diff = isinstance(_parsed, diffs.LimitedDiffContainer) | |
258 |
|
258 | |||
259 | _parsed = diff_processor.prepare() |
|
259 | _parsed = diff_processor.prepare() | |
260 |
|
260 | |||
261 | def _node_getter(commit): |
|
261 | def _node_getter(commit): | |
262 | def get_node(fname): |
|
262 | def get_node(fname): | |
263 | try: |
|
263 | try: | |
264 | return commit.get_node(fname) |
|
264 | return commit.get_node(fname) | |
265 | except NodeDoesNotExistError: |
|
265 | except NodeDoesNotExistError: | |
266 | return None |
|
266 | return None | |
267 | return get_node |
|
267 | return get_node | |
268 |
|
268 | |||
269 | inline_comments = CommentsModel().get_inline_comments( |
|
269 | inline_comments = CommentsModel().get_inline_comments( | |
270 | self.db_repo.repo_id, revision=commit.raw_id) |
|
270 | self.db_repo.repo_id, revision=commit.raw_id) | |
271 | c.inline_cnt = CommentsModel().get_inline_comments_count( |
|
271 | c.inline_cnt = CommentsModel().get_inline_comments_count( | |
272 | inline_comments) |
|
272 | inline_comments) | |
273 |
|
273 | |||
274 | diffset = codeblocks.DiffSet( |
|
274 | diffset = codeblocks.DiffSet( | |
275 | repo_name=self.db_repo_name, |
|
275 | repo_name=self.db_repo_name, | |
276 | source_node_getter=_node_getter(commit1), |
|
276 | source_node_getter=_node_getter(commit1), | |
277 | target_node_getter=_node_getter(commit2), |
|
277 | target_node_getter=_node_getter(commit2), | |
278 | comments=inline_comments) |
|
278 | comments=inline_comments) | |
279 | diffset = diffset.render_patchset( |
|
279 | diffset = diffset.render_patchset( | |
280 | _parsed, commit1.raw_id, commit2.raw_id) |
|
280 | _parsed, commit1.raw_id, commit2.raw_id) | |
281 |
|
281 | |||
282 | c.changes[commit.raw_id] = diffset |
|
282 | c.changes[commit.raw_id] = diffset | |
283 | else: |
|
283 | else: | |
284 | # downloads/raw we only need RAW diff nothing else |
|
284 | # downloads/raw we only need RAW diff nothing else | |
285 | diff = diff_processor.as_raw() |
|
285 | diff = diff_processor.as_raw() | |
286 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
|
286 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] | |
287 |
|
287 | |||
288 | # sort comments by how they were generated |
|
288 | # sort comments by how they were generated | |
289 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) |
|
289 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) | |
290 |
|
290 | |||
291 | if len(c.commit_ranges) == 1: |
|
291 | if len(c.commit_ranges) == 1: | |
292 | c.commit = c.commit_ranges[0] |
|
292 | c.commit = c.commit_ranges[0] | |
293 | c.parent_tmpl = ''.join( |
|
293 | c.parent_tmpl = ''.join( | |
294 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) |
|
294 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) | |
295 |
|
295 | |||
296 | if method == 'download': |
|
296 | if method == 'download': | |
297 | response = Response(diff) |
|
297 | response = Response(diff) | |
298 | response.content_type = 'text/plain' |
|
298 | response.content_type = 'text/plain' | |
299 | response.content_disposition = ( |
|
299 | response.content_disposition = ( | |
300 | 'attachment; filename=%s.diff' % commit_id_range[:12]) |
|
300 | 'attachment; filename=%s.diff' % commit_id_range[:12]) | |
301 | return response |
|
301 | return response | |
302 | elif method == 'patch': |
|
302 | elif method == 'patch': | |
303 | c.diff = safe_unicode(diff) |
|
303 | c.diff = safe_unicode(diff) | |
304 | patch = render( |
|
304 | patch = render( | |
305 | 'rhodecode:templates/changeset/patch_changeset.mako', |
|
305 | 'rhodecode:templates/changeset/patch_changeset.mako', | |
306 | self._get_template_context(c), self.request) |
|
306 | self._get_template_context(c), self.request) | |
307 | response = Response(patch) |
|
307 | response = Response(patch) | |
308 | response.content_type = 'text/plain' |
|
308 | response.content_type = 'text/plain' | |
309 | return response |
|
309 | return response | |
310 | elif method == 'raw': |
|
310 | elif method == 'raw': | |
311 | response = Response(diff) |
|
311 | response = Response(diff) | |
312 | response.content_type = 'text/plain' |
|
312 | response.content_type = 'text/plain' | |
313 | return response |
|
313 | return response | |
314 | elif method == 'show': |
|
314 | elif method == 'show': | |
315 | if len(c.commit_ranges) == 1: |
|
315 | if len(c.commit_ranges) == 1: | |
316 | html = render( |
|
316 | html = render( | |
317 | 'rhodecode:templates/changeset/changeset.mako', |
|
317 | 'rhodecode:templates/changeset/changeset.mako', | |
318 | self._get_template_context(c), self.request) |
|
318 | self._get_template_context(c), self.request) | |
319 | return Response(html) |
|
319 | return Response(html) | |
320 | else: |
|
320 | else: | |
321 | c.ancestor = None |
|
321 | c.ancestor = None | |
322 | c.target_repo = self.db_repo |
|
322 | c.target_repo = self.db_repo | |
323 | html = render( |
|
323 | html = render( | |
324 | 'rhodecode:templates/changeset/changeset_range.mako', |
|
324 | 'rhodecode:templates/changeset/changeset_range.mako', | |
325 | self._get_template_context(c), self.request) |
|
325 | self._get_template_context(c), self.request) | |
326 | return Response(html) |
|
326 | return Response(html) | |
327 |
|
327 | |||
328 | raise HTTPBadRequest() |
|
328 | raise HTTPBadRequest() | |
329 |
|
329 | |||
330 | @LoginRequired() |
|
330 | @LoginRequired() | |
331 | @HasRepoPermissionAnyDecorator( |
|
331 | @HasRepoPermissionAnyDecorator( | |
332 | 'repository.read', 'repository.write', 'repository.admin') |
|
332 | 'repository.read', 'repository.write', 'repository.admin') | |
333 | @view_config( |
|
333 | @view_config( | |
334 | route_name='repo_commit', request_method='GET', |
|
334 | route_name='repo_commit', request_method='GET', | |
335 | renderer=None) |
|
335 | renderer=None) | |
336 | def repo_commit_show(self): |
|
336 | def repo_commit_show(self): | |
337 | commit_id = self.request.matchdict['commit_id'] |
|
337 | commit_id = self.request.matchdict['commit_id'] | |
338 | return self._commit(commit_id, method='show') |
|
338 | return self._commit(commit_id, method='show') | |
339 |
|
339 | |||
340 | @LoginRequired() |
|
340 | @LoginRequired() | |
341 | @HasRepoPermissionAnyDecorator( |
|
341 | @HasRepoPermissionAnyDecorator( | |
342 | 'repository.read', 'repository.write', 'repository.admin') |
|
342 | 'repository.read', 'repository.write', 'repository.admin') | |
343 | @view_config( |
|
343 | @view_config( | |
344 | route_name='repo_commit_raw', request_method='GET', |
|
344 | route_name='repo_commit_raw', request_method='GET', | |
345 | renderer=None) |
|
345 | renderer=None) | |
346 | @view_config( |
|
346 | @view_config( | |
347 | route_name='repo_commit_raw_deprecated', request_method='GET', |
|
347 | route_name='repo_commit_raw_deprecated', request_method='GET', | |
348 | renderer=None) |
|
348 | renderer=None) | |
349 | def repo_commit_raw(self): |
|
349 | def repo_commit_raw(self): | |
350 | commit_id = self.request.matchdict['commit_id'] |
|
350 | commit_id = self.request.matchdict['commit_id'] | |
351 | return self._commit(commit_id, method='raw') |
|
351 | return self._commit(commit_id, method='raw') | |
352 |
|
352 | |||
353 | @LoginRequired() |
|
353 | @LoginRequired() | |
354 | @HasRepoPermissionAnyDecorator( |
|
354 | @HasRepoPermissionAnyDecorator( | |
355 | 'repository.read', 'repository.write', 'repository.admin') |
|
355 | 'repository.read', 'repository.write', 'repository.admin') | |
356 | @view_config( |
|
356 | @view_config( | |
357 | route_name='repo_commit_patch', request_method='GET', |
|
357 | route_name='repo_commit_patch', request_method='GET', | |
358 | renderer=None) |
|
358 | renderer=None) | |
359 | def repo_commit_patch(self): |
|
359 | def repo_commit_patch(self): | |
360 | commit_id = self.request.matchdict['commit_id'] |
|
360 | commit_id = self.request.matchdict['commit_id'] | |
361 | return self._commit(commit_id, method='patch') |
|
361 | return self._commit(commit_id, method='patch') | |
362 |
|
362 | |||
363 | @LoginRequired() |
|
363 | @LoginRequired() | |
364 | @HasRepoPermissionAnyDecorator( |
|
364 | @HasRepoPermissionAnyDecorator( | |
365 | 'repository.read', 'repository.write', 'repository.admin') |
|
365 | 'repository.read', 'repository.write', 'repository.admin') | |
366 | @view_config( |
|
366 | @view_config( | |
367 | route_name='repo_commit_download', request_method='GET', |
|
367 | route_name='repo_commit_download', request_method='GET', | |
368 | renderer=None) |
|
368 | renderer=None) | |
369 | def repo_commit_download(self): |
|
369 | def repo_commit_download(self): | |
370 | commit_id = self.request.matchdict['commit_id'] |
|
370 | commit_id = self.request.matchdict['commit_id'] | |
371 | return self._commit(commit_id, method='download') |
|
371 | return self._commit(commit_id, method='download') | |
372 |
|
372 | |||
373 | @LoginRequired() |
|
373 | @LoginRequired() | |
374 | @NotAnonymous() |
|
374 | @NotAnonymous() | |
375 | @HasRepoPermissionAnyDecorator( |
|
375 | @HasRepoPermissionAnyDecorator( | |
376 | 'repository.read', 'repository.write', 'repository.admin') |
|
376 | 'repository.read', 'repository.write', 'repository.admin') | |
377 | @CSRFRequired() |
|
377 | @CSRFRequired() | |
378 | @view_config( |
|
378 | @view_config( | |
379 | route_name='repo_commit_comment_create', request_method='POST', |
|
379 | route_name='repo_commit_comment_create', request_method='POST', | |
380 | renderer='json_ext') |
|
380 | renderer='json_ext') | |
381 | def repo_commit_comment_create(self): |
|
381 | def repo_commit_comment_create(self): | |
382 | _ = self.request.translate |
|
382 | _ = self.request.translate | |
383 | commit_id = self.request.matchdict['commit_id'] |
|
383 | commit_id = self.request.matchdict['commit_id'] | |
384 |
|
384 | |||
385 | c = self.load_default_context() |
|
385 | c = self.load_default_context() | |
386 | status = self.request.POST.get('changeset_status', None) |
|
386 | status = self.request.POST.get('changeset_status', None) | |
387 | text = self.request.POST.get('text') |
|
387 | text = self.request.POST.get('text') | |
388 | comment_type = self.request.POST.get('comment_type') |
|
388 | comment_type = self.request.POST.get('comment_type') | |
389 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
389 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) | |
390 |
|
390 | |||
391 | if status: |
|
391 | if status: | |
392 | text = text or (_('Status change %(transition_icon)s %(status)s') |
|
392 | text = text or (_('Status change %(transition_icon)s %(status)s') | |
393 | % {'transition_icon': '>', |
|
393 | % {'transition_icon': '>', | |
394 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
394 | 'status': ChangesetStatus.get_status_lbl(status)}) | |
395 |
|
395 | |||
396 | multi_commit_ids = [] |
|
396 | multi_commit_ids = [] | |
397 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): |
|
397 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): | |
398 | if _commit_id not in ['', None, EmptyCommit.raw_id]: |
|
398 | if _commit_id not in ['', None, EmptyCommit.raw_id]: | |
399 | if _commit_id not in multi_commit_ids: |
|
399 | if _commit_id not in multi_commit_ids: | |
400 | multi_commit_ids.append(_commit_id) |
|
400 | multi_commit_ids.append(_commit_id) | |
401 |
|
401 | |||
402 | commit_ids = multi_commit_ids or [commit_id] |
|
402 | commit_ids = multi_commit_ids or [commit_id] | |
403 |
|
403 | |||
404 | comment = None |
|
404 | comment = None | |
405 | for current_id in filter(None, commit_ids): |
|
405 | for current_id in filter(None, commit_ids): | |
406 | comment = CommentsModel().create( |
|
406 | comment = CommentsModel().create( | |
407 | text=text, |
|
407 | text=text, | |
408 | repo=self.db_repo.repo_id, |
|
408 | repo=self.db_repo.repo_id, | |
409 | user=self._rhodecode_db_user.user_id, |
|
409 | user=self._rhodecode_db_user.user_id, | |
410 | commit_id=current_id, |
|
410 | commit_id=current_id, | |
411 | f_path=self.request.POST.get('f_path'), |
|
411 | f_path=self.request.POST.get('f_path'), | |
412 | line_no=self.request.POST.get('line'), |
|
412 | line_no=self.request.POST.get('line'), | |
413 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
413 | status_change=(ChangesetStatus.get_status_lbl(status) | |
414 | if status else None), |
|
414 | if status else None), | |
415 | status_change_type=status, |
|
415 | status_change_type=status, | |
416 | comment_type=comment_type, |
|
416 | comment_type=comment_type, | |
417 | resolves_comment_id=resolves_comment_id |
|
417 | resolves_comment_id=resolves_comment_id | |
418 | ) |
|
418 | ) | |
419 |
|
419 | |||
420 | # get status if set ! |
|
420 | # get status if set ! | |
421 | if status: |
|
421 | if status: | |
422 | # if latest status was from pull request and it's closed |
|
422 | # if latest status was from pull request and it's closed | |
423 | # disallow changing status ! |
|
423 | # disallow changing status ! | |
424 | # dont_allow_on_closed_pull_request = True ! |
|
424 | # dont_allow_on_closed_pull_request = True ! | |
425 |
|
425 | |||
426 | try: |
|
426 | try: | |
427 | ChangesetStatusModel().set_status( |
|
427 | ChangesetStatusModel().set_status( | |
428 | self.db_repo.repo_id, |
|
428 | self.db_repo.repo_id, | |
429 | status, |
|
429 | status, | |
430 | self._rhodecode_db_user.user_id, |
|
430 | self._rhodecode_db_user.user_id, | |
431 | comment, |
|
431 | comment, | |
432 | revision=current_id, |
|
432 | revision=current_id, | |
433 | dont_allow_on_closed_pull_request=True |
|
433 | dont_allow_on_closed_pull_request=True | |
434 | ) |
|
434 | ) | |
435 | except StatusChangeOnClosedPullRequestError: |
|
435 | except StatusChangeOnClosedPullRequestError: | |
436 | msg = _('Changing the status of a commit associated with ' |
|
436 | msg = _('Changing the status of a commit associated with ' | |
437 | 'a closed pull request is not allowed') |
|
437 | 'a closed pull request is not allowed') | |
438 | log.exception(msg) |
|
438 | log.exception(msg) | |
439 | h.flash(msg, category='warning') |
|
439 | h.flash(msg, category='warning') | |
440 | raise HTTPFound(h.route_path( |
|
440 | raise HTTPFound(h.route_path( | |
441 | 'repo_commit', repo_name=self.db_repo_name, |
|
441 | 'repo_commit', repo_name=self.db_repo_name, | |
442 | commit_id=current_id)) |
|
442 | commit_id=current_id)) | |
443 |
|
443 | |||
444 | # finalize, commit and redirect |
|
444 | # finalize, commit and redirect | |
445 | Session().commit() |
|
445 | Session().commit() | |
446 |
|
446 | |||
447 | data = { |
|
447 | data = { | |
448 | 'target_id': h.safeid(h.safe_unicode( |
|
448 | 'target_id': h.safeid(h.safe_unicode( | |
449 | self.request.POST.get('f_path'))), |
|
449 | self.request.POST.get('f_path'))), | |
450 | } |
|
450 | } | |
451 | if comment: |
|
451 | if comment: | |
452 | c.co = comment |
|
452 | c.co = comment | |
453 | rendered_comment = render( |
|
453 | rendered_comment = render( | |
454 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
454 | 'rhodecode:templates/changeset/changeset_comment_block.mako', | |
455 | self._get_template_context(c), self.request) |
|
455 | self._get_template_context(c), self.request) | |
456 |
|
456 | |||
457 | data.update(comment.get_dict()) |
|
457 | data.update(comment.get_dict()) | |
458 | data.update({'rendered_text': rendered_comment}) |
|
458 | data.update({'rendered_text': rendered_comment}) | |
459 |
|
459 | |||
460 | return data |
|
460 | return data | |
461 |
|
461 | |||
462 | @LoginRequired() |
|
462 | @LoginRequired() | |
463 | @NotAnonymous() |
|
463 | @NotAnonymous() | |
464 | @HasRepoPermissionAnyDecorator( |
|
464 | @HasRepoPermissionAnyDecorator( | |
465 | 'repository.read', 'repository.write', 'repository.admin') |
|
465 | 'repository.read', 'repository.write', 'repository.admin') | |
466 | @CSRFRequired() |
|
466 | @CSRFRequired() | |
467 | @view_config( |
|
467 | @view_config( | |
468 | route_name='repo_commit_comment_preview', request_method='POST', |
|
468 | route_name='repo_commit_comment_preview', request_method='POST', | |
469 | renderer='string', xhr=True) |
|
469 | renderer='string', xhr=True) | |
470 | def repo_commit_comment_preview(self): |
|
470 | def repo_commit_comment_preview(self): | |
471 | # Technically a CSRF token is not needed as no state changes with this |
|
471 | # Technically a CSRF token is not needed as no state changes with this | |
472 | # call. However, as this is a POST is better to have it, so automated |
|
472 | # call. However, as this is a POST is better to have it, so automated | |
473 | # tools don't flag it as potential CSRF. |
|
473 | # tools don't flag it as potential CSRF. | |
474 | # Post is required because the payload could be bigger than the maximum |
|
474 | # Post is required because the payload could be bigger than the maximum | |
475 | # allowed by GET. |
|
475 | # allowed by GET. | |
476 |
|
476 | |||
477 | text = self.request.POST.get('text') |
|
477 | text = self.request.POST.get('text') | |
478 | renderer = self.request.POST.get('renderer') or 'rst' |
|
478 | renderer = self.request.POST.get('renderer') or 'rst' | |
479 | if text: |
|
479 | if text: | |
480 | return h.render(text, renderer=renderer, mentions=True) |
|
480 | return h.render(text, renderer=renderer, mentions=True) | |
481 | return '' |
|
481 | return '' | |
482 |
|
482 | |||
483 | @LoginRequired() |
|
483 | @LoginRequired() | |
484 | @NotAnonymous() |
|
484 | @NotAnonymous() | |
485 | @HasRepoPermissionAnyDecorator( |
|
485 | @HasRepoPermissionAnyDecorator( | |
486 | 'repository.read', 'repository.write', 'repository.admin') |
|
486 | 'repository.read', 'repository.write', 'repository.admin') | |
487 | @CSRFRequired() |
|
487 | @CSRFRequired() | |
488 | @view_config( |
|
488 | @view_config( | |
489 | route_name='repo_commit_comment_delete', request_method='POST', |
|
489 | route_name='repo_commit_comment_delete', request_method='POST', | |
490 | renderer='json_ext') |
|
490 | renderer='json_ext') | |
491 | def repo_commit_comment_delete(self): |
|
491 | def repo_commit_comment_delete(self): | |
492 | commit_id = self.request.matchdict['commit_id'] |
|
492 | commit_id = self.request.matchdict['commit_id'] | |
493 | comment_id = self.request.matchdict['comment_id'] |
|
493 | comment_id = self.request.matchdict['comment_id'] | |
494 |
|
494 | |||
495 | comment = ChangesetComment.get_or_404(comment_id) |
|
495 | comment = ChangesetComment.get_or_404(comment_id) | |
496 | if not comment: |
|
496 | if not comment: | |
497 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
497 | log.debug('Comment with id:%s not found, skipping', comment_id) | |
498 | # comment already deleted in another call probably |
|
498 | # comment already deleted in another call probably | |
499 | return True |
|
499 | return True | |
500 |
|
500 | |||
501 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
501 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |
502 | super_admin = h.HasPermissionAny('hg.admin')() |
|
502 | super_admin = h.HasPermissionAny('hg.admin')() | |
503 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
503 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) | |
504 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
504 | is_repo_comment = comment.repo.repo_name == self.db_repo_name | |
505 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
505 | comment_repo_admin = is_repo_admin and is_repo_comment | |
506 |
|
506 | |||
507 | if super_admin or comment_owner or comment_repo_admin: |
|
507 | if super_admin or comment_owner or comment_repo_admin: | |
508 | CommentsModel().delete(comment=comment, user=self._rhodecode_db_user) |
|
508 | CommentsModel().delete(comment=comment, user=self._rhodecode_db_user) | |
509 | Session().commit() |
|
509 | Session().commit() | |
510 | return True |
|
510 | return True | |
511 | else: |
|
511 | else: | |
512 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
512 | log.warning('No permissions for user %s to delete comment_id: %s', | |
513 | self._rhodecode_db_user, comment_id) |
|
513 | self._rhodecode_db_user, comment_id) | |
514 | raise HTTPNotFound() |
|
514 | raise HTTPNotFound() | |
515 |
|
515 | |||
516 | @LoginRequired() |
|
516 | @LoginRequired() | |
517 | @HasRepoPermissionAnyDecorator( |
|
517 | @HasRepoPermissionAnyDecorator( | |
518 | 'repository.read', 'repository.write', 'repository.admin') |
|
518 | 'repository.read', 'repository.write', 'repository.admin') | |
519 | @view_config( |
|
519 | @view_config( | |
520 | route_name='repo_commit_data', request_method='GET', |
|
520 | route_name='repo_commit_data', request_method='GET', | |
521 | renderer='json_ext', xhr=True) |
|
521 | renderer='json_ext', xhr=True) | |
522 | def repo_commit_data(self): |
|
522 | def repo_commit_data(self): | |
523 | commit_id = self.request.matchdict['commit_id'] |
|
523 | commit_id = self.request.matchdict['commit_id'] | |
524 | self.load_default_context() |
|
524 | self.load_default_context() | |
525 |
|
525 | |||
526 | try: |
|
526 | try: | |
527 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
527 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
528 | except CommitDoesNotExistError as e: |
|
528 | except CommitDoesNotExistError as e: | |
529 | return EmptyCommit(message=str(e)) |
|
529 | return EmptyCommit(message=str(e)) | |
530 |
|
530 | |||
531 | @LoginRequired() |
|
531 | @LoginRequired() | |
532 | @HasRepoPermissionAnyDecorator( |
|
532 | @HasRepoPermissionAnyDecorator( | |
533 | 'repository.read', 'repository.write', 'repository.admin') |
|
533 | 'repository.read', 'repository.write', 'repository.admin') | |
534 | @view_config( |
|
534 | @view_config( | |
535 | route_name='repo_commit_children', request_method='GET', |
|
535 | route_name='repo_commit_children', request_method='GET', | |
536 | renderer='json_ext', xhr=True) |
|
536 | renderer='json_ext', xhr=True) | |
537 | def repo_commit_children(self): |
|
537 | def repo_commit_children(self): | |
538 | commit_id = self.request.matchdict['commit_id'] |
|
538 | commit_id = self.request.matchdict['commit_id'] | |
539 | self.load_default_context() |
|
539 | self.load_default_context() | |
540 |
|
540 | |||
541 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
541 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
542 | result = {"results": commit.children} |
|
542 | result = {"results": commit.children} | |
543 | return result |
|
543 | return result | |
544 |
|
544 | |||
545 | @LoginRequired() |
|
545 | @LoginRequired() | |
546 | @HasRepoPermissionAnyDecorator( |
|
546 | @HasRepoPermissionAnyDecorator( | |
547 | 'repository.read', 'repository.write', 'repository.admin') |
|
547 | 'repository.read', 'repository.write', 'repository.admin') | |
548 | @view_config( |
|
548 | @view_config( | |
549 | route_name='repo_commit_parents', request_method='GET', |
|
549 | route_name='repo_commit_parents', request_method='GET', | |
550 | renderer='json_ext') |
|
550 | renderer='json_ext') | |
551 | def repo_commit_parents(self): |
|
551 | def repo_commit_parents(self): | |
552 | commit_id = self.request.matchdict['commit_id'] |
|
552 | commit_id = self.request.matchdict['commit_id'] | |
553 | self.load_default_context() |
|
553 | self.load_default_context() | |
554 |
|
554 | |||
555 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
555 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
556 | result = {"results": commit.parents} |
|
556 | result = {"results": commit.parents} | |
557 | return result |
|
557 | return result |
@@ -1,203 +1,202 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2017-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytz |
|
21 | import pytz | |
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 | from beaker.cache import cache_region |
|
24 | from beaker.cache import cache_region | |
25 | from pyramid.view import view_config |
|
25 | from pyramid.view import view_config | |
26 | from pyramid.response import Response |
|
26 | from pyramid.response import Response | |
27 | from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed |
|
27 | from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed | |
28 |
|
28 | |||
29 | from rhodecode.apps._base import RepoAppView |
|
29 | from rhodecode.apps._base import RepoAppView | |
30 | from rhodecode.lib import audit_logger |
|
30 | from rhodecode.lib import audit_logger | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator, |
|
32 | from rhodecode.lib.auth import ( | |
33 | NotAnonymous, CSRFRequired) |
|
33 | LoginRequired, HasRepoPermissionAnyDecorator) | |
34 | from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer |
|
34 | from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer | |
35 | from rhodecode.lib.ext_json import json |
|
|||
36 | from rhodecode.lib.utils2 import str2bool, safe_int |
|
35 | from rhodecode.lib.utils2 import str2bool, safe_int | |
37 | from rhodecode.model.db import UserApiKeys, CacheKey |
|
36 | from rhodecode.model.db import UserApiKeys, CacheKey | |
38 |
|
37 | |||
39 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
40 |
|
39 | |||
41 |
|
40 | |||
42 | class RepoFeedView(RepoAppView): |
|
41 | class RepoFeedView(RepoAppView): | |
43 | def load_default_context(self): |
|
42 | def load_default_context(self): | |
44 | c = self._get_local_tmpl_context() |
|
43 | c = self._get_local_tmpl_context() | |
45 |
|
44 | |||
46 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
45 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
47 | c.repo_info = self.db_repo |
|
46 | c.repo_info = self.db_repo | |
48 |
|
47 | |||
49 | self._register_global_c(c) |
|
48 | self._register_global_c(c) | |
50 | self._load_defaults() |
|
49 | self._load_defaults() | |
51 | return c |
|
50 | return c | |
52 |
|
51 | |||
53 | def _get_config(self): |
|
52 | def _get_config(self): | |
54 | import rhodecode |
|
53 | import rhodecode | |
55 | config = rhodecode.CONFIG |
|
54 | config = rhodecode.CONFIG | |
56 |
|
55 | |||
57 | return { |
|
56 | return { | |
58 | 'language': 'en-us', |
|
57 | 'language': 'en-us', | |
59 | 'feed_ttl': '5', # TTL of feed, |
|
58 | 'feed_ttl': '5', # TTL of feed, | |
60 | 'feed_include_diff': |
|
59 | 'feed_include_diff': | |
61 | str2bool(config.get('rss_include_diff', False)), |
|
60 | str2bool(config.get('rss_include_diff', False)), | |
62 | 'feed_items_per_page': |
|
61 | 'feed_items_per_page': | |
63 | safe_int(config.get('rss_items_per_page', 20)), |
|
62 | safe_int(config.get('rss_items_per_page', 20)), | |
64 | 'feed_diff_limit': |
|
63 | 'feed_diff_limit': | |
65 | # we need to protect from parsing huge diffs here other way |
|
64 | # we need to protect from parsing huge diffs here other way | |
66 | # we can kill the server |
|
65 | # we can kill the server | |
67 | safe_int(config.get('rss_cut_off_limit', 32 * 1024)), |
|
66 | safe_int(config.get('rss_cut_off_limit', 32 * 1024)), | |
68 | } |
|
67 | } | |
69 |
|
68 | |||
70 | def _load_defaults(self): |
|
69 | def _load_defaults(self): | |
71 | _ = self.request.translate |
|
70 | _ = self.request.translate | |
72 | config = self._get_config() |
|
71 | config = self._get_config() | |
73 | # common values for feeds |
|
72 | # common values for feeds | |
74 | self.description = _('Changes on %s repository') |
|
73 | self.description = _('Changes on %s repository') | |
75 | self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s') |
|
74 | self.title = self.title = _('%s %s feed') % (self.db_repo_name, '%s') | |
76 | self.language = config["language"] |
|
75 | self.language = config["language"] | |
77 | self.ttl = config["feed_ttl"] |
|
76 | self.ttl = config["feed_ttl"] | |
78 | self.feed_include_diff = config['feed_include_diff'] |
|
77 | self.feed_include_diff = config['feed_include_diff'] | |
79 | self.feed_diff_limit = config['feed_diff_limit'] |
|
78 | self.feed_diff_limit = config['feed_diff_limit'] | |
80 | self.feed_items_per_page = config['feed_items_per_page'] |
|
79 | self.feed_items_per_page = config['feed_items_per_page'] | |
81 |
|
80 | |||
82 | def _changes(self, commit): |
|
81 | def _changes(self, commit): | |
83 | diff_processor = DiffProcessor( |
|
82 | diff_processor = DiffProcessor( | |
84 | commit.diff(), diff_limit=self.feed_diff_limit) |
|
83 | commit.diff(), diff_limit=self.feed_diff_limit) | |
85 | _parsed = diff_processor.prepare(inline_diff=False) |
|
84 | _parsed = diff_processor.prepare(inline_diff=False) | |
86 | limited_diff = isinstance(_parsed, LimitedDiffContainer) |
|
85 | limited_diff = isinstance(_parsed, LimitedDiffContainer) | |
87 |
|
86 | |||
88 | return _parsed, limited_diff |
|
87 | return _parsed, limited_diff | |
89 |
|
88 | |||
90 | def _get_title(self, commit): |
|
89 | def _get_title(self, commit): | |
91 | return h.shorter(commit.message, 160) |
|
90 | return h.shorter(commit.message, 160) | |
92 |
|
91 | |||
93 | def _get_description(self, commit): |
|
92 | def _get_description(self, commit): | |
94 | _renderer = self.request.get_partial_renderer( |
|
93 | _renderer = self.request.get_partial_renderer( | |
95 | 'feed/atom_feed_entry.mako') |
|
94 | 'feed/atom_feed_entry.mako') | |
96 | parsed_diff, limited_diff = self._changes(commit) |
|
95 | parsed_diff, limited_diff = self._changes(commit) | |
97 | return _renderer( |
|
96 | return _renderer( | |
98 | 'body', |
|
97 | 'body', | |
99 | commit=commit, |
|
98 | commit=commit, | |
100 | parsed_diff=parsed_diff, |
|
99 | parsed_diff=parsed_diff, | |
101 | limited_diff=limited_diff, |
|
100 | limited_diff=limited_diff, | |
102 | feed_include_diff=self.feed_include_diff, |
|
101 | feed_include_diff=self.feed_include_diff, | |
103 | ) |
|
102 | ) | |
104 |
|
103 | |||
105 | def _set_timezone(self, date, tzinfo=pytz.utc): |
|
104 | def _set_timezone(self, date, tzinfo=pytz.utc): | |
106 | if not getattr(date, "tzinfo", None): |
|
105 | if not getattr(date, "tzinfo", None): | |
107 | date.replace(tzinfo=tzinfo) |
|
106 | date.replace(tzinfo=tzinfo) | |
108 | return date |
|
107 | return date | |
109 |
|
108 | |||
110 | def _get_commits(self): |
|
109 | def _get_commits(self): | |
111 | return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:]) |
|
110 | return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:]) | |
112 |
|
111 | |||
113 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
112 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
114 | @HasRepoPermissionAnyDecorator( |
|
113 | @HasRepoPermissionAnyDecorator( | |
115 | 'repository.read', 'repository.write', 'repository.admin') |
|
114 | 'repository.read', 'repository.write', 'repository.admin') | |
116 | @view_config( |
|
115 | @view_config( | |
117 | route_name='atom_feed_home', request_method='GET', |
|
116 | route_name='atom_feed_home', request_method='GET', | |
118 | renderer=None) |
|
117 | renderer=None) | |
119 | def atom(self): |
|
118 | def atom(self): | |
120 | """ |
|
119 | """ | |
121 | Produce an atom-1.0 feed via feedgenerator module |
|
120 | Produce an atom-1.0 feed via feedgenerator module | |
122 | """ |
|
121 | """ | |
123 | self.load_default_context() |
|
122 | self.load_default_context() | |
124 |
|
123 | |||
125 | @cache_region('long_term') |
|
124 | @cache_region('long_term') | |
126 | def _generate_feed(cache_key): |
|
125 | def _generate_feed(cache_key): | |
127 | feed = Atom1Feed( |
|
126 | feed = Atom1Feed( | |
128 | title=self.title % self.db_repo_name, |
|
127 | title=self.title % self.db_repo_name, | |
129 | link=h.route_url('repo_summary', repo_name=self.db_repo_name), |
|
128 | link=h.route_url('repo_summary', repo_name=self.db_repo_name), | |
130 | description=self.description % self.db_repo_name, |
|
129 | description=self.description % self.db_repo_name, | |
131 | language=self.language, |
|
130 | language=self.language, | |
132 | ttl=self.ttl |
|
131 | ttl=self.ttl | |
133 | ) |
|
132 | ) | |
134 |
|
133 | |||
135 | for commit in reversed(self._get_commits()): |
|
134 | for commit in reversed(self._get_commits()): | |
136 | date = self._set_timezone(commit.date) |
|
135 | date = self._set_timezone(commit.date) | |
137 | feed.add_item( |
|
136 | feed.add_item( | |
138 | title=self._get_title(commit), |
|
137 | title=self._get_title(commit), | |
139 | author_name=commit.author, |
|
138 | author_name=commit.author, | |
140 | description=self._get_description(commit), |
|
139 | description=self._get_description(commit), | |
141 | link=h.route_url( |
|
140 | link=h.route_url( | |
142 | 'repo_commit', repo_name=self.db_repo_name, |
|
141 | 'repo_commit', repo_name=self.db_repo_name, | |
143 | commit_id=commit.raw_id), |
|
142 | commit_id=commit.raw_id), | |
144 | pubdate=date,) |
|
143 | pubdate=date,) | |
145 |
|
144 | |||
146 | return feed.mime_type, feed.writeString('utf-8') |
|
145 | return feed.mime_type, feed.writeString('utf-8') | |
147 |
|
146 | |||
148 | invalidator_context = CacheKey.repo_context_cache( |
|
147 | invalidator_context = CacheKey.repo_context_cache( | |
149 | _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_ATOM) |
|
148 | _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_ATOM) | |
150 |
|
149 | |||
151 | with invalidator_context as context: |
|
150 | with invalidator_context as context: | |
152 | context.invalidate() |
|
151 | context.invalidate() | |
153 | mime_type, feed = context.compute() |
|
152 | mime_type, feed = context.compute() | |
154 |
|
153 | |||
155 | response = Response(feed) |
|
154 | response = Response(feed) | |
156 | response.content_type = mime_type |
|
155 | response.content_type = mime_type | |
157 | return response |
|
156 | return response | |
158 |
|
157 | |||
159 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
158 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
160 | @HasRepoPermissionAnyDecorator( |
|
159 | @HasRepoPermissionAnyDecorator( | |
161 | 'repository.read', 'repository.write', 'repository.admin') |
|
160 | 'repository.read', 'repository.write', 'repository.admin') | |
162 | @view_config( |
|
161 | @view_config( | |
163 | route_name='rss_feed_home', request_method='GET', |
|
162 | route_name='rss_feed_home', request_method='GET', | |
164 | renderer=None) |
|
163 | renderer=None) | |
165 | def rss(self): |
|
164 | def rss(self): | |
166 | """ |
|
165 | """ | |
167 | Produce an rss2 feed via feedgenerator module |
|
166 | Produce an rss2 feed via feedgenerator module | |
168 | """ |
|
167 | """ | |
169 | self.load_default_context() |
|
168 | self.load_default_context() | |
170 |
|
169 | |||
171 | @cache_region('long_term') |
|
170 | @cache_region('long_term') | |
172 | def _generate_feed(cache_key): |
|
171 | def _generate_feed(cache_key): | |
173 | feed = Rss201rev2Feed( |
|
172 | feed = Rss201rev2Feed( | |
174 | title=self.title % self.db_repo_name, |
|
173 | title=self.title % self.db_repo_name, | |
175 | link=h.route_url('repo_summary', repo_name=self.db_repo_name), |
|
174 | link=h.route_url('repo_summary', repo_name=self.db_repo_name), | |
176 | description=self.description % self.db_repo_name, |
|
175 | description=self.description % self.db_repo_name, | |
177 | language=self.language, |
|
176 | language=self.language, | |
178 | ttl=self.ttl |
|
177 | ttl=self.ttl | |
179 | ) |
|
178 | ) | |
180 |
|
179 | |||
181 | for commit in reversed(self._get_commits()): |
|
180 | for commit in reversed(self._get_commits()): | |
182 | date = self._set_timezone(commit.date) |
|
181 | date = self._set_timezone(commit.date) | |
183 | feed.add_item( |
|
182 | feed.add_item( | |
184 | title=self._get_title(commit), |
|
183 | title=self._get_title(commit), | |
185 | author_name=commit.author, |
|
184 | author_name=commit.author, | |
186 | description=self._get_description(commit), |
|
185 | description=self._get_description(commit), | |
187 | link=h.route_url( |
|
186 | link=h.route_url( | |
188 | 'repo_commit', repo_name=self.db_repo_name, |
|
187 | 'repo_commit', repo_name=self.db_repo_name, | |
189 | commit_id=commit.raw_id), |
|
188 | commit_id=commit.raw_id), | |
190 | pubdate=date,) |
|
189 | pubdate=date,) | |
191 |
|
190 | |||
192 | return feed.mime_type, feed.writeString('utf-8') |
|
191 | return feed.mime_type, feed.writeString('utf-8') | |
193 |
|
192 | |||
194 | invalidator_context = CacheKey.repo_context_cache( |
|
193 | invalidator_context = CacheKey.repo_context_cache( | |
195 | _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_RSS) |
|
194 | _generate_feed, self.db_repo_name, CacheKey.CACHE_TYPE_RSS) | |
196 |
|
195 | |||
197 | with invalidator_context as context: |
|
196 | with invalidator_context as context: | |
198 | context.invalidate() |
|
197 | context.invalidate() | |
199 | mime_type, feed = context.compute() |
|
198 | mime_type, feed = context.compute() | |
200 |
|
199 | |||
201 | response = Response(feed) |
|
200 | response = Response(feed) | |
202 | response.content_type = mime_type |
|
201 | response.content_type = mime_type | |
203 | return response |
|
202 | return response |
@@ -1,68 +1,67 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from pyramid.view import view_config |
|
23 | from pyramid.view import view_config | |
24 |
|
24 | |||
25 | from rhodecode.apps._base import RepoAppView |
|
25 | from rhodecode.apps._base import RepoAppView | |
26 |
from rhodecode.lib.auth import |
|
26 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
27 | NotAnonymous) |
|
|||
28 | from rhodecode.lib import repo_maintenance |
|
27 | from rhodecode.lib import repo_maintenance | |
29 |
|
28 | |||
30 | log = logging.getLogger(__name__) |
|
29 | log = logging.getLogger(__name__) | |
31 |
|
30 | |||
32 |
|
31 | |||
33 | class RepoMaintenanceView(RepoAppView): |
|
32 | class RepoMaintenanceView(RepoAppView): | |
34 | def load_default_context(self): |
|
33 | def load_default_context(self): | |
35 | c = self._get_local_tmpl_context() |
|
34 | c = self._get_local_tmpl_context() | |
36 |
|
35 | |||
37 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
36 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
38 | c.repo_info = self.db_repo |
|
37 | c.repo_info = self.db_repo | |
39 |
|
38 | |||
40 | self._register_global_c(c) |
|
39 | self._register_global_c(c) | |
41 | return c |
|
40 | return c | |
42 |
|
41 | |||
43 | @LoginRequired() |
|
42 | @LoginRequired() | |
44 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
43 | @HasRepoPermissionAnyDecorator('repository.admin') | |
45 | @view_config( |
|
44 | @view_config( | |
46 | route_name='repo_maintenance', request_method='GET', |
|
45 | route_name='edit_repo_maintenance', request_method='GET', | |
47 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
46 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
48 | def repo_maintenance(self): |
|
47 | def repo_maintenance(self): | |
49 | c = self.load_default_context() |
|
48 | c = self.load_default_context() | |
50 | c.active = 'maintenance' |
|
49 | c.active = 'maintenance' | |
51 | maintenance = repo_maintenance.RepoMaintenance() |
|
50 | maintenance = repo_maintenance.RepoMaintenance() | |
52 | c.executable_tasks = maintenance.get_tasks_for_repo(self.db_repo) |
|
51 | c.executable_tasks = maintenance.get_tasks_for_repo(self.db_repo) | |
53 | return self._get_template_context(c) |
|
52 | return self._get_template_context(c) | |
54 |
|
53 | |||
55 | @LoginRequired() |
|
54 | @LoginRequired() | |
56 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
55 | @HasRepoPermissionAnyDecorator('repository.admin') | |
57 | @view_config( |
|
56 | @view_config( | |
58 | route_name='repo_maintenance_execute', request_method='GET', |
|
57 | route_name='edit_repo_maintenance_execute', request_method='GET', | |
59 | renderer='json', xhr=True) |
|
58 | renderer='json', xhr=True) | |
60 | def repo_maintenance_execute(self): |
|
59 | def repo_maintenance_execute(self): | |
61 | c = self.load_default_context() |
|
60 | c = self.load_default_context() | |
62 | c.active = 'maintenance' |
|
61 | c.active = 'maintenance' | |
63 | _ = self.request.translate |
|
62 | _ = self.request.translate | |
64 |
|
63 | |||
65 | maintenance = repo_maintenance.RepoMaintenance() |
|
64 | maintenance = repo_maintenance.RepoMaintenance() | |
66 | executed_types = maintenance.execute(self.db_repo) |
|
65 | executed_types = maintenance.execute(self.db_repo) | |
67 |
|
66 | |||
68 | return executed_types |
|
67 | return executed_types |
@@ -1,98 +1,92 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | import deform |
|
|||
24 | from pyramid.httpexceptions import HTTPFound |
|
23 | from pyramid.httpexceptions import HTTPFound | |
25 | from pyramid.view import view_config |
|
24 | from pyramid.view import view_config | |
26 |
|
25 | |||
27 | from rhodecode.apps._base import RepoAppView |
|
26 | from rhodecode.apps._base import RepoAppView | |
28 | from rhodecode.forms import RcForm |
|
|||
29 | from rhodecode.lib import helpers as h |
|
27 | from rhodecode.lib import helpers as h | |
30 | from rhodecode.lib import audit_logger |
|
28 | from rhodecode.lib import audit_logger | |
31 | from rhodecode.lib.auth import ( |
|
29 | from rhodecode.lib.auth import ( | |
32 | LoginRequired, HasRepoPermissionAnyDecorator, |
|
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
33 | HasRepoPermissionAllDecorator, CSRFRequired) |
|
|||
34 | from rhodecode.model.db import RepositoryField, RepoGroup |
|
|||
35 | from rhodecode.model.forms import RepoPermsForm |
|
31 | from rhodecode.model.forms import RepoPermsForm | |
36 | from rhodecode.model.meta import Session |
|
32 | from rhodecode.model.meta import Session | |
37 | from rhodecode.model.repo import RepoModel |
|
33 | from rhodecode.model.repo import RepoModel | |
38 | from rhodecode.model.scm import RepoGroupList, ScmModel |
|
|||
39 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
|||
40 |
|
34 | |||
41 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
42 |
|
36 | |||
43 |
|
37 | |||
44 | class RepoSettingsPermissionsView(RepoAppView): |
|
38 | class RepoSettingsPermissionsView(RepoAppView): | |
45 |
|
39 | |||
46 | def load_default_context(self): |
|
40 | def load_default_context(self): | |
47 | c = self._get_local_tmpl_context() |
|
41 | c = self._get_local_tmpl_context() | |
48 |
|
42 | |||
49 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
43 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
50 | c.repo_info = self.db_repo |
|
44 | c.repo_info = self.db_repo | |
51 |
|
45 | |||
52 | self._register_global_c(c) |
|
46 | self._register_global_c(c) | |
53 | return c |
|
47 | return c | |
54 |
|
48 | |||
55 | @LoginRequired() |
|
49 | @LoginRequired() | |
56 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
50 | @HasRepoPermissionAnyDecorator('repository.admin') | |
57 | @view_config( |
|
51 | @view_config( | |
58 | route_name='edit_repo_perms', request_method='GET', |
|
52 | route_name='edit_repo_perms', request_method='GET', | |
59 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
53 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
60 | def edit_permissions(self): |
|
54 | def edit_permissions(self): | |
61 | c = self.load_default_context() |
|
55 | c = self.load_default_context() | |
62 | c.active = 'permissions' |
|
56 | c.active = 'permissions' | |
63 | return self._get_template_context(c) |
|
57 | return self._get_template_context(c) | |
64 |
|
58 | |||
65 | @LoginRequired() |
|
59 | @LoginRequired() | |
66 |
@HasRepoPermissionA |
|
60 | @HasRepoPermissionAnyDecorator('repository.admin') | |
67 | @CSRFRequired() |
|
61 | @CSRFRequired() | |
68 | @view_config( |
|
62 | @view_config( | |
69 | route_name='edit_repo_perms', request_method='POST', |
|
63 | route_name='edit_repo_perms', request_method='POST', | |
70 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
64 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
71 | def edit_permissions_update(self): |
|
65 | def edit_permissions_update(self): | |
72 | _ = self.request.translate |
|
66 | _ = self.request.translate | |
73 | c = self.load_default_context() |
|
67 | c = self.load_default_context() | |
74 | c.active = 'permissions' |
|
68 | c.active = 'permissions' | |
75 | data = self.request.POST |
|
69 | data = self.request.POST | |
76 | # store private flag outside of HTML to verify if we can modify |
|
70 | # store private flag outside of HTML to verify if we can modify | |
77 |
# default user permissions, prevents submi |
|
71 | # default user permissions, prevents submission of FAKE post data | |
78 | # into the form for private repos |
|
72 | # into the form for private repos | |
79 | data['repo_private'] = self.db_repo.private |
|
73 | data['repo_private'] = self.db_repo.private | |
80 | form = RepoPermsForm()().to_python(data) |
|
74 | form = RepoPermsForm()().to_python(data) | |
81 | changes = RepoModel().update_permissions( |
|
75 | changes = RepoModel().update_permissions( | |
82 | self.db_repo_name, form['perm_additions'], form['perm_updates'], |
|
76 | self.db_repo_name, form['perm_additions'], form['perm_updates'], | |
83 | form['perm_deletions']) |
|
77 | form['perm_deletions']) | |
84 |
|
78 | |||
85 | action_data = { |
|
79 | action_data = { | |
86 | 'added': changes['added'], |
|
80 | 'added': changes['added'], | |
87 | 'updated': changes['updated'], |
|
81 | 'updated': changes['updated'], | |
88 | 'deleted': changes['deleted'], |
|
82 | 'deleted': changes['deleted'], | |
89 | } |
|
83 | } | |
90 | audit_logger.store_web( |
|
84 | audit_logger.store_web( | |
91 | 'repo.edit.permissions', action_data=action_data, |
|
85 | 'repo.edit.permissions', action_data=action_data, | |
92 | user=self._rhodecode_user, repo=self.db_repo) |
|
86 | user=self._rhodecode_user, repo=self.db_repo) | |
93 |
|
87 | |||
94 | Session().commit() |
|
88 | Session().commit() | |
95 | h.flash(_('Repository permissions updated'), category='success') |
|
89 | h.flash(_('Repository permissions updated'), category='success') | |
96 |
|
90 | |||
97 | raise HTTPFound( |
|
91 | raise HTTPFound( | |
98 |
|
|
92 | h.route_path('edit_repo_perms', repo_name=self.db_repo_name)) |
@@ -1,179 +1,254 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | import deform |
|
23 | import deform | |
24 | from pyramid.httpexceptions import HTTPFound |
|
24 | from pyramid.httpexceptions import HTTPFound | |
25 | from pyramid.view import view_config |
|
25 | from pyramid.view import view_config | |
26 |
|
26 | |||
27 | from rhodecode.apps._base import RepoAppView |
|
27 | from rhodecode.apps._base import RepoAppView | |
28 | from rhodecode.forms import RcForm |
|
28 | from rhodecode.forms import RcForm | |
29 | from rhodecode.lib import helpers as h |
|
29 | from rhodecode.lib import helpers as h | |
30 | from rhodecode.lib import audit_logger |
|
30 | from rhodecode.lib import audit_logger | |
31 | from rhodecode.lib.auth import ( |
|
31 | from rhodecode.lib.auth import ( | |
32 | LoginRequired, HasRepoPermissionAnyDecorator, |
|
32 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
33 | HasRepoPermissionAllDecorator, CSRFRequired) |
|
33 | from rhodecode.model.db import RepositoryField, RepoGroup, Repository | |
34 | from rhodecode.model.db import RepositoryField, RepoGroup |
|
|||
35 | from rhodecode.model.meta import Session |
|
34 | from rhodecode.model.meta import Session | |
36 | from rhodecode.model.repo import RepoModel |
|
35 | from rhodecode.model.repo import RepoModel | |
37 | from rhodecode.model.scm import RepoGroupList, ScmModel |
|
36 | from rhodecode.model.scm import RepoGroupList, ScmModel | |
38 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
37 | from rhodecode.model.validation_schema.schemas import repo_schema | |
39 |
|
38 | |||
40 | log = logging.getLogger(__name__) |
|
39 | log = logging.getLogger(__name__) | |
41 |
|
40 | |||
42 |
|
41 | |||
43 | class RepoSettingsView(RepoAppView): |
|
42 | class RepoSettingsView(RepoAppView): | |
44 |
|
43 | |||
45 | def load_default_context(self): |
|
44 | def load_default_context(self): | |
46 | c = self._get_local_tmpl_context() |
|
45 | c = self._get_local_tmpl_context() | |
47 |
|
46 | |||
48 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
47 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
49 | c.repo_info = self.db_repo |
|
48 | c.repo_info = self.db_repo | |
50 |
|
49 | |||
51 | acl_groups = RepoGroupList( |
|
50 | acl_groups = RepoGroupList( | |
52 | RepoGroup.query().all(), |
|
51 | RepoGroup.query().all(), | |
53 | perm_set=['group.write', 'group.admin']) |
|
52 | perm_set=['group.write', 'group.admin']) | |
54 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) |
|
53 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) | |
55 | c.repo_groups_choices = map(lambda k: k[0], c.repo_groups) |
|
54 | c.repo_groups_choices = map(lambda k: k[0], c.repo_groups) | |
56 |
|
55 | |||
57 | # in case someone no longer have a group.write access to a repository |
|
56 | # in case someone no longer have a group.write access to a repository | |
58 | # pre fill the list with this entry, we don't care if this is the same |
|
57 | # pre fill the list with this entry, we don't care if this is the same | |
59 | # but it will allow saving repo data properly. |
|
58 | # but it will allow saving repo data properly. | |
60 | repo_group = self.db_repo.group |
|
59 | repo_group = self.db_repo.group | |
61 | if repo_group and repo_group.group_id not in c.repo_groups_choices: |
|
60 | if repo_group and repo_group.group_id not in c.repo_groups_choices: | |
62 | c.repo_groups_choices.append(repo_group.group_id) |
|
61 | c.repo_groups_choices.append(repo_group.group_id) | |
63 | c.repo_groups.append(RepoGroup._generate_choice(repo_group)) |
|
62 | c.repo_groups.append(RepoGroup._generate_choice(repo_group)) | |
64 |
|
63 | |||
65 | if c.repository_requirements_missing or self.rhodecode_vcs_repo is None: |
|
64 | if c.repository_requirements_missing or self.rhodecode_vcs_repo is None: | |
66 | # we might be in missing requirement state, so we load things |
|
65 | # we might be in missing requirement state, so we load things | |
67 | # without touching scm_instance() |
|
66 | # without touching scm_instance() | |
68 | c.landing_revs_choices, c.landing_revs = \ |
|
67 | c.landing_revs_choices, c.landing_revs = \ | |
69 | ScmModel().get_repo_landing_revs() |
|
68 | ScmModel().get_repo_landing_revs() | |
70 | else: |
|
69 | else: | |
71 | c.landing_revs_choices, c.landing_revs = \ |
|
70 | c.landing_revs_choices, c.landing_revs = \ | |
72 | ScmModel().get_repo_landing_revs(self.db_repo) |
|
71 | ScmModel().get_repo_landing_revs(self.db_repo) | |
73 |
|
72 | |||
74 | c.personal_repo_group = c.auth_user.personal_repo_group |
|
73 | c.personal_repo_group = c.auth_user.personal_repo_group | |
75 | c.repo_fields = RepositoryField.query()\ |
|
74 | c.repo_fields = RepositoryField.query()\ | |
76 | .filter(RepositoryField.repository == self.db_repo).all() |
|
75 | .filter(RepositoryField.repository == self.db_repo).all() | |
77 |
|
76 | |||
78 | self._register_global_c(c) |
|
77 | self._register_global_c(c) | |
79 | return c |
|
78 | return c | |
80 |
|
79 | |||
81 | def _get_schema(self, c, old_values=None): |
|
80 | def _get_schema(self, c, old_values=None): | |
82 | return repo_schema.RepoSettingsSchema().bind( |
|
81 | return repo_schema.RepoSettingsSchema().bind( | |
83 | repo_type=self.db_repo.repo_type, |
|
82 | repo_type=self.db_repo.repo_type, | |
84 | repo_type_options=[self.db_repo.repo_type], |
|
83 | repo_type_options=[self.db_repo.repo_type], | |
85 | repo_ref_options=c.landing_revs_choices, |
|
84 | repo_ref_options=c.landing_revs_choices, | |
86 | repo_ref_items=c.landing_revs, |
|
85 | repo_ref_items=c.landing_revs, | |
87 | repo_repo_group_options=c.repo_groups_choices, |
|
86 | repo_repo_group_options=c.repo_groups_choices, | |
88 | repo_repo_group_items=c.repo_groups, |
|
87 | repo_repo_group_items=c.repo_groups, | |
89 | # user caller |
|
88 | # user caller | |
90 | user=self._rhodecode_user, |
|
89 | user=self._rhodecode_user, | |
91 | old_values=old_values |
|
90 | old_values=old_values | |
92 | ) |
|
91 | ) | |
93 |
|
92 | |||
94 | @LoginRequired() |
|
93 | @LoginRequired() | |
95 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
94 | @HasRepoPermissionAnyDecorator('repository.admin') | |
96 | @view_config( |
|
95 | @view_config( | |
97 | route_name='edit_repo', request_method='GET', |
|
96 | route_name='edit_repo', request_method='GET', | |
98 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
97 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
99 | def edit_settings(self): |
|
98 | def edit_settings(self): | |
100 | c = self.load_default_context() |
|
99 | c = self.load_default_context() | |
101 | c.active = 'settings' |
|
100 | c.active = 'settings' | |
102 |
|
101 | |||
103 | defaults = RepoModel()._get_defaults(self.db_repo_name) |
|
102 | defaults = RepoModel()._get_defaults(self.db_repo_name) | |
104 | defaults['repo_owner'] = defaults['user'] |
|
103 | defaults['repo_owner'] = defaults['user'] | |
105 | defaults['repo_landing_commit_ref'] = defaults['repo_landing_rev'] |
|
104 | defaults['repo_landing_commit_ref'] = defaults['repo_landing_rev'] | |
106 |
|
105 | |||
107 | schema = self._get_schema(c) |
|
106 | schema = self._get_schema(c) | |
108 | c.form = RcForm(schema, appstruct=defaults) |
|
107 | c.form = RcForm(schema, appstruct=defaults) | |
109 | return self._get_template_context(c) |
|
108 | return self._get_template_context(c) | |
110 |
|
109 | |||
111 | @LoginRequired() |
|
110 | @LoginRequired() | |
112 |
@HasRepoPermissionA |
|
111 | @HasRepoPermissionAnyDecorator('repository.admin') | |
113 | @CSRFRequired() |
|
112 | @CSRFRequired() | |
114 | @view_config( |
|
113 | @view_config( | |
115 | route_name='edit_repo', request_method='POST', |
|
114 | route_name='edit_repo', request_method='POST', | |
116 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
115 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
117 | def edit_settings_update(self): |
|
116 | def edit_settings_update(self): | |
118 | _ = self.request.translate |
|
117 | _ = self.request.translate | |
119 | c = self.load_default_context() |
|
118 | c = self.load_default_context() | |
120 | c.active = 'settings' |
|
119 | c.active = 'settings' | |
121 | old_repo_name = self.db_repo_name |
|
120 | old_repo_name = self.db_repo_name | |
122 |
|
121 | |||
123 | old_values = self.db_repo.get_api_data() |
|
122 | old_values = self.db_repo.get_api_data() | |
124 | schema = self._get_schema(c, old_values=old_values) |
|
123 | schema = self._get_schema(c, old_values=old_values) | |
125 |
|
124 | |||
126 | c.form = RcForm(schema) |
|
125 | c.form = RcForm(schema) | |
127 | pstruct = self.request.POST.items() |
|
126 | pstruct = self.request.POST.items() | |
128 | pstruct.append(('repo_type', self.db_repo.repo_type)) |
|
127 | pstruct.append(('repo_type', self.db_repo.repo_type)) | |
129 | try: |
|
128 | try: | |
130 | schema_data = c.form.validate(pstruct) |
|
129 | schema_data = c.form.validate(pstruct) | |
131 | except deform.ValidationFailure as err_form: |
|
130 | except deform.ValidationFailure as err_form: | |
132 | return self._get_template_context(c) |
|
131 | return self._get_template_context(c) | |
133 |
|
132 | |||
134 | # data is now VALID, proceed with updates |
|
133 | # data is now VALID, proceed with updates | |
135 | # save validated data back into the updates dict |
|
134 | # save validated data back into the updates dict | |
136 | validated_updates = dict( |
|
135 | validated_updates = dict( | |
137 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
136 | repo_name=schema_data['repo_group']['repo_name_without_group'], | |
138 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
137 | repo_group=schema_data['repo_group']['repo_group_id'], | |
139 |
|
138 | |||
140 | user=schema_data['repo_owner'], |
|
139 | user=schema_data['repo_owner'], | |
141 | repo_description=schema_data['repo_description'], |
|
140 | repo_description=schema_data['repo_description'], | |
142 | repo_private=schema_data['repo_private'], |
|
141 | repo_private=schema_data['repo_private'], | |
143 | clone_uri=schema_data['repo_clone_uri'], |
|
142 | clone_uri=schema_data['repo_clone_uri'], | |
144 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
143 | repo_landing_rev=schema_data['repo_landing_commit_ref'], | |
145 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
144 | repo_enable_statistics=schema_data['repo_enable_statistics'], | |
146 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
145 | repo_enable_locking=schema_data['repo_enable_locking'], | |
147 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
146 | repo_enable_downloads=schema_data['repo_enable_downloads'], | |
148 | ) |
|
147 | ) | |
149 | # detect if CLONE URI changed, if we get OLD means we keep old values |
|
148 | # detect if CLONE URI changed, if we get OLD means we keep old values | |
150 | if schema_data['repo_clone_uri_change'] == 'OLD': |
|
149 | if schema_data['repo_clone_uri_change'] == 'OLD': | |
151 | validated_updates['clone_uri'] = self.db_repo.clone_uri |
|
150 | validated_updates['clone_uri'] = self.db_repo.clone_uri | |
152 |
|
151 | |||
153 | # use the new full name for redirect |
|
152 | # use the new full name for redirect | |
154 | new_repo_name = schema_data['repo_group']['repo_name_with_group'] |
|
153 | new_repo_name = schema_data['repo_group']['repo_name_with_group'] | |
155 |
|
154 | |||
156 | # save extra fields into our validated data |
|
155 | # save extra fields into our validated data | |
157 | for key, value in pstruct: |
|
156 | for key, value in pstruct: | |
158 | if key.startswith(RepositoryField.PREFIX): |
|
157 | if key.startswith(RepositoryField.PREFIX): | |
159 | validated_updates[key] = value |
|
158 | validated_updates[key] = value | |
160 |
|
159 | |||
161 | try: |
|
160 | try: | |
162 | RepoModel().update(self.db_repo, **validated_updates) |
|
161 | RepoModel().update(self.db_repo, **validated_updates) | |
163 | ScmModel().mark_for_invalidation(new_repo_name) |
|
162 | ScmModel().mark_for_invalidation(new_repo_name) | |
164 |
|
163 | |||
165 | audit_logger.store_web( |
|
164 | audit_logger.store_web( | |
166 | 'repo.edit', action_data={'old_data': old_values}, |
|
165 | 'repo.edit', action_data={'old_data': old_values}, | |
167 | user=self._rhodecode_user, repo=self.db_repo) |
|
166 | user=self._rhodecode_user, repo=self.db_repo) | |
168 |
|
167 | |||
169 | Session().commit() |
|
168 | Session().commit() | |
170 |
|
169 | |||
171 | h.flash(_('Repository {} updated successfully').format( |
|
170 | h.flash(_('Repository {} updated successfully').format( | |
172 | old_repo_name), category='success') |
|
171 | old_repo_name), category='success') | |
173 | except Exception: |
|
172 | except Exception: | |
174 | log.exception("Exception during update of repository") |
|
173 | log.exception("Exception during update of repository") | |
175 | h.flash(_('Error occurred during update of repository {}').format( |
|
174 | h.flash(_('Error occurred during update of repository {}').format( | |
176 | old_repo_name), category='error') |
|
175 | old_repo_name), category='error') | |
177 |
|
176 | |||
178 | raise HTTPFound( |
|
177 | raise HTTPFound( | |
179 |
|
|
178 | h.route_path('edit_repo', repo_name=new_repo_name)) | |
|
179 | ||||
|
180 | @LoginRequired() | |||
|
181 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |||
|
182 | @view_config( | |||
|
183 | route_name='repo_edit_toggle_locking', request_method='GET', | |||
|
184 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
185 | def toggle_locking(self): | |||
|
186 | """ | |||
|
187 | Toggle locking of repository by simple GET call to url | |||
|
188 | """ | |||
|
189 | _ = self.request.translate | |||
|
190 | repo = self.db_repo | |||
|
191 | ||||
|
192 | try: | |||
|
193 | if repo.enable_locking: | |||
|
194 | if repo.locked[0]: | |||
|
195 | Repository.unlock(repo) | |||
|
196 | action = _('Unlocked') | |||
|
197 | else: | |||
|
198 | Repository.lock( | |||
|
199 | repo, self._rhodecode_user.user_id, | |||
|
200 | lock_reason=Repository.LOCK_WEB) | |||
|
201 | action = _('Locked') | |||
|
202 | ||||
|
203 | h.flash(_('Repository has been %s') % action, | |||
|
204 | category='success') | |||
|
205 | except Exception: | |||
|
206 | log.exception("Exception during unlocking") | |||
|
207 | h.flash(_('An error occurred during unlocking'), | |||
|
208 | category='error') | |||
|
209 | raise HTTPFound( | |||
|
210 | h.route_path('repo_summary', repo_name=self.db_repo_name)) | |||
|
211 | ||||
|
212 | @LoginRequired() | |||
|
213 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
214 | @view_config( | |||
|
215 | route_name='edit_repo_statistics', request_method='GET', | |||
|
216 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
217 | def edit_statistics_form(self): | |||
|
218 | c = self.load_default_context() | |||
|
219 | ||||
|
220 | if self.db_repo.stats: | |||
|
221 | # this is on what revision we ended up so we add +1 for count | |||
|
222 | last_rev = self.db_repo.stats.stat_on_revision + 1 | |||
|
223 | else: | |||
|
224 | last_rev = 0 | |||
|
225 | ||||
|
226 | c.active = 'statistics' | |||
|
227 | c.stats_revision = last_rev | |||
|
228 | c.repo_last_rev = self.rhodecode_vcs_repo.count() | |||
|
229 | ||||
|
230 | if last_rev == 0 or c.repo_last_rev == 0: | |||
|
231 | c.stats_percentage = 0 | |||
|
232 | else: | |||
|
233 | c.stats_percentage = '%.2f' % ( | |||
|
234 | (float((last_rev)) / c.repo_last_rev) * 100) | |||
|
235 | return self._get_template_context(c) | |||
|
236 | ||||
|
237 | @LoginRequired() | |||
|
238 | @HasRepoPermissionAnyDecorator('repository.admin') | |||
|
239 | @CSRFRequired() | |||
|
240 | @view_config( | |||
|
241 | route_name='edit_repo_statistics_reset', request_method='POST', | |||
|
242 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |||
|
243 | def repo_statistics_reset(self): | |||
|
244 | _ = self.request.translate | |||
|
245 | ||||
|
246 | try: | |||
|
247 | RepoModel().delete_stats(self.db_repo_name) | |||
|
248 | Session().commit() | |||
|
249 | except Exception: | |||
|
250 | log.exception('Edit statistics failure') | |||
|
251 | h.flash(_('An error occurred during deletion of repository stats'), | |||
|
252 | category='error') | |||
|
253 | raise HTTPFound( | |||
|
254 | h.route_path('edit_repo_statistics', repo_name=self.db_repo_name)) |
@@ -1,226 +1,226 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from pyramid.view import view_config |
|
23 | from pyramid.view import view_config | |
24 | from pyramid.httpexceptions import HTTPFound |
|
24 | from pyramid.httpexceptions import HTTPFound | |
25 |
|
25 | |||
26 | from rhodecode.apps._base import RepoAppView |
|
26 | from rhodecode.apps._base import RepoAppView | |
27 | from rhodecode.lib import helpers as h |
|
27 | from rhodecode.lib import helpers as h | |
28 | from rhodecode.lib import audit_logger |
|
28 | from rhodecode.lib import audit_logger | |
29 | from rhodecode.lib.auth import ( |
|
29 | from rhodecode.lib.auth import ( | |
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
30 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
31 | from rhodecode.lib.exceptions import AttachedForksError |
|
31 | from rhodecode.lib.exceptions import AttachedForksError | |
32 | from rhodecode.lib.utils2 import safe_int |
|
32 | from rhodecode.lib.utils2 import safe_int | |
33 | from rhodecode.lib.vcs import RepositoryError |
|
33 | from rhodecode.lib.vcs import RepositoryError | |
34 | from rhodecode.model.db import Session, UserFollowing, User, Repository |
|
34 | from rhodecode.model.db import Session, UserFollowing, User, Repository | |
35 | from rhodecode.model.repo import RepoModel |
|
35 | from rhodecode.model.repo import RepoModel | |
36 | from rhodecode.model.scm import ScmModel |
|
36 | from rhodecode.model.scm import ScmModel | |
37 |
|
37 | |||
38 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class RepoSettingsView(RepoAppView): |
|
41 | class RepoSettingsView(RepoAppView): | |
42 |
|
42 | |||
43 | def load_default_context(self): |
|
43 | def load_default_context(self): | |
44 | c = self._get_local_tmpl_context() |
|
44 | c = self._get_local_tmpl_context() | |
45 |
|
45 | |||
46 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
46 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
47 | c.repo_info = self.db_repo |
|
47 | c.repo_info = self.db_repo | |
48 |
|
48 | |||
49 | self._register_global_c(c) |
|
49 | self._register_global_c(c) | |
50 | return c |
|
50 | return c | |
51 |
|
51 | |||
52 | @LoginRequired() |
|
52 | @LoginRequired() | |
53 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
53 | @HasRepoPermissionAnyDecorator('repository.admin') | |
54 | @view_config( |
|
54 | @view_config( | |
55 | route_name='edit_repo_advanced', request_method='GET', |
|
55 | route_name='edit_repo_advanced', request_method='GET', | |
56 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
56 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
57 | def edit_advanced(self): |
|
57 | def edit_advanced(self): | |
58 | c = self.load_default_context() |
|
58 | c = self.load_default_context() | |
59 | c.active = 'advanced' |
|
59 | c.active = 'advanced' | |
60 |
|
60 | |||
61 | c.default_user_id = User.get_default_user().user_id |
|
61 | c.default_user_id = User.get_default_user().user_id | |
62 | c.in_public_journal = UserFollowing.query() \ |
|
62 | c.in_public_journal = UserFollowing.query() \ | |
63 | .filter(UserFollowing.user_id == c.default_user_id) \ |
|
63 | .filter(UserFollowing.user_id == c.default_user_id) \ | |
64 |
.filter(UserFollowing.follows_repository == |
|
64 | .filter(UserFollowing.follows_repository == self.db_repo).scalar() | |
65 |
|
65 | |||
66 | c.has_origin_repo_read_perm = False |
|
66 | c.has_origin_repo_read_perm = False | |
67 | if self.db_repo.fork: |
|
67 | if self.db_repo.fork: | |
68 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( |
|
68 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( | |
69 | 'repository.write', 'repository.read', 'repository.admin')( |
|
69 | 'repository.write', 'repository.read', 'repository.admin')( | |
70 | self.db_repo.fork.repo_name, 'repo set as fork page') |
|
70 | self.db_repo.fork.repo_name, 'repo set as fork page') | |
71 |
|
71 | |||
72 | return self._get_template_context(c) |
|
72 | return self._get_template_context(c) | |
73 |
|
73 | |||
74 | @LoginRequired() |
|
74 | @LoginRequired() | |
75 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
75 | @HasRepoPermissionAnyDecorator('repository.admin') | |
76 | @CSRFRequired() |
|
76 | @CSRFRequired() | |
77 | @view_config( |
|
77 | @view_config( | |
78 | route_name='edit_repo_advanced_delete', request_method='POST', |
|
78 | route_name='edit_repo_advanced_delete', request_method='POST', | |
79 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
79 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
80 | def edit_advanced_delete(self): |
|
80 | def edit_advanced_delete(self): | |
81 | """ |
|
81 | """ | |
82 | Deletes the repository, or shows warnings if deletion is not possible |
|
82 | Deletes the repository, or shows warnings if deletion is not possible | |
83 | because of attached forks or other errors. |
|
83 | because of attached forks or other errors. | |
84 | """ |
|
84 | """ | |
85 | _ = self.request.translate |
|
85 | _ = self.request.translate | |
86 | handle_forks = self.request.POST.get('forks', None) |
|
86 | handle_forks = self.request.POST.get('forks', None) | |
87 |
|
87 | |||
88 | try: |
|
88 | try: | |
89 | _forks = self.db_repo.forks.count() |
|
89 | _forks = self.db_repo.forks.count() | |
90 | if _forks and handle_forks: |
|
90 | if _forks and handle_forks: | |
91 | if handle_forks == 'detach_forks': |
|
91 | if handle_forks == 'detach_forks': | |
92 | handle_forks = 'detach' |
|
92 | handle_forks = 'detach' | |
93 | h.flash(_('Detached %s forks') % _forks, category='success') |
|
93 | h.flash(_('Detached %s forks') % _forks, category='success') | |
94 | elif handle_forks == 'delete_forks': |
|
94 | elif handle_forks == 'delete_forks': | |
95 | handle_forks = 'delete' |
|
95 | handle_forks = 'delete' | |
96 | h.flash(_('Deleted %s forks') % _forks, category='success') |
|
96 | h.flash(_('Deleted %s forks') % _forks, category='success') | |
97 |
|
97 | |||
98 | old_data = self.db_repo.get_api_data() |
|
98 | old_data = self.db_repo.get_api_data() | |
99 | RepoModel().delete(self.db_repo, forks=handle_forks) |
|
99 | RepoModel().delete(self.db_repo, forks=handle_forks) | |
100 |
|
100 | |||
101 | repo = audit_logger.RepoWrap(repo_id=None, |
|
101 | repo = audit_logger.RepoWrap(repo_id=None, | |
102 | repo_name=self.db_repo.repo_name) |
|
102 | repo_name=self.db_repo.repo_name) | |
103 | audit_logger.store_web( |
|
103 | audit_logger.store_web( | |
104 | 'repo.delete', action_data={'old_data': old_data}, |
|
104 | 'repo.delete', action_data={'old_data': old_data}, | |
105 | user=self._rhodecode_user, repo=repo) |
|
105 | user=self._rhodecode_user, repo=repo) | |
106 |
|
106 | |||
107 | ScmModel().mark_for_invalidation(self.db_repo_name, delete=True) |
|
107 | ScmModel().mark_for_invalidation(self.db_repo_name, delete=True) | |
108 | h.flash( |
|
108 | h.flash( | |
109 | _('Deleted repository `%s`') % self.db_repo_name, |
|
109 | _('Deleted repository `%s`') % self.db_repo_name, | |
110 | category='success') |
|
110 | category='success') | |
111 | Session().commit() |
|
111 | Session().commit() | |
112 | except AttachedForksError: |
|
112 | except AttachedForksError: | |
113 | repo_advanced_url = h.route_path( |
|
113 | repo_advanced_url = h.route_path( | |
114 | 'edit_repo_advanced', repo_name=self.db_repo_name, |
|
114 | 'edit_repo_advanced', repo_name=self.db_repo_name, | |
115 | _anchor='advanced-delete') |
|
115 | _anchor='advanced-delete') | |
116 | delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url) |
|
116 | delete_anchor = h.link_to(_('detach or delete'), repo_advanced_url) | |
117 | h.flash(_('Cannot delete `{repo}` it still contains attached forks. ' |
|
117 | h.flash(_('Cannot delete `{repo}` it still contains attached forks. ' | |
118 | 'Try using {delete_or_detach} option.') |
|
118 | 'Try using {delete_or_detach} option.') | |
119 | .format(repo=self.db_repo_name, delete_or_detach=delete_anchor), |
|
119 | .format(repo=self.db_repo_name, delete_or_detach=delete_anchor), | |
120 | category='warning') |
|
120 | category='warning') | |
121 |
|
121 | |||
122 | # redirect to advanced for forks handle action ? |
|
122 | # redirect to advanced for forks handle action ? | |
123 | raise HTTPFound(repo_advanced_url) |
|
123 | raise HTTPFound(repo_advanced_url) | |
124 |
|
124 | |||
125 | except Exception: |
|
125 | except Exception: | |
126 | log.exception("Exception during deletion of repository") |
|
126 | log.exception("Exception during deletion of repository") | |
127 | h.flash(_('An error occurred during deletion of `%s`') |
|
127 | h.flash(_('An error occurred during deletion of `%s`') | |
128 | % self.db_repo_name, category='error') |
|
128 | % self.db_repo_name, category='error') | |
129 | # redirect to advanced for more deletion options |
|
129 | # redirect to advanced for more deletion options | |
130 | raise HTTPFound( |
|
130 | raise HTTPFound( | |
131 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name), |
|
131 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name), | |
132 | _anchor='advanced-delete') |
|
132 | _anchor='advanced-delete') | |
133 |
|
133 | |||
134 | raise HTTPFound(h.route_path('home')) |
|
134 | raise HTTPFound(h.route_path('home')) | |
135 |
|
135 | |||
136 | @LoginRequired() |
|
136 | @LoginRequired() | |
137 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
137 | @HasRepoPermissionAnyDecorator('repository.admin') | |
138 | @CSRFRequired() |
|
138 | @CSRFRequired() | |
139 | @view_config( |
|
139 | @view_config( | |
140 | route_name='edit_repo_advanced_journal', request_method='POST', |
|
140 | route_name='edit_repo_advanced_journal', request_method='POST', | |
141 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
141 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
142 | def edit_advanced_journal(self): |
|
142 | def edit_advanced_journal(self): | |
143 | """ |
|
143 | """ | |
144 | Set's this repository to be visible in public journal, |
|
144 | Set's this repository to be visible in public journal, | |
145 | in other words making default user to follow this repo |
|
145 | in other words making default user to follow this repo | |
146 | """ |
|
146 | """ | |
147 | _ = self.request.translate |
|
147 | _ = self.request.translate | |
148 |
|
148 | |||
149 | try: |
|
149 | try: | |
150 | user_id = User.get_default_user().user_id |
|
150 | user_id = User.get_default_user().user_id | |
151 | ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id) |
|
151 | ScmModel().toggle_following_repo(self.db_repo.repo_id, user_id) | |
152 | h.flash(_('Updated repository visibility in public journal'), |
|
152 | h.flash(_('Updated repository visibility in public journal'), | |
153 | category='success') |
|
153 | category='success') | |
154 | Session().commit() |
|
154 | Session().commit() | |
155 | except Exception: |
|
155 | except Exception: | |
156 | h.flash(_('An error occurred during setting this ' |
|
156 | h.flash(_('An error occurred during setting this ' | |
157 | 'repository in public journal'), |
|
157 | 'repository in public journal'), | |
158 | category='error') |
|
158 | category='error') | |
159 |
|
159 | |||
160 | raise HTTPFound( |
|
160 | raise HTTPFound( | |
161 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
161 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) | |
162 |
|
162 | |||
163 | @LoginRequired() |
|
163 | @LoginRequired() | |
164 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
164 | @HasRepoPermissionAnyDecorator('repository.admin') | |
165 | @CSRFRequired() |
|
165 | @CSRFRequired() | |
166 | @view_config( |
|
166 | @view_config( | |
167 | route_name='edit_repo_advanced_fork', request_method='POST', |
|
167 | route_name='edit_repo_advanced_fork', request_method='POST', | |
168 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
168 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
169 | def edit_advanced_fork(self): |
|
169 | def edit_advanced_fork(self): | |
170 | """ |
|
170 | """ | |
171 | Mark given repository as a fork of another |
|
171 | Mark given repository as a fork of another | |
172 | """ |
|
172 | """ | |
173 | _ = self.request.translate |
|
173 | _ = self.request.translate | |
174 |
|
174 | |||
175 | new_fork_id = self.request.POST.get('id_fork_of') |
|
175 | new_fork_id = self.request.POST.get('id_fork_of') | |
176 | try: |
|
176 | try: | |
177 |
|
177 | |||
178 | if new_fork_id and not new_fork_id.isdigit(): |
|
178 | if new_fork_id and not new_fork_id.isdigit(): | |
179 | log.error('Given fork id %s is not an INT', new_fork_id) |
|
179 | log.error('Given fork id %s is not an INT', new_fork_id) | |
180 |
|
180 | |||
181 | fork_id = safe_int(new_fork_id) |
|
181 | fork_id = safe_int(new_fork_id) | |
182 | repo = ScmModel().mark_as_fork( |
|
182 | repo = ScmModel().mark_as_fork( | |
183 | self.db_repo_name, fork_id, self._rhodecode_user.user_id) |
|
183 | self.db_repo_name, fork_id, self._rhodecode_user.user_id) | |
184 | fork = repo.fork.repo_name if repo.fork else _('Nothing') |
|
184 | fork = repo.fork.repo_name if repo.fork else _('Nothing') | |
185 | Session().commit() |
|
185 | Session().commit() | |
186 | h.flash(_('Marked repo %s as fork of %s') % (self.db_repo_name, fork), |
|
186 | h.flash(_('Marked repo %s as fork of %s') % (self.db_repo_name, fork), | |
187 | category='success') |
|
187 | category='success') | |
188 | except RepositoryError as e: |
|
188 | except RepositoryError as e: | |
189 | log.exception("Repository Error occurred") |
|
189 | log.exception("Repository Error occurred") | |
190 | h.flash(str(e), category='error') |
|
190 | h.flash(str(e), category='error') | |
191 | except Exception as e: |
|
191 | except Exception as e: | |
192 | log.exception("Exception while editing fork") |
|
192 | log.exception("Exception while editing fork") | |
193 | h.flash(_('An error occurred during this operation'), |
|
193 | h.flash(_('An error occurred during this operation'), | |
194 | category='error') |
|
194 | category='error') | |
195 |
|
195 | |||
196 | raise HTTPFound( |
|
196 | raise HTTPFound( | |
197 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
197 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) | |
198 |
|
198 | |||
199 | @LoginRequired() |
|
199 | @LoginRequired() | |
200 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
200 | @HasRepoPermissionAnyDecorator('repository.admin') | |
201 | @CSRFRequired() |
|
201 | @CSRFRequired() | |
202 | @view_config( |
|
202 | @view_config( | |
203 | route_name='edit_repo_advanced_locking', request_method='POST', |
|
203 | route_name='edit_repo_advanced_locking', request_method='POST', | |
204 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
204 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
205 | def edit_advanced_locking(self): |
|
205 | def edit_advanced_locking(self): | |
206 | """ |
|
206 | """ | |
207 | Toggle locking of repository |
|
207 | Toggle locking of repository | |
208 | """ |
|
208 | """ | |
209 | _ = self.request.translate |
|
209 | _ = self.request.translate | |
210 | set_lock = self.request.POST.get('set_lock') |
|
210 | set_lock = self.request.POST.get('set_lock') | |
211 | set_unlock = self.request.POST.get('set_unlock') |
|
211 | set_unlock = self.request.POST.get('set_unlock') | |
212 |
|
212 | |||
213 | try: |
|
213 | try: | |
214 | if set_lock: |
|
214 | if set_lock: | |
215 | Repository.lock(self.db_repo, self._rhodecode_user.user_id, |
|
215 | Repository.lock(self.db_repo, self._rhodecode_user.user_id, | |
216 | lock_reason=Repository.LOCK_WEB) |
|
216 | lock_reason=Repository.LOCK_WEB) | |
217 | h.flash(_('Locked repository'), category='success') |
|
217 | h.flash(_('Locked repository'), category='success') | |
218 | elif set_unlock: |
|
218 | elif set_unlock: | |
219 | Repository.unlock(self.db_repo) |
|
219 | Repository.unlock(self.db_repo) | |
220 | h.flash(_('Unlocked repository'), category='success') |
|
220 | h.flash(_('Unlocked repository'), category='success') | |
221 | except Exception as e: |
|
221 | except Exception as e: | |
222 | log.exception("Exception during unlocking") |
|
222 | log.exception("Exception during unlocking") | |
223 | h.flash(_('An error occurred during unlocking'), category='error') |
|
223 | h.flash(_('An error occurred during unlocking'), category='error') | |
224 |
|
224 | |||
225 | raise HTTPFound( |
|
225 | raise HTTPFound( | |
226 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
|
226 | h.route_path('edit_repo_advanced', repo_name=self.db_repo_name)) |
@@ -1,116 +1,116 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2017-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2017-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 | from pyramid.view import view_config |
|
22 | from pyramid.view import view_config | |
23 |
|
23 | |||
24 | from rhodecode.apps._base import RepoAppView |
|
24 | from rhodecode.apps._base import RepoAppView | |
25 | from rhodecode.lib import audit_logger |
|
25 | from rhodecode.lib import audit_logger | |
26 | from rhodecode.lib import helpers as h |
|
26 | from rhodecode.lib import helpers as h | |
27 | from rhodecode.lib.auth import (LoginRequired, HasRepoPermissionAnyDecorator, |
|
27 | from rhodecode.lib.auth import ( | |
28 | NotAnonymous, CSRFRequired) |
|
28 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) | |
29 | from rhodecode.lib.ext_json import json |
|
29 | from rhodecode.lib.ext_json import json | |
30 |
|
30 | |||
31 | log = logging.getLogger(__name__) |
|
31 | log = logging.getLogger(__name__) | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | class StripView(RepoAppView): |
|
34 | class StripView(RepoAppView): | |
35 | def load_default_context(self): |
|
35 | def load_default_context(self): | |
36 | c = self._get_local_tmpl_context() |
|
36 | c = self._get_local_tmpl_context() | |
37 |
|
37 | |||
38 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
38 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
39 | c.repo_info = self.db_repo |
|
39 | c.repo_info = self.db_repo | |
40 |
|
40 | |||
41 | self._register_global_c(c) |
|
41 | self._register_global_c(c) | |
42 | return c |
|
42 | return c | |
43 |
|
43 | |||
44 | @LoginRequired() |
|
44 | @LoginRequired() | |
45 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
45 | @HasRepoPermissionAnyDecorator('repository.admin') | |
46 | @view_config( |
|
46 | @view_config( | |
47 | route_name='strip', request_method='GET', |
|
47 | route_name='edit_repo_strip', request_method='GET', | |
48 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
48 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') | |
49 | def strip(self): |
|
49 | def strip(self): | |
50 | c = self.load_default_context() |
|
50 | c = self.load_default_context() | |
51 | c.active = 'strip' |
|
51 | c.active = 'strip' | |
52 | c.strip_limit = 10 |
|
52 | c.strip_limit = 10 | |
53 |
|
53 | |||
54 | return self._get_template_context(c) |
|
54 | return self._get_template_context(c) | |
55 |
|
55 | |||
56 | @LoginRequired() |
|
56 | @LoginRequired() | |
57 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
57 | @HasRepoPermissionAnyDecorator('repository.admin') | |
58 | @CSRFRequired() |
|
58 | @CSRFRequired() | |
59 | @view_config( |
|
59 | @view_config( | |
60 | route_name='strip_check', request_method='POST', |
|
60 | route_name='strip_check', request_method='POST', | |
61 | renderer='json', xhr=True) |
|
61 | renderer='json', xhr=True) | |
62 | def strip_check(self): |
|
62 | def strip_check(self): | |
63 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
63 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
64 | data = {} |
|
64 | data = {} | |
65 | rp = self.request.POST |
|
65 | rp = self.request.POST | |
66 | for i in range(1, 11): |
|
66 | for i in range(1, 11): | |
67 | chset = 'changeset_id-%d' % (i,) |
|
67 | chset = 'changeset_id-%d' % (i,) | |
68 | check = rp.get(chset) |
|
68 | check = rp.get(chset) | |
69 |
|
69 | |||
70 | if check: |
|
70 | if check: | |
71 | data[i] = self.db_repo.get_changeset(rp[chset]) |
|
71 | data[i] = self.db_repo.get_changeset(rp[chset]) | |
72 | if isinstance(data[i], EmptyCommit): |
|
72 | if isinstance(data[i], EmptyCommit): | |
73 | data[i] = {'rev': None, 'commit': h.escape(rp[chset])} |
|
73 | data[i] = {'rev': None, 'commit': h.escape(rp[chset])} | |
74 | else: |
|
74 | else: | |
75 | data[i] = {'rev': data[i].raw_id, 'branch': data[i].branch, |
|
75 | data[i] = {'rev': data[i].raw_id, 'branch': data[i].branch, | |
76 | 'author': data[i].author, |
|
76 | 'author': data[i].author, | |
77 | 'comment': data[i].message} |
|
77 | 'comment': data[i].message} | |
78 | else: |
|
78 | else: | |
79 | break |
|
79 | break | |
80 | return data |
|
80 | return data | |
81 |
|
81 | |||
82 | @LoginRequired() |
|
82 | @LoginRequired() | |
83 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
83 | @HasRepoPermissionAnyDecorator('repository.admin') | |
84 | @CSRFRequired() |
|
84 | @CSRFRequired() | |
85 | @view_config( |
|
85 | @view_config( | |
86 | route_name='strip_execute', request_method='POST', |
|
86 | route_name='strip_execute', request_method='POST', | |
87 | renderer='json', xhr=True) |
|
87 | renderer='json', xhr=True) | |
88 | def strip_execute(self): |
|
88 | def strip_execute(self): | |
89 | from rhodecode.model.scm import ScmModel |
|
89 | from rhodecode.model.scm import ScmModel | |
90 |
|
90 | |||
91 | c = self.load_default_context() |
|
91 | c = self.load_default_context() | |
92 | user = self._rhodecode_user |
|
92 | user = self._rhodecode_user | |
93 | rp = self.request.POST |
|
93 | rp = self.request.POST | |
94 | data = {} |
|
94 | data = {} | |
95 | for idx in rp: |
|
95 | for idx in rp: | |
96 | commit = json.loads(rp[idx]) |
|
96 | commit = json.loads(rp[idx]) | |
97 | # If someone put two times the same branch |
|
97 | # If someone put two times the same branch | |
98 | if commit['branch'] in data.keys(): |
|
98 | if commit['branch'] in data.keys(): | |
99 | continue |
|
99 | continue | |
100 | try: |
|
100 | try: | |
101 | ScmModel().strip( |
|
101 | ScmModel().strip( | |
102 |
repo= |
|
102 | repo=self.db_repo, | |
103 | commit_id=commit['rev'], branch=commit['branch']) |
|
103 | commit_id=commit['rev'], branch=commit['branch']) | |
104 | log.info('Stripped commit %s from repo `%s` by %s' % ( |
|
104 | log.info('Stripped commit %s from repo `%s` by %s' % ( | |
105 |
commit['rev'], |
|
105 | commit['rev'], self.db_repo_name, user)) | |
106 | data[commit['rev']] = True |
|
106 | data[commit['rev']] = True | |
107 |
|
107 | |||
108 | audit_logger.store_web( |
|
108 | audit_logger.store_web( | |
109 | 'repo.commit.strip', action_data={'commit_id': commit['rev']}, |
|
109 | 'repo.commit.strip', action_data={'commit_id': commit['rev']}, | |
110 | repo=self.db_repo, user=self._rhodecode_user, commit=True) |
|
110 | repo=self.db_repo, user=self._rhodecode_user, commit=True) | |
111 |
|
111 | |||
112 | except Exception as e: |
|
112 | except Exception as e: | |
113 | data[commit['rev']] = False |
|
113 | data[commit['rev']] = False | |
114 | log.debug('Stripped commit %s from repo `%s` failed by %s, exeption %s' % ( |
|
114 | log.debug('Stripped commit %s from repo `%s` failed by %s, exeption %s' % ( | |
115 | commit['rev'], self.db_repo_name, user, e.message)) |
|
115 | commit['rev'], self.db_repo_name, user, e.message)) | |
116 | return data |
|
116 | return data |
@@ -1,370 +1,372 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 | import string |
|
22 | import string | |
23 |
|
23 | |||
24 | from pyramid.view import view_config |
|
24 | from pyramid.view import view_config | |
25 |
|
||||
26 | from beaker.cache import cache_region |
|
25 | from beaker.cache import cache_region | |
27 |
|
26 | |||
28 |
|
||||
29 | from rhodecode.controllers import utils |
|
27 | from rhodecode.controllers import utils | |
30 |
|
||||
31 | from rhodecode.apps._base import RepoAppView |
|
28 | from rhodecode.apps._base import RepoAppView | |
32 | from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP) |
|
29 | from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP) | |
33 | from rhodecode.lib import caches, helpers as h |
|
30 | from rhodecode.lib import caches, helpers as h | |
34 | from rhodecode.lib.helpers import RepoPage |
|
31 | from rhodecode.lib.helpers import RepoPage | |
35 | from rhodecode.lib.utils2 import safe_str, safe_int |
|
32 | from rhodecode.lib.utils2 import safe_str, safe_int | |
36 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
33 | from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator | |
37 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
34 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links | |
38 | from rhodecode.lib.ext_json import json |
|
35 | from rhodecode.lib.ext_json import json | |
39 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
36 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
40 | from rhodecode.lib.vcs.exceptions import CommitError, EmptyRepositoryError |
|
37 | from rhodecode.lib.vcs.exceptions import CommitError, EmptyRepositoryError | |
41 | from rhodecode.model.db import Statistics, CacheKey, User |
|
38 | from rhodecode.model.db import Statistics, CacheKey, User | |
42 | from rhodecode.model.meta import Session |
|
39 | from rhodecode.model.meta import Session | |
43 | from rhodecode.model.repo import ReadmeFinder |
|
40 | from rhodecode.model.repo import ReadmeFinder | |
44 | from rhodecode.model.scm import ScmModel |
|
41 | from rhodecode.model.scm import ScmModel | |
45 |
|
42 | |||
46 | log = logging.getLogger(__name__) |
|
43 | log = logging.getLogger(__name__) | |
47 |
|
44 | |||
48 |
|
45 | |||
49 | class RepoSummaryView(RepoAppView): |
|
46 | class RepoSummaryView(RepoAppView): | |
50 |
|
47 | |||
51 | def load_default_context(self): |
|
48 | def load_default_context(self): | |
52 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
49 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
53 |
|
50 | |||
54 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead |
|
51 | # TODO(marcink): remove repo_info and use c.rhodecode_db_repo instead | |
55 | c.repo_info = self.db_repo |
|
52 | c.repo_info = self.db_repo | |
56 | c.rhodecode_repo = None |
|
53 | c.rhodecode_repo = None | |
57 | if not c.repository_requirements_missing: |
|
54 | if not c.repository_requirements_missing: | |
58 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
55 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
59 |
|
56 | |||
60 | self._register_global_c(c) |
|
57 | self._register_global_c(c) | |
61 | return c |
|
58 | return c | |
62 |
|
59 | |||
63 | def _get_readme_data(self, db_repo, default_renderer): |
|
60 | def _get_readme_data(self, db_repo, default_renderer): | |
64 | repo_name = db_repo.repo_name |
|
61 | repo_name = db_repo.repo_name | |
65 | log.debug('Looking for README file') |
|
62 | log.debug('Looking for README file') | |
66 |
|
63 | |||
67 | @cache_region('long_term') |
|
64 | @cache_region('long_term') | |
68 | def _generate_readme(cache_key): |
|
65 | def _generate_readme(cache_key): | |
69 | readme_data = None |
|
66 | readme_data = None | |
70 | readme_node = None |
|
67 | readme_node = None | |
71 | readme_filename = None |
|
68 | readme_filename = None | |
72 | commit = self._get_landing_commit_or_none(db_repo) |
|
69 | commit = self._get_landing_commit_or_none(db_repo) | |
73 | if commit: |
|
70 | if commit: | |
74 | log.debug("Searching for a README file.") |
|
71 | log.debug("Searching for a README file.") | |
75 | readme_node = ReadmeFinder(default_renderer).search(commit) |
|
72 | readme_node = ReadmeFinder(default_renderer).search(commit) | |
76 | if readme_node: |
|
73 | if readme_node: | |
77 |
relative_url = |
|
74 | relative_urls = { | |
|
75 | 'raw': h.route_path( | |||
78 | 'repo_file_raw', repo_name=repo_name, |
|
76 | 'repo_file_raw', repo_name=repo_name, | |
79 | commit_id=commit.raw_id, f_path=readme_node.path) |
|
77 | commit_id=commit.raw_id, f_path=readme_node.path), | |
|
78 | 'standard': h.route_path( | |||
|
79 | 'repo_files', repo_name=repo_name, | |||
|
80 | commit_id=commit.raw_id, f_path=readme_node.path), | |||
|
81 | } | |||
80 | readme_data = self._render_readme_or_none( |
|
82 | readme_data = self._render_readme_or_none( | |
81 | commit, readme_node, relative_url) |
|
83 | commit, readme_node, relative_urls) | |
82 | readme_filename = readme_node.path |
|
84 | readme_filename = readme_node.path | |
83 | return readme_data, readme_filename |
|
85 | return readme_data, readme_filename | |
84 |
|
86 | |||
85 | invalidator_context = CacheKey.repo_context_cache( |
|
87 | invalidator_context = CacheKey.repo_context_cache( | |
86 | _generate_readme, repo_name, CacheKey.CACHE_TYPE_README) |
|
88 | _generate_readme, repo_name, CacheKey.CACHE_TYPE_README) | |
87 |
|
89 | |||
88 | with invalidator_context as context: |
|
90 | with invalidator_context as context: | |
89 | context.invalidate() |
|
91 | context.invalidate() | |
90 | computed = context.compute() |
|
92 | computed = context.compute() | |
91 |
|
93 | |||
92 | return computed |
|
94 | return computed | |
93 |
|
95 | |||
94 | def _get_landing_commit_or_none(self, db_repo): |
|
96 | def _get_landing_commit_or_none(self, db_repo): | |
95 | log.debug("Getting the landing commit.") |
|
97 | log.debug("Getting the landing commit.") | |
96 | try: |
|
98 | try: | |
97 | commit = db_repo.get_landing_commit() |
|
99 | commit = db_repo.get_landing_commit() | |
98 | if not isinstance(commit, EmptyCommit): |
|
100 | if not isinstance(commit, EmptyCommit): | |
99 | return commit |
|
101 | return commit | |
100 | else: |
|
102 | else: | |
101 | log.debug("Repository is empty, no README to render.") |
|
103 | log.debug("Repository is empty, no README to render.") | |
102 | except CommitError: |
|
104 | except CommitError: | |
103 | log.exception( |
|
105 | log.exception( | |
104 | "Problem getting commit when trying to render the README.") |
|
106 | "Problem getting commit when trying to render the README.") | |
105 |
|
107 | |||
106 | def _render_readme_or_none(self, commit, readme_node, relative_url): |
|
108 | def _render_readme_or_none(self, commit, readme_node, relative_urls): | |
107 | log.debug( |
|
109 | log.debug( | |
108 | 'Found README file `%s` rendering...', readme_node.path) |
|
110 | 'Found README file `%s` rendering...', readme_node.path) | |
109 | renderer = MarkupRenderer() |
|
111 | renderer = MarkupRenderer() | |
110 | try: |
|
112 | try: | |
111 | html_source = renderer.render( |
|
113 | html_source = renderer.render( | |
112 | readme_node.content, filename=readme_node.path) |
|
114 | readme_node.content, filename=readme_node.path) | |
113 | if relative_url: |
|
115 | if relative_urls: | |
114 | return relative_links(html_source, relative_url) |
|
116 | return relative_links(html_source, relative_urls) | |
115 | return html_source |
|
117 | return html_source | |
116 | except Exception: |
|
118 | except Exception: | |
117 | log.exception( |
|
119 | log.exception( | |
118 | "Exception while trying to render the README") |
|
120 | "Exception while trying to render the README") | |
119 |
|
121 | |||
120 | def _load_commits_context(self, c): |
|
122 | def _load_commits_context(self, c): | |
121 | p = safe_int(self.request.GET.get('page'), 1) |
|
123 | p = safe_int(self.request.GET.get('page'), 1) | |
122 | size = safe_int(self.request.GET.get('size'), 10) |
|
124 | size = safe_int(self.request.GET.get('size'), 10) | |
123 |
|
125 | |||
124 | def url_generator(**kw): |
|
126 | def url_generator(**kw): | |
125 | query_params = { |
|
127 | query_params = { | |
126 | 'size': size |
|
128 | 'size': size | |
127 | } |
|
129 | } | |
128 | query_params.update(kw) |
|
130 | query_params.update(kw) | |
129 | return h.route_path( |
|
131 | return h.route_path( | |
130 | 'repo_summary_commits', |
|
132 | 'repo_summary_commits', | |
131 | repo_name=c.rhodecode_db_repo.repo_name, _query=query_params) |
|
133 | repo_name=c.rhodecode_db_repo.repo_name, _query=query_params) | |
132 |
|
134 | |||
133 | pre_load = ['author', 'branch', 'date', 'message'] |
|
135 | pre_load = ['author', 'branch', 'date', 'message'] | |
134 | try: |
|
136 | try: | |
135 | collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load) |
|
137 | collection = self.rhodecode_vcs_repo.get_commits(pre_load=pre_load) | |
136 | except EmptyRepositoryError: |
|
138 | except EmptyRepositoryError: | |
137 | collection = self.rhodecode_vcs_repo |
|
139 | collection = self.rhodecode_vcs_repo | |
138 |
|
140 | |||
139 | c.repo_commits = RepoPage( |
|
141 | c.repo_commits = RepoPage( | |
140 | collection, page=p, items_per_page=size, url=url_generator) |
|
142 | collection, page=p, items_per_page=size, url=url_generator) | |
141 | page_ids = [x.raw_id for x in c.repo_commits] |
|
143 | page_ids = [x.raw_id for x in c.repo_commits] | |
142 | c.comments = self.db_repo.get_comments(page_ids) |
|
144 | c.comments = self.db_repo.get_comments(page_ids) | |
143 | c.statuses = self.db_repo.statuses(page_ids) |
|
145 | c.statuses = self.db_repo.statuses(page_ids) | |
144 |
|
146 | |||
145 | @LoginRequired() |
|
147 | @LoginRequired() | |
146 | @HasRepoPermissionAnyDecorator( |
|
148 | @HasRepoPermissionAnyDecorator( | |
147 | 'repository.read', 'repository.write', 'repository.admin') |
|
149 | 'repository.read', 'repository.write', 'repository.admin') | |
148 | @view_config( |
|
150 | @view_config( | |
149 | route_name='repo_summary_commits', request_method='GET', |
|
151 | route_name='repo_summary_commits', request_method='GET', | |
150 | renderer='rhodecode:templates/summary/summary_commits.mako') |
|
152 | renderer='rhodecode:templates/summary/summary_commits.mako') | |
151 | def summary_commits(self): |
|
153 | def summary_commits(self): | |
152 | c = self.load_default_context() |
|
154 | c = self.load_default_context() | |
153 | self._load_commits_context(c) |
|
155 | self._load_commits_context(c) | |
154 | return self._get_template_context(c) |
|
156 | return self._get_template_context(c) | |
155 |
|
157 | |||
156 | @LoginRequired() |
|
158 | @LoginRequired() | |
157 | @HasRepoPermissionAnyDecorator( |
|
159 | @HasRepoPermissionAnyDecorator( | |
158 | 'repository.read', 'repository.write', 'repository.admin') |
|
160 | 'repository.read', 'repository.write', 'repository.admin') | |
159 | @view_config( |
|
161 | @view_config( | |
160 | route_name='repo_summary', request_method='GET', |
|
162 | route_name='repo_summary', request_method='GET', | |
161 | renderer='rhodecode:templates/summary/summary.mako') |
|
163 | renderer='rhodecode:templates/summary/summary.mako') | |
162 | @view_config( |
|
164 | @view_config( | |
163 | route_name='repo_summary_slash', request_method='GET', |
|
165 | route_name='repo_summary_slash', request_method='GET', | |
164 | renderer='rhodecode:templates/summary/summary.mako') |
|
166 | renderer='rhodecode:templates/summary/summary.mako') | |
165 | @view_config( |
|
167 | @view_config( | |
166 | route_name='repo_summary_explicit', request_method='GET', |
|
168 | route_name='repo_summary_explicit', request_method='GET', | |
167 | renderer='rhodecode:templates/summary/summary.mako') |
|
169 | renderer='rhodecode:templates/summary/summary.mako') | |
168 | def summary(self): |
|
170 | def summary(self): | |
169 | c = self.load_default_context() |
|
171 | c = self.load_default_context() | |
170 |
|
172 | |||
171 | # Prepare the clone URL |
|
173 | # Prepare the clone URL | |
172 | username = '' |
|
174 | username = '' | |
173 | if self._rhodecode_user.username != User.DEFAULT_USER: |
|
175 | if self._rhodecode_user.username != User.DEFAULT_USER: | |
174 | username = safe_str(self._rhodecode_user.username) |
|
176 | username = safe_str(self._rhodecode_user.username) | |
175 |
|
177 | |||
176 | _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl |
|
178 | _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl | |
177 | if '{repo}' in _def_clone_uri: |
|
179 | if '{repo}' in _def_clone_uri: | |
178 | _def_clone_uri_by_id = _def_clone_uri.replace( |
|
180 | _def_clone_uri_by_id = _def_clone_uri.replace( | |
179 | '{repo}', '_{repoid}') |
|
181 | '{repo}', '_{repoid}') | |
180 | elif '{repoid}' in _def_clone_uri: |
|
182 | elif '{repoid}' in _def_clone_uri: | |
181 | _def_clone_uri_by_id = _def_clone_uri.replace( |
|
183 | _def_clone_uri_by_id = _def_clone_uri.replace( | |
182 | '_{repoid}', '{repo}') |
|
184 | '_{repoid}', '{repo}') | |
183 |
|
185 | |||
184 | c.clone_repo_url = self.db_repo.clone_url( |
|
186 | c.clone_repo_url = self.db_repo.clone_url( | |
185 | user=username, uri_tmpl=_def_clone_uri) |
|
187 | user=username, uri_tmpl=_def_clone_uri) | |
186 | c.clone_repo_url_id = self.db_repo.clone_url( |
|
188 | c.clone_repo_url_id = self.db_repo.clone_url( | |
187 | user=username, uri_tmpl=_def_clone_uri_by_id) |
|
189 | user=username, uri_tmpl=_def_clone_uri_by_id) | |
188 |
|
190 | |||
189 | # If enabled, get statistics data |
|
191 | # If enabled, get statistics data | |
190 |
|
192 | |||
191 | c.show_stats = bool(self.db_repo.enable_statistics) |
|
193 | c.show_stats = bool(self.db_repo.enable_statistics) | |
192 |
|
194 | |||
193 | stats = Session().query(Statistics) \ |
|
195 | stats = Session().query(Statistics) \ | |
194 | .filter(Statistics.repository == self.db_repo) \ |
|
196 | .filter(Statistics.repository == self.db_repo) \ | |
195 | .scalar() |
|
197 | .scalar() | |
196 |
|
198 | |||
197 | c.stats_percentage = 0 |
|
199 | c.stats_percentage = 0 | |
198 |
|
200 | |||
199 | if stats and stats.languages: |
|
201 | if stats and stats.languages: | |
200 | c.no_data = False is self.db_repo.enable_statistics |
|
202 | c.no_data = False is self.db_repo.enable_statistics | |
201 | lang_stats_d = json.loads(stats.languages) |
|
203 | lang_stats_d = json.loads(stats.languages) | |
202 |
|
204 | |||
203 | # Sort first by decreasing count and second by the file extension, |
|
205 | # Sort first by decreasing count and second by the file extension, | |
204 | # so we have a consistent output. |
|
206 | # so we have a consistent output. | |
205 | lang_stats_items = sorted(lang_stats_d.iteritems(), |
|
207 | lang_stats_items = sorted(lang_stats_d.iteritems(), | |
206 | key=lambda k: (-k[1], k[0]))[:10] |
|
208 | key=lambda k: (-k[1], k[0]))[:10] | |
207 | lang_stats = [(x, {"count": y, |
|
209 | lang_stats = [(x, {"count": y, | |
208 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) |
|
210 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) | |
209 | for x, y in lang_stats_items] |
|
211 | for x, y in lang_stats_items] | |
210 |
|
212 | |||
211 | c.trending_languages = json.dumps(lang_stats) |
|
213 | c.trending_languages = json.dumps(lang_stats) | |
212 | else: |
|
214 | else: | |
213 | c.no_data = True |
|
215 | c.no_data = True | |
214 | c.trending_languages = json.dumps({}) |
|
216 | c.trending_languages = json.dumps({}) | |
215 |
|
217 | |||
216 | scm_model = ScmModel() |
|
218 | scm_model = ScmModel() | |
217 | c.enable_downloads = self.db_repo.enable_downloads |
|
219 | c.enable_downloads = self.db_repo.enable_downloads | |
218 | c.repository_followers = scm_model.get_followers(self.db_repo) |
|
220 | c.repository_followers = scm_model.get_followers(self.db_repo) | |
219 | c.repository_forks = scm_model.get_forks(self.db_repo) |
|
221 | c.repository_forks = scm_model.get_forks(self.db_repo) | |
220 | c.repository_is_user_following = scm_model.is_following_repo( |
|
222 | c.repository_is_user_following = scm_model.is_following_repo( | |
221 | self.db_repo_name, self._rhodecode_user.user_id) |
|
223 | self.db_repo_name, self._rhodecode_user.user_id) | |
222 |
|
224 | |||
223 | # first interaction with the VCS instance after here... |
|
225 | # first interaction with the VCS instance after here... | |
224 | if c.repository_requirements_missing: |
|
226 | if c.repository_requirements_missing: | |
225 | self.request.override_renderer = \ |
|
227 | self.request.override_renderer = \ | |
226 | 'rhodecode:templates/summary/missing_requirements.mako' |
|
228 | 'rhodecode:templates/summary/missing_requirements.mako' | |
227 | return self._get_template_context(c) |
|
229 | return self._get_template_context(c) | |
228 |
|
230 | |||
229 | c.readme_data, c.readme_file = \ |
|
231 | c.readme_data, c.readme_file = \ | |
230 | self._get_readme_data(self.db_repo, c.visual.default_renderer) |
|
232 | self._get_readme_data(self.db_repo, c.visual.default_renderer) | |
231 |
|
233 | |||
232 | # loads the summary commits template context |
|
234 | # loads the summary commits template context | |
233 | self._load_commits_context(c) |
|
235 | self._load_commits_context(c) | |
234 |
|
236 | |||
235 | return self._get_template_context(c) |
|
237 | return self._get_template_context(c) | |
236 |
|
238 | |||
237 | def get_request_commit_id(self): |
|
239 | def get_request_commit_id(self): | |
238 | return self.request.matchdict['commit_id'] |
|
240 | return self.request.matchdict['commit_id'] | |
239 |
|
241 | |||
240 | @LoginRequired() |
|
242 | @LoginRequired() | |
241 | @HasRepoPermissionAnyDecorator( |
|
243 | @HasRepoPermissionAnyDecorator( | |
242 | 'repository.read', 'repository.write', 'repository.admin') |
|
244 | 'repository.read', 'repository.write', 'repository.admin') | |
243 | @view_config( |
|
245 | @view_config( | |
244 | route_name='repo_stats', request_method='GET', |
|
246 | route_name='repo_stats', request_method='GET', | |
245 | renderer='json_ext') |
|
247 | renderer='json_ext') | |
246 | def repo_stats(self): |
|
248 | def repo_stats(self): | |
247 | commit_id = self.get_request_commit_id() |
|
249 | commit_id = self.get_request_commit_id() | |
248 |
|
250 | |||
249 | _namespace = caches.get_repo_namespace_key( |
|
251 | _namespace = caches.get_repo_namespace_key( | |
250 | caches.SUMMARY_STATS, self.db_repo_name) |
|
252 | caches.SUMMARY_STATS, self.db_repo_name) | |
251 | show_stats = bool(self.db_repo.enable_statistics) |
|
253 | show_stats = bool(self.db_repo.enable_statistics) | |
252 | cache_manager = caches.get_cache_manager( |
|
254 | cache_manager = caches.get_cache_manager( | |
253 | 'repo_cache_long', _namespace) |
|
255 | 'repo_cache_long', _namespace) | |
254 | _cache_key = caches.compute_key_from_params( |
|
256 | _cache_key = caches.compute_key_from_params( | |
255 | self.db_repo_name, commit_id, show_stats) |
|
257 | self.db_repo_name, commit_id, show_stats) | |
256 |
|
258 | |||
257 | def compute_stats(): |
|
259 | def compute_stats(): | |
258 | code_stats = {} |
|
260 | code_stats = {} | |
259 | size = 0 |
|
261 | size = 0 | |
260 | try: |
|
262 | try: | |
261 | scm_instance = self.db_repo.scm_instance() |
|
263 | scm_instance = self.db_repo.scm_instance() | |
262 | commit = scm_instance.get_commit(commit_id) |
|
264 | commit = scm_instance.get_commit(commit_id) | |
263 |
|
265 | |||
264 | for node in commit.get_filenodes_generator(): |
|
266 | for node in commit.get_filenodes_generator(): | |
265 | size += node.size |
|
267 | size += node.size | |
266 | if not show_stats: |
|
268 | if not show_stats: | |
267 | continue |
|
269 | continue | |
268 | ext = string.lower(node.extension) |
|
270 | ext = string.lower(node.extension) | |
269 | ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext) |
|
271 | ext_info = LANGUAGES_EXTENSIONS_MAP.get(ext) | |
270 | if ext_info: |
|
272 | if ext_info: | |
271 | if ext in code_stats: |
|
273 | if ext in code_stats: | |
272 | code_stats[ext]['count'] += 1 |
|
274 | code_stats[ext]['count'] += 1 | |
273 | else: |
|
275 | else: | |
274 | code_stats[ext] = {"count": 1, "desc": ext_info} |
|
276 | code_stats[ext] = {"count": 1, "desc": ext_info} | |
275 | except EmptyRepositoryError: |
|
277 | except EmptyRepositoryError: | |
276 | pass |
|
278 | pass | |
277 | return {'size': h.format_byte_size_binary(size), |
|
279 | return {'size': h.format_byte_size_binary(size), | |
278 | 'code_stats': code_stats} |
|
280 | 'code_stats': code_stats} | |
279 |
|
281 | |||
280 | stats = cache_manager.get(_cache_key, createfunc=compute_stats) |
|
282 | stats = cache_manager.get(_cache_key, createfunc=compute_stats) | |
281 | return stats |
|
283 | return stats | |
282 |
|
284 | |||
283 | @LoginRequired() |
|
285 | @LoginRequired() | |
284 | @HasRepoPermissionAnyDecorator( |
|
286 | @HasRepoPermissionAnyDecorator( | |
285 | 'repository.read', 'repository.write', 'repository.admin') |
|
287 | 'repository.read', 'repository.write', 'repository.admin') | |
286 | @view_config( |
|
288 | @view_config( | |
287 | route_name='repo_refs_data', request_method='GET', |
|
289 | route_name='repo_refs_data', request_method='GET', | |
288 | renderer='json_ext') |
|
290 | renderer='json_ext') | |
289 | def repo_refs_data(self): |
|
291 | def repo_refs_data(self): | |
290 | _ = self.request.translate |
|
292 | _ = self.request.translate | |
291 | self.load_default_context() |
|
293 | self.load_default_context() | |
292 |
|
294 | |||
293 | repo = self.rhodecode_vcs_repo |
|
295 | repo = self.rhodecode_vcs_repo | |
294 | refs_to_create = [ |
|
296 | refs_to_create = [ | |
295 | (_("Branch"), repo.branches, 'branch'), |
|
297 | (_("Branch"), repo.branches, 'branch'), | |
296 | (_("Tag"), repo.tags, 'tag'), |
|
298 | (_("Tag"), repo.tags, 'tag'), | |
297 | (_("Bookmark"), repo.bookmarks, 'book'), |
|
299 | (_("Bookmark"), repo.bookmarks, 'book'), | |
298 | ] |
|
300 | ] | |
299 | res = self._create_reference_data( |
|
301 | res = self._create_reference_data( | |
300 | repo, self.db_repo_name, refs_to_create) |
|
302 | repo, self.db_repo_name, refs_to_create) | |
301 | data = { |
|
303 | data = { | |
302 | 'more': False, |
|
304 | 'more': False, | |
303 | 'results': res |
|
305 | 'results': res | |
304 | } |
|
306 | } | |
305 | return data |
|
307 | return data | |
306 |
|
308 | |||
307 | @LoginRequired() |
|
309 | @LoginRequired() | |
308 | @HasRepoPermissionAnyDecorator( |
|
310 | @HasRepoPermissionAnyDecorator( | |
309 | 'repository.read', 'repository.write', 'repository.admin') |
|
311 | 'repository.read', 'repository.write', 'repository.admin') | |
310 | @view_config( |
|
312 | @view_config( | |
311 | route_name='repo_refs_changelog_data', request_method='GET', |
|
313 | route_name='repo_refs_changelog_data', request_method='GET', | |
312 | renderer='json_ext') |
|
314 | renderer='json_ext') | |
313 | def repo_refs_changelog_data(self): |
|
315 | def repo_refs_changelog_data(self): | |
314 | _ = self.request.translate |
|
316 | _ = self.request.translate | |
315 | self.load_default_context() |
|
317 | self.load_default_context() | |
316 |
|
318 | |||
317 | repo = self.rhodecode_vcs_repo |
|
319 | repo = self.rhodecode_vcs_repo | |
318 |
|
320 | |||
319 | refs_to_create = [ |
|
321 | refs_to_create = [ | |
320 | (_("Branches"), repo.branches, 'branch'), |
|
322 | (_("Branches"), repo.branches, 'branch'), | |
321 | (_("Closed branches"), repo.branches_closed, 'branch_closed'), |
|
323 | (_("Closed branches"), repo.branches_closed, 'branch_closed'), | |
322 | # TODO: enable when vcs can handle bookmarks filters |
|
324 | # TODO: enable when vcs can handle bookmarks filters | |
323 | # (_("Bookmarks"), repo.bookmarks, "book"), |
|
325 | # (_("Bookmarks"), repo.bookmarks, "book"), | |
324 | ] |
|
326 | ] | |
325 | res = self._create_reference_data( |
|
327 | res = self._create_reference_data( | |
326 | repo, self.db_repo_name, refs_to_create) |
|
328 | repo, self.db_repo_name, refs_to_create) | |
327 | data = { |
|
329 | data = { | |
328 | 'more': False, |
|
330 | 'more': False, | |
329 | 'results': res |
|
331 | 'results': res | |
330 | } |
|
332 | } | |
331 | return data |
|
333 | return data | |
332 |
|
334 | |||
333 | def _create_reference_data(self, repo, full_repo_name, refs_to_create): |
|
335 | def _create_reference_data(self, repo, full_repo_name, refs_to_create): | |
334 | format_ref_id = utils.get_format_ref_id(repo) |
|
336 | format_ref_id = utils.get_format_ref_id(repo) | |
335 |
|
337 | |||
336 | result = [] |
|
338 | result = [] | |
337 | for title, refs, ref_type in refs_to_create: |
|
339 | for title, refs, ref_type in refs_to_create: | |
338 | if refs: |
|
340 | if refs: | |
339 | result.append({ |
|
341 | result.append({ | |
340 | 'text': title, |
|
342 | 'text': title, | |
341 | 'children': self._create_reference_items( |
|
343 | 'children': self._create_reference_items( | |
342 | repo, full_repo_name, refs, ref_type, |
|
344 | repo, full_repo_name, refs, ref_type, | |
343 | format_ref_id), |
|
345 | format_ref_id), | |
344 | }) |
|
346 | }) | |
345 | return result |
|
347 | return result | |
346 |
|
348 | |||
347 | def _create_reference_items(self, repo, full_repo_name, refs, ref_type, |
|
349 | def _create_reference_items(self, repo, full_repo_name, refs, ref_type, | |
348 | format_ref_id): |
|
350 | format_ref_id): | |
349 | result = [] |
|
351 | result = [] | |
350 | is_svn = h.is_svn(repo) |
|
352 | is_svn = h.is_svn(repo) | |
351 | for ref_name, raw_id in refs.iteritems(): |
|
353 | for ref_name, raw_id in refs.iteritems(): | |
352 | files_url = self._create_files_url( |
|
354 | files_url = self._create_files_url( | |
353 | repo, full_repo_name, ref_name, raw_id, is_svn) |
|
355 | repo, full_repo_name, ref_name, raw_id, is_svn) | |
354 | result.append({ |
|
356 | result.append({ | |
355 | 'text': ref_name, |
|
357 | 'text': ref_name, | |
356 | 'id': format_ref_id(ref_name, raw_id), |
|
358 | 'id': format_ref_id(ref_name, raw_id), | |
357 | 'raw_id': raw_id, |
|
359 | 'raw_id': raw_id, | |
358 | 'type': ref_type, |
|
360 | 'type': ref_type, | |
359 | 'files_url': files_url, |
|
361 | 'files_url': files_url, | |
360 | }) |
|
362 | }) | |
361 | return result |
|
363 | return result | |
362 |
|
364 | |||
363 | def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn): |
|
365 | def _create_files_url(self, repo, full_repo_name, ref_name, raw_id, is_svn): | |
364 | use_commit_id = '/' in ref_name or is_svn |
|
366 | use_commit_id = '/' in ref_name or is_svn | |
365 | return h.route_path( |
|
367 | return h.route_path( | |
366 | 'repo_files', |
|
368 | 'repo_files', | |
367 | repo_name=full_repo_name, |
|
369 | repo_name=full_repo_name, | |
368 | f_path=ref_name if is_svn else '', |
|
370 | f_path=ref_name if is_svn else '', | |
369 | commit_id=raw_id if use_commit_id else ref_name, |
|
371 | commit_id=raw_id if use_commit_id else ref_name, | |
370 | _query=dict(at=ref_name)) |
|
372 | _query=dict(at=ref_name)) |
@@ -1,180 +1,182 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Pylons environment configuration |
|
22 | Pylons environment configuration | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os |
|
25 | import os | |
26 | import logging |
|
26 | import logging | |
27 | import rhodecode |
|
27 | import rhodecode | |
28 | import platform |
|
28 | import platform | |
29 | import re |
|
29 | import re | |
30 | import io |
|
30 | import io | |
31 |
|
31 | |||
32 | from mako.lookup import TemplateLookup |
|
32 | from mako.lookup import TemplateLookup | |
33 | from pylons.configuration import PylonsConfig |
|
33 | from pylons.configuration import PylonsConfig | |
34 | from pylons.error import handle_mako_error |
|
34 | from pylons.error import handle_mako_error | |
35 | from pyramid.settings import asbool |
|
35 | from pyramid.settings import asbool | |
36 |
|
36 | |||
37 | # ------------------------------------------------------------------------------ |
|
37 | # ------------------------------------------------------------------------------ | |
38 | # CELERY magic until refactor - issue #4163 - import order matters here: |
|
38 | # CELERY magic until refactor - issue #4163 - import order matters here: | |
39 | from rhodecode.lib import celerypylons # this must be first, celerypylons |
|
39 | from rhodecode.lib import celerypylons # this must be first, celerypylons | |
40 | # sets config settings upon import |
|
40 | # sets config settings upon import | |
41 |
|
41 | |||
42 | import rhodecode.integrations # any modules using celery task |
|
42 | import rhodecode.integrations # any modules using celery task | |
43 | # decorators should be added afterwards: |
|
43 | # decorators should be added afterwards: | |
44 | # ------------------------------------------------------------------------------ |
|
44 | # ------------------------------------------------------------------------------ | |
45 |
|
45 | |||
46 | from rhodecode.lib import app_globals |
|
46 | from rhodecode.lib import app_globals | |
47 | from rhodecode.config import utils |
|
47 | from rhodecode.config import utils | |
48 | from rhodecode.config.routing import make_map |
|
48 | from rhodecode.config.routing import make_map | |
49 | from rhodecode.config.jsroutes import generate_jsroutes_content |
|
49 | from rhodecode.config.jsroutes import generate_jsroutes_content | |
50 |
|
50 | |||
51 | from rhodecode.lib import helpers |
|
51 | from rhodecode.lib import helpers | |
52 | from rhodecode.lib.auth import set_available_permissions |
|
52 | from rhodecode.lib.auth import set_available_permissions | |
53 | from rhodecode.lib.utils import ( |
|
53 | from rhodecode.lib.utils import ( | |
54 | repo2db_mapper, make_db_config, set_rhodecode_config, |
|
54 | repo2db_mapper, make_db_config, set_rhodecode_config, | |
55 | load_rcextensions) |
|
55 | load_rcextensions) | |
56 | from rhodecode.lib.utils2 import str2bool, aslist |
|
56 | from rhodecode.lib.utils2 import str2bool, aslist | |
57 | from rhodecode.lib.vcs import connect_vcs, start_vcs_server |
|
57 | from rhodecode.lib.vcs import connect_vcs, start_vcs_server | |
58 | from rhodecode.model.scm import ScmModel |
|
58 | from rhodecode.model.scm import ScmModel | |
59 |
|
59 | |||
60 | log = logging.getLogger(__name__) |
|
60 | log = logging.getLogger(__name__) | |
61 |
|
61 | |||
62 | def load_environment(global_conf, app_conf, initial=False, |
|
62 | def load_environment(global_conf, app_conf, initial=False, | |
63 | test_env=None, test_index=None): |
|
63 | test_env=None, test_index=None): | |
64 | """ |
|
64 | """ | |
65 | Configure the Pylons environment via the ``pylons.config`` |
|
65 | Configure the Pylons environment via the ``pylons.config`` | |
66 | object |
|
66 | object | |
67 | """ |
|
67 | """ | |
68 | config = PylonsConfig() |
|
68 | config = PylonsConfig() | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | # Pylons paths |
|
71 | # Pylons paths | |
72 | root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
|
72 | root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | |
73 | paths = { |
|
73 | paths = { | |
74 | 'root': root, |
|
74 | 'root': root, | |
75 | 'controllers': os.path.join(root, 'controllers'), |
|
75 | 'controllers': os.path.join(root, 'controllers'), | |
76 | 'static_files': os.path.join(root, 'public'), |
|
76 | 'static_files': os.path.join(root, 'public'), | |
77 | 'templates': [os.path.join(root, 'templates')], |
|
77 | 'templates': [os.path.join(root, 'templates')], | |
78 | } |
|
78 | } | |
79 |
|
79 | |||
80 | # Initialize config with the basic options |
|
80 | # Initialize config with the basic options | |
81 | config.init_app(global_conf, app_conf, package='rhodecode', paths=paths) |
|
81 | config.init_app(global_conf, app_conf, package='rhodecode', paths=paths) | |
82 |
|
82 | |||
83 | # store some globals into rhodecode |
|
83 | # store some globals into rhodecode | |
84 | rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery')) |
|
84 | rhodecode.CELERY_ENABLED = str2bool(config['app_conf'].get('use_celery')) | |
85 | rhodecode.CELERY_EAGER = str2bool( |
|
85 | rhodecode.CELERY_EAGER = str2bool( | |
86 | config['app_conf'].get('celery.always.eager')) |
|
86 | config['app_conf'].get('celery.always.eager')) | |
87 |
|
87 | |||
88 | config['routes.map'] = make_map(config) |
|
88 | config['routes.map'] = make_map(config) | |
89 |
|
89 | |||
90 | config['pylons.app_globals'] = app_globals.Globals(config) |
|
90 | config['pylons.app_globals'] = app_globals.Globals(config) | |
91 | config['pylons.h'] = helpers |
|
91 | config['pylons.h'] = helpers | |
92 | rhodecode.CONFIG = config |
|
92 | rhodecode.CONFIG = config | |
93 |
|
93 | |||
94 | load_rcextensions(root_path=config['here']) |
|
94 | load_rcextensions(root_path=config['here']) | |
95 |
|
95 | |||
96 | # Setup cache object as early as possible |
|
96 | # Setup cache object as early as possible | |
97 | import pylons |
|
97 | import pylons | |
98 | pylons.cache._push_object(config['pylons.app_globals'].cache) |
|
98 | pylons.cache._push_object(config['pylons.app_globals'].cache) | |
99 |
|
99 | |||
100 | # Create the Mako TemplateLookup, with the default auto-escaping |
|
100 | # Create the Mako TemplateLookup, with the default auto-escaping | |
101 | config['pylons.app_globals'].mako_lookup = TemplateLookup( |
|
101 | config['pylons.app_globals'].mako_lookup = TemplateLookup( | |
102 | directories=paths['templates'], |
|
102 | directories=paths['templates'], | |
103 | error_handler=handle_mako_error, |
|
103 | error_handler=handle_mako_error, | |
104 | module_directory=os.path.join(app_conf['cache_dir'], 'templates'), |
|
104 | module_directory=os.path.join(app_conf['cache_dir'], 'templates'), | |
105 | input_encoding='utf-8', default_filters=['escape'], |
|
105 | input_encoding='utf-8', default_filters=['escape'], | |
106 | imports=['from webhelpers.html import escape']) |
|
106 | imports=['from webhelpers.html import escape']) | |
107 |
|
107 | |||
108 | # sets the c attribute access when don't existing attribute are accessed |
|
108 | # sets the c attribute access when don't existing attribute are accessed | |
109 | config['pylons.strict_tmpl_context'] = True |
|
109 | config['pylons.strict_tmpl_context'] = True | |
110 |
|
110 | |||
111 | # configure channelstream |
|
111 | # configure channelstream | |
112 | config['channelstream_config'] = { |
|
112 | config['channelstream_config'] = { | |
113 | 'enabled': asbool(config.get('channelstream.enabled', False)), |
|
113 | 'enabled': asbool(config.get('channelstream.enabled', False)), | |
114 | 'server': config.get('channelstream.server'), |
|
114 | 'server': config.get('channelstream.server'), | |
115 | 'secret': config.get('channelstream.secret') |
|
115 | 'secret': config.get('channelstream.secret') | |
116 | } |
|
116 | } | |
117 |
|
117 | |||
118 | set_available_permissions(config) |
|
118 | set_available_permissions(config) | |
119 | db_cfg = make_db_config(clear_session=True) |
|
119 | db_cfg = make_db_config(clear_session=True) | |
120 |
|
120 | |||
121 | repos_path = list(db_cfg.items('paths'))[0][1] |
|
121 | repos_path = list(db_cfg.items('paths'))[0][1] | |
122 | config['base_path'] = repos_path |
|
122 | config['base_path'] = repos_path | |
123 |
|
123 | |||
124 | # store db config also in main global CONFIG |
|
124 | # store db config also in main global CONFIG | |
125 | set_rhodecode_config(config) |
|
125 | set_rhodecode_config(config) | |
126 |
|
126 | |||
127 | # configure instance id |
|
127 | # configure instance id | |
128 | utils.set_instance_id(config) |
|
128 | utils.set_instance_id(config) | |
129 |
|
129 | |||
130 | # CONFIGURATION OPTIONS HERE (note: all config options will override |
|
130 | # CONFIGURATION OPTIONS HERE (note: all config options will override | |
131 | # any Pylons config options) |
|
131 | # any Pylons config options) | |
132 |
|
132 | |||
133 | # store config reference into our module to skip import magic of pylons |
|
133 | # store config reference into our module to skip import magic of pylons | |
134 | rhodecode.CONFIG.update(config) |
|
134 | rhodecode.CONFIG.update(config) | |
135 |
|
135 | |||
136 | return config |
|
136 | return config | |
137 |
|
137 | |||
138 |
|
138 | |||
139 | def load_pyramid_environment(global_config, settings): |
|
139 | def load_pyramid_environment(global_config, settings): | |
140 | # Some parts of the code expect a merge of global and app settings. |
|
140 | # Some parts of the code expect a merge of global and app settings. | |
141 | settings_merged = global_config.copy() |
|
141 | settings_merged = global_config.copy() | |
142 | settings_merged.update(settings) |
|
142 | settings_merged.update(settings) | |
143 |
|
143 | |||
144 | # Store the settings to make them available to other modules. |
|
144 | # Store the settings to make them available to other modules. | |
145 | rhodecode.PYRAMID_SETTINGS = settings_merged |
|
145 | rhodecode.PYRAMID_SETTINGS = settings_merged | |
|
146 | # NOTE(marcink): needs to be enabled after full port to pyramid | |||
|
147 | # rhodecode.CONFIG = config | |||
146 |
|
148 | |||
147 | # If this is a test run we prepare the test environment like |
|
149 | # If this is a test run we prepare the test environment like | |
148 | # creating a test database, test search index and test repositories. |
|
150 | # creating a test database, test search index and test repositories. | |
149 | # This has to be done before the database connection is initialized. |
|
151 | # This has to be done before the database connection is initialized. | |
150 | if settings['is_test']: |
|
152 | if settings['is_test']: | |
151 | rhodecode.is_test = True |
|
153 | rhodecode.is_test = True | |
152 | rhodecode.disable_error_handler = True |
|
154 | rhodecode.disable_error_handler = True | |
153 |
|
155 | |||
154 | utils.initialize_test_environment(settings_merged) |
|
156 | utils.initialize_test_environment(settings_merged) | |
155 |
|
157 | |||
156 | # Initialize the database connection. |
|
158 | # Initialize the database connection. | |
157 | utils.initialize_database(settings_merged) |
|
159 | utils.initialize_database(settings_merged) | |
158 |
|
160 | |||
159 | # Limit backends to `vcs.backends` from configuration |
|
161 | # Limit backends to `vcs.backends` from configuration | |
160 | for alias in rhodecode.BACKENDS.keys(): |
|
162 | for alias in rhodecode.BACKENDS.keys(): | |
161 | if alias not in settings['vcs.backends']: |
|
163 | if alias not in settings['vcs.backends']: | |
162 | del rhodecode.BACKENDS[alias] |
|
164 | del rhodecode.BACKENDS[alias] | |
163 | log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys()) |
|
165 | log.info('Enabled VCS backends: %s', rhodecode.BACKENDS.keys()) | |
164 |
|
166 | |||
165 | # initialize vcs client and optionally run the server if enabled |
|
167 | # initialize vcs client and optionally run the server if enabled | |
166 | vcs_server_uri = settings['vcs.server'] |
|
168 | vcs_server_uri = settings['vcs.server'] | |
167 | vcs_server_enabled = settings['vcs.server.enable'] |
|
169 | vcs_server_enabled = settings['vcs.server.enable'] | |
168 | start_server = ( |
|
170 | start_server = ( | |
169 | settings['vcs.start_server'] and |
|
171 | settings['vcs.start_server'] and | |
170 | not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0'))) |
|
172 | not int(os.environ.get('RC_VCSSERVER_TEST_DISABLE', '0'))) | |
171 |
|
173 | |||
172 | if vcs_server_enabled and start_server: |
|
174 | if vcs_server_enabled and start_server: | |
173 | log.info("Starting vcsserver") |
|
175 | log.info("Starting vcsserver") | |
174 | start_vcs_server(server_and_port=vcs_server_uri, |
|
176 | start_vcs_server(server_and_port=vcs_server_uri, | |
175 | protocol=utils.get_vcs_server_protocol(settings), |
|
177 | protocol=utils.get_vcs_server_protocol(settings), | |
176 | log_level=settings['vcs.server.log_level']) |
|
178 | log_level=settings['vcs.server.log_level']) | |
177 |
|
179 | |||
178 | utils.configure_vcs(settings) |
|
180 | utils.configure_vcs(settings) | |
179 | if vcs_server_enabled: |
|
181 | if vcs_server_enabled: | |
180 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) |
|
182 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) |
@@ -1,486 +1,393 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Routes configuration |
|
22 | Routes configuration | |
23 |
|
23 | |||
24 | The more specific and detailed routes should be defined first so they |
|
24 | The more specific and detailed routes should be defined first so they | |
25 | may take precedent over the more generic routes. For more information |
|
25 | may take precedent over the more generic routes. For more information | |
26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
26 | refer to the routes manual at http://routes.groovie.org/docs/ | |
27 |
|
27 | |||
28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py | |
29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
29 | and _route_name variable which uses some of stored naming here to do redirects. | |
30 | """ |
|
30 | """ | |
31 | import os |
|
31 | import os | |
32 | import re |
|
32 | import re | |
33 | from routes import Mapper |
|
33 | from routes import Mapper | |
34 |
|
34 | |||
35 | # prefix for non repository related links needs to be prefixed with `/` |
|
35 | # prefix for non repository related links needs to be prefixed with `/` | |
36 | ADMIN_PREFIX = '/_admin' |
|
36 | ADMIN_PREFIX = '/_admin' | |
37 | STATIC_FILE_PREFIX = '/_static' |
|
37 | STATIC_FILE_PREFIX = '/_static' | |
38 |
|
38 | |||
39 | # Default requirements for URL parts |
|
39 | # Default requirements for URL parts | |
40 | URL_NAME_REQUIREMENTS = { |
|
40 | URL_NAME_REQUIREMENTS = { | |
41 | # group name can have a slash in them, but they must not end with a slash |
|
41 | # group name can have a slash in them, but they must not end with a slash | |
42 | 'group_name': r'.*?[^/]', |
|
42 | 'group_name': r'.*?[^/]', | |
43 | 'repo_group_name': r'.*?[^/]', |
|
43 | 'repo_group_name': r'.*?[^/]', | |
44 | # repo names can have a slash in them, but they must not end with a slash |
|
44 | # repo names can have a slash in them, but they must not end with a slash | |
45 | 'repo_name': r'.*?[^/]', |
|
45 | 'repo_name': r'.*?[^/]', | |
46 | # file path eats up everything at the end |
|
46 | # file path eats up everything at the end | |
47 | 'f_path': r'.*', |
|
47 | 'f_path': r'.*', | |
48 | # reference types |
|
48 | # reference types | |
49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', | |
50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', | |
51 | } |
|
51 | } | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | class JSRoutesMapper(Mapper): |
|
54 | class JSRoutesMapper(Mapper): | |
55 | """ |
|
55 | """ | |
56 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
56 | Wrapper for routes.Mapper to make pyroutes compatible url definitions | |
57 | """ |
|
57 | """ | |
58 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
58 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') | |
59 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
59 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') | |
60 | def __init__(self, *args, **kw): |
|
60 | def __init__(self, *args, **kw): | |
61 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
61 | super(JSRoutesMapper, self).__init__(*args, **kw) | |
62 | self._jsroutes = [] |
|
62 | self._jsroutes = [] | |
63 |
|
63 | |||
64 | def connect(self, *args, **kw): |
|
64 | def connect(self, *args, **kw): | |
65 | """ |
|
65 | """ | |
66 | Wrapper for connect to take an extra argument jsroute=True |
|
66 | Wrapper for connect to take an extra argument jsroute=True | |
67 |
|
67 | |||
68 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
68 | :param jsroute: boolean, if True will add the route to the pyroutes list | |
69 | """ |
|
69 | """ | |
70 | if kw.pop('jsroute', False): |
|
70 | if kw.pop('jsroute', False): | |
71 | if not self._named_route_regex.match(args[0]): |
|
71 | if not self._named_route_regex.match(args[0]): | |
72 | raise Exception('only named routes can be added to pyroutes') |
|
72 | raise Exception('only named routes can be added to pyroutes') | |
73 | self._jsroutes.append(args[0]) |
|
73 | self._jsroutes.append(args[0]) | |
74 |
|
74 | |||
75 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
75 | super(JSRoutesMapper, self).connect(*args, **kw) | |
76 |
|
76 | |||
77 | def _extract_route_information(self, route): |
|
77 | def _extract_route_information(self, route): | |
78 | """ |
|
78 | """ | |
79 | Convert a route into tuple(name, path, args), eg: |
|
79 | Convert a route into tuple(name, path, args), eg: | |
80 | ('show_user', '/profile/%(username)s', ['username']) |
|
80 | ('show_user', '/profile/%(username)s', ['username']) | |
81 | """ |
|
81 | """ | |
82 | routepath = route.routepath |
|
82 | routepath = route.routepath | |
83 | def replace(matchobj): |
|
83 | def replace(matchobj): | |
84 | if matchobj.group(1): |
|
84 | if matchobj.group(1): | |
85 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
85 | return "%%(%s)s" % matchobj.group(1).split(':')[0] | |
86 | else: |
|
86 | else: | |
87 | return "%%(%s)s" % matchobj.group(2) |
|
87 | return "%%(%s)s" % matchobj.group(2) | |
88 |
|
88 | |||
89 | routepath = self._argument_prog.sub(replace, routepath) |
|
89 | routepath = self._argument_prog.sub(replace, routepath) | |
90 | return ( |
|
90 | return ( | |
91 | route.name, |
|
91 | route.name, | |
92 | routepath, |
|
92 | routepath, | |
93 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
93 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) | |
94 | for arg in self._argument_prog.findall(route.routepath)] |
|
94 | for arg in self._argument_prog.findall(route.routepath)] | |
95 | ) |
|
95 | ) | |
96 |
|
96 | |||
97 | def jsroutes(self): |
|
97 | def jsroutes(self): | |
98 | """ |
|
98 | """ | |
99 | Return a list of pyroutes.js compatible routes |
|
99 | Return a list of pyroutes.js compatible routes | |
100 | """ |
|
100 | """ | |
101 | for route_name in self._jsroutes: |
|
101 | for route_name in self._jsroutes: | |
102 | yield self._extract_route_information(self._routenames[route_name]) |
|
102 | yield self._extract_route_information(self._routenames[route_name]) | |
103 |
|
103 | |||
104 |
|
104 | |||
105 | def make_map(config): |
|
105 | def make_map(config): | |
106 | """Create, configure and return the routes Mapper""" |
|
106 | """Create, configure and return the routes Mapper""" | |
107 | rmap = JSRoutesMapper( |
|
107 | rmap = JSRoutesMapper( | |
108 | directory=config['pylons.paths']['controllers'], |
|
108 | directory=config['pylons.paths']['controllers'], | |
109 | always_scan=config['debug']) |
|
109 | always_scan=config['debug']) | |
110 | rmap.minimization = False |
|
110 | rmap.minimization = False | |
111 | rmap.explicit = False |
|
111 | rmap.explicit = False | |
112 |
|
112 | |||
113 | from rhodecode.lib.utils2 import str2bool |
|
113 | from rhodecode.lib.utils2 import str2bool | |
114 | from rhodecode.model import repo, repo_group |
|
114 | from rhodecode.model import repo, repo_group | |
115 |
|
115 | |||
116 | def check_repo(environ, match_dict): |
|
116 | def check_repo(environ, match_dict): | |
117 | """ |
|
117 | """ | |
118 | check for valid repository for proper 404 handling |
|
118 | check for valid repository for proper 404 handling | |
119 |
|
119 | |||
120 | :param environ: |
|
120 | :param environ: | |
121 | :param match_dict: |
|
121 | :param match_dict: | |
122 | """ |
|
122 | """ | |
123 | repo_name = match_dict.get('repo_name') |
|
123 | repo_name = match_dict.get('repo_name') | |
124 |
|
124 | |||
125 | if match_dict.get('f_path'): |
|
125 | if match_dict.get('f_path'): | |
126 | # fix for multiple initial slashes that causes errors |
|
126 | # fix for multiple initial slashes that causes errors | |
127 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
127 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') | |
128 | repo_model = repo.RepoModel() |
|
128 | repo_model = repo.RepoModel() | |
129 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
129 | by_name_match = repo_model.get_by_repo_name(repo_name) | |
130 | # if we match quickly from database, short circuit the operation, |
|
130 | # if we match quickly from database, short circuit the operation, | |
131 | # and validate repo based on the type. |
|
131 | # and validate repo based on the type. | |
132 | if by_name_match: |
|
132 | if by_name_match: | |
133 | return True |
|
133 | return True | |
134 |
|
134 | |||
135 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
135 | by_id_match = repo_model.get_repo_by_id(repo_name) | |
136 | if by_id_match: |
|
136 | if by_id_match: | |
137 | repo_name = by_id_match.repo_name |
|
137 | repo_name = by_id_match.repo_name | |
138 | match_dict['repo_name'] = repo_name |
|
138 | match_dict['repo_name'] = repo_name | |
139 | return True |
|
139 | return True | |
140 |
|
140 | |||
141 | return False |
|
141 | return False | |
142 |
|
142 | |||
143 | def check_group(environ, match_dict): |
|
143 | def check_group(environ, match_dict): | |
144 | """ |
|
144 | """ | |
145 | check for valid repository group path for proper 404 handling |
|
145 | check for valid repository group path for proper 404 handling | |
146 |
|
146 | |||
147 | :param environ: |
|
147 | :param environ: | |
148 | :param match_dict: |
|
148 | :param match_dict: | |
149 | """ |
|
149 | """ | |
150 | repo_group_name = match_dict.get('group_name') |
|
150 | repo_group_name = match_dict.get('group_name') | |
151 | repo_group_model = repo_group.RepoGroupModel() |
|
151 | repo_group_model = repo_group.RepoGroupModel() | |
152 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
152 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) | |
153 | if by_name_match: |
|
153 | if by_name_match: | |
154 | return True |
|
154 | return True | |
155 |
|
155 | |||
156 | return False |
|
156 | return False | |
157 |
|
157 | |||
158 | def check_user_group(environ, match_dict): |
|
158 | def check_user_group(environ, match_dict): | |
159 | """ |
|
159 | """ | |
160 | check for valid user group for proper 404 handling |
|
160 | check for valid user group for proper 404 handling | |
161 |
|
161 | |||
162 | :param environ: |
|
162 | :param environ: | |
163 | :param match_dict: |
|
163 | :param match_dict: | |
164 | """ |
|
164 | """ | |
165 | return True |
|
165 | return True | |
166 |
|
166 | |||
167 | def check_int(environ, match_dict): |
|
167 | def check_int(environ, match_dict): | |
168 | return match_dict.get('id').isdigit() |
|
168 | return match_dict.get('id').isdigit() | |
169 |
|
169 | |||
170 |
|
170 | |||
171 | #========================================================================== |
|
171 | #========================================================================== | |
172 | # CUSTOM ROUTES HERE |
|
172 | # CUSTOM ROUTES HERE | |
173 | #========================================================================== |
|
173 | #========================================================================== | |
174 |
|
174 | |||
175 | # ping and pylons error test |
|
175 | # ping and pylons error test | |
176 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
176 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') | |
177 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
177 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') | |
178 |
|
178 | |||
179 | # ADMIN REPOSITORY ROUTES |
|
|||
180 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
|||
181 | controller='admin/repos') as m: |
|
|||
182 | m.connect('repos', '/repos', |
|
|||
183 | action='create', conditions={'method': ['POST']}) |
|
|||
184 | m.connect('repos', '/repos', |
|
|||
185 | action='index', conditions={'method': ['GET']}) |
|
|||
186 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
|||
187 | action='create_repository', conditions={'method': ['GET']}) |
|
|||
188 | m.connect('delete_repo', '/repos/{repo_name}', |
|
|||
189 | action='delete', conditions={'method': ['DELETE']}, |
|
|||
190 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
191 | m.connect('repo', '/repos/{repo_name}', |
|
|||
192 | action='show', conditions={'method': ['GET'], |
|
|||
193 | 'function': check_repo}, |
|
|||
194 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
195 |
|
||||
196 | # ADMIN REPOSITORY GROUPS ROUTES |
|
179 | # ADMIN REPOSITORY GROUPS ROUTES | |
197 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
180 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
198 | controller='admin/repo_groups') as m: |
|
181 | controller='admin/repo_groups') as m: | |
199 | m.connect('repo_groups', '/repo_groups', |
|
182 | m.connect('repo_groups', '/repo_groups', | |
200 | action='create', conditions={'method': ['POST']}) |
|
183 | action='create', conditions={'method': ['POST']}) | |
201 | m.connect('repo_groups', '/repo_groups', |
|
184 | m.connect('repo_groups', '/repo_groups', | |
202 | action='index', conditions={'method': ['GET']}) |
|
185 | action='index', conditions={'method': ['GET']}) | |
203 | m.connect('new_repo_group', '/repo_groups/new', |
|
186 | m.connect('new_repo_group', '/repo_groups/new', | |
204 | action='new', conditions={'method': ['GET']}) |
|
187 | action='new', conditions={'method': ['GET']}) | |
205 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
188 | m.connect('update_repo_group', '/repo_groups/{group_name}', | |
206 | action='update', conditions={'method': ['PUT'], |
|
189 | action='update', conditions={'method': ['PUT'], | |
207 | 'function': check_group}, |
|
190 | 'function': check_group}, | |
208 | requirements=URL_NAME_REQUIREMENTS) |
|
191 | requirements=URL_NAME_REQUIREMENTS) | |
209 |
|
192 | |||
210 | # EXTRAS REPO GROUP ROUTES |
|
193 | # EXTRAS REPO GROUP ROUTES | |
211 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
194 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', | |
212 | action='edit', |
|
195 | action='edit', | |
213 | conditions={'method': ['GET'], 'function': check_group}, |
|
196 | conditions={'method': ['GET'], 'function': check_group}, | |
214 | requirements=URL_NAME_REQUIREMENTS) |
|
197 | requirements=URL_NAME_REQUIREMENTS) | |
215 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
198 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', | |
216 | action='edit', |
|
199 | action='edit', | |
217 | conditions={'method': ['PUT'], 'function': check_group}, |
|
200 | conditions={'method': ['PUT'], 'function': check_group}, | |
218 | requirements=URL_NAME_REQUIREMENTS) |
|
201 | requirements=URL_NAME_REQUIREMENTS) | |
219 |
|
202 | |||
220 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
203 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', | |
221 | action='edit_repo_group_advanced', |
|
204 | action='edit_repo_group_advanced', | |
222 | conditions={'method': ['GET'], 'function': check_group}, |
|
205 | conditions={'method': ['GET'], 'function': check_group}, | |
223 | requirements=URL_NAME_REQUIREMENTS) |
|
206 | requirements=URL_NAME_REQUIREMENTS) | |
224 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
207 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', | |
225 | action='edit_repo_group_advanced', |
|
208 | action='edit_repo_group_advanced', | |
226 | conditions={'method': ['PUT'], 'function': check_group}, |
|
209 | conditions={'method': ['PUT'], 'function': check_group}, | |
227 | requirements=URL_NAME_REQUIREMENTS) |
|
210 | requirements=URL_NAME_REQUIREMENTS) | |
228 |
|
211 | |||
229 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
212 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', | |
230 | action='edit_repo_group_perms', |
|
213 | action='edit_repo_group_perms', | |
231 | conditions={'method': ['GET'], 'function': check_group}, |
|
214 | conditions={'method': ['GET'], 'function': check_group}, | |
232 | requirements=URL_NAME_REQUIREMENTS) |
|
215 | requirements=URL_NAME_REQUIREMENTS) | |
233 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
216 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', | |
234 | action='update_perms', |
|
217 | action='update_perms', | |
235 | conditions={'method': ['PUT'], 'function': check_group}, |
|
218 | conditions={'method': ['PUT'], 'function': check_group}, | |
236 | requirements=URL_NAME_REQUIREMENTS) |
|
219 | requirements=URL_NAME_REQUIREMENTS) | |
237 |
|
220 | |||
238 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
221 | m.connect('delete_repo_group', '/repo_groups/{group_name}', | |
239 | action='delete', conditions={'method': ['DELETE'], |
|
222 | action='delete', conditions={'method': ['DELETE'], | |
240 | 'function': check_group}, |
|
223 | 'function': check_group}, | |
241 | requirements=URL_NAME_REQUIREMENTS) |
|
224 | requirements=URL_NAME_REQUIREMENTS) | |
242 |
|
225 | |||
243 | # ADMIN USER ROUTES |
|
226 | # ADMIN USER ROUTES | |
244 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
227 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
245 | controller='admin/users') as m: |
|
228 | controller='admin/users') as m: | |
246 | m.connect('users', '/users', |
|
229 | m.connect('users', '/users', | |
247 | action='create', conditions={'method': ['POST']}) |
|
230 | action='create', conditions={'method': ['POST']}) | |
248 | m.connect('new_user', '/users/new', |
|
231 | m.connect('new_user', '/users/new', | |
249 | action='new', conditions={'method': ['GET']}) |
|
232 | action='new', conditions={'method': ['GET']}) | |
250 | m.connect('update_user', '/users/{user_id}', |
|
233 | m.connect('update_user', '/users/{user_id}', | |
251 | action='update', conditions={'method': ['PUT']}) |
|
234 | action='update', conditions={'method': ['PUT']}) | |
252 | m.connect('delete_user', '/users/{user_id}', |
|
235 | m.connect('delete_user', '/users/{user_id}', | |
253 | action='delete', conditions={'method': ['DELETE']}) |
|
236 | action='delete', conditions={'method': ['DELETE']}) | |
254 | m.connect('edit_user', '/users/{user_id}/edit', |
|
237 | m.connect('edit_user', '/users/{user_id}/edit', | |
255 | action='edit', conditions={'method': ['GET']}, jsroute=True) |
|
238 | action='edit', conditions={'method': ['GET']}, jsroute=True) | |
256 | m.connect('user', '/users/{user_id}', |
|
239 | m.connect('user', '/users/{user_id}', | |
257 | action='show', conditions={'method': ['GET']}) |
|
240 | action='show', conditions={'method': ['GET']}) | |
258 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
241 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', | |
259 | action='reset_password', conditions={'method': ['POST']}) |
|
242 | action='reset_password', conditions={'method': ['POST']}) | |
260 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
243 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', | |
261 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
244 | action='create_personal_repo_group', conditions={'method': ['POST']}) | |
262 |
|
245 | |||
263 | # EXTRAS USER ROUTES |
|
246 | # EXTRAS USER ROUTES | |
264 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
247 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', | |
265 | action='edit_advanced', conditions={'method': ['GET']}) |
|
248 | action='edit_advanced', conditions={'method': ['GET']}) | |
266 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
249 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', | |
267 | action='update_advanced', conditions={'method': ['PUT']}) |
|
250 | action='update_advanced', conditions={'method': ['PUT']}) | |
268 |
|
251 | |||
269 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
252 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', | |
270 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
253 | action='edit_global_perms', conditions={'method': ['GET']}) | |
271 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
254 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', | |
272 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
255 | action='update_global_perms', conditions={'method': ['PUT']}) | |
273 |
|
256 | |||
274 | # ADMIN USER GROUPS REST ROUTES |
|
257 | # ADMIN USER GROUPS REST ROUTES | |
275 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
258 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
276 | controller='admin/user_groups') as m: |
|
259 | controller='admin/user_groups') as m: | |
277 | m.connect('users_groups', '/user_groups', |
|
260 | m.connect('users_groups', '/user_groups', | |
278 | action='create', conditions={'method': ['POST']}) |
|
261 | action='create', conditions={'method': ['POST']}) | |
279 | m.connect('new_users_group', '/user_groups/new', |
|
262 | m.connect('new_users_group', '/user_groups/new', | |
280 | action='new', conditions={'method': ['GET']}) |
|
263 | action='new', conditions={'method': ['GET']}) | |
281 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
264 | m.connect('update_users_group', '/user_groups/{user_group_id}', | |
282 | action='update', conditions={'method': ['PUT']}) |
|
265 | action='update', conditions={'method': ['PUT']}) | |
283 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
266 | m.connect('delete_users_group', '/user_groups/{user_group_id}', | |
284 | action='delete', conditions={'method': ['DELETE']}) |
|
267 | action='delete', conditions={'method': ['DELETE']}) | |
285 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
268 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', | |
286 | action='edit', conditions={'method': ['GET']}, |
|
269 | action='edit', conditions={'method': ['GET']}, | |
287 | function=check_user_group) |
|
270 | function=check_user_group) | |
288 |
|
271 | |||
289 | # EXTRAS USER GROUP ROUTES |
|
272 | # EXTRAS USER GROUP ROUTES | |
290 | m.connect('edit_user_group_global_perms', |
|
273 | m.connect('edit_user_group_global_perms', | |
291 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
274 | '/user_groups/{user_group_id}/edit/global_permissions', | |
292 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
275 | action='edit_global_perms', conditions={'method': ['GET']}) | |
293 | m.connect('edit_user_group_global_perms', |
|
276 | m.connect('edit_user_group_global_perms', | |
294 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
277 | '/user_groups/{user_group_id}/edit/global_permissions', | |
295 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
278 | action='update_global_perms', conditions={'method': ['PUT']}) | |
296 |
|
279 | |||
297 | m.connect('edit_user_group_perms', |
|
280 | m.connect('edit_user_group_perms', | |
298 | '/user_groups/{user_group_id}/edit/permissions', |
|
281 | '/user_groups/{user_group_id}/edit/permissions', | |
299 | action='edit_perms', conditions={'method': ['GET']}) |
|
282 | action='edit_perms', conditions={'method': ['GET']}) | |
300 | m.connect('edit_user_group_perms', |
|
283 | m.connect('edit_user_group_perms', | |
301 | '/user_groups/{user_group_id}/edit/permissions', |
|
284 | '/user_groups/{user_group_id}/edit/permissions', | |
302 | action='update_perms', conditions={'method': ['PUT']}) |
|
285 | action='update_perms', conditions={'method': ['PUT']}) | |
303 |
|
286 | |||
304 | m.connect('edit_user_group_advanced', |
|
287 | m.connect('edit_user_group_advanced', | |
305 | '/user_groups/{user_group_id}/edit/advanced', |
|
288 | '/user_groups/{user_group_id}/edit/advanced', | |
306 | action='edit_advanced', conditions={'method': ['GET']}) |
|
289 | action='edit_advanced', conditions={'method': ['GET']}) | |
307 |
|
290 | |||
308 | m.connect('edit_user_group_advanced_sync', |
|
291 | m.connect('edit_user_group_advanced_sync', | |
309 | '/user_groups/{user_group_id}/edit/advanced/sync', |
|
292 | '/user_groups/{user_group_id}/edit/advanced/sync', | |
310 | action='edit_advanced_set_synchronization', conditions={'method': ['POST']}) |
|
293 | action='edit_advanced_set_synchronization', conditions={'method': ['POST']}) | |
311 |
|
294 | |||
312 | # ADMIN DEFAULTS REST ROUTES |
|
295 | # ADMIN DEFAULTS REST ROUTES | |
313 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
296 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
314 | controller='admin/defaults') as m: |
|
297 | controller='admin/defaults') as m: | |
315 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
298 | m.connect('admin_defaults_repositories', '/defaults/repositories', | |
316 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
299 | action='update_repository_defaults', conditions={'method': ['POST']}) | |
317 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
300 | m.connect('admin_defaults_repositories', '/defaults/repositories', | |
318 | action='index', conditions={'method': ['GET']}) |
|
301 | action='index', conditions={'method': ['GET']}) | |
319 |
|
302 | |||
320 | # ADMIN SETTINGS ROUTES |
|
303 | # ADMIN SETTINGS ROUTES | |
321 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
304 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
322 | controller='admin/settings') as m: |
|
305 | controller='admin/settings') as m: | |
323 |
|
306 | |||
324 | # default |
|
307 | # default | |
325 | m.connect('admin_settings', '/settings', |
|
308 | m.connect('admin_settings', '/settings', | |
326 | action='settings_global_update', |
|
309 | action='settings_global_update', | |
327 | conditions={'method': ['POST']}) |
|
310 | conditions={'method': ['POST']}) | |
328 | m.connect('admin_settings', '/settings', |
|
311 | m.connect('admin_settings', '/settings', | |
329 | action='settings_global', conditions={'method': ['GET']}) |
|
312 | action='settings_global', conditions={'method': ['GET']}) | |
330 |
|
313 | |||
331 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
314 | m.connect('admin_settings_vcs', '/settings/vcs', | |
332 | action='settings_vcs_update', |
|
315 | action='settings_vcs_update', | |
333 | conditions={'method': ['POST']}) |
|
316 | conditions={'method': ['POST']}) | |
334 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
317 | m.connect('admin_settings_vcs', '/settings/vcs', | |
335 | action='settings_vcs', |
|
318 | action='settings_vcs', | |
336 | conditions={'method': ['GET']}) |
|
319 | conditions={'method': ['GET']}) | |
337 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
320 | m.connect('admin_settings_vcs', '/settings/vcs', | |
338 | action='delete_svn_pattern', |
|
321 | action='delete_svn_pattern', | |
339 | conditions={'method': ['DELETE']}) |
|
322 | conditions={'method': ['DELETE']}) | |
340 |
|
323 | |||
341 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
324 | m.connect('admin_settings_mapping', '/settings/mapping', | |
342 | action='settings_mapping_update', |
|
325 | action='settings_mapping_update', | |
343 | conditions={'method': ['POST']}) |
|
326 | conditions={'method': ['POST']}) | |
344 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
327 | m.connect('admin_settings_mapping', '/settings/mapping', | |
345 | action='settings_mapping', conditions={'method': ['GET']}) |
|
328 | action='settings_mapping', conditions={'method': ['GET']}) | |
346 |
|
329 | |||
347 | m.connect('admin_settings_global', '/settings/global', |
|
330 | m.connect('admin_settings_global', '/settings/global', | |
348 | action='settings_global_update', |
|
331 | action='settings_global_update', | |
349 | conditions={'method': ['POST']}) |
|
332 | conditions={'method': ['POST']}) | |
350 | m.connect('admin_settings_global', '/settings/global', |
|
333 | m.connect('admin_settings_global', '/settings/global', | |
351 | action='settings_global', conditions={'method': ['GET']}) |
|
334 | action='settings_global', conditions={'method': ['GET']}) | |
352 |
|
335 | |||
353 | m.connect('admin_settings_visual', '/settings/visual', |
|
336 | m.connect('admin_settings_visual', '/settings/visual', | |
354 | action='settings_visual_update', |
|
337 | action='settings_visual_update', | |
355 | conditions={'method': ['POST']}) |
|
338 | conditions={'method': ['POST']}) | |
356 | m.connect('admin_settings_visual', '/settings/visual', |
|
339 | m.connect('admin_settings_visual', '/settings/visual', | |
357 | action='settings_visual', conditions={'method': ['GET']}) |
|
340 | action='settings_visual', conditions={'method': ['GET']}) | |
358 |
|
341 | |||
359 | m.connect('admin_settings_issuetracker', |
|
342 | m.connect('admin_settings_issuetracker', | |
360 | '/settings/issue-tracker', action='settings_issuetracker', |
|
343 | '/settings/issue-tracker', action='settings_issuetracker', | |
361 | conditions={'method': ['GET']}) |
|
344 | conditions={'method': ['GET']}) | |
362 | m.connect('admin_settings_issuetracker_save', |
|
345 | m.connect('admin_settings_issuetracker_save', | |
363 | '/settings/issue-tracker/save', |
|
346 | '/settings/issue-tracker/save', | |
364 | action='settings_issuetracker_save', |
|
347 | action='settings_issuetracker_save', | |
365 | conditions={'method': ['POST']}) |
|
348 | conditions={'method': ['POST']}) | |
366 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
349 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', | |
367 | action='settings_issuetracker_test', |
|
350 | action='settings_issuetracker_test', | |
368 | conditions={'method': ['POST']}) |
|
351 | conditions={'method': ['POST']}) | |
369 | m.connect('admin_issuetracker_delete', |
|
352 | m.connect('admin_issuetracker_delete', | |
370 | '/settings/issue-tracker/delete', |
|
353 | '/settings/issue-tracker/delete', | |
371 | action='settings_issuetracker_delete', |
|
354 | action='settings_issuetracker_delete', | |
372 | conditions={'method': ['DELETE']}) |
|
355 | conditions={'method': ['DELETE']}) | |
373 |
|
356 | |||
374 | m.connect('admin_settings_email', '/settings/email', |
|
357 | m.connect('admin_settings_email', '/settings/email', | |
375 | action='settings_email_update', |
|
358 | action='settings_email_update', | |
376 | conditions={'method': ['POST']}) |
|
359 | conditions={'method': ['POST']}) | |
377 | m.connect('admin_settings_email', '/settings/email', |
|
360 | m.connect('admin_settings_email', '/settings/email', | |
378 | action='settings_email', conditions={'method': ['GET']}) |
|
361 | action='settings_email', conditions={'method': ['GET']}) | |
379 |
|
362 | |||
380 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
363 | m.connect('admin_settings_hooks', '/settings/hooks', | |
381 | action='settings_hooks_update', |
|
364 | action='settings_hooks_update', | |
382 | conditions={'method': ['POST', 'DELETE']}) |
|
365 | conditions={'method': ['POST', 'DELETE']}) | |
383 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
366 | m.connect('admin_settings_hooks', '/settings/hooks', | |
384 | action='settings_hooks', conditions={'method': ['GET']}) |
|
367 | action='settings_hooks', conditions={'method': ['GET']}) | |
385 |
|
368 | |||
386 | m.connect('admin_settings_search', '/settings/search', |
|
369 | m.connect('admin_settings_search', '/settings/search', | |
387 | action='settings_search', conditions={'method': ['GET']}) |
|
370 | action='settings_search', conditions={'method': ['GET']}) | |
388 |
|
371 | |||
389 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
372 | m.connect('admin_settings_supervisor', '/settings/supervisor', | |
390 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
373 | action='settings_supervisor', conditions={'method': ['GET']}) | |
391 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
374 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', | |
392 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
375 | action='settings_supervisor_log', conditions={'method': ['GET']}) | |
393 |
|
376 | |||
394 | m.connect('admin_settings_labs', '/settings/labs', |
|
377 | m.connect('admin_settings_labs', '/settings/labs', | |
395 | action='settings_labs_update', |
|
378 | action='settings_labs_update', | |
396 | conditions={'method': ['POST']}) |
|
379 | conditions={'method': ['POST']}) | |
397 | m.connect('admin_settings_labs', '/settings/labs', |
|
380 | m.connect('admin_settings_labs', '/settings/labs', | |
398 | action='settings_labs', conditions={'method': ['GET']}) |
|
381 | action='settings_labs', conditions={'method': ['GET']}) | |
399 |
|
382 | |||
400 | # ADMIN MY ACCOUNT |
|
383 | # ADMIN MY ACCOUNT | |
401 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
384 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
402 | controller='admin/my_account') as m: |
|
385 | controller='admin/my_account') as m: | |
403 |
|
386 | |||
404 | # NOTE(marcink): this needs to be kept for password force flag to be |
|
387 | # NOTE(marcink): this needs to be kept for password force flag to be | |
405 | # handled in pylons controllers, remove after full migration to pyramid |
|
388 | # handled in pylons controllers, remove after full migration to pyramid | |
406 | m.connect('my_account_password', '/my_account/password', |
|
389 | m.connect('my_account_password', '/my_account/password', | |
407 | action='my_account_password', conditions={'method': ['GET']}) |
|
390 | action='my_account_password', conditions={'method': ['GET']}) | |
408 |
|
391 | |||
409 | #========================================================================== |
|
|||
410 | # REPOSITORY ROUTES |
|
|||
411 | #========================================================================== |
|
|||
412 |
|
||||
413 | # repo edit options |
|
|||
414 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
|||
415 | controller='admin/repos', action='edit_fields', |
|
|||
416 | conditions={'method': ['GET'], 'function': check_repo}, |
|
|||
417 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
418 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
|||
419 | controller='admin/repos', action='create_repo_field', |
|
|||
420 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
|||
421 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
422 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
|||
423 | controller='admin/repos', action='delete_repo_field', |
|
|||
424 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
|||
425 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
426 |
|
||||
427 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
|||
428 | controller='admin/repos', action='toggle_locking', |
|
|||
429 | conditions={'method': ['GET'], 'function': check_repo}, |
|
|||
430 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
431 |
|
||||
432 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
|||
433 | controller='admin/repos', action='edit_remote_form', |
|
|||
434 | conditions={'method': ['GET'], 'function': check_repo}, |
|
|||
435 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
436 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
|||
437 | controller='admin/repos', action='edit_remote', |
|
|||
438 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
|||
439 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
440 |
|
||||
441 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
|||
442 | controller='admin/repos', action='edit_statistics_form', |
|
|||
443 | conditions={'method': ['GET'], 'function': check_repo}, |
|
|||
444 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
445 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
|||
446 | controller='admin/repos', action='edit_statistics', |
|
|||
447 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
|||
448 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
449 | rmap.connect('repo_settings_issuetracker', |
|
|||
450 | '/{repo_name}/settings/issue-tracker', |
|
|||
451 | controller='admin/repos', action='repo_issuetracker', |
|
|||
452 | conditions={'method': ['GET'], 'function': check_repo}, |
|
|||
453 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
454 | rmap.connect('repo_issuetracker_test', |
|
|||
455 | '/{repo_name}/settings/issue-tracker/test', |
|
|||
456 | controller='admin/repos', action='repo_issuetracker_test', |
|
|||
457 | conditions={'method': ['POST'], 'function': check_repo}, |
|
|||
458 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
459 | rmap.connect('repo_issuetracker_delete', |
|
|||
460 | '/{repo_name}/settings/issue-tracker/delete', |
|
|||
461 | controller='admin/repos', action='repo_issuetracker_delete', |
|
|||
462 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
|||
463 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
464 | rmap.connect('repo_issuetracker_save', |
|
|||
465 | '/{repo_name}/settings/issue-tracker/save', |
|
|||
466 | controller='admin/repos', action='repo_issuetracker_save', |
|
|||
467 | conditions={'method': ['POST'], 'function': check_repo}, |
|
|||
468 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
469 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
|||
470 | controller='admin/repos', action='repo_settings_vcs_update', |
|
|||
471 | conditions={'method': ['POST'], 'function': check_repo}, |
|
|||
472 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
473 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
|||
474 | controller='admin/repos', action='repo_settings_vcs', |
|
|||
475 | conditions={'method': ['GET'], 'function': check_repo}, |
|
|||
476 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
477 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
|||
478 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
|||
479 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
|||
480 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
481 | rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest', |
|
|||
482 | controller='admin/repos', action='repo_settings_pullrequest', |
|
|||
483 | conditions={'method': ['GET', 'POST'], 'function': check_repo}, |
|
|||
484 | requirements=URL_NAME_REQUIREMENTS) |
|
|||
485 |
|
392 | |||
486 | return rmap |
|
393 | return rmap |
@@ -1,697 +1,697 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | settings controller for rhodecode admin |
|
23 | settings controller for rhodecode admin | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import collections |
|
26 | import collections | |
27 | import logging |
|
27 | import logging | |
28 |
|
28 | |||
29 | import datetime |
|
29 | import datetime | |
30 | import formencode |
|
30 | import formencode | |
31 | from formencode import htmlfill |
|
31 | from formencode import htmlfill | |
32 | from pylons import request, tmpl_context as c, url, config |
|
32 | from pylons import request, tmpl_context as c, url, config | |
33 | from pylons.controllers.util import redirect |
|
33 | from pylons.controllers.util import redirect | |
34 | from pylons.i18n.translation import _ |
|
34 | from pylons.i18n.translation import _ | |
|
35 | from pylons.decorators import jsonify | |||
35 | from pyramid.threadlocal import get_current_registry |
|
36 | from pyramid.threadlocal import get_current_registry | |
36 | from webob.exc import HTTPBadRequest |
|
37 | from webob.exc import HTTPBadRequest | |
37 |
|
38 | |||
38 | import rhodecode |
|
39 | import rhodecode | |
39 | from rhodecode.apps.admin.navigation import navigation_list |
|
40 | from rhodecode.apps.admin.navigation import navigation_list | |
40 | from rhodecode.apps.svn_support.config_keys import generate_config |
|
41 | from rhodecode.apps.svn_support.config_keys import generate_config | |
41 | from rhodecode.lib import auth |
|
42 | from rhodecode.lib import auth | |
42 | from rhodecode.lib import helpers as h |
|
43 | from rhodecode.lib import helpers as h | |
43 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
44 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator | |
44 | from rhodecode.lib.base import BaseController, render |
|
45 | from rhodecode.lib.base import BaseController, render | |
45 | from rhodecode.lib.celerylib import tasks, run_task |
|
46 | from rhodecode.lib.celerylib import tasks, run_task | |
46 | from rhodecode.lib.utils import repo2db_mapper |
|
47 | from rhodecode.lib.utils import repo2db_mapper | |
47 | from rhodecode.lib.utils2 import ( |
|
48 | from rhodecode.lib.utils2 import ( | |
48 | str2bool, safe_unicode, AttributeDict, safe_int) |
|
49 | str2bool, safe_unicode, AttributeDict, safe_int) | |
49 | from rhodecode.lib.compat import OrderedDict |
|
50 | from rhodecode.lib.compat import OrderedDict | |
50 | from rhodecode.lib.utils import jsonify |
|
|||
51 |
|
51 | |||
52 | from rhodecode.model.db import RhodeCodeUi, Repository |
|
52 | from rhodecode.model.db import RhodeCodeUi, Repository | |
53 | from rhodecode.model.forms import ApplicationSettingsForm, \ |
|
53 | from rhodecode.model.forms import ApplicationSettingsForm, \ | |
54 | ApplicationUiSettingsForm, ApplicationVisualisationForm, \ |
|
54 | ApplicationUiSettingsForm, ApplicationVisualisationForm, \ | |
55 | LabsSettingsForm, IssueTrackerPatternsForm |
|
55 | LabsSettingsForm, IssueTrackerPatternsForm | |
56 | from rhodecode.model.repo_group import RepoGroupModel |
|
56 | from rhodecode.model.repo_group import RepoGroupModel | |
57 |
|
57 | |||
58 | from rhodecode.model.scm import ScmModel |
|
58 | from rhodecode.model.scm import ScmModel | |
59 | from rhodecode.model.notification import EmailNotificationModel |
|
59 | from rhodecode.model.notification import EmailNotificationModel | |
60 | from rhodecode.model.meta import Session |
|
60 | from rhodecode.model.meta import Session | |
61 | from rhodecode.model.settings import ( |
|
61 | from rhodecode.model.settings import ( | |
62 | IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound, |
|
62 | IssueTrackerSettingsModel, VcsSettingsModel, SettingNotFound, | |
63 | SettingsModel) |
|
63 | SettingsModel) | |
64 |
|
64 | |||
65 | from rhodecode.model.supervisor import SupervisorModel, SUPERVISOR_MASTER |
|
65 | from rhodecode.model.supervisor import SupervisorModel, SUPERVISOR_MASTER | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | log = logging.getLogger(__name__) |
|
68 | log = logging.getLogger(__name__) | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | class SettingsController(BaseController): |
|
71 | class SettingsController(BaseController): | |
72 | """REST Controller styled on the Atom Publishing Protocol""" |
|
72 | """REST Controller styled on the Atom Publishing Protocol""" | |
73 | # To properly map this controller, ensure your config/routing.py |
|
73 | # To properly map this controller, ensure your config/routing.py | |
74 | # file has a resource setup: |
|
74 | # file has a resource setup: | |
75 | # map.resource('setting', 'settings', controller='admin/settings', |
|
75 | # map.resource('setting', 'settings', controller='admin/settings', | |
76 | # path_prefix='/admin', name_prefix='admin_') |
|
76 | # path_prefix='/admin', name_prefix='admin_') | |
77 |
|
77 | |||
78 | @LoginRequired() |
|
78 | @LoginRequired() | |
79 | def __before__(self): |
|
79 | def __before__(self): | |
80 | super(SettingsController, self).__before__() |
|
80 | super(SettingsController, self).__before__() | |
81 | c.labs_active = str2bool( |
|
81 | c.labs_active = str2bool( | |
82 | rhodecode.CONFIG.get('labs_settings_active', 'true')) |
|
82 | rhodecode.CONFIG.get('labs_settings_active', 'true')) | |
83 | c.navlist = navigation_list(request) |
|
83 | c.navlist = navigation_list(request) | |
84 |
|
84 | |||
85 | def _get_ui_settings(self): |
|
85 | def _get_ui_settings(self): | |
86 | ret = RhodeCodeUi.query().all() |
|
86 | ret = RhodeCodeUi.query().all() | |
87 |
|
87 | |||
88 | if not ret: |
|
88 | if not ret: | |
89 | raise Exception('Could not get application ui settings !') |
|
89 | raise Exception('Could not get application ui settings !') | |
90 | settings = {} |
|
90 | settings = {} | |
91 | for each in ret: |
|
91 | for each in ret: | |
92 | k = each.ui_key |
|
92 | k = each.ui_key | |
93 | v = each.ui_value |
|
93 | v = each.ui_value | |
94 | if k == '/': |
|
94 | if k == '/': | |
95 | k = 'root_path' |
|
95 | k = 'root_path' | |
96 |
|
96 | |||
97 | if k in ['push_ssl', 'publish', 'enabled']: |
|
97 | if k in ['push_ssl', 'publish', 'enabled']: | |
98 | v = str2bool(v) |
|
98 | v = str2bool(v) | |
99 |
|
99 | |||
100 | if k.find('.') != -1: |
|
100 | if k.find('.') != -1: | |
101 | k = k.replace('.', '_') |
|
101 | k = k.replace('.', '_') | |
102 |
|
102 | |||
103 | if each.ui_section in ['hooks', 'extensions']: |
|
103 | if each.ui_section in ['hooks', 'extensions']: | |
104 | v = each.ui_active |
|
104 | v = each.ui_active | |
105 |
|
105 | |||
106 | settings[each.ui_section + '_' + k] = v |
|
106 | settings[each.ui_section + '_' + k] = v | |
107 | return settings |
|
107 | return settings | |
108 |
|
108 | |||
109 | @HasPermissionAllDecorator('hg.admin') |
|
109 | @HasPermissionAllDecorator('hg.admin') | |
110 | @auth.CSRFRequired() |
|
110 | @auth.CSRFRequired() | |
111 | @jsonify |
|
111 | @jsonify | |
112 | def delete_svn_pattern(self): |
|
112 | def delete_svn_pattern(self): | |
113 | if not request.is_xhr: |
|
113 | if not request.is_xhr: | |
114 | raise HTTPBadRequest() |
|
114 | raise HTTPBadRequest() | |
115 |
|
115 | |||
116 | delete_pattern_id = request.POST.get('delete_svn_pattern') |
|
116 | delete_pattern_id = request.POST.get('delete_svn_pattern') | |
117 | model = VcsSettingsModel() |
|
117 | model = VcsSettingsModel() | |
118 | try: |
|
118 | try: | |
119 | model.delete_global_svn_pattern(delete_pattern_id) |
|
119 | model.delete_global_svn_pattern(delete_pattern_id) | |
120 | except SettingNotFound: |
|
120 | except SettingNotFound: | |
121 | raise HTTPBadRequest() |
|
121 | raise HTTPBadRequest() | |
122 |
|
122 | |||
123 | Session().commit() |
|
123 | Session().commit() | |
124 | return True |
|
124 | return True | |
125 |
|
125 | |||
126 | @HasPermissionAllDecorator('hg.admin') |
|
126 | @HasPermissionAllDecorator('hg.admin') | |
127 | @auth.CSRFRequired() |
|
127 | @auth.CSRFRequired() | |
128 | def settings_vcs_update(self): |
|
128 | def settings_vcs_update(self): | |
129 | """POST /admin/settings: All items in the collection""" |
|
129 | """POST /admin/settings: All items in the collection""" | |
130 | # url('admin_settings_vcs') |
|
130 | # url('admin_settings_vcs') | |
131 | c.active = 'vcs' |
|
131 | c.active = 'vcs' | |
132 |
|
132 | |||
133 | model = VcsSettingsModel() |
|
133 | model = VcsSettingsModel() | |
134 | c.svn_branch_patterns = model.get_global_svn_branch_patterns() |
|
134 | c.svn_branch_patterns = model.get_global_svn_branch_patterns() | |
135 | c.svn_tag_patterns = model.get_global_svn_tag_patterns() |
|
135 | c.svn_tag_patterns = model.get_global_svn_tag_patterns() | |
136 |
|
136 | |||
137 | # TODO: Replace with request.registry after migrating to pyramid. |
|
137 | # TODO: Replace with request.registry after migrating to pyramid. | |
138 | pyramid_settings = get_current_registry().settings |
|
138 | pyramid_settings = get_current_registry().settings | |
139 | c.svn_proxy_generate_config = pyramid_settings[generate_config] |
|
139 | c.svn_proxy_generate_config = pyramid_settings[generate_config] | |
140 |
|
140 | |||
141 | application_form = ApplicationUiSettingsForm()() |
|
141 | application_form = ApplicationUiSettingsForm()() | |
142 |
|
142 | |||
143 | try: |
|
143 | try: | |
144 | form_result = application_form.to_python(dict(request.POST)) |
|
144 | form_result = application_form.to_python(dict(request.POST)) | |
145 | except formencode.Invalid as errors: |
|
145 | except formencode.Invalid as errors: | |
146 | h.flash( |
|
146 | h.flash( | |
147 | _("Some form inputs contain invalid data."), |
|
147 | _("Some form inputs contain invalid data."), | |
148 | category='error') |
|
148 | category='error') | |
149 | return htmlfill.render( |
|
149 | return htmlfill.render( | |
150 | render('admin/settings/settings.mako'), |
|
150 | render('admin/settings/settings.mako'), | |
151 | defaults=errors.value, |
|
151 | defaults=errors.value, | |
152 | errors=errors.error_dict or {}, |
|
152 | errors=errors.error_dict or {}, | |
153 | prefix_error=False, |
|
153 | prefix_error=False, | |
154 | encoding="UTF-8", |
|
154 | encoding="UTF-8", | |
155 | force_defaults=False |
|
155 | force_defaults=False | |
156 | ) |
|
156 | ) | |
157 |
|
157 | |||
158 | try: |
|
158 | try: | |
159 | if c.visual.allow_repo_location_change: |
|
159 | if c.visual.allow_repo_location_change: | |
160 | model.update_global_path_setting( |
|
160 | model.update_global_path_setting( | |
161 | form_result['paths_root_path']) |
|
161 | form_result['paths_root_path']) | |
162 |
|
162 | |||
163 | model.update_global_ssl_setting(form_result['web_push_ssl']) |
|
163 | model.update_global_ssl_setting(form_result['web_push_ssl']) | |
164 | model.update_global_hook_settings(form_result) |
|
164 | model.update_global_hook_settings(form_result) | |
165 |
|
165 | |||
166 | model.create_or_update_global_svn_settings(form_result) |
|
166 | model.create_or_update_global_svn_settings(form_result) | |
167 | model.create_or_update_global_hg_settings(form_result) |
|
167 | model.create_or_update_global_hg_settings(form_result) | |
168 | model.create_or_update_global_git_settings(form_result) |
|
168 | model.create_or_update_global_git_settings(form_result) | |
169 | model.create_or_update_global_pr_settings(form_result) |
|
169 | model.create_or_update_global_pr_settings(form_result) | |
170 | except Exception: |
|
170 | except Exception: | |
171 | log.exception("Exception while updating settings") |
|
171 | log.exception("Exception while updating settings") | |
172 | h.flash(_('Error occurred during updating ' |
|
172 | h.flash(_('Error occurred during updating ' | |
173 | 'application settings'), category='error') |
|
173 | 'application settings'), category='error') | |
174 | else: |
|
174 | else: | |
175 | Session().commit() |
|
175 | Session().commit() | |
176 | h.flash(_('Updated VCS settings'), category='success') |
|
176 | h.flash(_('Updated VCS settings'), category='success') | |
177 | return redirect(url('admin_settings_vcs')) |
|
177 | return redirect(url('admin_settings_vcs')) | |
178 |
|
178 | |||
179 | return htmlfill.render( |
|
179 | return htmlfill.render( | |
180 | render('admin/settings/settings.mako'), |
|
180 | render('admin/settings/settings.mako'), | |
181 | defaults=self._form_defaults(), |
|
181 | defaults=self._form_defaults(), | |
182 | encoding="UTF-8", |
|
182 | encoding="UTF-8", | |
183 | force_defaults=False) |
|
183 | force_defaults=False) | |
184 |
|
184 | |||
185 | @HasPermissionAllDecorator('hg.admin') |
|
185 | @HasPermissionAllDecorator('hg.admin') | |
186 | def settings_vcs(self): |
|
186 | def settings_vcs(self): | |
187 | """GET /admin/settings: All items in the collection""" |
|
187 | """GET /admin/settings: All items in the collection""" | |
188 | # url('admin_settings_vcs') |
|
188 | # url('admin_settings_vcs') | |
189 | c.active = 'vcs' |
|
189 | c.active = 'vcs' | |
190 | model = VcsSettingsModel() |
|
190 | model = VcsSettingsModel() | |
191 | c.svn_branch_patterns = model.get_global_svn_branch_patterns() |
|
191 | c.svn_branch_patterns = model.get_global_svn_branch_patterns() | |
192 | c.svn_tag_patterns = model.get_global_svn_tag_patterns() |
|
192 | c.svn_tag_patterns = model.get_global_svn_tag_patterns() | |
193 |
|
193 | |||
194 | # TODO: Replace with request.registry after migrating to pyramid. |
|
194 | # TODO: Replace with request.registry after migrating to pyramid. | |
195 | pyramid_settings = get_current_registry().settings |
|
195 | pyramid_settings = get_current_registry().settings | |
196 | c.svn_proxy_generate_config = pyramid_settings[generate_config] |
|
196 | c.svn_proxy_generate_config = pyramid_settings[generate_config] | |
197 |
|
197 | |||
198 | defaults = self._form_defaults() |
|
198 | defaults = self._form_defaults() | |
199 |
|
199 | |||
200 | model.create_largeobjects_dirs_if_needed(defaults['paths_root_path']) |
|
200 | model.create_largeobjects_dirs_if_needed(defaults['paths_root_path']) | |
201 | return htmlfill.render( |
|
201 | return htmlfill.render( | |
202 | render('admin/settings/settings.mako'), |
|
202 | render('admin/settings/settings.mako'), | |
203 | defaults=defaults, |
|
203 | defaults=defaults, | |
204 | encoding="UTF-8", |
|
204 | encoding="UTF-8", | |
205 | force_defaults=False) |
|
205 | force_defaults=False) | |
206 |
|
206 | |||
207 | @HasPermissionAllDecorator('hg.admin') |
|
207 | @HasPermissionAllDecorator('hg.admin') | |
208 | @auth.CSRFRequired() |
|
208 | @auth.CSRFRequired() | |
209 | def settings_mapping_update(self): |
|
209 | def settings_mapping_update(self): | |
210 | """POST /admin/settings/mapping: All items in the collection""" |
|
210 | """POST /admin/settings/mapping: All items in the collection""" | |
211 | # url('admin_settings_mapping') |
|
211 | # url('admin_settings_mapping') | |
212 | c.active = 'mapping' |
|
212 | c.active = 'mapping' | |
213 | rm_obsolete = request.POST.get('destroy', False) |
|
213 | rm_obsolete = request.POST.get('destroy', False) | |
214 | invalidate_cache = request.POST.get('invalidate', False) |
|
214 | invalidate_cache = request.POST.get('invalidate', False) | |
215 | log.debug( |
|
215 | log.debug( | |
216 | 'rescanning repo location with destroy obsolete=%s', rm_obsolete) |
|
216 | 'rescanning repo location with destroy obsolete=%s', rm_obsolete) | |
217 |
|
217 | |||
218 | if invalidate_cache: |
|
218 | if invalidate_cache: | |
219 | log.debug('invalidating all repositories cache') |
|
219 | log.debug('invalidating all repositories cache') | |
220 | for repo in Repository.get_all(): |
|
220 | for repo in Repository.get_all(): | |
221 | ScmModel().mark_for_invalidation(repo.repo_name, delete=True) |
|
221 | ScmModel().mark_for_invalidation(repo.repo_name, delete=True) | |
222 |
|
222 | |||
223 | filesystem_repos = ScmModel().repo_scan() |
|
223 | filesystem_repos = ScmModel().repo_scan() | |
224 | added, removed = repo2db_mapper(filesystem_repos, rm_obsolete) |
|
224 | added, removed = repo2db_mapper(filesystem_repos, rm_obsolete) | |
225 | _repr = lambda l: ', '.join(map(safe_unicode, l)) or '-' |
|
225 | _repr = lambda l: ', '.join(map(safe_unicode, l)) or '-' | |
226 | h.flash(_('Repositories successfully ' |
|
226 | h.flash(_('Repositories successfully ' | |
227 | 'rescanned added: %s ; removed: %s') % |
|
227 | 'rescanned added: %s ; removed: %s') % | |
228 | (_repr(added), _repr(removed)), |
|
228 | (_repr(added), _repr(removed)), | |
229 | category='success') |
|
229 | category='success') | |
230 | return redirect(url('admin_settings_mapping')) |
|
230 | return redirect(url('admin_settings_mapping')) | |
231 |
|
231 | |||
232 | @HasPermissionAllDecorator('hg.admin') |
|
232 | @HasPermissionAllDecorator('hg.admin') | |
233 | def settings_mapping(self): |
|
233 | def settings_mapping(self): | |
234 | """GET /admin/settings/mapping: All items in the collection""" |
|
234 | """GET /admin/settings/mapping: All items in the collection""" | |
235 | # url('admin_settings_mapping') |
|
235 | # url('admin_settings_mapping') | |
236 | c.active = 'mapping' |
|
236 | c.active = 'mapping' | |
237 |
|
237 | |||
238 | return htmlfill.render( |
|
238 | return htmlfill.render( | |
239 | render('admin/settings/settings.mako'), |
|
239 | render('admin/settings/settings.mako'), | |
240 | defaults=self._form_defaults(), |
|
240 | defaults=self._form_defaults(), | |
241 | encoding="UTF-8", |
|
241 | encoding="UTF-8", | |
242 | force_defaults=False) |
|
242 | force_defaults=False) | |
243 |
|
243 | |||
244 | @HasPermissionAllDecorator('hg.admin') |
|
244 | @HasPermissionAllDecorator('hg.admin') | |
245 | @auth.CSRFRequired() |
|
245 | @auth.CSRFRequired() | |
246 | def settings_global_update(self): |
|
246 | def settings_global_update(self): | |
247 | """POST /admin/settings/global: All items in the collection""" |
|
247 | """POST /admin/settings/global: All items in the collection""" | |
248 | # url('admin_settings_global') |
|
248 | # url('admin_settings_global') | |
249 | c.active = 'global' |
|
249 | c.active = 'global' | |
250 | c.personal_repo_group_default_pattern = RepoGroupModel()\ |
|
250 | c.personal_repo_group_default_pattern = RepoGroupModel()\ | |
251 | .get_personal_group_name_pattern() |
|
251 | .get_personal_group_name_pattern() | |
252 | application_form = ApplicationSettingsForm()() |
|
252 | application_form = ApplicationSettingsForm()() | |
253 | try: |
|
253 | try: | |
254 | form_result = application_form.to_python(dict(request.POST)) |
|
254 | form_result = application_form.to_python(dict(request.POST)) | |
255 | except formencode.Invalid as errors: |
|
255 | except formencode.Invalid as errors: | |
256 | return htmlfill.render( |
|
256 | return htmlfill.render( | |
257 | render('admin/settings/settings.mako'), |
|
257 | render('admin/settings/settings.mako'), | |
258 | defaults=errors.value, |
|
258 | defaults=errors.value, | |
259 | errors=errors.error_dict or {}, |
|
259 | errors=errors.error_dict or {}, | |
260 | prefix_error=False, |
|
260 | prefix_error=False, | |
261 | encoding="UTF-8", |
|
261 | encoding="UTF-8", | |
262 | force_defaults=False) |
|
262 | force_defaults=False) | |
263 |
|
263 | |||
264 | try: |
|
264 | try: | |
265 | settings = [ |
|
265 | settings = [ | |
266 | ('title', 'rhodecode_title', 'unicode'), |
|
266 | ('title', 'rhodecode_title', 'unicode'), | |
267 | ('realm', 'rhodecode_realm', 'unicode'), |
|
267 | ('realm', 'rhodecode_realm', 'unicode'), | |
268 | ('pre_code', 'rhodecode_pre_code', 'unicode'), |
|
268 | ('pre_code', 'rhodecode_pre_code', 'unicode'), | |
269 | ('post_code', 'rhodecode_post_code', 'unicode'), |
|
269 | ('post_code', 'rhodecode_post_code', 'unicode'), | |
270 | ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'), |
|
270 | ('captcha_public_key', 'rhodecode_captcha_public_key', 'unicode'), | |
271 | ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'), |
|
271 | ('captcha_private_key', 'rhodecode_captcha_private_key', 'unicode'), | |
272 | ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'), |
|
272 | ('create_personal_repo_group', 'rhodecode_create_personal_repo_group', 'bool'), | |
273 | ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'), |
|
273 | ('personal_repo_group_pattern', 'rhodecode_personal_repo_group_pattern', 'unicode'), | |
274 | ] |
|
274 | ] | |
275 | for setting, form_key, type_ in settings: |
|
275 | for setting, form_key, type_ in settings: | |
276 | sett = SettingsModel().create_or_update_setting( |
|
276 | sett = SettingsModel().create_or_update_setting( | |
277 | setting, form_result[form_key], type_) |
|
277 | setting, form_result[form_key], type_) | |
278 | Session().add(sett) |
|
278 | Session().add(sett) | |
279 |
|
279 | |||
280 | Session().commit() |
|
280 | Session().commit() | |
281 | SettingsModel().invalidate_settings_cache() |
|
281 | SettingsModel().invalidate_settings_cache() | |
282 | h.flash(_('Updated application settings'), category='success') |
|
282 | h.flash(_('Updated application settings'), category='success') | |
283 | except Exception: |
|
283 | except Exception: | |
284 | log.exception("Exception while updating application settings") |
|
284 | log.exception("Exception while updating application settings") | |
285 | h.flash( |
|
285 | h.flash( | |
286 | _('Error occurred during updating application settings'), |
|
286 | _('Error occurred during updating application settings'), | |
287 | category='error') |
|
287 | category='error') | |
288 |
|
288 | |||
289 | return redirect(url('admin_settings_global')) |
|
289 | return redirect(url('admin_settings_global')) | |
290 |
|
290 | |||
291 | @HasPermissionAllDecorator('hg.admin') |
|
291 | @HasPermissionAllDecorator('hg.admin') | |
292 | def settings_global(self): |
|
292 | def settings_global(self): | |
293 | """GET /admin/settings/global: All items in the collection""" |
|
293 | """GET /admin/settings/global: All items in the collection""" | |
294 | # url('admin_settings_global') |
|
294 | # url('admin_settings_global') | |
295 | c.active = 'global' |
|
295 | c.active = 'global' | |
296 | c.personal_repo_group_default_pattern = RepoGroupModel()\ |
|
296 | c.personal_repo_group_default_pattern = RepoGroupModel()\ | |
297 | .get_personal_group_name_pattern() |
|
297 | .get_personal_group_name_pattern() | |
298 |
|
298 | |||
299 | return htmlfill.render( |
|
299 | return htmlfill.render( | |
300 | render('admin/settings/settings.mako'), |
|
300 | render('admin/settings/settings.mako'), | |
301 | defaults=self._form_defaults(), |
|
301 | defaults=self._form_defaults(), | |
302 | encoding="UTF-8", |
|
302 | encoding="UTF-8", | |
303 | force_defaults=False) |
|
303 | force_defaults=False) | |
304 |
|
304 | |||
305 | @HasPermissionAllDecorator('hg.admin') |
|
305 | @HasPermissionAllDecorator('hg.admin') | |
306 | @auth.CSRFRequired() |
|
306 | @auth.CSRFRequired() | |
307 | def settings_visual_update(self): |
|
307 | def settings_visual_update(self): | |
308 | """POST /admin/settings/visual: All items in the collection""" |
|
308 | """POST /admin/settings/visual: All items in the collection""" | |
309 | # url('admin_settings_visual') |
|
309 | # url('admin_settings_visual') | |
310 | c.active = 'visual' |
|
310 | c.active = 'visual' | |
311 | application_form = ApplicationVisualisationForm()() |
|
311 | application_form = ApplicationVisualisationForm()() | |
312 | try: |
|
312 | try: | |
313 | form_result = application_form.to_python(dict(request.POST)) |
|
313 | form_result = application_form.to_python(dict(request.POST)) | |
314 | except formencode.Invalid as errors: |
|
314 | except formencode.Invalid as errors: | |
315 | return htmlfill.render( |
|
315 | return htmlfill.render( | |
316 | render('admin/settings/settings.mako'), |
|
316 | render('admin/settings/settings.mako'), | |
317 | defaults=errors.value, |
|
317 | defaults=errors.value, | |
318 | errors=errors.error_dict or {}, |
|
318 | errors=errors.error_dict or {}, | |
319 | prefix_error=False, |
|
319 | prefix_error=False, | |
320 | encoding="UTF-8", |
|
320 | encoding="UTF-8", | |
321 | force_defaults=False |
|
321 | force_defaults=False | |
322 | ) |
|
322 | ) | |
323 |
|
323 | |||
324 | try: |
|
324 | try: | |
325 | settings = [ |
|
325 | settings = [ | |
326 | ('show_public_icon', 'rhodecode_show_public_icon', 'bool'), |
|
326 | ('show_public_icon', 'rhodecode_show_public_icon', 'bool'), | |
327 | ('show_private_icon', 'rhodecode_show_private_icon', 'bool'), |
|
327 | ('show_private_icon', 'rhodecode_show_private_icon', 'bool'), | |
328 | ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'), |
|
328 | ('stylify_metatags', 'rhodecode_stylify_metatags', 'bool'), | |
329 | ('repository_fields', 'rhodecode_repository_fields', 'bool'), |
|
329 | ('repository_fields', 'rhodecode_repository_fields', 'bool'), | |
330 | ('dashboard_items', 'rhodecode_dashboard_items', 'int'), |
|
330 | ('dashboard_items', 'rhodecode_dashboard_items', 'int'), | |
331 | ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'), |
|
331 | ('admin_grid_items', 'rhodecode_admin_grid_items', 'int'), | |
332 | ('show_version', 'rhodecode_show_version', 'bool'), |
|
332 | ('show_version', 'rhodecode_show_version', 'bool'), | |
333 | ('use_gravatar', 'rhodecode_use_gravatar', 'bool'), |
|
333 | ('use_gravatar', 'rhodecode_use_gravatar', 'bool'), | |
334 | ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'), |
|
334 | ('markup_renderer', 'rhodecode_markup_renderer', 'unicode'), | |
335 | ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'), |
|
335 | ('gravatar_url', 'rhodecode_gravatar_url', 'unicode'), | |
336 | ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'), |
|
336 | ('clone_uri_tmpl', 'rhodecode_clone_uri_tmpl', 'unicode'), | |
337 | ('support_url', 'rhodecode_support_url', 'unicode'), |
|
337 | ('support_url', 'rhodecode_support_url', 'unicode'), | |
338 | ('show_revision_number', 'rhodecode_show_revision_number', 'bool'), |
|
338 | ('show_revision_number', 'rhodecode_show_revision_number', 'bool'), | |
339 | ('show_sha_length', 'rhodecode_show_sha_length', 'int'), |
|
339 | ('show_sha_length', 'rhodecode_show_sha_length', 'int'), | |
340 | ] |
|
340 | ] | |
341 | for setting, form_key, type_ in settings: |
|
341 | for setting, form_key, type_ in settings: | |
342 | sett = SettingsModel().create_or_update_setting( |
|
342 | sett = SettingsModel().create_or_update_setting( | |
343 | setting, form_result[form_key], type_) |
|
343 | setting, form_result[form_key], type_) | |
344 | Session().add(sett) |
|
344 | Session().add(sett) | |
345 |
|
345 | |||
346 | Session().commit() |
|
346 | Session().commit() | |
347 | SettingsModel().invalidate_settings_cache() |
|
347 | SettingsModel().invalidate_settings_cache() | |
348 | h.flash(_('Updated visualisation settings'), category='success') |
|
348 | h.flash(_('Updated visualisation settings'), category='success') | |
349 | except Exception: |
|
349 | except Exception: | |
350 | log.exception("Exception updating visualization settings") |
|
350 | log.exception("Exception updating visualization settings") | |
351 | h.flash(_('Error occurred during updating ' |
|
351 | h.flash(_('Error occurred during updating ' | |
352 | 'visualisation settings'), |
|
352 | 'visualisation settings'), | |
353 | category='error') |
|
353 | category='error') | |
354 |
|
354 | |||
355 | return redirect(url('admin_settings_visual')) |
|
355 | return redirect(url('admin_settings_visual')) | |
356 |
|
356 | |||
357 | @HasPermissionAllDecorator('hg.admin') |
|
357 | @HasPermissionAllDecorator('hg.admin') | |
358 | def settings_visual(self): |
|
358 | def settings_visual(self): | |
359 | """GET /admin/settings/visual: All items in the collection""" |
|
359 | """GET /admin/settings/visual: All items in the collection""" | |
360 | # url('admin_settings_visual') |
|
360 | # url('admin_settings_visual') | |
361 | c.active = 'visual' |
|
361 | c.active = 'visual' | |
362 |
|
362 | |||
363 | return htmlfill.render( |
|
363 | return htmlfill.render( | |
364 | render('admin/settings/settings.mako'), |
|
364 | render('admin/settings/settings.mako'), | |
365 | defaults=self._form_defaults(), |
|
365 | defaults=self._form_defaults(), | |
366 | encoding="UTF-8", |
|
366 | encoding="UTF-8", | |
367 | force_defaults=False) |
|
367 | force_defaults=False) | |
368 |
|
368 | |||
369 | @HasPermissionAllDecorator('hg.admin') |
|
369 | @HasPermissionAllDecorator('hg.admin') | |
370 | @auth.CSRFRequired() |
|
370 | @auth.CSRFRequired() | |
371 | def settings_issuetracker_test(self): |
|
371 | def settings_issuetracker_test(self): | |
372 | if request.is_xhr: |
|
372 | if request.is_xhr: | |
373 | return h.urlify_commit_message( |
|
373 | return h.urlify_commit_message( | |
374 | request.POST.get('test_text', ''), |
|
374 | request.POST.get('test_text', ''), | |
375 | 'repo_group/test_repo1') |
|
375 | 'repo_group/test_repo1') | |
376 | else: |
|
376 | else: | |
377 | raise HTTPBadRequest() |
|
377 | raise HTTPBadRequest() | |
378 |
|
378 | |||
379 | @HasPermissionAllDecorator('hg.admin') |
|
379 | @HasPermissionAllDecorator('hg.admin') | |
380 | @auth.CSRFRequired() |
|
380 | @auth.CSRFRequired() | |
381 | def settings_issuetracker_delete(self): |
|
381 | def settings_issuetracker_delete(self): | |
382 | uid = request.POST.get('uid') |
|
382 | uid = request.POST.get('uid') | |
383 | IssueTrackerSettingsModel().delete_entries(uid) |
|
383 | IssueTrackerSettingsModel().delete_entries(uid) | |
384 | h.flash(_('Removed issue tracker entry'), category='success') |
|
384 | h.flash(_('Removed issue tracker entry'), category='success') | |
385 | return redirect(url('admin_settings_issuetracker')) |
|
385 | return redirect(url('admin_settings_issuetracker')) | |
386 |
|
386 | |||
387 | @HasPermissionAllDecorator('hg.admin') |
|
387 | @HasPermissionAllDecorator('hg.admin') | |
388 | def settings_issuetracker(self): |
|
388 | def settings_issuetracker(self): | |
389 | """GET /admin/settings/issue-tracker: All items in the collection""" |
|
389 | """GET /admin/settings/issue-tracker: All items in the collection""" | |
390 | # url('admin_settings_issuetracker') |
|
390 | # url('admin_settings_issuetracker') | |
391 | c.active = 'issuetracker' |
|
391 | c.active = 'issuetracker' | |
392 | defaults = SettingsModel().get_all_settings() |
|
392 | defaults = SettingsModel().get_all_settings() | |
393 |
|
393 | |||
394 | entry_key = 'rhodecode_issuetracker_pat_' |
|
394 | entry_key = 'rhodecode_issuetracker_pat_' | |
395 |
|
395 | |||
396 | c.issuetracker_entries = {} |
|
396 | c.issuetracker_entries = {} | |
397 | for k, v in defaults.items(): |
|
397 | for k, v in defaults.items(): | |
398 | if k.startswith(entry_key): |
|
398 | if k.startswith(entry_key): | |
399 | uid = k[len(entry_key):] |
|
399 | uid = k[len(entry_key):] | |
400 | c.issuetracker_entries[uid] = None |
|
400 | c.issuetracker_entries[uid] = None | |
401 |
|
401 | |||
402 | for uid in c.issuetracker_entries: |
|
402 | for uid in c.issuetracker_entries: | |
403 | c.issuetracker_entries[uid] = AttributeDict({ |
|
403 | c.issuetracker_entries[uid] = AttributeDict({ | |
404 | 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid), |
|
404 | 'pat': defaults.get('rhodecode_issuetracker_pat_' + uid), | |
405 | 'url': defaults.get('rhodecode_issuetracker_url_' + uid), |
|
405 | 'url': defaults.get('rhodecode_issuetracker_url_' + uid), | |
406 | 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid), |
|
406 | 'pref': defaults.get('rhodecode_issuetracker_pref_' + uid), | |
407 | 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid), |
|
407 | 'desc': defaults.get('rhodecode_issuetracker_desc_' + uid), | |
408 | }) |
|
408 | }) | |
409 |
|
409 | |||
410 | return render('admin/settings/settings.mako') |
|
410 | return render('admin/settings/settings.mako') | |
411 |
|
411 | |||
412 | @HasPermissionAllDecorator('hg.admin') |
|
412 | @HasPermissionAllDecorator('hg.admin') | |
413 | @auth.CSRFRequired() |
|
413 | @auth.CSRFRequired() | |
414 | def settings_issuetracker_save(self): |
|
414 | def settings_issuetracker_save(self): | |
415 | settings_model = IssueTrackerSettingsModel() |
|
415 | settings_model = IssueTrackerSettingsModel() | |
416 |
|
416 | |||
417 | form = IssueTrackerPatternsForm()().to_python(request.POST) |
|
417 | form = IssueTrackerPatternsForm()().to_python(request.POST) | |
418 | if form: |
|
418 | if form: | |
419 | for uid in form.get('delete_patterns', []): |
|
419 | for uid in form.get('delete_patterns', []): | |
420 | settings_model.delete_entries(uid) |
|
420 | settings_model.delete_entries(uid) | |
421 |
|
421 | |||
422 | for pattern in form.get('patterns', []): |
|
422 | for pattern in form.get('patterns', []): | |
423 | for setting, value, type_ in pattern: |
|
423 | for setting, value, type_ in pattern: | |
424 | sett = settings_model.create_or_update_setting( |
|
424 | sett = settings_model.create_or_update_setting( | |
425 | setting, value, type_) |
|
425 | setting, value, type_) | |
426 | Session().add(sett) |
|
426 | Session().add(sett) | |
427 |
|
427 | |||
428 | Session().commit() |
|
428 | Session().commit() | |
429 |
|
429 | |||
430 | SettingsModel().invalidate_settings_cache() |
|
430 | SettingsModel().invalidate_settings_cache() | |
431 | h.flash(_('Updated issue tracker entries'), category='success') |
|
431 | h.flash(_('Updated issue tracker entries'), category='success') | |
432 | return redirect(url('admin_settings_issuetracker')) |
|
432 | return redirect(url('admin_settings_issuetracker')) | |
433 |
|
433 | |||
434 | @HasPermissionAllDecorator('hg.admin') |
|
434 | @HasPermissionAllDecorator('hg.admin') | |
435 | @auth.CSRFRequired() |
|
435 | @auth.CSRFRequired() | |
436 | def settings_email_update(self): |
|
436 | def settings_email_update(self): | |
437 | """POST /admin/settings/email: All items in the collection""" |
|
437 | """POST /admin/settings/email: All items in the collection""" | |
438 | # url('admin_settings_email') |
|
438 | # url('admin_settings_email') | |
439 | c.active = 'email' |
|
439 | c.active = 'email' | |
440 |
|
440 | |||
441 | test_email = request.POST.get('test_email') |
|
441 | test_email = request.POST.get('test_email') | |
442 |
|
442 | |||
443 | if not test_email: |
|
443 | if not test_email: | |
444 | h.flash(_('Please enter email address'), category='error') |
|
444 | h.flash(_('Please enter email address'), category='error') | |
445 | return redirect(url('admin_settings_email')) |
|
445 | return redirect(url('admin_settings_email')) | |
446 |
|
446 | |||
447 | email_kwargs = { |
|
447 | email_kwargs = { | |
448 | 'date': datetime.datetime.now(), |
|
448 | 'date': datetime.datetime.now(), | |
449 | 'user': c.rhodecode_user, |
|
449 | 'user': c.rhodecode_user, | |
450 | 'rhodecode_version': c.rhodecode_version |
|
450 | 'rhodecode_version': c.rhodecode_version | |
451 | } |
|
451 | } | |
452 |
|
452 | |||
453 | (subject, headers, email_body, |
|
453 | (subject, headers, email_body, | |
454 | email_body_plaintext) = EmailNotificationModel().render_email( |
|
454 | email_body_plaintext) = EmailNotificationModel().render_email( | |
455 | EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs) |
|
455 | EmailNotificationModel.TYPE_EMAIL_TEST, **email_kwargs) | |
456 |
|
456 | |||
457 | recipients = [test_email] if test_email else None |
|
457 | recipients = [test_email] if test_email else None | |
458 |
|
458 | |||
459 | run_task(tasks.send_email, recipients, subject, |
|
459 | run_task(tasks.send_email, recipients, subject, | |
460 | email_body_plaintext, email_body) |
|
460 | email_body_plaintext, email_body) | |
461 |
|
461 | |||
462 | h.flash(_('Send email task created'), category='success') |
|
462 | h.flash(_('Send email task created'), category='success') | |
463 | return redirect(url('admin_settings_email')) |
|
463 | return redirect(url('admin_settings_email')) | |
464 |
|
464 | |||
465 | @HasPermissionAllDecorator('hg.admin') |
|
465 | @HasPermissionAllDecorator('hg.admin') | |
466 | def settings_email(self): |
|
466 | def settings_email(self): | |
467 | """GET /admin/settings/email: All items in the collection""" |
|
467 | """GET /admin/settings/email: All items in the collection""" | |
468 | # url('admin_settings_email') |
|
468 | # url('admin_settings_email') | |
469 | c.active = 'email' |
|
469 | c.active = 'email' | |
470 | c.rhodecode_ini = rhodecode.CONFIG |
|
470 | c.rhodecode_ini = rhodecode.CONFIG | |
471 |
|
471 | |||
472 | return htmlfill.render( |
|
472 | return htmlfill.render( | |
473 | render('admin/settings/settings.mako'), |
|
473 | render('admin/settings/settings.mako'), | |
474 | defaults=self._form_defaults(), |
|
474 | defaults=self._form_defaults(), | |
475 | encoding="UTF-8", |
|
475 | encoding="UTF-8", | |
476 | force_defaults=False) |
|
476 | force_defaults=False) | |
477 |
|
477 | |||
478 | @HasPermissionAllDecorator('hg.admin') |
|
478 | @HasPermissionAllDecorator('hg.admin') | |
479 | @auth.CSRFRequired() |
|
479 | @auth.CSRFRequired() | |
480 | def settings_hooks_update(self): |
|
480 | def settings_hooks_update(self): | |
481 | """POST or DELETE /admin/settings/hooks: All items in the collection""" |
|
481 | """POST or DELETE /admin/settings/hooks: All items in the collection""" | |
482 | # url('admin_settings_hooks') |
|
482 | # url('admin_settings_hooks') | |
483 | c.active = 'hooks' |
|
483 | c.active = 'hooks' | |
484 | if c.visual.allow_custom_hooks_settings: |
|
484 | if c.visual.allow_custom_hooks_settings: | |
485 | ui_key = request.POST.get('new_hook_ui_key') |
|
485 | ui_key = request.POST.get('new_hook_ui_key') | |
486 | ui_value = request.POST.get('new_hook_ui_value') |
|
486 | ui_value = request.POST.get('new_hook_ui_value') | |
487 |
|
487 | |||
488 | hook_id = request.POST.get('hook_id') |
|
488 | hook_id = request.POST.get('hook_id') | |
489 | new_hook = False |
|
489 | new_hook = False | |
490 |
|
490 | |||
491 | model = SettingsModel() |
|
491 | model = SettingsModel() | |
492 | try: |
|
492 | try: | |
493 | if ui_value and ui_key: |
|
493 | if ui_value and ui_key: | |
494 | model.create_or_update_hook(ui_key, ui_value) |
|
494 | model.create_or_update_hook(ui_key, ui_value) | |
495 | h.flash(_('Added new hook'), category='success') |
|
495 | h.flash(_('Added new hook'), category='success') | |
496 | new_hook = True |
|
496 | new_hook = True | |
497 | elif hook_id: |
|
497 | elif hook_id: | |
498 | RhodeCodeUi.delete(hook_id) |
|
498 | RhodeCodeUi.delete(hook_id) | |
499 | Session().commit() |
|
499 | Session().commit() | |
500 |
|
500 | |||
501 | # check for edits |
|
501 | # check for edits | |
502 | update = False |
|
502 | update = False | |
503 | _d = request.POST.dict_of_lists() |
|
503 | _d = request.POST.dict_of_lists() | |
504 | for k, v in zip(_d.get('hook_ui_key', []), |
|
504 | for k, v in zip(_d.get('hook_ui_key', []), | |
505 | _d.get('hook_ui_value_new', [])): |
|
505 | _d.get('hook_ui_value_new', [])): | |
506 | model.create_or_update_hook(k, v) |
|
506 | model.create_or_update_hook(k, v) | |
507 | update = True |
|
507 | update = True | |
508 |
|
508 | |||
509 | if update and not new_hook: |
|
509 | if update and not new_hook: | |
510 | h.flash(_('Updated hooks'), category='success') |
|
510 | h.flash(_('Updated hooks'), category='success') | |
511 | Session().commit() |
|
511 | Session().commit() | |
512 | except Exception: |
|
512 | except Exception: | |
513 | log.exception("Exception during hook creation") |
|
513 | log.exception("Exception during hook creation") | |
514 | h.flash(_('Error occurred during hook creation'), |
|
514 | h.flash(_('Error occurred during hook creation'), | |
515 | category='error') |
|
515 | category='error') | |
516 |
|
516 | |||
517 | return redirect(url('admin_settings_hooks')) |
|
517 | return redirect(url('admin_settings_hooks')) | |
518 |
|
518 | |||
519 | @HasPermissionAllDecorator('hg.admin') |
|
519 | @HasPermissionAllDecorator('hg.admin') | |
520 | def settings_hooks(self): |
|
520 | def settings_hooks(self): | |
521 | """GET /admin/settings/hooks: All items in the collection""" |
|
521 | """GET /admin/settings/hooks: All items in the collection""" | |
522 | # url('admin_settings_hooks') |
|
522 | # url('admin_settings_hooks') | |
523 | c.active = 'hooks' |
|
523 | c.active = 'hooks' | |
524 |
|
524 | |||
525 | model = SettingsModel() |
|
525 | model = SettingsModel() | |
526 | c.hooks = model.get_builtin_hooks() |
|
526 | c.hooks = model.get_builtin_hooks() | |
527 | c.custom_hooks = model.get_custom_hooks() |
|
527 | c.custom_hooks = model.get_custom_hooks() | |
528 |
|
528 | |||
529 | return htmlfill.render( |
|
529 | return htmlfill.render( | |
530 | render('admin/settings/settings.mako'), |
|
530 | render('admin/settings/settings.mako'), | |
531 | defaults=self._form_defaults(), |
|
531 | defaults=self._form_defaults(), | |
532 | encoding="UTF-8", |
|
532 | encoding="UTF-8", | |
533 | force_defaults=False) |
|
533 | force_defaults=False) | |
534 |
|
534 | |||
535 | @HasPermissionAllDecorator('hg.admin') |
|
535 | @HasPermissionAllDecorator('hg.admin') | |
536 | def settings_search(self): |
|
536 | def settings_search(self): | |
537 | """GET /admin/settings/search: All items in the collection""" |
|
537 | """GET /admin/settings/search: All items in the collection""" | |
538 | # url('admin_settings_search') |
|
538 | # url('admin_settings_search') | |
539 | c.active = 'search' |
|
539 | c.active = 'search' | |
540 |
|
540 | |||
541 | from rhodecode.lib.index import searcher_from_config |
|
541 | from rhodecode.lib.index import searcher_from_config | |
542 | searcher = searcher_from_config(config) |
|
542 | searcher = searcher_from_config(config) | |
543 | c.statistics = searcher.statistics() |
|
543 | c.statistics = searcher.statistics() | |
544 |
|
544 | |||
545 | return render('admin/settings/settings.mako') |
|
545 | return render('admin/settings/settings.mako') | |
546 |
|
546 | |||
547 | @HasPermissionAllDecorator('hg.admin') |
|
547 | @HasPermissionAllDecorator('hg.admin') | |
548 | def settings_supervisor(self): |
|
548 | def settings_supervisor(self): | |
549 | c.rhodecode_ini = rhodecode.CONFIG |
|
549 | c.rhodecode_ini = rhodecode.CONFIG | |
550 | c.active = 'supervisor' |
|
550 | c.active = 'supervisor' | |
551 |
|
551 | |||
552 | c.supervisor_procs = OrderedDict([ |
|
552 | c.supervisor_procs = OrderedDict([ | |
553 | (SUPERVISOR_MASTER, {}), |
|
553 | (SUPERVISOR_MASTER, {}), | |
554 | ]) |
|
554 | ]) | |
555 |
|
555 | |||
556 | c.log_size = 10240 |
|
556 | c.log_size = 10240 | |
557 | supervisor = SupervisorModel() |
|
557 | supervisor = SupervisorModel() | |
558 |
|
558 | |||
559 | _connection = supervisor.get_connection( |
|
559 | _connection = supervisor.get_connection( | |
560 | c.rhodecode_ini.get('supervisor.uri')) |
|
560 | c.rhodecode_ini.get('supervisor.uri')) | |
561 | c.connection_error = None |
|
561 | c.connection_error = None | |
562 | try: |
|
562 | try: | |
563 | _connection.supervisor.getAllProcessInfo() |
|
563 | _connection.supervisor.getAllProcessInfo() | |
564 | except Exception as e: |
|
564 | except Exception as e: | |
565 | c.connection_error = str(e) |
|
565 | c.connection_error = str(e) | |
566 | log.exception("Exception reading supervisor data") |
|
566 | log.exception("Exception reading supervisor data") | |
567 | return render('admin/settings/settings.mako') |
|
567 | return render('admin/settings/settings.mako') | |
568 |
|
568 | |||
569 | groupid = c.rhodecode_ini.get('supervisor.group_id') |
|
569 | groupid = c.rhodecode_ini.get('supervisor.group_id') | |
570 |
|
570 | |||
571 | # feed our group processes to the main |
|
571 | # feed our group processes to the main | |
572 | for proc in supervisor.get_group_processes(_connection, groupid): |
|
572 | for proc in supervisor.get_group_processes(_connection, groupid): | |
573 | c.supervisor_procs[proc['name']] = {} |
|
573 | c.supervisor_procs[proc['name']] = {} | |
574 |
|
574 | |||
575 | for k in c.supervisor_procs.keys(): |
|
575 | for k in c.supervisor_procs.keys(): | |
576 | try: |
|
576 | try: | |
577 | # master process info |
|
577 | # master process info | |
578 | if k == SUPERVISOR_MASTER: |
|
578 | if k == SUPERVISOR_MASTER: | |
579 | _data = supervisor.get_master_state(_connection) |
|
579 | _data = supervisor.get_master_state(_connection) | |
580 | _data['name'] = 'supervisor master' |
|
580 | _data['name'] = 'supervisor master' | |
581 | _data['description'] = 'pid %s, id: %s, ver: %s' % ( |
|
581 | _data['description'] = 'pid %s, id: %s, ver: %s' % ( | |
582 | _data['pid'], _data['id'], _data['ver']) |
|
582 | _data['pid'], _data['id'], _data['ver']) | |
583 | c.supervisor_procs[k] = _data |
|
583 | c.supervisor_procs[k] = _data | |
584 | else: |
|
584 | else: | |
585 | procid = groupid + ":" + k |
|
585 | procid = groupid + ":" + k | |
586 | c.supervisor_procs[k] = supervisor.get_process_info(_connection, procid) |
|
586 | c.supervisor_procs[k] = supervisor.get_process_info(_connection, procid) | |
587 | except Exception as e: |
|
587 | except Exception as e: | |
588 | log.exception("Exception reading supervisor data") |
|
588 | log.exception("Exception reading supervisor data") | |
589 | c.supervisor_procs[k] = {'_rhodecode_error': str(e)} |
|
589 | c.supervisor_procs[k] = {'_rhodecode_error': str(e)} | |
590 |
|
590 | |||
591 | return render('admin/settings/settings.mako') |
|
591 | return render('admin/settings/settings.mako') | |
592 |
|
592 | |||
593 | @HasPermissionAllDecorator('hg.admin') |
|
593 | @HasPermissionAllDecorator('hg.admin') | |
594 | def settings_supervisor_log(self, procid): |
|
594 | def settings_supervisor_log(self, procid): | |
595 | import rhodecode |
|
595 | import rhodecode | |
596 | c.rhodecode_ini = rhodecode.CONFIG |
|
596 | c.rhodecode_ini = rhodecode.CONFIG | |
597 | c.active = 'supervisor_tail' |
|
597 | c.active = 'supervisor_tail' | |
598 |
|
598 | |||
599 | supervisor = SupervisorModel() |
|
599 | supervisor = SupervisorModel() | |
600 | _connection = supervisor.get_connection(c.rhodecode_ini.get('supervisor.uri')) |
|
600 | _connection = supervisor.get_connection(c.rhodecode_ini.get('supervisor.uri')) | |
601 | groupid = c.rhodecode_ini.get('supervisor.group_id') |
|
601 | groupid = c.rhodecode_ini.get('supervisor.group_id') | |
602 | procid = groupid + ":" + procid if procid != SUPERVISOR_MASTER else procid |
|
602 | procid = groupid + ":" + procid if procid != SUPERVISOR_MASTER else procid | |
603 |
|
603 | |||
604 | c.log_size = 10240 |
|
604 | c.log_size = 10240 | |
605 | offset = abs(safe_int(request.GET.get('offset', c.log_size))) * -1 |
|
605 | offset = abs(safe_int(request.GET.get('offset', c.log_size))) * -1 | |
606 | c.log = supervisor.read_process_log(_connection, procid, offset, 0) |
|
606 | c.log = supervisor.read_process_log(_connection, procid, offset, 0) | |
607 |
|
607 | |||
608 | return render('admin/settings/settings.mako') |
|
608 | return render('admin/settings/settings.mako') | |
609 |
|
609 | |||
610 | @HasPermissionAllDecorator('hg.admin') |
|
610 | @HasPermissionAllDecorator('hg.admin') | |
611 | @auth.CSRFRequired() |
|
611 | @auth.CSRFRequired() | |
612 | def settings_labs_update(self): |
|
612 | def settings_labs_update(self): | |
613 | """POST /admin/settings/labs: All items in the collection""" |
|
613 | """POST /admin/settings/labs: All items in the collection""" | |
614 | # url('admin_settings/labs', method={'POST'}) |
|
614 | # url('admin_settings/labs', method={'POST'}) | |
615 | c.active = 'labs' |
|
615 | c.active = 'labs' | |
616 |
|
616 | |||
617 | application_form = LabsSettingsForm()() |
|
617 | application_form = LabsSettingsForm()() | |
618 | try: |
|
618 | try: | |
619 | form_result = application_form.to_python(dict(request.POST)) |
|
619 | form_result = application_form.to_python(dict(request.POST)) | |
620 | except formencode.Invalid as errors: |
|
620 | except formencode.Invalid as errors: | |
621 | h.flash( |
|
621 | h.flash( | |
622 | _('Some form inputs contain invalid data.'), |
|
622 | _('Some form inputs contain invalid data.'), | |
623 | category='error') |
|
623 | category='error') | |
624 | return htmlfill.render( |
|
624 | return htmlfill.render( | |
625 | render('admin/settings/settings.mako'), |
|
625 | render('admin/settings/settings.mako'), | |
626 | defaults=errors.value, |
|
626 | defaults=errors.value, | |
627 | errors=errors.error_dict or {}, |
|
627 | errors=errors.error_dict or {}, | |
628 | prefix_error=False, |
|
628 | prefix_error=False, | |
629 | encoding='UTF-8', |
|
629 | encoding='UTF-8', | |
630 | force_defaults=False |
|
630 | force_defaults=False | |
631 | ) |
|
631 | ) | |
632 |
|
632 | |||
633 | try: |
|
633 | try: | |
634 | session = Session() |
|
634 | session = Session() | |
635 | for setting in _LAB_SETTINGS: |
|
635 | for setting in _LAB_SETTINGS: | |
636 | setting_name = setting.key[len('rhodecode_'):] |
|
636 | setting_name = setting.key[len('rhodecode_'):] | |
637 | sett = SettingsModel().create_or_update_setting( |
|
637 | sett = SettingsModel().create_or_update_setting( | |
638 | setting_name, form_result[setting.key], setting.type) |
|
638 | setting_name, form_result[setting.key], setting.type) | |
639 | session.add(sett) |
|
639 | session.add(sett) | |
640 |
|
640 | |||
641 | except Exception: |
|
641 | except Exception: | |
642 | log.exception('Exception while updating lab settings') |
|
642 | log.exception('Exception while updating lab settings') | |
643 | h.flash(_('Error occurred during updating labs settings'), |
|
643 | h.flash(_('Error occurred during updating labs settings'), | |
644 | category='error') |
|
644 | category='error') | |
645 | else: |
|
645 | else: | |
646 | Session().commit() |
|
646 | Session().commit() | |
647 | SettingsModel().invalidate_settings_cache() |
|
647 | SettingsModel().invalidate_settings_cache() | |
648 | h.flash(_('Updated Labs settings'), category='success') |
|
648 | h.flash(_('Updated Labs settings'), category='success') | |
649 | return redirect(url('admin_settings_labs')) |
|
649 | return redirect(url('admin_settings_labs')) | |
650 |
|
650 | |||
651 | return htmlfill.render( |
|
651 | return htmlfill.render( | |
652 | render('admin/settings/settings.mako'), |
|
652 | render('admin/settings/settings.mako'), | |
653 | defaults=self._form_defaults(), |
|
653 | defaults=self._form_defaults(), | |
654 | encoding='UTF-8', |
|
654 | encoding='UTF-8', | |
655 | force_defaults=False) |
|
655 | force_defaults=False) | |
656 |
|
656 | |||
657 | @HasPermissionAllDecorator('hg.admin') |
|
657 | @HasPermissionAllDecorator('hg.admin') | |
658 | def settings_labs(self): |
|
658 | def settings_labs(self): | |
659 | """GET /admin/settings/labs: All items in the collection""" |
|
659 | """GET /admin/settings/labs: All items in the collection""" | |
660 | # url('admin_settings_labs') |
|
660 | # url('admin_settings_labs') | |
661 | if not c.labs_active: |
|
661 | if not c.labs_active: | |
662 | redirect(url('admin_settings')) |
|
662 | redirect(url('admin_settings')) | |
663 |
|
663 | |||
664 | c.active = 'labs' |
|
664 | c.active = 'labs' | |
665 | c.lab_settings = _LAB_SETTINGS |
|
665 | c.lab_settings = _LAB_SETTINGS | |
666 |
|
666 | |||
667 | return htmlfill.render( |
|
667 | return htmlfill.render( | |
668 | render('admin/settings/settings.mako'), |
|
668 | render('admin/settings/settings.mako'), | |
669 | defaults=self._form_defaults(), |
|
669 | defaults=self._form_defaults(), | |
670 | encoding='UTF-8', |
|
670 | encoding='UTF-8', | |
671 | force_defaults=False) |
|
671 | force_defaults=False) | |
672 |
|
672 | |||
673 | def _form_defaults(self): |
|
673 | def _form_defaults(self): | |
674 | defaults = SettingsModel().get_all_settings() |
|
674 | defaults = SettingsModel().get_all_settings() | |
675 | defaults.update(self._get_ui_settings()) |
|
675 | defaults.update(self._get_ui_settings()) | |
676 |
|
676 | |||
677 | defaults.update({ |
|
677 | defaults.update({ | |
678 | 'new_svn_branch': '', |
|
678 | 'new_svn_branch': '', | |
679 | 'new_svn_tag': '', |
|
679 | 'new_svn_tag': '', | |
680 | }) |
|
680 | }) | |
681 | return defaults |
|
681 | return defaults | |
682 |
|
682 | |||
683 |
|
683 | |||
684 | # :param key: name of the setting including the 'rhodecode_' prefix |
|
684 | # :param key: name of the setting including the 'rhodecode_' prefix | |
685 | # :param type: the RhodeCodeSetting type to use. |
|
685 | # :param type: the RhodeCodeSetting type to use. | |
686 | # :param group: the i18ned group in which we should dispaly this setting |
|
686 | # :param group: the i18ned group in which we should dispaly this setting | |
687 | # :param label: the i18ned label we should display for this setting |
|
687 | # :param label: the i18ned label we should display for this setting | |
688 | # :param help: the i18ned help we should dispaly for this setting |
|
688 | # :param help: the i18ned help we should dispaly for this setting | |
689 | LabSetting = collections.namedtuple( |
|
689 | LabSetting = collections.namedtuple( | |
690 | 'LabSetting', ('key', 'type', 'group', 'label', 'help')) |
|
690 | 'LabSetting', ('key', 'type', 'group', 'label', 'help')) | |
691 |
|
691 | |||
692 |
|
692 | |||
693 | # This list has to be kept in sync with the form |
|
693 | # This list has to be kept in sync with the form | |
694 | # rhodecode.model.forms.LabsSettingsForm. |
|
694 | # rhodecode.model.forms.LabsSettingsForm. | |
695 | _LAB_SETTINGS = [ |
|
695 | _LAB_SETTINGS = [ | |
696 |
|
696 | |||
697 | ] |
|
697 | ] |
@@ -1,419 +1,413 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | User Groups crud controller for pylons |
|
22 | User Groups crud controller for pylons | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import logging |
|
25 | import logging | |
26 | import formencode |
|
26 | import formencode | |
27 |
|
27 | |||
28 | import peppercorn |
|
28 | import peppercorn | |
29 | from formencode import htmlfill |
|
29 | from formencode import htmlfill | |
30 | from pylons import request, tmpl_context as c, url, config |
|
30 | from pylons import request, tmpl_context as c, url, config | |
31 | from pylons.controllers.util import redirect |
|
31 | from pylons.controllers.util import redirect | |
32 | from pylons.i18n.translation import _ |
|
32 | from pylons.i18n.translation import _ | |
33 |
|
33 | |||
34 | from sqlalchemy.orm import joinedload |
|
|||
35 |
|
||||
36 | from rhodecode.lib import auth |
|
34 | from rhodecode.lib import auth | |
37 | from rhodecode.lib import helpers as h |
|
35 | from rhodecode.lib import helpers as h | |
38 | from rhodecode.lib import audit_logger |
|
36 | from rhodecode.lib import audit_logger | |
39 | from rhodecode.lib.ext_json import json |
|
|||
40 | from rhodecode.lib.exceptions import UserGroupAssignedException,\ |
|
37 | from rhodecode.lib.exceptions import UserGroupAssignedException,\ | |
41 | RepoGroupAssignmentError |
|
38 | RepoGroupAssignmentError | |
42 | from rhodecode.lib.utils import jsonify |
|
|||
43 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int |
|
39 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int | |
44 | from rhodecode.lib.auth import ( |
|
40 | from rhodecode.lib.auth import ( | |
45 | LoginRequired, NotAnonymous, HasUserGroupPermissionAnyDecorator, |
|
41 | LoginRequired, NotAnonymous, HasUserGroupPermissionAnyDecorator, | |
46 | HasPermissionAnyDecorator) |
|
42 | HasPermissionAnyDecorator) | |
47 | from rhodecode.lib.base import BaseController, render |
|
43 | from rhodecode.lib.base import BaseController, render | |
48 | from rhodecode.model.permission import PermissionModel |
|
44 | from rhodecode.model.permission import PermissionModel | |
49 | from rhodecode.model.scm import UserGroupList |
|
|||
50 | from rhodecode.model.user_group import UserGroupModel |
|
45 | from rhodecode.model.user_group import UserGroupModel | |
51 |
from rhodecode.model.db import |
|
46 | from rhodecode.model.db import User, UserGroup | |
52 | User, UserGroup, UserGroupRepoToPerm, UserGroupRepoGroupToPerm) |
|
|||
53 | from rhodecode.model.forms import ( |
|
47 | from rhodecode.model.forms import ( | |
54 | UserGroupForm, UserGroupPermsForm, UserIndividualPermissionsForm, |
|
48 | UserGroupForm, UserGroupPermsForm, UserIndividualPermissionsForm, | |
55 | UserPermissionsForm) |
|
49 | UserPermissionsForm) | |
56 | from rhodecode.model.meta import Session |
|
50 | from rhodecode.model.meta import Session | |
57 |
|
51 | |||
58 |
|
52 | |||
59 | log = logging.getLogger(__name__) |
|
53 | log = logging.getLogger(__name__) | |
60 |
|
54 | |||
61 |
|
55 | |||
62 | class UserGroupsController(BaseController): |
|
56 | class UserGroupsController(BaseController): | |
63 | """REST Controller styled on the Atom Publishing Protocol""" |
|
57 | """REST Controller styled on the Atom Publishing Protocol""" | |
64 |
|
58 | |||
65 | @LoginRequired() |
|
59 | @LoginRequired() | |
66 | def __before__(self): |
|
60 | def __before__(self): | |
67 | super(UserGroupsController, self).__before__() |
|
61 | super(UserGroupsController, self).__before__() | |
68 | c.available_permissions = config['available_permissions'] |
|
62 | c.available_permissions = config['available_permissions'] | |
69 | PermissionModel().set_global_permission_choices(c, gettext_translator=_) |
|
63 | PermissionModel().set_global_permission_choices(c, gettext_translator=_) | |
70 |
|
64 | |||
71 | def __load_data(self, user_group_id): |
|
65 | def __load_data(self, user_group_id): | |
72 | c.group_members_obj = [x.user for x in c.user_group.members] |
|
66 | c.group_members_obj = [x.user for x in c.user_group.members] | |
73 | c.group_members_obj.sort(key=lambda u: u.username.lower()) |
|
67 | c.group_members_obj.sort(key=lambda u: u.username.lower()) | |
74 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] |
|
68 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] | |
75 |
|
69 | |||
76 | def __load_defaults(self, user_group_id): |
|
70 | def __load_defaults(self, user_group_id): | |
77 | """ |
|
71 | """ | |
78 | Load defaults settings for edit, and update |
|
72 | Load defaults settings for edit, and update | |
79 |
|
73 | |||
80 | :param user_group_id: |
|
74 | :param user_group_id: | |
81 | """ |
|
75 | """ | |
82 | user_group = UserGroup.get_or_404(user_group_id) |
|
76 | user_group = UserGroup.get_or_404(user_group_id) | |
83 | data = user_group.get_dict() |
|
77 | data = user_group.get_dict() | |
84 | # fill owner |
|
78 | # fill owner | |
85 | if user_group.user: |
|
79 | if user_group.user: | |
86 | data.update({'user': user_group.user.username}) |
|
80 | data.update({'user': user_group.user.username}) | |
87 | else: |
|
81 | else: | |
88 | replacement_user = User.get_first_super_admin().username |
|
82 | replacement_user = User.get_first_super_admin().username | |
89 | data.update({'user': replacement_user}) |
|
83 | data.update({'user': replacement_user}) | |
90 | return data |
|
84 | return data | |
91 |
|
85 | |||
92 | def _revoke_perms_on_yourself(self, form_result): |
|
86 | def _revoke_perms_on_yourself(self, form_result): | |
93 | _updates = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
87 | _updates = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), | |
94 | form_result['perm_updates']) |
|
88 | form_result['perm_updates']) | |
95 | _additions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
89 | _additions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), | |
96 | form_result['perm_additions']) |
|
90 | form_result['perm_additions']) | |
97 | _deletions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
91 | _deletions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), | |
98 | form_result['perm_deletions']) |
|
92 | form_result['perm_deletions']) | |
99 | admin_perm = 'usergroup.admin' |
|
93 | admin_perm = 'usergroup.admin' | |
100 | if _updates and _updates[0][1] != admin_perm or \ |
|
94 | if _updates and _updates[0][1] != admin_perm or \ | |
101 | _additions and _additions[0][1] != admin_perm or \ |
|
95 | _additions and _additions[0][1] != admin_perm or \ | |
102 | _deletions and _deletions[0][1] != admin_perm: |
|
96 | _deletions and _deletions[0][1] != admin_perm: | |
103 | return True |
|
97 | return True | |
104 | return False |
|
98 | return False | |
105 |
|
99 | |||
106 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
100 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') | |
107 | @auth.CSRFRequired() |
|
101 | @auth.CSRFRequired() | |
108 | def create(self): |
|
102 | def create(self): | |
109 |
|
103 | |||
110 | users_group_form = UserGroupForm()() |
|
104 | users_group_form = UserGroupForm()() | |
111 | try: |
|
105 | try: | |
112 | form_result = users_group_form.to_python(dict(request.POST)) |
|
106 | form_result = users_group_form.to_python(dict(request.POST)) | |
113 | user_group = UserGroupModel().create( |
|
107 | user_group = UserGroupModel().create( | |
114 | name=form_result['users_group_name'], |
|
108 | name=form_result['users_group_name'], | |
115 | description=form_result['user_group_description'], |
|
109 | description=form_result['user_group_description'], | |
116 | owner=c.rhodecode_user.user_id, |
|
110 | owner=c.rhodecode_user.user_id, | |
117 | active=form_result['users_group_active']) |
|
111 | active=form_result['users_group_active']) | |
118 | Session().flush() |
|
112 | Session().flush() | |
119 | creation_data = user_group.get_api_data() |
|
113 | creation_data = user_group.get_api_data() | |
120 | user_group_name = form_result['users_group_name'] |
|
114 | user_group_name = form_result['users_group_name'] | |
121 |
|
115 | |||
122 | audit_logger.store_web( |
|
116 | audit_logger.store_web( | |
123 | 'user_group.create', action_data={'data': creation_data}, |
|
117 | 'user_group.create', action_data={'data': creation_data}, | |
124 | user=c.rhodecode_user) |
|
118 | user=c.rhodecode_user) | |
125 |
|
119 | |||
126 | user_group_link = h.link_to( |
|
120 | user_group_link = h.link_to( | |
127 | h.escape(user_group_name), |
|
121 | h.escape(user_group_name), | |
128 | url('edit_users_group', user_group_id=user_group.users_group_id)) |
|
122 | url('edit_users_group', user_group_id=user_group.users_group_id)) | |
129 | h.flash(h.literal(_('Created user group %(user_group_link)s') |
|
123 | h.flash(h.literal(_('Created user group %(user_group_link)s') | |
130 | % {'user_group_link': user_group_link}), |
|
124 | % {'user_group_link': user_group_link}), | |
131 | category='success') |
|
125 | category='success') | |
132 | Session().commit() |
|
126 | Session().commit() | |
133 | except formencode.Invalid as errors: |
|
127 | except formencode.Invalid as errors: | |
134 | return htmlfill.render( |
|
128 | return htmlfill.render( | |
135 | render('admin/user_groups/user_group_add.mako'), |
|
129 | render('admin/user_groups/user_group_add.mako'), | |
136 | defaults=errors.value, |
|
130 | defaults=errors.value, | |
137 | errors=errors.error_dict or {}, |
|
131 | errors=errors.error_dict or {}, | |
138 | prefix_error=False, |
|
132 | prefix_error=False, | |
139 | encoding="UTF-8", |
|
133 | encoding="UTF-8", | |
140 | force_defaults=False) |
|
134 | force_defaults=False) | |
141 | except Exception: |
|
135 | except Exception: | |
142 | log.exception("Exception creating user group") |
|
136 | log.exception("Exception creating user group") | |
143 | h.flash(_('Error occurred during creation of user group %s') \ |
|
137 | h.flash(_('Error occurred during creation of user group %s') \ | |
144 | % request.POST.get('users_group_name'), category='error') |
|
138 | % request.POST.get('users_group_name'), category='error') | |
145 |
|
139 | |||
146 | return redirect( |
|
140 | return redirect( | |
147 | url('edit_users_group', user_group_id=user_group.users_group_id)) |
|
141 | url('edit_users_group', user_group_id=user_group.users_group_id)) | |
148 |
|
142 | |||
149 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
143 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') | |
150 | def new(self): |
|
144 | def new(self): | |
151 | """GET /user_groups/new: Form to create a new item""" |
|
145 | """GET /user_groups/new: Form to create a new item""" | |
152 | # url('new_users_group') |
|
146 | # url('new_users_group') | |
153 | return render('admin/user_groups/user_group_add.mako') |
|
147 | return render('admin/user_groups/user_group_add.mako') | |
154 |
|
148 | |||
155 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
149 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
156 | @auth.CSRFRequired() |
|
150 | @auth.CSRFRequired() | |
157 | def update(self, user_group_id): |
|
151 | def update(self, user_group_id): | |
158 |
|
152 | |||
159 | user_group_id = safe_int(user_group_id) |
|
153 | user_group_id = safe_int(user_group_id) | |
160 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
154 | c.user_group = UserGroup.get_or_404(user_group_id) | |
161 | c.active = 'settings' |
|
155 | c.active = 'settings' | |
162 | self.__load_data(user_group_id) |
|
156 | self.__load_data(user_group_id) | |
163 |
|
157 | |||
164 | users_group_form = UserGroupForm( |
|
158 | users_group_form = UserGroupForm( | |
165 | edit=True, old_data=c.user_group.get_dict(), allow_disabled=True)() |
|
159 | edit=True, old_data=c.user_group.get_dict(), allow_disabled=True)() | |
166 |
|
160 | |||
167 | old_values = c.user_group.get_api_data() |
|
161 | old_values = c.user_group.get_api_data() | |
168 | try: |
|
162 | try: | |
169 | form_result = users_group_form.to_python(request.POST) |
|
163 | form_result = users_group_form.to_python(request.POST) | |
170 | pstruct = peppercorn.parse(request.POST.items()) |
|
164 | pstruct = peppercorn.parse(request.POST.items()) | |
171 | form_result['users_group_members'] = pstruct['user_group_members'] |
|
165 | form_result['users_group_members'] = pstruct['user_group_members'] | |
172 |
|
166 | |||
173 | user_group, added_members, removed_members = \ |
|
167 | user_group, added_members, removed_members = \ | |
174 | UserGroupModel().update(c.user_group, form_result) |
|
168 | UserGroupModel().update(c.user_group, form_result) | |
175 | updated_user_group = form_result['users_group_name'] |
|
169 | updated_user_group = form_result['users_group_name'] | |
176 |
|
170 | |||
177 | audit_logger.store_web( |
|
171 | audit_logger.store_web( | |
178 | 'user_group.edit', action_data={'old_data': old_values}, |
|
172 | 'user_group.edit', action_data={'old_data': old_values}, | |
179 | user=c.rhodecode_user) |
|
173 | user=c.rhodecode_user) | |
180 |
|
174 | |||
181 | # TODO(marcink): use added/removed to set user_group.edit.member.add |
|
175 | # TODO(marcink): use added/removed to set user_group.edit.member.add | |
182 |
|
176 | |||
183 | h.flash(_('Updated user group %s') % updated_user_group, |
|
177 | h.flash(_('Updated user group %s') % updated_user_group, | |
184 | category='success') |
|
178 | category='success') | |
185 | Session().commit() |
|
179 | Session().commit() | |
186 | except formencode.Invalid as errors: |
|
180 | except formencode.Invalid as errors: | |
187 | defaults = errors.value |
|
181 | defaults = errors.value | |
188 | e = errors.error_dict or {} |
|
182 | e = errors.error_dict or {} | |
189 |
|
183 | |||
190 | return htmlfill.render( |
|
184 | return htmlfill.render( | |
191 | render('admin/user_groups/user_group_edit.mako'), |
|
185 | render('admin/user_groups/user_group_edit.mako'), | |
192 | defaults=defaults, |
|
186 | defaults=defaults, | |
193 | errors=e, |
|
187 | errors=e, | |
194 | prefix_error=False, |
|
188 | prefix_error=False, | |
195 | encoding="UTF-8", |
|
189 | encoding="UTF-8", | |
196 | force_defaults=False) |
|
190 | force_defaults=False) | |
197 | except Exception: |
|
191 | except Exception: | |
198 | log.exception("Exception during update of user group") |
|
192 | log.exception("Exception during update of user group") | |
199 | h.flash(_('Error occurred during update of user group %s') |
|
193 | h.flash(_('Error occurred during update of user group %s') | |
200 | % request.POST.get('users_group_name'), category='error') |
|
194 | % request.POST.get('users_group_name'), category='error') | |
201 |
|
195 | |||
202 | return redirect(url('edit_users_group', user_group_id=user_group_id)) |
|
196 | return redirect(url('edit_users_group', user_group_id=user_group_id)) | |
203 |
|
197 | |||
204 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
198 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
205 | @auth.CSRFRequired() |
|
199 | @auth.CSRFRequired() | |
206 | def delete(self, user_group_id): |
|
200 | def delete(self, user_group_id): | |
207 | user_group_id = safe_int(user_group_id) |
|
201 | user_group_id = safe_int(user_group_id) | |
208 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
202 | c.user_group = UserGroup.get_or_404(user_group_id) | |
209 | force = str2bool(request.POST.get('force')) |
|
203 | force = str2bool(request.POST.get('force')) | |
210 |
|
204 | |||
211 | old_values = c.user_group.get_api_data() |
|
205 | old_values = c.user_group.get_api_data() | |
212 | try: |
|
206 | try: | |
213 | UserGroupModel().delete(c.user_group, force=force) |
|
207 | UserGroupModel().delete(c.user_group, force=force) | |
214 | audit_logger.store_web( |
|
208 | audit_logger.store_web( | |
215 | 'user.delete', action_data={'old_data': old_values}, |
|
209 | 'user.delete', action_data={'old_data': old_values}, | |
216 | user=c.rhodecode_user) |
|
210 | user=c.rhodecode_user) | |
217 | Session().commit() |
|
211 | Session().commit() | |
218 | h.flash(_('Successfully deleted user group'), category='success') |
|
212 | h.flash(_('Successfully deleted user group'), category='success') | |
219 | except UserGroupAssignedException as e: |
|
213 | except UserGroupAssignedException as e: | |
220 | h.flash(str(e), category='error') |
|
214 | h.flash(str(e), category='error') | |
221 | except Exception: |
|
215 | except Exception: | |
222 | log.exception("Exception during deletion of user group") |
|
216 | log.exception("Exception during deletion of user group") | |
223 | h.flash(_('An error occurred during deletion of user group'), |
|
217 | h.flash(_('An error occurred during deletion of user group'), | |
224 | category='error') |
|
218 | category='error') | |
225 | return redirect(url('users_groups')) |
|
219 | return redirect(url('users_groups')) | |
226 |
|
220 | |||
227 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
221 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
228 | def edit(self, user_group_id): |
|
222 | def edit(self, user_group_id): | |
229 | """GET /user_groups/user_group_id/edit: Form to edit an existing item""" |
|
223 | """GET /user_groups/user_group_id/edit: Form to edit an existing item""" | |
230 | # url('edit_users_group', user_group_id=ID) |
|
224 | # url('edit_users_group', user_group_id=ID) | |
231 |
|
225 | |||
232 | user_group_id = safe_int(user_group_id) |
|
226 | user_group_id = safe_int(user_group_id) | |
233 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
227 | c.user_group = UserGroup.get_or_404(user_group_id) | |
234 | c.active = 'settings' |
|
228 | c.active = 'settings' | |
235 | self.__load_data(user_group_id) |
|
229 | self.__load_data(user_group_id) | |
236 |
|
230 | |||
237 | defaults = self.__load_defaults(user_group_id) |
|
231 | defaults = self.__load_defaults(user_group_id) | |
238 |
|
232 | |||
239 | return htmlfill.render( |
|
233 | return htmlfill.render( | |
240 | render('admin/user_groups/user_group_edit.mako'), |
|
234 | render('admin/user_groups/user_group_edit.mako'), | |
241 | defaults=defaults, |
|
235 | defaults=defaults, | |
242 | encoding="UTF-8", |
|
236 | encoding="UTF-8", | |
243 | force_defaults=False |
|
237 | force_defaults=False | |
244 | ) |
|
238 | ) | |
245 |
|
239 | |||
246 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
240 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
247 | def edit_perms(self, user_group_id): |
|
241 | def edit_perms(self, user_group_id): | |
248 | user_group_id = safe_int(user_group_id) |
|
242 | user_group_id = safe_int(user_group_id) | |
249 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
243 | c.user_group = UserGroup.get_or_404(user_group_id) | |
250 | c.active = 'perms' |
|
244 | c.active = 'perms' | |
251 |
|
245 | |||
252 | defaults = {} |
|
246 | defaults = {} | |
253 | # fill user group users |
|
247 | # fill user group users | |
254 | for p in c.user_group.user_user_group_to_perm: |
|
248 | for p in c.user_group.user_user_group_to_perm: | |
255 | defaults.update({'u_perm_%s' % p.user.user_id: |
|
249 | defaults.update({'u_perm_%s' % p.user.user_id: | |
256 | p.permission.permission_name}) |
|
250 | p.permission.permission_name}) | |
257 |
|
251 | |||
258 | for p in c.user_group.user_group_user_group_to_perm: |
|
252 | for p in c.user_group.user_group_user_group_to_perm: | |
259 | defaults.update({'g_perm_%s' % p.user_group.users_group_id: |
|
253 | defaults.update({'g_perm_%s' % p.user_group.users_group_id: | |
260 | p.permission.permission_name}) |
|
254 | p.permission.permission_name}) | |
261 |
|
255 | |||
262 | return htmlfill.render( |
|
256 | return htmlfill.render( | |
263 | render('admin/user_groups/user_group_edit.mako'), |
|
257 | render('admin/user_groups/user_group_edit.mako'), | |
264 | defaults=defaults, |
|
258 | defaults=defaults, | |
265 | encoding="UTF-8", |
|
259 | encoding="UTF-8", | |
266 | force_defaults=False |
|
260 | force_defaults=False | |
267 | ) |
|
261 | ) | |
268 |
|
262 | |||
269 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
263 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
270 | @auth.CSRFRequired() |
|
264 | @auth.CSRFRequired() | |
271 | def update_perms(self, user_group_id): |
|
265 | def update_perms(self, user_group_id): | |
272 | """ |
|
266 | """ | |
273 | grant permission for given usergroup |
|
267 | grant permission for given usergroup | |
274 |
|
268 | |||
275 | :param user_group_id: |
|
269 | :param user_group_id: | |
276 | """ |
|
270 | """ | |
277 | user_group_id = safe_int(user_group_id) |
|
271 | user_group_id = safe_int(user_group_id) | |
278 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
272 | c.user_group = UserGroup.get_or_404(user_group_id) | |
279 | form = UserGroupPermsForm()().to_python(request.POST) |
|
273 | form = UserGroupPermsForm()().to_python(request.POST) | |
280 |
|
274 | |||
281 | if not c.rhodecode_user.is_admin: |
|
275 | if not c.rhodecode_user.is_admin: | |
282 | if self._revoke_perms_on_yourself(form): |
|
276 | if self._revoke_perms_on_yourself(form): | |
283 | msg = _('Cannot change permission for yourself as admin') |
|
277 | msg = _('Cannot change permission for yourself as admin') | |
284 | h.flash(msg, category='warning') |
|
278 | h.flash(msg, category='warning') | |
285 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
279 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) | |
286 |
|
280 | |||
287 | try: |
|
281 | try: | |
288 | UserGroupModel().update_permissions(user_group_id, |
|
282 | UserGroupModel().update_permissions(user_group_id, | |
289 | form['perm_additions'], form['perm_updates'], form['perm_deletions']) |
|
283 | form['perm_additions'], form['perm_updates'], form['perm_deletions']) | |
290 | except RepoGroupAssignmentError: |
|
284 | except RepoGroupAssignmentError: | |
291 | h.flash(_('Target group cannot be the same'), category='error') |
|
285 | h.flash(_('Target group cannot be the same'), category='error') | |
292 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
286 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) | |
293 |
|
287 | |||
294 | # TODO(marcink): implement global permissions |
|
288 | # TODO(marcink): implement global permissions | |
295 | # audit_log.store_web('user_group.edit.permissions') |
|
289 | # audit_log.store_web('user_group.edit.permissions') | |
296 | Session().commit() |
|
290 | Session().commit() | |
297 | h.flash(_('User Group permissions updated'), category='success') |
|
291 | h.flash(_('User Group permissions updated'), category='success') | |
298 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
292 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) | |
299 |
|
293 | |||
300 |
|
294 | |||
301 |
|
295 | |||
302 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
296 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
303 | def edit_global_perms(self, user_group_id): |
|
297 | def edit_global_perms(self, user_group_id): | |
304 | user_group_id = safe_int(user_group_id) |
|
298 | user_group_id = safe_int(user_group_id) | |
305 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
299 | c.user_group = UserGroup.get_or_404(user_group_id) | |
306 | c.active = 'global_perms' |
|
300 | c.active = 'global_perms' | |
307 |
|
301 | |||
308 | c.default_user = User.get_default_user() |
|
302 | c.default_user = User.get_default_user() | |
309 | defaults = c.user_group.get_dict() |
|
303 | defaults = c.user_group.get_dict() | |
310 | defaults.update(c.default_user.get_default_perms(suffix='_inherited')) |
|
304 | defaults.update(c.default_user.get_default_perms(suffix='_inherited')) | |
311 | defaults.update(c.user_group.get_default_perms()) |
|
305 | defaults.update(c.user_group.get_default_perms()) | |
312 |
|
306 | |||
313 | return htmlfill.render( |
|
307 | return htmlfill.render( | |
314 | render('admin/user_groups/user_group_edit.mako'), |
|
308 | render('admin/user_groups/user_group_edit.mako'), | |
315 | defaults=defaults, |
|
309 | defaults=defaults, | |
316 | encoding="UTF-8", |
|
310 | encoding="UTF-8", | |
317 | force_defaults=False |
|
311 | force_defaults=False | |
318 | ) |
|
312 | ) | |
319 |
|
313 | |||
320 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
314 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
321 | @auth.CSRFRequired() |
|
315 | @auth.CSRFRequired() | |
322 | def update_global_perms(self, user_group_id): |
|
316 | def update_global_perms(self, user_group_id): | |
323 | user_group_id = safe_int(user_group_id) |
|
317 | user_group_id = safe_int(user_group_id) | |
324 | user_group = UserGroup.get_or_404(user_group_id) |
|
318 | user_group = UserGroup.get_or_404(user_group_id) | |
325 | c.active = 'global_perms' |
|
319 | c.active = 'global_perms' | |
326 |
|
320 | |||
327 | try: |
|
321 | try: | |
328 | # first stage that verifies the checkbox |
|
322 | # first stage that verifies the checkbox | |
329 | _form = UserIndividualPermissionsForm() |
|
323 | _form = UserIndividualPermissionsForm() | |
330 | form_result = _form.to_python(dict(request.POST)) |
|
324 | form_result = _form.to_python(dict(request.POST)) | |
331 | inherit_perms = form_result['inherit_default_permissions'] |
|
325 | inherit_perms = form_result['inherit_default_permissions'] | |
332 | user_group.inherit_default_permissions = inherit_perms |
|
326 | user_group.inherit_default_permissions = inherit_perms | |
333 | Session().add(user_group) |
|
327 | Session().add(user_group) | |
334 |
|
328 | |||
335 | if not inherit_perms: |
|
329 | if not inherit_perms: | |
336 | # only update the individual ones if we un check the flag |
|
330 | # only update the individual ones if we un check the flag | |
337 | _form = UserPermissionsForm( |
|
331 | _form = UserPermissionsForm( | |
338 | [x[0] for x in c.repo_create_choices], |
|
332 | [x[0] for x in c.repo_create_choices], | |
339 | [x[0] for x in c.repo_create_on_write_choices], |
|
333 | [x[0] for x in c.repo_create_on_write_choices], | |
340 | [x[0] for x in c.repo_group_create_choices], |
|
334 | [x[0] for x in c.repo_group_create_choices], | |
341 | [x[0] for x in c.user_group_create_choices], |
|
335 | [x[0] for x in c.user_group_create_choices], | |
342 | [x[0] for x in c.fork_choices], |
|
336 | [x[0] for x in c.fork_choices], | |
343 | [x[0] for x in c.inherit_default_permission_choices])() |
|
337 | [x[0] for x in c.inherit_default_permission_choices])() | |
344 |
|
338 | |||
345 | form_result = _form.to_python(dict(request.POST)) |
|
339 | form_result = _form.to_python(dict(request.POST)) | |
346 | form_result.update({'perm_user_group_id': user_group.users_group_id}) |
|
340 | form_result.update({'perm_user_group_id': user_group.users_group_id}) | |
347 |
|
341 | |||
348 | PermissionModel().update_user_group_permissions(form_result) |
|
342 | PermissionModel().update_user_group_permissions(form_result) | |
349 |
|
343 | |||
350 | Session().commit() |
|
344 | Session().commit() | |
351 | h.flash(_('User Group global permissions updated successfully'), |
|
345 | h.flash(_('User Group global permissions updated successfully'), | |
352 | category='success') |
|
346 | category='success') | |
353 |
|
347 | |||
354 | except formencode.Invalid as errors: |
|
348 | except formencode.Invalid as errors: | |
355 | defaults = errors.value |
|
349 | defaults = errors.value | |
356 | c.user_group = user_group |
|
350 | c.user_group = user_group | |
357 | return htmlfill.render( |
|
351 | return htmlfill.render( | |
358 | render('admin/user_groups/user_group_edit.mako'), |
|
352 | render('admin/user_groups/user_group_edit.mako'), | |
359 | defaults=defaults, |
|
353 | defaults=defaults, | |
360 | errors=errors.error_dict or {}, |
|
354 | errors=errors.error_dict or {}, | |
361 | prefix_error=False, |
|
355 | prefix_error=False, | |
362 | encoding="UTF-8", |
|
356 | encoding="UTF-8", | |
363 | force_defaults=False) |
|
357 | force_defaults=False) | |
364 | except Exception: |
|
358 | except Exception: | |
365 | log.exception("Exception during permissions saving") |
|
359 | log.exception("Exception during permissions saving") | |
366 | h.flash(_('An error occurred during permissions saving'), |
|
360 | h.flash(_('An error occurred during permissions saving'), | |
367 | category='error') |
|
361 | category='error') | |
368 |
|
362 | |||
369 | return redirect(url('edit_user_group_global_perms', user_group_id=user_group_id)) |
|
363 | return redirect(url('edit_user_group_global_perms', user_group_id=user_group_id)) | |
370 |
|
364 | |||
371 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
365 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
372 | def edit_advanced(self, user_group_id): |
|
366 | def edit_advanced(self, user_group_id): | |
373 | user_group_id = safe_int(user_group_id) |
|
367 | user_group_id = safe_int(user_group_id) | |
374 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
368 | c.user_group = UserGroup.get_or_404(user_group_id) | |
375 | c.active = 'advanced' |
|
369 | c.active = 'advanced' | |
376 | c.group_members_obj = sorted( |
|
370 | c.group_members_obj = sorted( | |
377 | (x.user for x in c.user_group.members), |
|
371 | (x.user for x in c.user_group.members), | |
378 | key=lambda u: u.username.lower()) |
|
372 | key=lambda u: u.username.lower()) | |
379 |
|
373 | |||
380 | c.group_to_repos = sorted( |
|
374 | c.group_to_repos = sorted( | |
381 | (x.repository for x in c.user_group.users_group_repo_to_perm), |
|
375 | (x.repository for x in c.user_group.users_group_repo_to_perm), | |
382 | key=lambda u: u.repo_name.lower()) |
|
376 | key=lambda u: u.repo_name.lower()) | |
383 |
|
377 | |||
384 | c.group_to_repo_groups = sorted( |
|
378 | c.group_to_repo_groups = sorted( | |
385 | (x.group for x in c.user_group.users_group_repo_group_to_perm), |
|
379 | (x.group for x in c.user_group.users_group_repo_group_to_perm), | |
386 | key=lambda u: u.group_name.lower()) |
|
380 | key=lambda u: u.group_name.lower()) | |
387 |
|
381 | |||
388 | return render('admin/user_groups/user_group_edit.mako') |
|
382 | return render('admin/user_groups/user_group_edit.mako') | |
389 |
|
383 | |||
390 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
384 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') | |
391 | def edit_advanced_set_synchronization(self, user_group_id): |
|
385 | def edit_advanced_set_synchronization(self, user_group_id): | |
392 | user_group_id = safe_int(user_group_id) |
|
386 | user_group_id = safe_int(user_group_id) | |
393 | user_group = UserGroup.get_or_404(user_group_id) |
|
387 | user_group = UserGroup.get_or_404(user_group_id) | |
394 |
|
388 | |||
395 | existing = user_group.group_data.get('extern_type') |
|
389 | existing = user_group.group_data.get('extern_type') | |
396 |
|
390 | |||
397 | if existing: |
|
391 | if existing: | |
398 | new_state = user_group.group_data |
|
392 | new_state = user_group.group_data | |
399 | new_state['extern_type'] = None |
|
393 | new_state['extern_type'] = None | |
400 | else: |
|
394 | else: | |
401 | new_state = user_group.group_data |
|
395 | new_state = user_group.group_data | |
402 | new_state['extern_type'] = 'manual' |
|
396 | new_state['extern_type'] = 'manual' | |
403 | new_state['extern_type_set_by'] = c.rhodecode_user.username |
|
397 | new_state['extern_type_set_by'] = c.rhodecode_user.username | |
404 |
|
398 | |||
405 | try: |
|
399 | try: | |
406 | user_group.group_data = new_state |
|
400 | user_group.group_data = new_state | |
407 | Session().add(user_group) |
|
401 | Session().add(user_group) | |
408 | Session().commit() |
|
402 | Session().commit() | |
409 |
|
403 | |||
410 | h.flash(_('User Group synchronization updated successfully'), |
|
404 | h.flash(_('User Group synchronization updated successfully'), | |
411 | category='success') |
|
405 | category='success') | |
412 | except Exception: |
|
406 | except Exception: | |
413 | log.exception("Exception during sync settings saving") |
|
407 | log.exception("Exception during sync settings saving") | |
414 | h.flash(_('An error occurred during synchronization update'), |
|
408 | h.flash(_('An error occurred during synchronization update'), | |
415 | category='error') |
|
409 | category='error') | |
416 |
|
410 | |||
417 | return redirect( |
|
411 | return redirect( | |
418 | url('edit_user_group_advanced', user_group_id=user_group_id)) |
|
412 | url('edit_user_group_advanced', user_group_id=user_group_id)) | |
419 |
|
413 |
@@ -1,712 +1,617 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | The base Controller API |
|
22 | The base Controller API | |
23 | Provides the BaseController class for subclassing. And usage in different |
|
23 | Provides the BaseController class for subclassing. And usage in different | |
24 | controllers |
|
24 | controllers | |
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | import logging |
|
27 | import logging | |
28 | import socket |
|
28 | import socket | |
29 |
|
29 | |||
30 | import markupsafe |
|
30 | import markupsafe | |
31 | import ipaddress |
|
31 | import ipaddress | |
32 | import pyramid.threadlocal |
|
32 | import pyramid.threadlocal | |
33 |
|
33 | |||
34 | from paste.auth.basic import AuthBasicAuthenticator |
|
34 | from paste.auth.basic import AuthBasicAuthenticator | |
35 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception |
|
35 | from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception | |
36 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION |
|
36 | from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION | |
37 | from pylons import tmpl_context as c, request, url |
|
|||
38 | from pylons.controllers import WSGIController |
|
|||
39 | from pylons.controllers.util import redirect |
|
|||
40 | from pylons.i18n import translation |
|
|||
41 | # marcink: don't remove this import |
|
|||
42 | from pylons.templating import render_mako, pylons_globals, literal, cached_template |
|
|||
43 | from pylons.i18n.translation import _ |
|
|||
44 | from webob.exc import HTTPFound |
|
|||
45 |
|
||||
46 |
|
37 | |||
47 | import rhodecode |
|
38 | import rhodecode | |
48 | from rhodecode.authentication.base import VCS_TYPE |
|
39 | from rhodecode.authentication.base import VCS_TYPE | |
49 | from rhodecode.lib import auth, utils2 |
|
40 | from rhodecode.lib import auth, utils2 | |
50 | from rhodecode.lib import helpers as h |
|
41 | from rhodecode.lib import helpers as h | |
51 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper |
|
42 | from rhodecode.lib.auth import AuthUser, CookieStoreWrapper | |
52 | from rhodecode.lib.exceptions import UserCreationError |
|
43 | from rhodecode.lib.exceptions import UserCreationError | |
53 | from rhodecode.lib.utils import ( |
|
44 | from rhodecode.lib.utils import ( | |
54 | get_repo_slug, set_rhodecode_config, password_changed, |
|
45 | get_repo_slug, set_rhodecode_config, password_changed, | |
55 | get_enabled_hook_classes) |
|
46 | get_enabled_hook_classes) | |
56 | from rhodecode.lib.utils2 import ( |
|
47 | from rhodecode.lib.utils2 import ( | |
57 | str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist) |
|
48 | str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist) | |
58 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
|||
59 | from rhodecode.model import meta |
|
49 | from rhodecode.model import meta | |
60 | from rhodecode.model.db import Repository, User, ChangesetComment |
|
50 | from rhodecode.model.db import Repository, User, ChangesetComment | |
61 | from rhodecode.model.notification import NotificationModel |
|
51 | from rhodecode.model.notification import NotificationModel | |
62 | from rhodecode.model.scm import ScmModel |
|
52 | from rhodecode.model.scm import ScmModel | |
63 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel |
|
53 | from rhodecode.model.settings import VcsSettingsModel, SettingsModel | |
64 |
|
54 | |||
|
55 | # NOTE(marcink): remove after base controller is no longer required | |||
|
56 | from pylons.controllers import WSGIController | |||
|
57 | from pylons.i18n import translation | |||
65 |
|
58 | |||
66 | log = logging.getLogger(__name__) |
|
59 | log = logging.getLogger(__name__) | |
67 |
|
60 | |||
68 |
|
61 | |||
69 | # hack to make the migration to pyramid easier |
|
62 | # hack to make the migration to pyramid easier | |
70 | def render(template_name, extra_vars=None, cache_key=None, |
|
63 | def render(template_name, extra_vars=None, cache_key=None, | |
71 | cache_type=None, cache_expire=None): |
|
64 | cache_type=None, cache_expire=None): | |
72 | """Render a template with Mako |
|
65 | """Render a template with Mako | |
73 |
|
66 | |||
74 | Accepts the cache options ``cache_key``, ``cache_type``, and |
|
67 | Accepts the cache options ``cache_key``, ``cache_type``, and | |
75 | ``cache_expire``. |
|
68 | ``cache_expire``. | |
76 |
|
69 | |||
77 | """ |
|
70 | """ | |
|
71 | from pylons.templating import literal | |||
|
72 | from pylons.templating import cached_template, pylons_globals | |||
|
73 | ||||
78 | # Create a render callable for the cache function |
|
74 | # Create a render callable for the cache function | |
79 | def render_template(): |
|
75 | def render_template(): | |
80 | # Pull in extra vars if needed |
|
76 | # Pull in extra vars if needed | |
81 | globs = extra_vars or {} |
|
77 | globs = extra_vars or {} | |
82 |
|
78 | |||
83 | # Second, get the globals |
|
79 | # Second, get the globals | |
84 | globs.update(pylons_globals()) |
|
80 | globs.update(pylons_globals()) | |
85 |
|
81 | |||
86 | globs['_ungettext'] = globs['ungettext'] |
|
82 | globs['_ungettext'] = globs['ungettext'] | |
87 | # Grab a template reference |
|
83 | # Grab a template reference | |
88 | template = globs['app_globals'].mako_lookup.get_template(template_name) |
|
84 | template = globs['app_globals'].mako_lookup.get_template(template_name) | |
89 |
|
85 | |||
90 | return literal(template.render_unicode(**globs)) |
|
86 | return literal(template.render_unicode(**globs)) | |
91 |
|
87 | |||
92 | return cached_template(template_name, render_template, cache_key=cache_key, |
|
88 | return cached_template(template_name, render_template, cache_key=cache_key, | |
93 | cache_type=cache_type, cache_expire=cache_expire) |
|
89 | cache_type=cache_type, cache_expire=cache_expire) | |
94 |
|
90 | |||
95 | def _filter_proxy(ip): |
|
91 | def _filter_proxy(ip): | |
96 | """ |
|
92 | """ | |
97 | Passed in IP addresses in HEADERS can be in a special format of multiple |
|
93 | Passed in IP addresses in HEADERS can be in a special format of multiple | |
98 | ips. Those comma separated IPs are passed from various proxies in the |
|
94 | ips. Those comma separated IPs are passed from various proxies in the | |
99 | chain of request processing. The left-most being the original client. |
|
95 | chain of request processing. The left-most being the original client. | |
100 | We only care about the first IP which came from the org. client. |
|
96 | We only care about the first IP which came from the org. client. | |
101 |
|
97 | |||
102 | :param ip: ip string from headers |
|
98 | :param ip: ip string from headers | |
103 | """ |
|
99 | """ | |
104 | if ',' in ip: |
|
100 | if ',' in ip: | |
105 | _ips = ip.split(',') |
|
101 | _ips = ip.split(',') | |
106 | _first_ip = _ips[0].strip() |
|
102 | _first_ip = _ips[0].strip() | |
107 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
103 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) | |
108 | return _first_ip |
|
104 | return _first_ip | |
109 | return ip |
|
105 | return ip | |
110 |
|
106 | |||
111 |
|
107 | |||
112 | def _filter_port(ip): |
|
108 | def _filter_port(ip): | |
113 | """ |
|
109 | """ | |
114 | Removes a port from ip, there are 4 main cases to handle here. |
|
110 | Removes a port from ip, there are 4 main cases to handle here. | |
115 | - ipv4 eg. 127.0.0.1 |
|
111 | - ipv4 eg. 127.0.0.1 | |
116 | - ipv6 eg. ::1 |
|
112 | - ipv6 eg. ::1 | |
117 | - ipv4+port eg. 127.0.0.1:8080 |
|
113 | - ipv4+port eg. 127.0.0.1:8080 | |
118 | - ipv6+port eg. [::1]:8080 |
|
114 | - ipv6+port eg. [::1]:8080 | |
119 |
|
115 | |||
120 | :param ip: |
|
116 | :param ip: | |
121 | """ |
|
117 | """ | |
122 | def is_ipv6(ip_addr): |
|
118 | def is_ipv6(ip_addr): | |
123 | if hasattr(socket, 'inet_pton'): |
|
119 | if hasattr(socket, 'inet_pton'): | |
124 | try: |
|
120 | try: | |
125 | socket.inet_pton(socket.AF_INET6, ip_addr) |
|
121 | socket.inet_pton(socket.AF_INET6, ip_addr) | |
126 | except socket.error: |
|
122 | except socket.error: | |
127 | return False |
|
123 | return False | |
128 | else: |
|
124 | else: | |
129 | # fallback to ipaddress |
|
125 | # fallback to ipaddress | |
130 | try: |
|
126 | try: | |
131 | ipaddress.IPv6Address(safe_unicode(ip_addr)) |
|
127 | ipaddress.IPv6Address(safe_unicode(ip_addr)) | |
132 | except Exception: |
|
128 | except Exception: | |
133 | return False |
|
129 | return False | |
134 | return True |
|
130 | return True | |
135 |
|
131 | |||
136 | if ':' not in ip: # must be ipv4 pure ip |
|
132 | if ':' not in ip: # must be ipv4 pure ip | |
137 | return ip |
|
133 | return ip | |
138 |
|
134 | |||
139 | if '[' in ip and ']' in ip: # ipv6 with port |
|
135 | if '[' in ip and ']' in ip: # ipv6 with port | |
140 | return ip.split(']')[0][1:].lower() |
|
136 | return ip.split(']')[0][1:].lower() | |
141 |
|
137 | |||
142 | # must be ipv6 or ipv4 with port |
|
138 | # must be ipv6 or ipv4 with port | |
143 | if is_ipv6(ip): |
|
139 | if is_ipv6(ip): | |
144 | return ip |
|
140 | return ip | |
145 | else: |
|
141 | else: | |
146 | ip, _port = ip.split(':')[:2] # means ipv4+port |
|
142 | ip, _port = ip.split(':')[:2] # means ipv4+port | |
147 | return ip |
|
143 | return ip | |
148 |
|
144 | |||
149 |
|
145 | |||
150 | def get_ip_addr(environ): |
|
146 | def get_ip_addr(environ): | |
151 | proxy_key = 'HTTP_X_REAL_IP' |
|
147 | proxy_key = 'HTTP_X_REAL_IP' | |
152 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
148 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' | |
153 | def_key = 'REMOTE_ADDR' |
|
149 | def_key = 'REMOTE_ADDR' | |
154 | _filters = lambda x: _filter_port(_filter_proxy(x)) |
|
150 | _filters = lambda x: _filter_port(_filter_proxy(x)) | |
155 |
|
151 | |||
156 | ip = environ.get(proxy_key) |
|
152 | ip = environ.get(proxy_key) | |
157 | if ip: |
|
153 | if ip: | |
158 | return _filters(ip) |
|
154 | return _filters(ip) | |
159 |
|
155 | |||
160 | ip = environ.get(proxy_key2) |
|
156 | ip = environ.get(proxy_key2) | |
161 | if ip: |
|
157 | if ip: | |
162 | return _filters(ip) |
|
158 | return _filters(ip) | |
163 |
|
159 | |||
164 | ip = environ.get(def_key, '0.0.0.0') |
|
160 | ip = environ.get(def_key, '0.0.0.0') | |
165 | return _filters(ip) |
|
161 | return _filters(ip) | |
166 |
|
162 | |||
167 |
|
163 | |||
168 | def get_server_ip_addr(environ, log_errors=True): |
|
164 | def get_server_ip_addr(environ, log_errors=True): | |
169 | hostname = environ.get('SERVER_NAME') |
|
165 | hostname = environ.get('SERVER_NAME') | |
170 | try: |
|
166 | try: | |
171 | return socket.gethostbyname(hostname) |
|
167 | return socket.gethostbyname(hostname) | |
172 | except Exception as e: |
|
168 | except Exception as e: | |
173 | if log_errors: |
|
169 | if log_errors: | |
174 | # in some cases this lookup is not possible, and we don't want to |
|
170 | # in some cases this lookup is not possible, and we don't want to | |
175 | # make it an exception in logs |
|
171 | # make it an exception in logs | |
176 | log.exception('Could not retrieve server ip address: %s', e) |
|
172 | log.exception('Could not retrieve server ip address: %s', e) | |
177 | return hostname |
|
173 | return hostname | |
178 |
|
174 | |||
179 |
|
175 | |||
180 | def get_server_port(environ): |
|
176 | def get_server_port(environ): | |
181 | return environ.get('SERVER_PORT') |
|
177 | return environ.get('SERVER_PORT') | |
182 |
|
178 | |||
183 |
|
179 | |||
184 | def get_access_path(environ): |
|
180 | def get_access_path(environ): | |
185 | path = environ.get('PATH_INFO') |
|
181 | path = environ.get('PATH_INFO') | |
186 | org_req = environ.get('pylons.original_request') |
|
182 | org_req = environ.get('pylons.original_request') | |
187 | if org_req: |
|
183 | if org_req: | |
188 | path = org_req.environ.get('PATH_INFO') |
|
184 | path = org_req.environ.get('PATH_INFO') | |
189 | return path |
|
185 | return path | |
190 |
|
186 | |||
191 |
|
187 | |||
192 | def get_user_agent(environ): |
|
188 | def get_user_agent(environ): | |
193 | return environ.get('HTTP_USER_AGENT') |
|
189 | return environ.get('HTTP_USER_AGENT') | |
194 |
|
190 | |||
195 |
|
191 | |||
196 | def vcs_operation_context( |
|
192 | def vcs_operation_context( | |
197 | environ, repo_name, username, action, scm, check_locking=True, |
|
193 | environ, repo_name, username, action, scm, check_locking=True, | |
198 | is_shadow_repo=False): |
|
194 | is_shadow_repo=False): | |
199 | """ |
|
195 | """ | |
200 | Generate the context for a vcs operation, e.g. push or pull. |
|
196 | Generate the context for a vcs operation, e.g. push or pull. | |
201 |
|
197 | |||
202 | This context is passed over the layers so that hooks triggered by the |
|
198 | This context is passed over the layers so that hooks triggered by the | |
203 | vcs operation know details like the user, the user's IP address etc. |
|
199 | vcs operation know details like the user, the user's IP address etc. | |
204 |
|
200 | |||
205 | :param check_locking: Allows to switch of the computation of the locking |
|
201 | :param check_locking: Allows to switch of the computation of the locking | |
206 | data. This serves mainly the need of the simplevcs middleware to be |
|
202 | data. This serves mainly the need of the simplevcs middleware to be | |
207 | able to disable this for certain operations. |
|
203 | able to disable this for certain operations. | |
208 |
|
204 | |||
209 | """ |
|
205 | """ | |
210 | # Tri-state value: False: unlock, None: nothing, True: lock |
|
206 | # Tri-state value: False: unlock, None: nothing, True: lock | |
211 | make_lock = None |
|
207 | make_lock = None | |
212 | locked_by = [None, None, None] |
|
208 | locked_by = [None, None, None] | |
213 | is_anonymous = username == User.DEFAULT_USER |
|
209 | is_anonymous = username == User.DEFAULT_USER | |
214 | if not is_anonymous and check_locking: |
|
210 | if not is_anonymous and check_locking: | |
215 | log.debug('Checking locking on repository "%s"', repo_name) |
|
211 | log.debug('Checking locking on repository "%s"', repo_name) | |
216 | user = User.get_by_username(username) |
|
212 | user = User.get_by_username(username) | |
217 | repo = Repository.get_by_repo_name(repo_name) |
|
213 | repo = Repository.get_by_repo_name(repo_name) | |
218 | make_lock, __, locked_by = repo.get_locking_state( |
|
214 | make_lock, __, locked_by = repo.get_locking_state( | |
219 | action, user.user_id) |
|
215 | action, user.user_id) | |
220 |
|
216 | |||
221 | settings_model = VcsSettingsModel(repo=repo_name) |
|
217 | settings_model = VcsSettingsModel(repo=repo_name) | |
222 | ui_settings = settings_model.get_ui_settings() |
|
218 | ui_settings = settings_model.get_ui_settings() | |
223 |
|
219 | |||
224 | extras = { |
|
220 | extras = { | |
225 | 'ip': get_ip_addr(environ), |
|
221 | 'ip': get_ip_addr(environ), | |
226 | 'username': username, |
|
222 | 'username': username, | |
227 | 'action': action, |
|
223 | 'action': action, | |
228 | 'repository': repo_name, |
|
224 | 'repository': repo_name, | |
229 | 'scm': scm, |
|
225 | 'scm': scm, | |
230 | 'config': rhodecode.CONFIG['__file__'], |
|
226 | 'config': rhodecode.CONFIG['__file__'], | |
231 | 'make_lock': make_lock, |
|
227 | 'make_lock': make_lock, | |
232 | 'locked_by': locked_by, |
|
228 | 'locked_by': locked_by, | |
233 | 'server_url': utils2.get_server_url(environ), |
|
229 | 'server_url': utils2.get_server_url(environ), | |
234 | 'user_agent': get_user_agent(environ), |
|
230 | 'user_agent': get_user_agent(environ), | |
235 | 'hooks': get_enabled_hook_classes(ui_settings), |
|
231 | 'hooks': get_enabled_hook_classes(ui_settings), | |
236 | 'is_shadow_repo': is_shadow_repo, |
|
232 | 'is_shadow_repo': is_shadow_repo, | |
237 | } |
|
233 | } | |
238 | return extras |
|
234 | return extras | |
239 |
|
235 | |||
240 |
|
236 | |||
241 | class BasicAuth(AuthBasicAuthenticator): |
|
237 | class BasicAuth(AuthBasicAuthenticator): | |
242 |
|
238 | |||
243 | def __init__(self, realm, authfunc, registry, auth_http_code=None, |
|
239 | def __init__(self, realm, authfunc, registry, auth_http_code=None, | |
244 | initial_call_detection=False, acl_repo_name=None): |
|
240 | initial_call_detection=False, acl_repo_name=None): | |
245 | self.realm = realm |
|
241 | self.realm = realm | |
246 | self.initial_call = initial_call_detection |
|
242 | self.initial_call = initial_call_detection | |
247 | self.authfunc = authfunc |
|
243 | self.authfunc = authfunc | |
248 | self.registry = registry |
|
244 | self.registry = registry | |
249 | self.acl_repo_name = acl_repo_name |
|
245 | self.acl_repo_name = acl_repo_name | |
250 | self._rc_auth_http_code = auth_http_code |
|
246 | self._rc_auth_http_code = auth_http_code | |
251 |
|
247 | |||
252 | def _get_response_from_code(self, http_code): |
|
248 | def _get_response_from_code(self, http_code): | |
253 | try: |
|
249 | try: | |
254 | return get_exception(safe_int(http_code)) |
|
250 | return get_exception(safe_int(http_code)) | |
255 | except Exception: |
|
251 | except Exception: | |
256 | log.exception('Failed to fetch response for code %s' % http_code) |
|
252 | log.exception('Failed to fetch response for code %s' % http_code) | |
257 | return HTTPForbidden |
|
253 | return HTTPForbidden | |
258 |
|
254 | |||
259 | def build_authentication(self): |
|
255 | def build_authentication(self): | |
260 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
256 | head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) | |
261 | if self._rc_auth_http_code and not self.initial_call: |
|
257 | if self._rc_auth_http_code and not self.initial_call: | |
262 | # return alternative HTTP code if alternative http return code |
|
258 | # return alternative HTTP code if alternative http return code | |
263 | # is specified in RhodeCode config, but ONLY if it's not the |
|
259 | # is specified in RhodeCode config, but ONLY if it's not the | |
264 | # FIRST call |
|
260 | # FIRST call | |
265 | custom_response_klass = self._get_response_from_code( |
|
261 | custom_response_klass = self._get_response_from_code( | |
266 | self._rc_auth_http_code) |
|
262 | self._rc_auth_http_code) | |
267 | return custom_response_klass(headers=head) |
|
263 | return custom_response_klass(headers=head) | |
268 | return HTTPUnauthorized(headers=head) |
|
264 | return HTTPUnauthorized(headers=head) | |
269 |
|
265 | |||
270 | def authenticate(self, environ): |
|
266 | def authenticate(self, environ): | |
271 | authorization = AUTHORIZATION(environ) |
|
267 | authorization = AUTHORIZATION(environ) | |
272 | if not authorization: |
|
268 | if not authorization: | |
273 | return self.build_authentication() |
|
269 | return self.build_authentication() | |
274 | (authmeth, auth) = authorization.split(' ', 1) |
|
270 | (authmeth, auth) = authorization.split(' ', 1) | |
275 | if 'basic' != authmeth.lower(): |
|
271 | if 'basic' != authmeth.lower(): | |
276 | return self.build_authentication() |
|
272 | return self.build_authentication() | |
277 | auth = auth.strip().decode('base64') |
|
273 | auth = auth.strip().decode('base64') | |
278 | _parts = auth.split(':', 1) |
|
274 | _parts = auth.split(':', 1) | |
279 | if len(_parts) == 2: |
|
275 | if len(_parts) == 2: | |
280 | username, password = _parts |
|
276 | username, password = _parts | |
281 | if self.authfunc( |
|
277 | if self.authfunc( | |
282 | username, password, environ, VCS_TYPE, |
|
278 | username, password, environ, VCS_TYPE, | |
283 | registry=self.registry, acl_repo_name=self.acl_repo_name): |
|
279 | registry=self.registry, acl_repo_name=self.acl_repo_name): | |
284 | return username |
|
280 | return username | |
285 | if username and password: |
|
281 | if username and password: | |
286 | # we mark that we actually executed authentication once, at |
|
282 | # we mark that we actually executed authentication once, at | |
287 | # that point we can use the alternative auth code |
|
283 | # that point we can use the alternative auth code | |
288 | self.initial_call = False |
|
284 | self.initial_call = False | |
289 |
|
285 | |||
290 | return self.build_authentication() |
|
286 | return self.build_authentication() | |
291 |
|
287 | |||
292 | __call__ = authenticate |
|
288 | __call__ = authenticate | |
293 |
|
289 | |||
294 |
|
290 | |||
295 | def calculate_version_hash(config): |
|
291 | def calculate_version_hash(config): | |
296 | return md5( |
|
292 | return md5( | |
297 | config.get('beaker.session.secret', '') + |
|
293 | config.get('beaker.session.secret', '') + | |
298 | rhodecode.__version__)[:8] |
|
294 | rhodecode.__version__)[:8] | |
299 |
|
295 | |||
300 |
|
296 | |||
301 | def get_current_lang(request): |
|
297 | def get_current_lang(request): | |
302 | # NOTE(marcink): remove after pyramid move |
|
298 | # NOTE(marcink): remove after pyramid move | |
303 | try: |
|
299 | try: | |
304 | return translation.get_lang()[0] |
|
300 | return translation.get_lang()[0] | |
305 | except: |
|
301 | except: | |
306 | pass |
|
302 | pass | |
307 |
|
303 | |||
308 | return getattr(request, '_LOCALE_', request.locale_name) |
|
304 | return getattr(request, '_LOCALE_', request.locale_name) | |
309 |
|
305 | |||
310 |
|
306 | |||
311 | def attach_context_attributes(context, request, user_id): |
|
307 | def attach_context_attributes(context, request, user_id): | |
312 | """ |
|
308 | """ | |
313 | Attach variables into template context called `c`, please note that |
|
309 | Attach variables into template context called `c`, please note that | |
314 | request could be pylons or pyramid request in here. |
|
310 | request could be pylons or pyramid request in here. | |
315 | """ |
|
311 | """ | |
316 | # NOTE(marcink): remove check after pyramid migration |
|
312 | # NOTE(marcink): remove check after pyramid migration | |
317 | if hasattr(request, 'registry'): |
|
313 | if hasattr(request, 'registry'): | |
318 | config = request.registry.settings |
|
314 | config = request.registry.settings | |
319 | else: |
|
315 | else: | |
320 | from pylons import config |
|
316 | from pylons import config | |
321 |
|
317 | |||
322 | rc_config = SettingsModel().get_all_settings(cache=True) |
|
318 | rc_config = SettingsModel().get_all_settings(cache=True) | |
323 |
|
319 | |||
324 | context.rhodecode_version = rhodecode.__version__ |
|
320 | context.rhodecode_version = rhodecode.__version__ | |
325 | context.rhodecode_edition = config.get('rhodecode.edition') |
|
321 | context.rhodecode_edition = config.get('rhodecode.edition') | |
326 | # unique secret + version does not leak the version but keep consistency |
|
322 | # unique secret + version does not leak the version but keep consistency | |
327 | context.rhodecode_version_hash = calculate_version_hash(config) |
|
323 | context.rhodecode_version_hash = calculate_version_hash(config) | |
328 |
|
324 | |||
329 | # Default language set for the incoming request |
|
325 | # Default language set for the incoming request | |
330 | context.language = get_current_lang(request) |
|
326 | context.language = get_current_lang(request) | |
331 |
|
327 | |||
332 | # Visual options |
|
328 | # Visual options | |
333 | context.visual = AttributeDict({}) |
|
329 | context.visual = AttributeDict({}) | |
334 |
|
330 | |||
335 | # DB stored Visual Items |
|
331 | # DB stored Visual Items | |
336 | context.visual.show_public_icon = str2bool( |
|
332 | context.visual.show_public_icon = str2bool( | |
337 | rc_config.get('rhodecode_show_public_icon')) |
|
333 | rc_config.get('rhodecode_show_public_icon')) | |
338 | context.visual.show_private_icon = str2bool( |
|
334 | context.visual.show_private_icon = str2bool( | |
339 | rc_config.get('rhodecode_show_private_icon')) |
|
335 | rc_config.get('rhodecode_show_private_icon')) | |
340 | context.visual.stylify_metatags = str2bool( |
|
336 | context.visual.stylify_metatags = str2bool( | |
341 | rc_config.get('rhodecode_stylify_metatags')) |
|
337 | rc_config.get('rhodecode_stylify_metatags')) | |
342 | context.visual.dashboard_items = safe_int( |
|
338 | context.visual.dashboard_items = safe_int( | |
343 | rc_config.get('rhodecode_dashboard_items', 100)) |
|
339 | rc_config.get('rhodecode_dashboard_items', 100)) | |
344 | context.visual.admin_grid_items = safe_int( |
|
340 | context.visual.admin_grid_items = safe_int( | |
345 | rc_config.get('rhodecode_admin_grid_items', 100)) |
|
341 | rc_config.get('rhodecode_admin_grid_items', 100)) | |
346 | context.visual.repository_fields = str2bool( |
|
342 | context.visual.repository_fields = str2bool( | |
347 | rc_config.get('rhodecode_repository_fields')) |
|
343 | rc_config.get('rhodecode_repository_fields')) | |
348 | context.visual.show_version = str2bool( |
|
344 | context.visual.show_version = str2bool( | |
349 | rc_config.get('rhodecode_show_version')) |
|
345 | rc_config.get('rhodecode_show_version')) | |
350 | context.visual.use_gravatar = str2bool( |
|
346 | context.visual.use_gravatar = str2bool( | |
351 | rc_config.get('rhodecode_use_gravatar')) |
|
347 | rc_config.get('rhodecode_use_gravatar')) | |
352 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') |
|
348 | context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url') | |
353 | context.visual.default_renderer = rc_config.get( |
|
349 | context.visual.default_renderer = rc_config.get( | |
354 | 'rhodecode_markup_renderer', 'rst') |
|
350 | 'rhodecode_markup_renderer', 'rst') | |
355 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES |
|
351 | context.visual.comment_types = ChangesetComment.COMMENT_TYPES | |
356 | context.visual.rhodecode_support_url = \ |
|
352 | context.visual.rhodecode_support_url = \ | |
357 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') |
|
353 | rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support') | |
358 |
|
354 | |||
359 | context.visual.affected_files_cut_off = 60 |
|
355 | context.visual.affected_files_cut_off = 60 | |
360 |
|
356 | |||
361 | context.pre_code = rc_config.get('rhodecode_pre_code') |
|
357 | context.pre_code = rc_config.get('rhodecode_pre_code') | |
362 | context.post_code = rc_config.get('rhodecode_post_code') |
|
358 | context.post_code = rc_config.get('rhodecode_post_code') | |
363 | context.rhodecode_name = rc_config.get('rhodecode_title') |
|
359 | context.rhodecode_name = rc_config.get('rhodecode_title') | |
364 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') |
|
360 | context.default_encodings = aslist(config.get('default_encoding'), sep=',') | |
365 | # if we have specified default_encoding in the request, it has more |
|
361 | # if we have specified default_encoding in the request, it has more | |
366 | # priority |
|
362 | # priority | |
367 | if request.GET.get('default_encoding'): |
|
363 | if request.GET.get('default_encoding'): | |
368 | context.default_encodings.insert(0, request.GET.get('default_encoding')) |
|
364 | context.default_encodings.insert(0, request.GET.get('default_encoding')) | |
369 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') |
|
365 | context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl') | |
370 |
|
366 | |||
371 | # INI stored |
|
367 | # INI stored | |
372 | context.labs_active = str2bool( |
|
368 | context.labs_active = str2bool( | |
373 | config.get('labs_settings_active', 'false')) |
|
369 | config.get('labs_settings_active', 'false')) | |
374 | context.visual.allow_repo_location_change = str2bool( |
|
370 | context.visual.allow_repo_location_change = str2bool( | |
375 | config.get('allow_repo_location_change', True)) |
|
371 | config.get('allow_repo_location_change', True)) | |
376 | context.visual.allow_custom_hooks_settings = str2bool( |
|
372 | context.visual.allow_custom_hooks_settings = str2bool( | |
377 | config.get('allow_custom_hooks_settings', True)) |
|
373 | config.get('allow_custom_hooks_settings', True)) | |
378 | context.debug_style = str2bool(config.get('debug_style', False)) |
|
374 | context.debug_style = str2bool(config.get('debug_style', False)) | |
379 |
|
375 | |||
380 | context.rhodecode_instanceid = config.get('instance_id') |
|
376 | context.rhodecode_instanceid = config.get('instance_id') | |
381 |
|
377 | |||
382 | context.visual.cut_off_limit_diff = safe_int( |
|
378 | context.visual.cut_off_limit_diff = safe_int( | |
383 | config.get('cut_off_limit_diff')) |
|
379 | config.get('cut_off_limit_diff')) | |
384 | context.visual.cut_off_limit_file = safe_int( |
|
380 | context.visual.cut_off_limit_file = safe_int( | |
385 | config.get('cut_off_limit_file')) |
|
381 | config.get('cut_off_limit_file')) | |
386 |
|
382 | |||
387 | # AppEnlight |
|
383 | # AppEnlight | |
388 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) |
|
384 | context.appenlight_enabled = str2bool(config.get('appenlight', 'false')) | |
389 | context.appenlight_api_public_key = config.get( |
|
385 | context.appenlight_api_public_key = config.get( | |
390 | 'appenlight.api_public_key', '') |
|
386 | 'appenlight.api_public_key', '') | |
391 | context.appenlight_server_url = config.get('appenlight.server_url', '') |
|
387 | context.appenlight_server_url = config.get('appenlight.server_url', '') | |
392 |
|
388 | |||
393 | # JS template context |
|
389 | # JS template context | |
394 | context.template_context = { |
|
390 | context.template_context = { | |
395 | 'repo_name': None, |
|
391 | 'repo_name': None, | |
396 | 'repo_type': None, |
|
392 | 'repo_type': None, | |
397 | 'repo_landing_commit': None, |
|
393 | 'repo_landing_commit': None, | |
398 | 'rhodecode_user': { |
|
394 | 'rhodecode_user': { | |
399 | 'username': None, |
|
395 | 'username': None, | |
400 | 'email': None, |
|
396 | 'email': None, | |
401 | 'notification_status': False |
|
397 | 'notification_status': False | |
402 | }, |
|
398 | }, | |
403 | 'visual': { |
|
399 | 'visual': { | |
404 | 'default_renderer': None |
|
400 | 'default_renderer': None | |
405 | }, |
|
401 | }, | |
406 | 'commit_data': { |
|
402 | 'commit_data': { | |
407 | 'commit_id': None |
|
403 | 'commit_id': None | |
408 | }, |
|
404 | }, | |
409 | 'pull_request_data': {'pull_request_id': None}, |
|
405 | 'pull_request_data': {'pull_request_id': None}, | |
410 | 'timeago': { |
|
406 | 'timeago': { | |
411 | 'refresh_time': 120 * 1000, |
|
407 | 'refresh_time': 120 * 1000, | |
412 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 |
|
408 | 'cutoff_limit': 1000 * 60 * 60 * 24 * 7 | |
413 | }, |
|
409 | }, | |
414 | 'pylons_dispatch': { |
|
|||
415 | # 'controller': request.environ['pylons.routes_dict']['controller'], |
|
|||
416 | # 'action': request.environ['pylons.routes_dict']['action'], |
|
|||
417 | }, |
|
|||
418 | 'pyramid_dispatch': { |
|
410 | 'pyramid_dispatch': { | |
419 |
|
411 | |||
420 | }, |
|
412 | }, | |
421 | 'extra': {'plugins': {}} |
|
413 | 'extra': {'plugins': {}} | |
422 | } |
|
414 | } | |
423 | # END CONFIG VARS |
|
415 | # END CONFIG VARS | |
424 |
|
416 | |||
425 | # TODO: This dosn't work when called from pylons compatibility tween. |
|
417 | # TODO: This dosn't work when called from pylons compatibility tween. | |
426 | # Fix this and remove it from base controller. |
|
418 | # Fix this and remove it from base controller. | |
427 | # context.repo_name = get_repo_slug(request) # can be empty |
|
419 | # context.repo_name = get_repo_slug(request) # can be empty | |
428 |
|
420 | |||
429 | diffmode = 'sideside' |
|
421 | diffmode = 'sideside' | |
430 | if request.GET.get('diffmode'): |
|
422 | if request.GET.get('diffmode'): | |
431 | if request.GET['diffmode'] == 'unified': |
|
423 | if request.GET['diffmode'] == 'unified': | |
432 | diffmode = 'unified' |
|
424 | diffmode = 'unified' | |
433 | elif request.session.get('diffmode'): |
|
425 | elif request.session.get('diffmode'): | |
434 | diffmode = request.session['diffmode'] |
|
426 | diffmode = request.session['diffmode'] | |
435 |
|
427 | |||
436 | context.diffmode = diffmode |
|
428 | context.diffmode = diffmode | |
437 |
|
429 | |||
438 | if request.session.get('diffmode') != diffmode: |
|
430 | if request.session.get('diffmode') != diffmode: | |
439 | request.session['diffmode'] = diffmode |
|
431 | request.session['diffmode'] = diffmode | |
440 |
|
432 | |||
441 | context.csrf_token = auth.get_csrf_token(session=request.session) |
|
433 | context.csrf_token = auth.get_csrf_token(session=request.session) | |
442 | context.backends = rhodecode.BACKENDS.keys() |
|
434 | context.backends = rhodecode.BACKENDS.keys() | |
443 | context.backends.sort() |
|
435 | context.backends.sort() | |
444 | context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id) |
|
436 | context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id) | |
445 |
|
437 | |||
446 | # NOTE(marcink): when migrated to pyramid we don't need to set this anymore, |
|
438 | # NOTE(marcink): when migrated to pyramid we don't need to set this anymore, | |
447 | # given request will ALWAYS be pyramid one |
|
439 | # given request will ALWAYS be pyramid one | |
448 | pyramid_request = pyramid.threadlocal.get_current_request() |
|
440 | pyramid_request = pyramid.threadlocal.get_current_request() | |
449 | context.pyramid_request = pyramid_request |
|
441 | context.pyramid_request = pyramid_request | |
450 |
|
442 | |||
451 | # web case |
|
443 | # web case | |
452 | if hasattr(pyramid_request, 'user'): |
|
444 | if hasattr(pyramid_request, 'user'): | |
453 | context.auth_user = pyramid_request.user |
|
445 | context.auth_user = pyramid_request.user | |
454 | context.rhodecode_user = pyramid_request.user |
|
446 | context.rhodecode_user = pyramid_request.user | |
455 |
|
447 | |||
456 | # api case |
|
448 | # api case | |
457 | if hasattr(pyramid_request, 'rpc_user'): |
|
449 | if hasattr(pyramid_request, 'rpc_user'): | |
458 | context.auth_user = pyramid_request.rpc_user |
|
450 | context.auth_user = pyramid_request.rpc_user | |
459 | context.rhodecode_user = pyramid_request.rpc_user |
|
451 | context.rhodecode_user = pyramid_request.rpc_user | |
460 |
|
452 | |||
461 | # attach the whole call context to the request |
|
453 | # attach the whole call context to the request | |
462 | request.call_context = context |
|
454 | request.call_context = context | |
463 |
|
455 | |||
464 |
|
456 | |||
465 | def get_auth_user(request): |
|
457 | def get_auth_user(request): | |
466 | environ = request.environ |
|
458 | environ = request.environ | |
467 | session = request.session |
|
459 | session = request.session | |
468 |
|
460 | |||
469 | ip_addr = get_ip_addr(environ) |
|
461 | ip_addr = get_ip_addr(environ) | |
470 | # make sure that we update permissions each time we call controller |
|
462 | # make sure that we update permissions each time we call controller | |
471 | _auth_token = (request.GET.get('auth_token', '') or |
|
463 | _auth_token = (request.GET.get('auth_token', '') or | |
472 | request.GET.get('api_key', '')) |
|
464 | request.GET.get('api_key', '')) | |
473 |
|
465 | |||
474 | if _auth_token: |
|
466 | if _auth_token: | |
475 | # when using API_KEY we assume user exists, and |
|
467 | # when using API_KEY we assume user exists, and | |
476 | # doesn't need auth based on cookies. |
|
468 | # doesn't need auth based on cookies. | |
477 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) |
|
469 | auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr) | |
478 | authenticated = False |
|
470 | authenticated = False | |
479 | else: |
|
471 | else: | |
480 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
472 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) | |
481 | try: |
|
473 | try: | |
482 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), |
|
474 | auth_user = AuthUser(user_id=cookie_store.get('user_id', None), | |
483 | ip_addr=ip_addr) |
|
475 | ip_addr=ip_addr) | |
484 | except UserCreationError as e: |
|
476 | except UserCreationError as e: | |
485 | h.flash(e, 'error') |
|
477 | h.flash(e, 'error') | |
486 | # container auth or other auth functions that create users |
|
478 | # container auth or other auth functions that create users | |
487 | # on the fly can throw this exception signaling that there's |
|
479 | # on the fly can throw this exception signaling that there's | |
488 | # issue with user creation, explanation should be provided |
|
480 | # issue with user creation, explanation should be provided | |
489 | # in Exception itself. We then create a simple blank |
|
481 | # in Exception itself. We then create a simple blank | |
490 | # AuthUser |
|
482 | # AuthUser | |
491 | auth_user = AuthUser(ip_addr=ip_addr) |
|
483 | auth_user = AuthUser(ip_addr=ip_addr) | |
492 |
|
484 | |||
493 | if password_changed(auth_user, session): |
|
485 | if password_changed(auth_user, session): | |
494 | session.invalidate() |
|
486 | session.invalidate() | |
495 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) |
|
487 | cookie_store = CookieStoreWrapper(session.get('rhodecode_user')) | |
496 | auth_user = AuthUser(ip_addr=ip_addr) |
|
488 | auth_user = AuthUser(ip_addr=ip_addr) | |
497 |
|
489 | |||
498 | authenticated = cookie_store.get('is_authenticated') |
|
490 | authenticated = cookie_store.get('is_authenticated') | |
499 |
|
491 | |||
500 | if not auth_user.is_authenticated and auth_user.is_user_object: |
|
492 | if not auth_user.is_authenticated and auth_user.is_user_object: | |
501 | # user is not authenticated and not empty |
|
493 | # user is not authenticated and not empty | |
502 | auth_user.set_authenticated(authenticated) |
|
494 | auth_user.set_authenticated(authenticated) | |
503 |
|
495 | |||
504 | return auth_user |
|
496 | return auth_user | |
505 |
|
497 | |||
506 |
|
498 | |||
507 | class BaseController(WSGIController): |
|
499 | class BaseController(WSGIController): | |
508 |
|
500 | |||
509 | def __before__(self): |
|
501 | def __before__(self): | |
510 | """ |
|
502 | """ | |
511 | __before__ is called before controller methods and after __call__ |
|
503 | __before__ is called before controller methods and after __call__ | |
512 | """ |
|
504 | """ | |
513 | # on each call propagate settings calls into global settings. |
|
505 | # on each call propagate settings calls into global settings. | |
514 | from pylons import config |
|
506 | from pylons import config | |
|
507 | from pylons import tmpl_context as c, request, url | |||
515 | set_rhodecode_config(config) |
|
508 | set_rhodecode_config(config) | |
516 | attach_context_attributes(c, request, self._rhodecode_user.user_id) |
|
509 | attach_context_attributes(c, request, self._rhodecode_user.user_id) | |
517 |
|
510 | |||
518 | # TODO: Remove this when fixed in attach_context_attributes() |
|
511 | # TODO: Remove this when fixed in attach_context_attributes() | |
519 | c.repo_name = get_repo_slug(request) # can be empty |
|
512 | c.repo_name = get_repo_slug(request) # can be empty | |
520 |
|
513 | |||
521 | self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff')) |
|
514 | self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff')) | |
522 | self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file')) |
|
515 | self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file')) | |
523 | self.sa = meta.Session |
|
516 | self.sa = meta.Session | |
524 | self.scm_model = ScmModel(self.sa) |
|
517 | self.scm_model = ScmModel(self.sa) | |
525 |
|
518 | |||
526 | # set user language |
|
519 | # set user language | |
527 | user_lang = getattr(c.pyramid_request, '_LOCALE_', None) |
|
520 | user_lang = getattr(c.pyramid_request, '_LOCALE_', None) | |
528 | if user_lang: |
|
521 | if user_lang: | |
529 | translation.set_lang(user_lang) |
|
522 | translation.set_lang(user_lang) | |
530 | log.debug('set language to %s for user %s', |
|
523 | log.debug('set language to %s for user %s', | |
531 | user_lang, self._rhodecode_user) |
|
524 | user_lang, self._rhodecode_user) | |
532 |
|
525 | |||
533 | def _dispatch_redirect(self, with_url, environ, start_response): |
|
526 | def _dispatch_redirect(self, with_url, environ, start_response): | |
|
527 | from webob.exc import HTTPFound | |||
534 | resp = HTTPFound(with_url) |
|
528 | resp = HTTPFound(with_url) | |
535 | environ['SCRIPT_NAME'] = '' # handle prefix middleware |
|
529 | environ['SCRIPT_NAME'] = '' # handle prefix middleware | |
536 | environ['PATH_INFO'] = with_url |
|
530 | environ['PATH_INFO'] = with_url | |
537 | return resp(environ, start_response) |
|
531 | return resp(environ, start_response) | |
538 |
|
532 | |||
539 | def __call__(self, environ, start_response): |
|
533 | def __call__(self, environ, start_response): | |
540 | """Invoke the Controller""" |
|
534 | """Invoke the Controller""" | |
541 | # WSGIController.__call__ dispatches to the Controller method |
|
535 | # WSGIController.__call__ dispatches to the Controller method | |
542 | # the request is routed to. This routing information is |
|
536 | # the request is routed to. This routing information is | |
543 | # available in environ['pylons.routes_dict'] |
|
537 | # available in environ['pylons.routes_dict'] | |
544 | from rhodecode.lib import helpers as h |
|
538 | from rhodecode.lib import helpers as h | |
|
539 | from pylons import tmpl_context as c, request, url | |||
545 |
|
540 | |||
546 | # Provide the Pylons context to Pyramid's debugtoolbar if it asks |
|
541 | # Provide the Pylons context to Pyramid's debugtoolbar if it asks | |
547 | if environ.get('debugtoolbar.wants_pylons_context', False): |
|
542 | if environ.get('debugtoolbar.wants_pylons_context', False): | |
548 | environ['debugtoolbar.pylons_context'] = c._current_obj() |
|
543 | environ['debugtoolbar.pylons_context'] = c._current_obj() | |
549 |
|
544 | |||
550 | _route_name = '.'.join([environ['pylons.routes_dict']['controller'], |
|
545 | _route_name = '.'.join([environ['pylons.routes_dict']['controller'], | |
551 | environ['pylons.routes_dict']['action']]) |
|
546 | environ['pylons.routes_dict']['action']]) | |
552 |
|
547 | |||
553 | self.rc_config = SettingsModel().get_all_settings(cache=True) |
|
548 | self.rc_config = SettingsModel().get_all_settings(cache=True) | |
554 | self.ip_addr = get_ip_addr(environ) |
|
549 | self.ip_addr = get_ip_addr(environ) | |
555 |
|
550 | |||
556 | # The rhodecode auth user is looked up and passed through the |
|
551 | # The rhodecode auth user is looked up and passed through the | |
557 | # environ by the pylons compatibility tween in pyramid. |
|
552 | # environ by the pylons compatibility tween in pyramid. | |
558 | # So we can just grab it from there. |
|
553 | # So we can just grab it from there. | |
559 | auth_user = environ['rc_auth_user'] |
|
554 | auth_user = environ['rc_auth_user'] | |
560 |
|
555 | |||
561 | # set globals for auth user |
|
556 | # set globals for auth user | |
562 | request.user = auth_user |
|
557 | request.user = auth_user | |
563 | self._rhodecode_user = auth_user |
|
558 | self._rhodecode_user = auth_user | |
564 |
|
559 | |||
565 | log.info('IP: %s User: %s accessed %s [%s]' % ( |
|
560 | log.info('IP: %s User: %s accessed %s [%s]' % ( | |
566 | self.ip_addr, auth_user, safe_unicode(get_access_path(environ)), |
|
561 | self.ip_addr, auth_user, safe_unicode(get_access_path(environ)), | |
567 | _route_name) |
|
562 | _route_name) | |
568 | ) |
|
563 | ) | |
569 |
|
564 | |||
570 | user_obj = auth_user.get_instance() |
|
565 | user_obj = auth_user.get_instance() | |
571 | if user_obj and user_obj.user_data.get('force_password_change'): |
|
566 | if user_obj and user_obj.user_data.get('force_password_change'): | |
572 | h.flash('You are required to change your password', 'warning', |
|
567 | h.flash('You are required to change your password', 'warning', | |
573 | ignore_duplicate=True) |
|
568 | ignore_duplicate=True) | |
574 | return self._dispatch_redirect( |
|
569 | return self._dispatch_redirect( | |
575 | url('my_account_password'), environ, start_response) |
|
570 | url('my_account_password'), environ, start_response) | |
576 |
|
571 | |||
577 | return WSGIController.__call__(self, environ, start_response) |
|
572 | return WSGIController.__call__(self, environ, start_response) | |
578 |
|
573 | |||
579 |
|
574 | |||
580 | def h_filter(s): |
|
575 | def h_filter(s): | |
581 | """ |
|
576 | """ | |
582 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` |
|
577 | Custom filter for Mako templates. Mako by standard uses `markupsafe.escape` | |
583 | we wrap this with additional functionality that converts None to empty |
|
578 | we wrap this with additional functionality that converts None to empty | |
584 | strings |
|
579 | strings | |
585 | """ |
|
580 | """ | |
586 | if s is None: |
|
581 | if s is None: | |
587 | return markupsafe.Markup() |
|
582 | return markupsafe.Markup() | |
588 | return markupsafe.escape(s) |
|
583 | return markupsafe.escape(s) | |
589 |
|
584 | |||
590 |
|
585 | |||
591 | def add_events_routes(config): |
|
586 | def add_events_routes(config): | |
592 | """ |
|
587 | """ | |
593 | Adds routing that can be used in events. Because some events are triggered |
|
588 | Adds routing that can be used in events. Because some events are triggered | |
594 | outside of pyramid context, we need to bootstrap request with some |
|
589 | outside of pyramid context, we need to bootstrap request with some | |
595 | routing registered |
|
590 | routing registered | |
596 | """ |
|
591 | """ | |
597 | config.add_route(name='home', pattern='/') |
|
592 | config.add_route(name='home', pattern='/') | |
598 |
|
593 | |||
599 | config.add_route(name='repo_summary', pattern='/{repo_name}') |
|
594 | config.add_route(name='repo_summary', pattern='/{repo_name}') | |
600 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') |
|
595 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') | |
601 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') |
|
596 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') | |
602 |
|
597 | |||
603 | config.add_route(name='pullrequest_show', |
|
598 | config.add_route(name='pullrequest_show', | |
604 | pattern='/{repo_name}/pull-request/{pull_request_id}') |
|
599 | pattern='/{repo_name}/pull-request/{pull_request_id}') | |
605 | config.add_route(name='pull_requests_global', |
|
600 | config.add_route(name='pull_requests_global', | |
606 | pattern='/pull-request/{pull_request_id}') |
|
601 | pattern='/pull-request/{pull_request_id}') | |
607 |
|
602 | |||
608 | config.add_route(name='repo_commit', |
|
603 | config.add_route(name='repo_commit', | |
609 | pattern='/{repo_name}/changeset/{commit_id}') |
|
604 | pattern='/{repo_name}/changeset/{commit_id}') | |
610 | config.add_route(name='repo_files', |
|
605 | config.add_route(name='repo_files', | |
611 | pattern='/{repo_name}/files/{commit_id}/{f_path}') |
|
606 | pattern='/{repo_name}/files/{commit_id}/{f_path}') | |
612 |
|
607 | |||
613 |
|
608 | |||
614 | def bootstrap_request(**kwargs): |
|
609 | def bootstrap_request(**kwargs): | |
615 | import pyramid.testing |
|
610 | import pyramid.testing | |
616 | request = pyramid.testing.DummyRequest(**kwargs) |
|
611 | request = pyramid.testing.DummyRequest(**kwargs) | |
617 | request.application_url = kwargs.pop('application_url', 'http://example.com') |
|
612 | request.application_url = kwargs.pop('application_url', 'http://example.com') | |
618 | request.host = kwargs.pop('host', 'example.com:80') |
|
613 | request.host = kwargs.pop('host', 'example.com:80') | |
619 | request.domain = kwargs.pop('domain', 'example.com') |
|
614 | request.domain = kwargs.pop('domain', 'example.com') | |
620 |
|
615 | |||
621 | config = pyramid.testing.setUp(request=request) |
|
616 | config = pyramid.testing.setUp(request=request) | |
622 | add_events_routes(config) |
|
617 | add_events_routes(config) | |
623 |
|
||||
624 |
|
||||
625 | class BaseRepoController(BaseController): |
|
|||
626 | """ |
|
|||
627 | Base class for controllers responsible for loading all needed data for |
|
|||
628 | repository loaded items are |
|
|||
629 |
|
||||
630 | c.rhodecode_repo: instance of scm repository |
|
|||
631 | c.rhodecode_db_repo: instance of db |
|
|||
632 | c.repository_requirements_missing: shows that repository specific data |
|
|||
633 | could not be displayed due to the missing requirements |
|
|||
634 | c.repository_pull_requests: show number of open pull requests |
|
|||
635 | """ |
|
|||
636 |
|
||||
637 | def __before__(self): |
|
|||
638 | super(BaseRepoController, self).__before__() |
|
|||
639 | if c.repo_name: # extracted from routes |
|
|||
640 | db_repo = Repository.get_by_repo_name(c.repo_name) |
|
|||
641 | if not db_repo: |
|
|||
642 | return |
|
|||
643 |
|
||||
644 | log.debug( |
|
|||
645 | 'Found repository in database %s with state `%s`', |
|
|||
646 | safe_unicode(db_repo), safe_unicode(db_repo.repo_state)) |
|
|||
647 | route = getattr(request.environ.get('routes.route'), 'name', '') |
|
|||
648 |
|
||||
649 | # allow to delete repos that are somehow damages in filesystem |
|
|||
650 | if route in ['delete_repo']: |
|
|||
651 | return |
|
|||
652 |
|
||||
653 | if db_repo.repo_state in [Repository.STATE_PENDING]: |
|
|||
654 | if route in ['repo_creating_home']: |
|
|||
655 | return |
|
|||
656 | check_url = url('repo_creating_home', repo_name=c.repo_name) |
|
|||
657 | return redirect(check_url) |
|
|||
658 |
|
||||
659 | self.rhodecode_db_repo = db_repo |
|
|||
660 |
|
||||
661 | missing_requirements = False |
|
|||
662 | try: |
|
|||
663 | self.rhodecode_repo = self.rhodecode_db_repo.scm_instance() |
|
|||
664 | except RepositoryRequirementError as e: |
|
|||
665 | missing_requirements = True |
|
|||
666 | self._handle_missing_requirements(e) |
|
|||
667 |
|
||||
668 | if self.rhodecode_repo is None and not missing_requirements: |
|
|||
669 | log.error('%s this repository is present in database but it ' |
|
|||
670 | 'cannot be created as an scm instance', c.repo_name) |
|
|||
671 |
|
||||
672 | h.flash(_( |
|
|||
673 | "The repository at %(repo_name)s cannot be located.") % |
|
|||
674 | {'repo_name': c.repo_name}, |
|
|||
675 | category='error', ignore_duplicate=True) |
|
|||
676 | redirect(h.route_path('home')) |
|
|||
677 |
|
||||
678 | # update last change according to VCS data |
|
|||
679 | if not missing_requirements: |
|
|||
680 | commit = db_repo.get_commit( |
|
|||
681 | pre_load=["author", "date", "message", "parents"]) |
|
|||
682 | db_repo.update_commit_cache(commit) |
|
|||
683 |
|
||||
684 | # Prepare context |
|
|||
685 | c.rhodecode_db_repo = db_repo |
|
|||
686 | c.rhodecode_repo = self.rhodecode_repo |
|
|||
687 | c.repository_requirements_missing = missing_requirements |
|
|||
688 |
|
||||
689 | self._update_global_counters(self.scm_model, db_repo) |
|
|||
690 |
|
||||
691 | def _update_global_counters(self, scm_model, db_repo): |
|
|||
692 | """ |
|
|||
693 | Base variables that are exposed to every page of repository |
|
|||
694 | """ |
|
|||
695 | c.repository_pull_requests = scm_model.get_pull_requests(db_repo) |
|
|||
696 |
|
||||
697 | def _handle_missing_requirements(self, error): |
|
|||
698 | self.rhodecode_repo = None |
|
|||
699 | log.error( |
|
|||
700 | 'Requirements are missing for repository %s: %s', |
|
|||
701 | c.repo_name, error.message) |
|
|||
702 |
|
||||
703 | summary_url = h.route_path('repo_summary', repo_name=c.repo_name) |
|
|||
704 | statistics_url = url('edit_repo_statistics', repo_name=c.repo_name) |
|
|||
705 | settings_update_url = url('repo', repo_name=c.repo_name) |
|
|||
706 | path = request.path |
|
|||
707 | should_redirect = ( |
|
|||
708 | path not in (summary_url, settings_update_url) |
|
|||
709 | and '/settings' not in path or path == statistics_url |
|
|||
710 | ) |
|
|||
711 | if should_redirect: |
|
|||
712 | redirect(summary_url) |
|
@@ -1,997 +1,973 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Utilities library for RhodeCode |
|
22 | Utilities library for RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import datetime |
|
25 | import datetime | |
26 | import decorator |
|
26 | import decorator | |
27 | import json |
|
27 | import json | |
28 | import logging |
|
28 | import logging | |
29 | import os |
|
29 | import os | |
30 | import re |
|
30 | import re | |
31 | import shutil |
|
31 | import shutil | |
32 | import tempfile |
|
32 | import tempfile | |
33 | import traceback |
|
33 | import traceback | |
34 | import tarfile |
|
34 | import tarfile | |
35 | import warnings |
|
35 | import warnings | |
36 | import hashlib |
|
36 | import hashlib | |
37 | from os.path import join as jn |
|
37 | from os.path import join as jn | |
38 |
|
38 | |||
39 | import paste |
|
39 | import paste | |
40 | import pkg_resources |
|
40 | import pkg_resources | |
41 | from paste.script.command import Command, BadCommand |
|
41 | from paste.script.command import Command, BadCommand | |
42 | from webhelpers.text import collapse, remove_formatting, strip_tags |
|
42 | from webhelpers.text import collapse, remove_formatting, strip_tags | |
43 | from mako import exceptions |
|
43 | from mako import exceptions | |
44 | from pyramid.threadlocal import get_current_registry |
|
44 | from pyramid.threadlocal import get_current_registry | |
45 | from pyramid.request import Request |
|
45 | from pyramid.request import Request | |
46 |
|
46 | |||
47 | from rhodecode.lib.fakemod import create_module |
|
47 | from rhodecode.lib.fakemod import create_module | |
48 | from rhodecode.lib.vcs.backends.base import Config |
|
48 | from rhodecode.lib.vcs.backends.base import Config | |
49 | from rhodecode.lib.vcs.exceptions import VCSError |
|
49 | from rhodecode.lib.vcs.exceptions import VCSError | |
50 | from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend |
|
50 | from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend | |
51 | from rhodecode.lib.utils2 import ( |
|
51 | from rhodecode.lib.utils2 import ( | |
52 | safe_str, safe_unicode, get_current_rhodecode_user, md5) |
|
52 | safe_str, safe_unicode, get_current_rhodecode_user, md5) | |
53 | from rhodecode.model import meta |
|
53 | from rhodecode.model import meta | |
54 | from rhodecode.model.db import ( |
|
54 | from rhodecode.model.db import ( | |
55 | Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup) |
|
55 | Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup) | |
56 | from rhodecode.model.meta import Session |
|
56 | from rhodecode.model.meta import Session | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | log = logging.getLogger(__name__) |
|
59 | log = logging.getLogger(__name__) | |
60 |
|
60 | |||
61 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') |
|
61 | REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*') | |
62 |
|
62 | |||
63 | # String which contains characters that are not allowed in slug names for |
|
63 | # String which contains characters that are not allowed in slug names for | |
64 | # repositories or repository groups. It is properly escaped to use it in |
|
64 | # repositories or repository groups. It is properly escaped to use it in | |
65 | # regular expressions. |
|
65 | # regular expressions. | |
66 | SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:') |
|
66 | SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:') | |
67 |
|
67 | |||
68 | # Regex that matches forbidden characters in repo/group slugs. |
|
68 | # Regex that matches forbidden characters in repo/group slugs. | |
69 | SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS)) |
|
69 | SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS)) | |
70 |
|
70 | |||
71 | # Regex that matches allowed characters in repo/group slugs. |
|
71 | # Regex that matches allowed characters in repo/group slugs. | |
72 | SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS)) |
|
72 | SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS)) | |
73 |
|
73 | |||
74 | # Regex that matches whole repo/group slugs. |
|
74 | # Regex that matches whole repo/group slugs. | |
75 | SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS)) |
|
75 | SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS)) | |
76 |
|
76 | |||
77 | _license_cache = None |
|
77 | _license_cache = None | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | def repo_name_slug(value): |
|
80 | def repo_name_slug(value): | |
81 | """ |
|
81 | """ | |
82 | Return slug of name of repository |
|
82 | Return slug of name of repository | |
83 | This function is called on each creation/modification |
|
83 | This function is called on each creation/modification | |
84 | of repository to prevent bad names in repo |
|
84 | of repository to prevent bad names in repo | |
85 | """ |
|
85 | """ | |
86 | replacement_char = '-' |
|
86 | replacement_char = '-' | |
87 |
|
87 | |||
88 | slug = remove_formatting(value) |
|
88 | slug = remove_formatting(value) | |
89 | slug = SLUG_BAD_CHAR_RE.sub('', slug) |
|
89 | slug = SLUG_BAD_CHAR_RE.sub('', slug) | |
90 | slug = re.sub('[\s]+', '-', slug) |
|
90 | slug = re.sub('[\s]+', '-', slug) | |
91 | slug = collapse(slug, replacement_char) |
|
91 | slug = collapse(slug, replacement_char) | |
92 | return slug |
|
92 | return slug | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | #============================================================================== |
|
95 | #============================================================================== | |
96 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS |
|
96 | # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS | |
97 | #============================================================================== |
|
97 | #============================================================================== | |
98 | def get_repo_slug(request): |
|
98 | def get_repo_slug(request): | |
99 | _repo = '' |
|
99 | _repo = '' | |
100 | if isinstance(request, Request): |
|
100 | if isinstance(request, Request): | |
101 | if hasattr(request, 'db_repo'): |
|
101 | if hasattr(request, 'db_repo'): | |
102 | # if our requests has set db reference use it for name, this |
|
102 | # if our requests has set db reference use it for name, this | |
103 | # translates the example.com/_<id> into proper repo names |
|
103 | # translates the example.com/_<id> into proper repo names | |
104 | _repo = request.db_repo.repo_name |
|
104 | _repo = request.db_repo.repo_name | |
105 | elif getattr(request, 'matchdict', None): |
|
105 | elif getattr(request, 'matchdict', None): | |
106 | # pyramid |
|
106 | # pyramid | |
107 | _repo = request.matchdict.get('repo_name') |
|
107 | _repo = request.matchdict.get('repo_name') | |
108 |
|
108 | |||
109 | # TODO(marcink): remove after pylons migration... |
|
109 | # TODO(marcink): remove after pylons migration... | |
110 | if not _repo: |
|
110 | if not _repo: | |
111 | _repo = request.environ['pylons.routes_dict'].get('repo_name') |
|
111 | _repo = request.environ['pylons.routes_dict'].get('repo_name') | |
112 |
|
112 | |||
113 | if _repo: |
|
113 | if _repo: | |
114 | _repo = _repo.rstrip('/') |
|
114 | _repo = _repo.rstrip('/') | |
115 | return _repo |
|
115 | return _repo | |
116 |
|
116 | |||
117 |
|
117 | |||
118 | def get_repo_group_slug(request): |
|
118 | def get_repo_group_slug(request): | |
119 | _group = '' |
|
119 | _group = '' | |
120 | if isinstance(request, Request): |
|
120 | if isinstance(request, Request): | |
121 | if hasattr(request, 'db_repo_group'): |
|
121 | if hasattr(request, 'db_repo_group'): | |
122 | # if our requests has set db reference use it for name, this |
|
122 | # if our requests has set db reference use it for name, this | |
123 | # translates the example.com/_<id> into proper repo group names |
|
123 | # translates the example.com/_<id> into proper repo group names | |
124 | _group = request.db_repo_group.group_name |
|
124 | _group = request.db_repo_group.group_name | |
125 | elif getattr(request, 'matchdict', None): |
|
125 | elif getattr(request, 'matchdict', None): | |
126 | # pyramid |
|
126 | # pyramid | |
127 | _group = request.matchdict.get('repo_group_name') |
|
127 | _group = request.matchdict.get('repo_group_name') | |
128 |
|
128 | |||
129 | # TODO(marcink): remove after pylons migration... |
|
129 | # TODO(marcink): remove after pylons migration... | |
130 | if not _group: |
|
130 | if not _group: | |
131 | _group = request.environ['pylons.routes_dict'].get('group_name') |
|
131 | _group = request.environ['pylons.routes_dict'].get('group_name') | |
132 |
|
132 | |||
133 | if _group: |
|
133 | if _group: | |
134 | _group = _group.rstrip('/') |
|
134 | _group = _group.rstrip('/') | |
135 | return _group |
|
135 | return _group | |
136 |
|
136 | |||
137 |
|
137 | |||
138 | def get_user_group_slug(request): |
|
138 | def get_user_group_slug(request): | |
139 | if isinstance(request, Request) and getattr(request, 'matchdict', None): |
|
139 | if isinstance(request, Request) and getattr(request, 'matchdict', None): | |
140 | # pyramid |
|
140 | # pyramid | |
141 | _group = request.matchdict.get('user_group_id') |
|
141 | _group = request.matchdict.get('user_group_id') | |
142 | else: |
|
142 | else: | |
143 | _group = request.environ['pylons.routes_dict'].get('user_group_id') |
|
143 | _group = request.environ['pylons.routes_dict'].get('user_group_id') | |
144 |
|
144 | |||
145 | try: |
|
145 | try: | |
146 | _group = UserGroup.get(_group) |
|
146 | _group = UserGroup.get(_group) | |
147 | if _group: |
|
147 | if _group: | |
148 | _group = _group.users_group_name |
|
148 | _group = _group.users_group_name | |
149 | except Exception: |
|
149 | except Exception: | |
150 | log.exception('Failed to get user group by id') |
|
150 | log.exception('Failed to get user group by id') | |
151 | # catch all failures here |
|
151 | # catch all failures here | |
152 | return None |
|
152 | return None | |
153 |
|
153 | |||
154 | return _group |
|
154 | return _group | |
155 |
|
155 | |||
156 |
|
156 | |||
157 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): |
|
157 | def get_filesystem_repos(path, recursive=False, skip_removed_repos=True): | |
158 | """ |
|
158 | """ | |
159 | Scans given path for repos and return (name,(type,path)) tuple |
|
159 | Scans given path for repos and return (name,(type,path)) tuple | |
160 |
|
160 | |||
161 | :param path: path to scan for repositories |
|
161 | :param path: path to scan for repositories | |
162 | :param recursive: recursive search and return names with subdirs in front |
|
162 | :param recursive: recursive search and return names with subdirs in front | |
163 | """ |
|
163 | """ | |
164 |
|
164 | |||
165 | # remove ending slash for better results |
|
165 | # remove ending slash for better results | |
166 | path = path.rstrip(os.sep) |
|
166 | path = path.rstrip(os.sep) | |
167 | log.debug('now scanning in %s location recursive:%s...', path, recursive) |
|
167 | log.debug('now scanning in %s location recursive:%s...', path, recursive) | |
168 |
|
168 | |||
169 | def _get_repos(p): |
|
169 | def _get_repos(p): | |
170 | dirpaths = _get_dirpaths(p) |
|
170 | dirpaths = _get_dirpaths(p) | |
171 | if not _is_dir_writable(p): |
|
171 | if not _is_dir_writable(p): | |
172 | log.warning('repo path without write access: %s', p) |
|
172 | log.warning('repo path without write access: %s', p) | |
173 |
|
173 | |||
174 | for dirpath in dirpaths: |
|
174 | for dirpath in dirpaths: | |
175 | if os.path.isfile(os.path.join(p, dirpath)): |
|
175 | if os.path.isfile(os.path.join(p, dirpath)): | |
176 | continue |
|
176 | continue | |
177 | cur_path = os.path.join(p, dirpath) |
|
177 | cur_path = os.path.join(p, dirpath) | |
178 |
|
178 | |||
179 | # skip removed repos |
|
179 | # skip removed repos | |
180 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): |
|
180 | if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath): | |
181 | continue |
|
181 | continue | |
182 |
|
182 | |||
183 | #skip .<somethin> dirs |
|
183 | #skip .<somethin> dirs | |
184 | if dirpath.startswith('.'): |
|
184 | if dirpath.startswith('.'): | |
185 | continue |
|
185 | continue | |
186 |
|
186 | |||
187 | try: |
|
187 | try: | |
188 | scm_info = get_scm(cur_path) |
|
188 | scm_info = get_scm(cur_path) | |
189 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info |
|
189 | yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info | |
190 | except VCSError: |
|
190 | except VCSError: | |
191 | if not recursive: |
|
191 | if not recursive: | |
192 | continue |
|
192 | continue | |
193 | #check if this dir containts other repos for recursive scan |
|
193 | #check if this dir containts other repos for recursive scan | |
194 | rec_path = os.path.join(p, dirpath) |
|
194 | rec_path = os.path.join(p, dirpath) | |
195 | if os.path.isdir(rec_path): |
|
195 | if os.path.isdir(rec_path): | |
196 | for inner_scm in _get_repos(rec_path): |
|
196 | for inner_scm in _get_repos(rec_path): | |
197 | yield inner_scm |
|
197 | yield inner_scm | |
198 |
|
198 | |||
199 | return _get_repos(path) |
|
199 | return _get_repos(path) | |
200 |
|
200 | |||
201 |
|
201 | |||
202 | def _get_dirpaths(p): |
|
202 | def _get_dirpaths(p): | |
203 | try: |
|
203 | try: | |
204 | # OS-independable way of checking if we have at least read-only |
|
204 | # OS-independable way of checking if we have at least read-only | |
205 | # access or not. |
|
205 | # access or not. | |
206 | dirpaths = os.listdir(p) |
|
206 | dirpaths = os.listdir(p) | |
207 | except OSError: |
|
207 | except OSError: | |
208 | log.warning('ignoring repo path without read access: %s', p) |
|
208 | log.warning('ignoring repo path without read access: %s', p) | |
209 | return [] |
|
209 | return [] | |
210 |
|
210 | |||
211 | # os.listpath has a tweak: If a unicode is passed into it, then it tries to |
|
211 | # os.listpath has a tweak: If a unicode is passed into it, then it tries to | |
212 | # decode paths and suddenly returns unicode objects itself. The items it |
|
212 | # decode paths and suddenly returns unicode objects itself. The items it | |
213 | # cannot decode are returned as strings and cause issues. |
|
213 | # cannot decode are returned as strings and cause issues. | |
214 | # |
|
214 | # | |
215 | # Those paths are ignored here until a solid solution for path handling has |
|
215 | # Those paths are ignored here until a solid solution for path handling has | |
216 | # been built. |
|
216 | # been built. | |
217 | expected_type = type(p) |
|
217 | expected_type = type(p) | |
218 |
|
218 | |||
219 | def _has_correct_type(item): |
|
219 | def _has_correct_type(item): | |
220 | if type(item) is not expected_type: |
|
220 | if type(item) is not expected_type: | |
221 | log.error( |
|
221 | log.error( | |
222 | u"Ignoring path %s since it cannot be decoded into unicode.", |
|
222 | u"Ignoring path %s since it cannot be decoded into unicode.", | |
223 | # Using "repr" to make sure that we see the byte value in case |
|
223 | # Using "repr" to make sure that we see the byte value in case | |
224 | # of support. |
|
224 | # of support. | |
225 | repr(item)) |
|
225 | repr(item)) | |
226 | return False |
|
226 | return False | |
227 | return True |
|
227 | return True | |
228 |
|
228 | |||
229 | dirpaths = [item for item in dirpaths if _has_correct_type(item)] |
|
229 | dirpaths = [item for item in dirpaths if _has_correct_type(item)] | |
230 |
|
230 | |||
231 | return dirpaths |
|
231 | return dirpaths | |
232 |
|
232 | |||
233 |
|
233 | |||
234 | def _is_dir_writable(path): |
|
234 | def _is_dir_writable(path): | |
235 | """ |
|
235 | """ | |
236 | Probe if `path` is writable. |
|
236 | Probe if `path` is writable. | |
237 |
|
237 | |||
238 | Due to trouble on Cygwin / Windows, this is actually probing if it is |
|
238 | Due to trouble on Cygwin / Windows, this is actually probing if it is | |
239 | possible to create a file inside of `path`, stat does not produce reliable |
|
239 | possible to create a file inside of `path`, stat does not produce reliable | |
240 | results in this case. |
|
240 | results in this case. | |
241 | """ |
|
241 | """ | |
242 | try: |
|
242 | try: | |
243 | with tempfile.TemporaryFile(dir=path): |
|
243 | with tempfile.TemporaryFile(dir=path): | |
244 | pass |
|
244 | pass | |
245 | except OSError: |
|
245 | except OSError: | |
246 | return False |
|
246 | return False | |
247 | return True |
|
247 | return True | |
248 |
|
248 | |||
249 |
|
249 | |||
250 | def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None): |
|
250 | def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None): | |
251 | """ |
|
251 | """ | |
252 | Returns True if given path is a valid repository False otherwise. |
|
252 | Returns True if given path is a valid repository False otherwise. | |
253 | If expect_scm param is given also, compare if given scm is the same |
|
253 | If expect_scm param is given also, compare if given scm is the same | |
254 | as expected from scm parameter. If explicit_scm is given don't try to |
|
254 | as expected from scm parameter. If explicit_scm is given don't try to | |
255 | detect the scm, just use the given one to check if repo is valid |
|
255 | detect the scm, just use the given one to check if repo is valid | |
256 |
|
256 | |||
257 | :param repo_name: |
|
257 | :param repo_name: | |
258 | :param base_path: |
|
258 | :param base_path: | |
259 | :param expect_scm: |
|
259 | :param expect_scm: | |
260 | :param explicit_scm: |
|
260 | :param explicit_scm: | |
261 |
|
261 | |||
262 | :return True: if given path is a valid repository |
|
262 | :return True: if given path is a valid repository | |
263 | """ |
|
263 | """ | |
264 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) |
|
264 | full_path = os.path.join(safe_str(base_path), safe_str(repo_name)) | |
265 | log.debug('Checking if `%s` is a valid path for repository. ' |
|
265 | log.debug('Checking if `%s` is a valid path for repository. ' | |
266 | 'Explicit type: %s', repo_name, explicit_scm) |
|
266 | 'Explicit type: %s', repo_name, explicit_scm) | |
267 |
|
267 | |||
268 | try: |
|
268 | try: | |
269 | if explicit_scm: |
|
269 | if explicit_scm: | |
270 | detected_scms = [get_scm_backend(explicit_scm)] |
|
270 | detected_scms = [get_scm_backend(explicit_scm)] | |
271 | else: |
|
271 | else: | |
272 | detected_scms = get_scm(full_path) |
|
272 | detected_scms = get_scm(full_path) | |
273 |
|
273 | |||
274 | if expect_scm: |
|
274 | if expect_scm: | |
275 | return detected_scms[0] == expect_scm |
|
275 | return detected_scms[0] == expect_scm | |
276 | log.debug('path: %s is an vcs object:%s', full_path, detected_scms) |
|
276 | log.debug('path: %s is an vcs object:%s', full_path, detected_scms) | |
277 | return True |
|
277 | return True | |
278 | except VCSError: |
|
278 | except VCSError: | |
279 | log.debug('path: %s is not a valid repo !', full_path) |
|
279 | log.debug('path: %s is not a valid repo !', full_path) | |
280 | return False |
|
280 | return False | |
281 |
|
281 | |||
282 |
|
282 | |||
283 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): |
|
283 | def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False): | |
284 | """ |
|
284 | """ | |
285 | Returns True if given path is a repository group, False otherwise |
|
285 | Returns True if given path is a repository group, False otherwise | |
286 |
|
286 | |||
287 | :param repo_name: |
|
287 | :param repo_name: | |
288 | :param base_path: |
|
288 | :param base_path: | |
289 | """ |
|
289 | """ | |
290 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) |
|
290 | full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name)) | |
291 | log.debug('Checking if `%s` is a valid path for repository group', |
|
291 | log.debug('Checking if `%s` is a valid path for repository group', | |
292 | repo_group_name) |
|
292 | repo_group_name) | |
293 |
|
293 | |||
294 | # check if it's not a repo |
|
294 | # check if it's not a repo | |
295 | if is_valid_repo(repo_group_name, base_path): |
|
295 | if is_valid_repo(repo_group_name, base_path): | |
296 | log.debug('Repo called %s exist, it is not a valid ' |
|
296 | log.debug('Repo called %s exist, it is not a valid ' | |
297 | 'repo group' % repo_group_name) |
|
297 | 'repo group' % repo_group_name) | |
298 | return False |
|
298 | return False | |
299 |
|
299 | |||
300 | try: |
|
300 | try: | |
301 | # we need to check bare git repos at higher level |
|
301 | # we need to check bare git repos at higher level | |
302 | # since we might match branches/hooks/info/objects or possible |
|
302 | # since we might match branches/hooks/info/objects or possible | |
303 | # other things inside bare git repo |
|
303 | # other things inside bare git repo | |
304 | scm_ = get_scm(os.path.dirname(full_path)) |
|
304 | scm_ = get_scm(os.path.dirname(full_path)) | |
305 | log.debug('path: %s is a vcs object:%s, not valid ' |
|
305 | log.debug('path: %s is a vcs object:%s, not valid ' | |
306 | 'repo group' % (full_path, scm_)) |
|
306 | 'repo group' % (full_path, scm_)) | |
307 | return False |
|
307 | return False | |
308 | except VCSError: |
|
308 | except VCSError: | |
309 | pass |
|
309 | pass | |
310 |
|
310 | |||
311 | # check if it's a valid path |
|
311 | # check if it's a valid path | |
312 | if skip_path_check or os.path.isdir(full_path): |
|
312 | if skip_path_check or os.path.isdir(full_path): | |
313 | log.debug('path: %s is a valid repo group !', full_path) |
|
313 | log.debug('path: %s is a valid repo group !', full_path) | |
314 | return True |
|
314 | return True | |
315 |
|
315 | |||
316 | log.debug('path: %s is not a valid repo group !', full_path) |
|
316 | log.debug('path: %s is not a valid repo group !', full_path) | |
317 | return False |
|
317 | return False | |
318 |
|
318 | |||
319 |
|
319 | |||
320 | def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'): |
|
320 | def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'): | |
321 | while True: |
|
321 | while True: | |
322 | ok = raw_input(prompt) |
|
322 | ok = raw_input(prompt) | |
323 | if ok.lower() in ('y', 'ye', 'yes'): |
|
323 | if ok.lower() in ('y', 'ye', 'yes'): | |
324 | return True |
|
324 | return True | |
325 | if ok.lower() in ('n', 'no', 'nop', 'nope'): |
|
325 | if ok.lower() in ('n', 'no', 'nop', 'nope'): | |
326 | return False |
|
326 | return False | |
327 | retries = retries - 1 |
|
327 | retries = retries - 1 | |
328 | if retries < 0: |
|
328 | if retries < 0: | |
329 | raise IOError |
|
329 | raise IOError | |
330 | print(complaint) |
|
330 | print(complaint) | |
331 |
|
331 | |||
332 | # propagated from mercurial documentation |
|
332 | # propagated from mercurial documentation | |
333 | ui_sections = [ |
|
333 | ui_sections = [ | |
334 | 'alias', 'auth', |
|
334 | 'alias', 'auth', | |
335 | 'decode/encode', 'defaults', |
|
335 | 'decode/encode', 'defaults', | |
336 | 'diff', 'email', |
|
336 | 'diff', 'email', | |
337 | 'extensions', 'format', |
|
337 | 'extensions', 'format', | |
338 | 'merge-patterns', 'merge-tools', |
|
338 | 'merge-patterns', 'merge-tools', | |
339 | 'hooks', 'http_proxy', |
|
339 | 'hooks', 'http_proxy', | |
340 | 'smtp', 'patch', |
|
340 | 'smtp', 'patch', | |
341 | 'paths', 'profiling', |
|
341 | 'paths', 'profiling', | |
342 | 'server', 'trusted', |
|
342 | 'server', 'trusted', | |
343 | 'ui', 'web', ] |
|
343 | 'ui', 'web', ] | |
344 |
|
344 | |||
345 |
|
345 | |||
346 | def config_data_from_db(clear_session=True, repo=None): |
|
346 | def config_data_from_db(clear_session=True, repo=None): | |
347 | """ |
|
347 | """ | |
348 | Read the configuration data from the database and return configuration |
|
348 | Read the configuration data from the database and return configuration | |
349 | tuples. |
|
349 | tuples. | |
350 | """ |
|
350 | """ | |
351 | from rhodecode.model.settings import VcsSettingsModel |
|
351 | from rhodecode.model.settings import VcsSettingsModel | |
352 |
|
352 | |||
353 | config = [] |
|
353 | config = [] | |
354 |
|
354 | |||
355 | sa = meta.Session() |
|
355 | sa = meta.Session() | |
356 | settings_model = VcsSettingsModel(repo=repo, sa=sa) |
|
356 | settings_model = VcsSettingsModel(repo=repo, sa=sa) | |
357 |
|
357 | |||
358 | ui_settings = settings_model.get_ui_settings() |
|
358 | ui_settings = settings_model.get_ui_settings() | |
359 |
|
359 | |||
360 | for setting in ui_settings: |
|
360 | for setting in ui_settings: | |
361 | if setting.active: |
|
361 | if setting.active: | |
362 | log.debug( |
|
362 | log.debug( | |
363 | 'settings ui from db: [%s] %s=%s', |
|
363 | 'settings ui from db: [%s] %s=%s', | |
364 | setting.section, setting.key, setting.value) |
|
364 | setting.section, setting.key, setting.value) | |
365 | config.append(( |
|
365 | config.append(( | |
366 | safe_str(setting.section), safe_str(setting.key), |
|
366 | safe_str(setting.section), safe_str(setting.key), | |
367 | safe_str(setting.value))) |
|
367 | safe_str(setting.value))) | |
368 | if setting.key == 'push_ssl': |
|
368 | if setting.key == 'push_ssl': | |
369 | # force set push_ssl requirement to False, rhodecode |
|
369 | # force set push_ssl requirement to False, rhodecode | |
370 | # handles that |
|
370 | # handles that | |
371 | config.append(( |
|
371 | config.append(( | |
372 | safe_str(setting.section), safe_str(setting.key), False)) |
|
372 | safe_str(setting.section), safe_str(setting.key), False)) | |
373 | if clear_session: |
|
373 | if clear_session: | |
374 | meta.Session.remove() |
|
374 | meta.Session.remove() | |
375 |
|
375 | |||
376 | # TODO: mikhail: probably it makes no sense to re-read hooks information. |
|
376 | # TODO: mikhail: probably it makes no sense to re-read hooks information. | |
377 | # It's already there and activated/deactivated |
|
377 | # It's already there and activated/deactivated | |
378 | skip_entries = [] |
|
378 | skip_entries = [] | |
379 | enabled_hook_classes = get_enabled_hook_classes(ui_settings) |
|
379 | enabled_hook_classes = get_enabled_hook_classes(ui_settings) | |
380 | if 'pull' not in enabled_hook_classes: |
|
380 | if 'pull' not in enabled_hook_classes: | |
381 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL)) |
|
381 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL)) | |
382 | if 'push' not in enabled_hook_classes: |
|
382 | if 'push' not in enabled_hook_classes: | |
383 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH)) |
|
383 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH)) | |
384 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH)) |
|
384 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH)) | |
385 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY)) |
|
385 | skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY)) | |
386 |
|
386 | |||
387 | config = [entry for entry in config if entry[:2] not in skip_entries] |
|
387 | config = [entry for entry in config if entry[:2] not in skip_entries] | |
388 |
|
388 | |||
389 | return config |
|
389 | return config | |
390 |
|
390 | |||
391 |
|
391 | |||
392 | def make_db_config(clear_session=True, repo=None): |
|
392 | def make_db_config(clear_session=True, repo=None): | |
393 | """ |
|
393 | """ | |
394 | Create a :class:`Config` instance based on the values in the database. |
|
394 | Create a :class:`Config` instance based on the values in the database. | |
395 | """ |
|
395 | """ | |
396 | config = Config() |
|
396 | config = Config() | |
397 | config_data = config_data_from_db(clear_session=clear_session, repo=repo) |
|
397 | config_data = config_data_from_db(clear_session=clear_session, repo=repo) | |
398 | for section, option, value in config_data: |
|
398 | for section, option, value in config_data: | |
399 | config.set(section, option, value) |
|
399 | config.set(section, option, value) | |
400 | return config |
|
400 | return config | |
401 |
|
401 | |||
402 |
|
402 | |||
403 | def get_enabled_hook_classes(ui_settings): |
|
403 | def get_enabled_hook_classes(ui_settings): | |
404 | """ |
|
404 | """ | |
405 | Return the enabled hook classes. |
|
405 | Return the enabled hook classes. | |
406 |
|
406 | |||
407 | :param ui_settings: List of ui_settings as returned |
|
407 | :param ui_settings: List of ui_settings as returned | |
408 | by :meth:`VcsSettingsModel.get_ui_settings` |
|
408 | by :meth:`VcsSettingsModel.get_ui_settings` | |
409 |
|
409 | |||
410 | :return: a list with the enabled hook classes. The order is not guaranteed. |
|
410 | :return: a list with the enabled hook classes. The order is not guaranteed. | |
411 | :rtype: list |
|
411 | :rtype: list | |
412 | """ |
|
412 | """ | |
413 | enabled_hooks = [] |
|
413 | enabled_hooks = [] | |
414 | active_hook_keys = [ |
|
414 | active_hook_keys = [ | |
415 | key for section, key, value, active in ui_settings |
|
415 | key for section, key, value, active in ui_settings | |
416 | if section == 'hooks' and active] |
|
416 | if section == 'hooks' and active] | |
417 |
|
417 | |||
418 | hook_names = { |
|
418 | hook_names = { | |
419 | RhodeCodeUi.HOOK_PUSH: 'push', |
|
419 | RhodeCodeUi.HOOK_PUSH: 'push', | |
420 | RhodeCodeUi.HOOK_PULL: 'pull', |
|
420 | RhodeCodeUi.HOOK_PULL: 'pull', | |
421 | RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size' |
|
421 | RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size' | |
422 | } |
|
422 | } | |
423 |
|
423 | |||
424 | for key in active_hook_keys: |
|
424 | for key in active_hook_keys: | |
425 | hook = hook_names.get(key) |
|
425 | hook = hook_names.get(key) | |
426 | if hook: |
|
426 | if hook: | |
427 | enabled_hooks.append(hook) |
|
427 | enabled_hooks.append(hook) | |
428 |
|
428 | |||
429 | return enabled_hooks |
|
429 | return enabled_hooks | |
430 |
|
430 | |||
431 |
|
431 | |||
432 | def set_rhodecode_config(config): |
|
432 | def set_rhodecode_config(config): | |
433 | """ |
|
433 | """ | |
434 | Updates pylons config with new settings from database |
|
434 | Updates pylons config with new settings from database | |
435 |
|
435 | |||
436 | :param config: |
|
436 | :param config: | |
437 | """ |
|
437 | """ | |
438 | from rhodecode.model.settings import SettingsModel |
|
438 | from rhodecode.model.settings import SettingsModel | |
439 | app_settings = SettingsModel().get_all_settings() |
|
439 | app_settings = SettingsModel().get_all_settings() | |
440 |
|
440 | |||
441 | for k, v in app_settings.items(): |
|
441 | for k, v in app_settings.items(): | |
442 | config[k] = v |
|
442 | config[k] = v | |
443 |
|
443 | |||
444 |
|
444 | |||
445 | def get_rhodecode_realm(): |
|
445 | def get_rhodecode_realm(): | |
446 | """ |
|
446 | """ | |
447 | Return the rhodecode realm from database. |
|
447 | Return the rhodecode realm from database. | |
448 | """ |
|
448 | """ | |
449 | from rhodecode.model.settings import SettingsModel |
|
449 | from rhodecode.model.settings import SettingsModel | |
450 | realm = SettingsModel().get_setting_by_name('realm') |
|
450 | realm = SettingsModel().get_setting_by_name('realm') | |
451 | return safe_str(realm.app_settings_value) |
|
451 | return safe_str(realm.app_settings_value) | |
452 |
|
452 | |||
453 |
|
453 | |||
454 | def get_rhodecode_base_path(): |
|
454 | def get_rhodecode_base_path(): | |
455 | """ |
|
455 | """ | |
456 | Returns the base path. The base path is the filesystem path which points |
|
456 | Returns the base path. The base path is the filesystem path which points | |
457 | to the repository store. |
|
457 | to the repository store. | |
458 | """ |
|
458 | """ | |
459 | from rhodecode.model.settings import SettingsModel |
|
459 | from rhodecode.model.settings import SettingsModel | |
460 | paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/') |
|
460 | paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/') | |
461 | return safe_str(paths_ui.ui_value) |
|
461 | return safe_str(paths_ui.ui_value) | |
462 |
|
462 | |||
463 |
|
463 | |||
464 | def map_groups(path): |
|
464 | def map_groups(path): | |
465 | """ |
|
465 | """ | |
466 | Given a full path to a repository, create all nested groups that this |
|
466 | Given a full path to a repository, create all nested groups that this | |
467 | repo is inside. This function creates parent-child relationships between |
|
467 | repo is inside. This function creates parent-child relationships between | |
468 | groups and creates default perms for all new groups. |
|
468 | groups and creates default perms for all new groups. | |
469 |
|
469 | |||
470 | :param paths: full path to repository |
|
470 | :param paths: full path to repository | |
471 | """ |
|
471 | """ | |
472 | from rhodecode.model.repo_group import RepoGroupModel |
|
472 | from rhodecode.model.repo_group import RepoGroupModel | |
473 | sa = meta.Session() |
|
473 | sa = meta.Session() | |
474 | groups = path.split(Repository.NAME_SEP) |
|
474 | groups = path.split(Repository.NAME_SEP) | |
475 | parent = None |
|
475 | parent = None | |
476 | group = None |
|
476 | group = None | |
477 |
|
477 | |||
478 | # last element is repo in nested groups structure |
|
478 | # last element is repo in nested groups structure | |
479 | groups = groups[:-1] |
|
479 | groups = groups[:-1] | |
480 | rgm = RepoGroupModel(sa) |
|
480 | rgm = RepoGroupModel(sa) | |
481 | owner = User.get_first_super_admin() |
|
481 | owner = User.get_first_super_admin() | |
482 | for lvl, group_name in enumerate(groups): |
|
482 | for lvl, group_name in enumerate(groups): | |
483 | group_name = '/'.join(groups[:lvl] + [group_name]) |
|
483 | group_name = '/'.join(groups[:lvl] + [group_name]) | |
484 | group = RepoGroup.get_by_group_name(group_name) |
|
484 | group = RepoGroup.get_by_group_name(group_name) | |
485 | desc = '%s group' % group_name |
|
485 | desc = '%s group' % group_name | |
486 |
|
486 | |||
487 | # skip folders that are now removed repos |
|
487 | # skip folders that are now removed repos | |
488 | if REMOVED_REPO_PAT.match(group_name): |
|
488 | if REMOVED_REPO_PAT.match(group_name): | |
489 | break |
|
489 | break | |
490 |
|
490 | |||
491 | if group is None: |
|
491 | if group is None: | |
492 | log.debug('creating group level: %s group_name: %s', |
|
492 | log.debug('creating group level: %s group_name: %s', | |
493 | lvl, group_name) |
|
493 | lvl, group_name) | |
494 | group = RepoGroup(group_name, parent) |
|
494 | group = RepoGroup(group_name, parent) | |
495 | group.group_description = desc |
|
495 | group.group_description = desc | |
496 | group.user = owner |
|
496 | group.user = owner | |
497 | sa.add(group) |
|
497 | sa.add(group) | |
498 | perm_obj = rgm._create_default_perms(group) |
|
498 | perm_obj = rgm._create_default_perms(group) | |
499 | sa.add(perm_obj) |
|
499 | sa.add(perm_obj) | |
500 | sa.flush() |
|
500 | sa.flush() | |
501 |
|
501 | |||
502 | parent = group |
|
502 | parent = group | |
503 | return group |
|
503 | return group | |
504 |
|
504 | |||
505 |
|
505 | |||
506 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): |
|
506 | def repo2db_mapper(initial_repo_list, remove_obsolete=False): | |
507 | """ |
|
507 | """ | |
508 | maps all repos given in initial_repo_list, non existing repositories |
|
508 | maps all repos given in initial_repo_list, non existing repositories | |
509 | are created, if remove_obsolete is True it also checks for db entries |
|
509 | are created, if remove_obsolete is True it also checks for db entries | |
510 | that are not in initial_repo_list and removes them. |
|
510 | that are not in initial_repo_list and removes them. | |
511 |
|
511 | |||
512 | :param initial_repo_list: list of repositories found by scanning methods |
|
512 | :param initial_repo_list: list of repositories found by scanning methods | |
513 | :param remove_obsolete: check for obsolete entries in database |
|
513 | :param remove_obsolete: check for obsolete entries in database | |
514 | """ |
|
514 | """ | |
515 | from rhodecode.model.repo import RepoModel |
|
515 | from rhodecode.model.repo import RepoModel | |
516 | from rhodecode.model.scm import ScmModel |
|
516 | from rhodecode.model.scm import ScmModel | |
517 | from rhodecode.model.repo_group import RepoGroupModel |
|
517 | from rhodecode.model.repo_group import RepoGroupModel | |
518 | from rhodecode.model.settings import SettingsModel |
|
518 | from rhodecode.model.settings import SettingsModel | |
519 |
|
519 | |||
520 | sa = meta.Session() |
|
520 | sa = meta.Session() | |
521 | repo_model = RepoModel() |
|
521 | repo_model = RepoModel() | |
522 | user = User.get_first_super_admin() |
|
522 | user = User.get_first_super_admin() | |
523 | added = [] |
|
523 | added = [] | |
524 |
|
524 | |||
525 | # creation defaults |
|
525 | # creation defaults | |
526 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
526 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) | |
527 | enable_statistics = defs.get('repo_enable_statistics') |
|
527 | enable_statistics = defs.get('repo_enable_statistics') | |
528 | enable_locking = defs.get('repo_enable_locking') |
|
528 | enable_locking = defs.get('repo_enable_locking') | |
529 | enable_downloads = defs.get('repo_enable_downloads') |
|
529 | enable_downloads = defs.get('repo_enable_downloads') | |
530 | private = defs.get('repo_private') |
|
530 | private = defs.get('repo_private') | |
531 |
|
531 | |||
532 | for name, repo in initial_repo_list.items(): |
|
532 | for name, repo in initial_repo_list.items(): | |
533 | group = map_groups(name) |
|
533 | group = map_groups(name) | |
534 | unicode_name = safe_unicode(name) |
|
534 | unicode_name = safe_unicode(name) | |
535 | db_repo = repo_model.get_by_repo_name(unicode_name) |
|
535 | db_repo = repo_model.get_by_repo_name(unicode_name) | |
536 | # found repo that is on filesystem not in RhodeCode database |
|
536 | # found repo that is on filesystem not in RhodeCode database | |
537 | if not db_repo: |
|
537 | if not db_repo: | |
538 | log.info('repository %s not found, creating now', name) |
|
538 | log.info('repository %s not found, creating now', name) | |
539 | added.append(name) |
|
539 | added.append(name) | |
540 | desc = (repo.description |
|
540 | desc = (repo.description | |
541 | if repo.description != 'unknown' |
|
541 | if repo.description != 'unknown' | |
542 | else '%s repository' % name) |
|
542 | else '%s repository' % name) | |
543 |
|
543 | |||
544 | db_repo = repo_model._create_repo( |
|
544 | db_repo = repo_model._create_repo( | |
545 | repo_name=name, |
|
545 | repo_name=name, | |
546 | repo_type=repo.alias, |
|
546 | repo_type=repo.alias, | |
547 | description=desc, |
|
547 | description=desc, | |
548 | repo_group=getattr(group, 'group_id', None), |
|
548 | repo_group=getattr(group, 'group_id', None), | |
549 | owner=user, |
|
549 | owner=user, | |
550 | enable_locking=enable_locking, |
|
550 | enable_locking=enable_locking, | |
551 | enable_downloads=enable_downloads, |
|
551 | enable_downloads=enable_downloads, | |
552 | enable_statistics=enable_statistics, |
|
552 | enable_statistics=enable_statistics, | |
553 | private=private, |
|
553 | private=private, | |
554 | state=Repository.STATE_CREATED |
|
554 | state=Repository.STATE_CREATED | |
555 | ) |
|
555 | ) | |
556 | sa.commit() |
|
556 | sa.commit() | |
557 | # we added that repo just now, and make sure we updated server info |
|
557 | # we added that repo just now, and make sure we updated server info | |
558 | if db_repo.repo_type == 'git': |
|
558 | if db_repo.repo_type == 'git': | |
559 | git_repo = db_repo.scm_instance() |
|
559 | git_repo = db_repo.scm_instance() | |
560 | # update repository server-info |
|
560 | # update repository server-info | |
561 | log.debug('Running update server info') |
|
561 | log.debug('Running update server info') | |
562 | git_repo._update_server_info() |
|
562 | git_repo._update_server_info() | |
563 |
|
563 | |||
564 | db_repo.update_commit_cache() |
|
564 | db_repo.update_commit_cache() | |
565 |
|
565 | |||
566 | config = db_repo._config |
|
566 | config = db_repo._config | |
567 | config.set('extensions', 'largefiles', '') |
|
567 | config.set('extensions', 'largefiles', '') | |
568 | ScmModel().install_hooks( |
|
568 | ScmModel().install_hooks( | |
569 | db_repo.scm_instance(config=config), |
|
569 | db_repo.scm_instance(config=config), | |
570 | repo_type=db_repo.repo_type) |
|
570 | repo_type=db_repo.repo_type) | |
571 |
|
571 | |||
572 | removed = [] |
|
572 | removed = [] | |
573 | if remove_obsolete: |
|
573 | if remove_obsolete: | |
574 | # remove from database those repositories that are not in the filesystem |
|
574 | # remove from database those repositories that are not in the filesystem | |
575 | for repo in sa.query(Repository).all(): |
|
575 | for repo in sa.query(Repository).all(): | |
576 | if repo.repo_name not in initial_repo_list.keys(): |
|
576 | if repo.repo_name not in initial_repo_list.keys(): | |
577 | log.debug("Removing non-existing repository found in db `%s`", |
|
577 | log.debug("Removing non-existing repository found in db `%s`", | |
578 | repo.repo_name) |
|
578 | repo.repo_name) | |
579 | try: |
|
579 | try: | |
580 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) |
|
580 | RepoModel(sa).delete(repo, forks='detach', fs_remove=False) | |
581 | sa.commit() |
|
581 | sa.commit() | |
582 | removed.append(repo.repo_name) |
|
582 | removed.append(repo.repo_name) | |
583 | except Exception: |
|
583 | except Exception: | |
584 | # don't hold further removals on error |
|
584 | # don't hold further removals on error | |
585 | log.error(traceback.format_exc()) |
|
585 | log.error(traceback.format_exc()) | |
586 | sa.rollback() |
|
586 | sa.rollback() | |
587 |
|
587 | |||
588 | def splitter(full_repo_name): |
|
588 | def splitter(full_repo_name): | |
589 | _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1) |
|
589 | _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1) | |
590 | gr_name = None |
|
590 | gr_name = None | |
591 | if len(_parts) == 2: |
|
591 | if len(_parts) == 2: | |
592 | gr_name = _parts[0] |
|
592 | gr_name = _parts[0] | |
593 | return gr_name |
|
593 | return gr_name | |
594 |
|
594 | |||
595 | initial_repo_group_list = [splitter(x) for x in |
|
595 | initial_repo_group_list = [splitter(x) for x in | |
596 | initial_repo_list.keys() if splitter(x)] |
|
596 | initial_repo_list.keys() if splitter(x)] | |
597 |
|
597 | |||
598 | # remove from database those repository groups that are not in the |
|
598 | # remove from database those repository groups that are not in the | |
599 | # filesystem due to parent child relationships we need to delete them |
|
599 | # filesystem due to parent child relationships we need to delete them | |
600 | # in a specific order of most nested first |
|
600 | # in a specific order of most nested first | |
601 | all_groups = [x.group_name for x in sa.query(RepoGroup).all()] |
|
601 | all_groups = [x.group_name for x in sa.query(RepoGroup).all()] | |
602 | nested_sort = lambda gr: len(gr.split('/')) |
|
602 | nested_sort = lambda gr: len(gr.split('/')) | |
603 | for group_name in sorted(all_groups, key=nested_sort, reverse=True): |
|
603 | for group_name in sorted(all_groups, key=nested_sort, reverse=True): | |
604 | if group_name not in initial_repo_group_list: |
|
604 | if group_name not in initial_repo_group_list: | |
605 | repo_group = RepoGroup.get_by_group_name(group_name) |
|
605 | repo_group = RepoGroup.get_by_group_name(group_name) | |
606 | if (repo_group.children.all() or |
|
606 | if (repo_group.children.all() or | |
607 | not RepoGroupModel().check_exist_filesystem( |
|
607 | not RepoGroupModel().check_exist_filesystem( | |
608 | group_name=group_name, exc_on_failure=False)): |
|
608 | group_name=group_name, exc_on_failure=False)): | |
609 | continue |
|
609 | continue | |
610 |
|
610 | |||
611 | log.info( |
|
611 | log.info( | |
612 | 'Removing non-existing repository group found in db `%s`', |
|
612 | 'Removing non-existing repository group found in db `%s`', | |
613 | group_name) |
|
613 | group_name) | |
614 | try: |
|
614 | try: | |
615 | RepoGroupModel(sa).delete(group_name, fs_remove=False) |
|
615 | RepoGroupModel(sa).delete(group_name, fs_remove=False) | |
616 | sa.commit() |
|
616 | sa.commit() | |
617 | removed.append(group_name) |
|
617 | removed.append(group_name) | |
618 | except Exception: |
|
618 | except Exception: | |
619 | # don't hold further removals on error |
|
619 | # don't hold further removals on error | |
620 | log.exception( |
|
620 | log.exception( | |
621 | 'Unable to remove repository group `%s`', |
|
621 | 'Unable to remove repository group `%s`', | |
622 | group_name) |
|
622 | group_name) | |
623 | sa.rollback() |
|
623 | sa.rollback() | |
624 | raise |
|
624 | raise | |
625 |
|
625 | |||
626 | return added, removed |
|
626 | return added, removed | |
627 |
|
627 | |||
628 |
|
628 | |||
629 | def get_default_cache_settings(settings): |
|
629 | def get_default_cache_settings(settings): | |
630 | cache_settings = {} |
|
630 | cache_settings = {} | |
631 | for key in settings.keys(): |
|
631 | for key in settings.keys(): | |
632 | for prefix in ['beaker.cache.', 'cache.']: |
|
632 | for prefix in ['beaker.cache.', 'cache.']: | |
633 | if key.startswith(prefix): |
|
633 | if key.startswith(prefix): | |
634 | name = key.split(prefix)[1].strip() |
|
634 | name = key.split(prefix)[1].strip() | |
635 | cache_settings[name] = settings[key].strip() |
|
635 | cache_settings[name] = settings[key].strip() | |
636 | return cache_settings |
|
636 | return cache_settings | |
637 |
|
637 | |||
638 |
|
638 | |||
639 | # set cache regions for beaker so celery can utilise it |
|
639 | # set cache regions for beaker so celery can utilise it | |
640 | def add_cache(settings): |
|
640 | def add_cache(settings): | |
641 | from rhodecode.lib import caches |
|
641 | from rhodecode.lib import caches | |
642 | cache_settings = {'regions': None} |
|
642 | cache_settings = {'regions': None} | |
643 | # main cache settings used as default ... |
|
643 | # main cache settings used as default ... | |
644 | cache_settings.update(get_default_cache_settings(settings)) |
|
644 | cache_settings.update(get_default_cache_settings(settings)) | |
645 |
|
645 | |||
646 | if cache_settings['regions']: |
|
646 | if cache_settings['regions']: | |
647 | for region in cache_settings['regions'].split(','): |
|
647 | for region in cache_settings['regions'].split(','): | |
648 | region = region.strip() |
|
648 | region = region.strip() | |
649 | region_settings = {} |
|
649 | region_settings = {} | |
650 | for key, value in cache_settings.items(): |
|
650 | for key, value in cache_settings.items(): | |
651 | if key.startswith(region): |
|
651 | if key.startswith(region): | |
652 | region_settings[key.split('.')[1]] = value |
|
652 | region_settings[key.split('.')[1]] = value | |
653 |
|
653 | |||
654 | caches.configure_cache_region( |
|
654 | caches.configure_cache_region( | |
655 | region, region_settings, cache_settings) |
|
655 | region, region_settings, cache_settings) | |
656 |
|
656 | |||
657 |
|
657 | |||
658 | def load_rcextensions(root_path): |
|
658 | def load_rcextensions(root_path): | |
659 | import rhodecode |
|
659 | import rhodecode | |
660 | from rhodecode.config import conf |
|
660 | from rhodecode.config import conf | |
661 |
|
661 | |||
662 | path = os.path.join(root_path, 'rcextensions', '__init__.py') |
|
662 | path = os.path.join(root_path, 'rcextensions', '__init__.py') | |
663 | if os.path.isfile(path): |
|
663 | if os.path.isfile(path): | |
664 | rcext = create_module('rc', path) |
|
664 | rcext = create_module('rc', path) | |
665 | EXT = rhodecode.EXTENSIONS = rcext |
|
665 | EXT = rhodecode.EXTENSIONS = rcext | |
666 | log.debug('Found rcextensions now loading %s...', rcext) |
|
666 | log.debug('Found rcextensions now loading %s...', rcext) | |
667 |
|
667 | |||
668 | # Additional mappings that are not present in the pygments lexers |
|
668 | # Additional mappings that are not present in the pygments lexers | |
669 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) |
|
669 | conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {})) | |
670 |
|
670 | |||
671 | # auto check if the module is not missing any data, set to default if is |
|
671 | # auto check if the module is not missing any data, set to default if is | |
672 | # this will help autoupdate new feature of rcext module |
|
672 | # this will help autoupdate new feature of rcext module | |
673 | #from rhodecode.config import rcextensions |
|
673 | #from rhodecode.config import rcextensions | |
674 | #for k in dir(rcextensions): |
|
674 | #for k in dir(rcextensions): | |
675 | # if not k.startswith('_') and not hasattr(EXT, k): |
|
675 | # if not k.startswith('_') and not hasattr(EXT, k): | |
676 | # setattr(EXT, k, getattr(rcextensions, k)) |
|
676 | # setattr(EXT, k, getattr(rcextensions, k)) | |
677 |
|
677 | |||
678 |
|
678 | |||
679 | def get_custom_lexer(extension): |
|
679 | def get_custom_lexer(extension): | |
680 | """ |
|
680 | """ | |
681 | returns a custom lexer if it is defined in rcextensions module, or None |
|
681 | returns a custom lexer if it is defined in rcextensions module, or None | |
682 | if there's no custom lexer defined |
|
682 | if there's no custom lexer defined | |
683 | """ |
|
683 | """ | |
684 | import rhodecode |
|
684 | import rhodecode | |
685 | from pygments import lexers |
|
685 | from pygments import lexers | |
686 |
|
686 | |||
687 | # custom override made by RhodeCode |
|
687 | # custom override made by RhodeCode | |
688 | if extension in ['mako']: |
|
688 | if extension in ['mako']: | |
689 | return lexers.get_lexer_by_name('html+mako') |
|
689 | return lexers.get_lexer_by_name('html+mako') | |
690 |
|
690 | |||
691 | # check if we didn't define this extension as other lexer |
|
691 | # check if we didn't define this extension as other lexer | |
692 | extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None) |
|
692 | extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None) | |
693 | if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS: |
|
693 | if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS: | |
694 | _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension] |
|
694 | _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension] | |
695 | return lexers.get_lexer_by_name(_lexer_name) |
|
695 | return lexers.get_lexer_by_name(_lexer_name) | |
696 |
|
696 | |||
697 |
|
697 | |||
698 | #============================================================================== |
|
698 | #============================================================================== | |
699 | # TEST FUNCTIONS AND CREATORS |
|
699 | # TEST FUNCTIONS AND CREATORS | |
700 | #============================================================================== |
|
700 | #============================================================================== | |
701 | def create_test_index(repo_location, config): |
|
701 | def create_test_index(repo_location, config): | |
702 | """ |
|
702 | """ | |
703 | Makes default test index. |
|
703 | Makes default test index. | |
704 | """ |
|
704 | """ | |
705 | import rc_testdata |
|
705 | import rc_testdata | |
706 |
|
706 | |||
707 | rc_testdata.extract_search_index( |
|
707 | rc_testdata.extract_search_index( | |
708 | 'vcs_search_index', os.path.dirname(config['search.location'])) |
|
708 | 'vcs_search_index', os.path.dirname(config['search.location'])) | |
709 |
|
709 | |||
710 |
|
710 | |||
711 | def create_test_directory(test_path): |
|
711 | def create_test_directory(test_path): | |
712 | """ |
|
712 | """ | |
713 | Create test directory if it doesn't exist. |
|
713 | Create test directory if it doesn't exist. | |
714 | """ |
|
714 | """ | |
715 | if not os.path.isdir(test_path): |
|
715 | if not os.path.isdir(test_path): | |
716 | log.debug('Creating testdir %s', test_path) |
|
716 | log.debug('Creating testdir %s', test_path) | |
717 | os.makedirs(test_path) |
|
717 | os.makedirs(test_path) | |
718 |
|
718 | |||
719 |
|
719 | |||
720 | def create_test_database(test_path, config): |
|
720 | def create_test_database(test_path, config): | |
721 | """ |
|
721 | """ | |
722 | Makes a fresh database. |
|
722 | Makes a fresh database. | |
723 | """ |
|
723 | """ | |
724 | from rhodecode.lib.db_manage import DbManage |
|
724 | from rhodecode.lib.db_manage import DbManage | |
725 |
|
725 | |||
726 | # PART ONE create db |
|
726 | # PART ONE create db | |
727 | dbconf = config['sqlalchemy.db1.url'] |
|
727 | dbconf = config['sqlalchemy.db1.url'] | |
728 | log.debug('making test db %s', dbconf) |
|
728 | log.debug('making test db %s', dbconf) | |
729 |
|
729 | |||
730 | dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'], |
|
730 | dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'], | |
731 | tests=True, cli_args={'force_ask': True}) |
|
731 | tests=True, cli_args={'force_ask': True}) | |
732 | dbmanage.create_tables(override=True) |
|
732 | dbmanage.create_tables(override=True) | |
733 | dbmanage.set_db_version() |
|
733 | dbmanage.set_db_version() | |
734 | # for tests dynamically set new root paths based on generated content |
|
734 | # for tests dynamically set new root paths based on generated content | |
735 | dbmanage.create_settings(dbmanage.config_prompt(test_path)) |
|
735 | dbmanage.create_settings(dbmanage.config_prompt(test_path)) | |
736 | dbmanage.create_default_user() |
|
736 | dbmanage.create_default_user() | |
737 | dbmanage.create_test_admin_and_users() |
|
737 | dbmanage.create_test_admin_and_users() | |
738 | dbmanage.create_permissions() |
|
738 | dbmanage.create_permissions() | |
739 | dbmanage.populate_default_permissions() |
|
739 | dbmanage.populate_default_permissions() | |
740 | Session().commit() |
|
740 | Session().commit() | |
741 |
|
741 | |||
742 |
|
742 | |||
743 | def create_test_repositories(test_path, config): |
|
743 | def create_test_repositories(test_path, config): | |
744 | """ |
|
744 | """ | |
745 | Creates test repositories in the temporary directory. Repositories are |
|
745 | Creates test repositories in the temporary directory. Repositories are | |
746 | extracted from archives within the rc_testdata package. |
|
746 | extracted from archives within the rc_testdata package. | |
747 | """ |
|
747 | """ | |
748 | import rc_testdata |
|
748 | import rc_testdata | |
749 | from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO |
|
749 | from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO | |
750 |
|
750 | |||
751 | log.debug('making test vcs repositories') |
|
751 | log.debug('making test vcs repositories') | |
752 |
|
752 | |||
753 | idx_path = config['search.location'] |
|
753 | idx_path = config['search.location'] | |
754 | data_path = config['cache_dir'] |
|
754 | data_path = config['cache_dir'] | |
755 |
|
755 | |||
756 | # clean index and data |
|
756 | # clean index and data | |
757 | if idx_path and os.path.exists(idx_path): |
|
757 | if idx_path and os.path.exists(idx_path): | |
758 | log.debug('remove %s', idx_path) |
|
758 | log.debug('remove %s', idx_path) | |
759 | shutil.rmtree(idx_path) |
|
759 | shutil.rmtree(idx_path) | |
760 |
|
760 | |||
761 | if data_path and os.path.exists(data_path): |
|
761 | if data_path and os.path.exists(data_path): | |
762 | log.debug('remove %s', data_path) |
|
762 | log.debug('remove %s', data_path) | |
763 | shutil.rmtree(data_path) |
|
763 | shutil.rmtree(data_path) | |
764 |
|
764 | |||
765 | rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) |
|
765 | rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) | |
766 | rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) |
|
766 | rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) | |
767 |
|
767 | |||
768 | # Note: Subversion is in the process of being integrated with the system, |
|
768 | # Note: Subversion is in the process of being integrated with the system, | |
769 | # until we have a properly packed version of the test svn repository, this |
|
769 | # until we have a properly packed version of the test svn repository, this | |
770 | # tries to copy over the repo from a package "rc_testdata" |
|
770 | # tries to copy over the repo from a package "rc_testdata" | |
771 | svn_repo_path = rc_testdata.get_svn_repo_archive() |
|
771 | svn_repo_path = rc_testdata.get_svn_repo_archive() | |
772 | with tarfile.open(svn_repo_path) as tar: |
|
772 | with tarfile.open(svn_repo_path) as tar: | |
773 | tar.extractall(jn(test_path, SVN_REPO)) |
|
773 | tar.extractall(jn(test_path, SVN_REPO)) | |
774 |
|
774 | |||
775 |
|
775 | |||
776 | #============================================================================== |
|
776 | #============================================================================== | |
777 | # PASTER COMMANDS |
|
777 | # PASTER COMMANDS | |
778 | #============================================================================== |
|
778 | #============================================================================== | |
779 | class BasePasterCommand(Command): |
|
779 | class BasePasterCommand(Command): | |
780 | """ |
|
780 | """ | |
781 | Abstract Base Class for paster commands. |
|
781 | Abstract Base Class for paster commands. | |
782 |
|
782 | |||
783 | The celery commands are somewhat aggressive about loading |
|
783 | The celery commands are somewhat aggressive about loading | |
784 | celery.conf, and since our module sets the `CELERY_LOADER` |
|
784 | celery.conf, and since our module sets the `CELERY_LOADER` | |
785 | environment variable to our loader, we have to bootstrap a bit and |
|
785 | environment variable to our loader, we have to bootstrap a bit and | |
786 | make sure we've had a chance to load the pylons config off of the |
|
786 | make sure we've had a chance to load the pylons config off of the | |
787 | command line, otherwise everything fails. |
|
787 | command line, otherwise everything fails. | |
788 | """ |
|
788 | """ | |
789 | min_args = 1 |
|
789 | min_args = 1 | |
790 | min_args_error = "Please provide a paster config file as an argument." |
|
790 | min_args_error = "Please provide a paster config file as an argument." | |
791 | takes_config_file = 1 |
|
791 | takes_config_file = 1 | |
792 | requires_config_file = True |
|
792 | requires_config_file = True | |
793 |
|
793 | |||
794 | def notify_msg(self, msg, log=False): |
|
794 | def notify_msg(self, msg, log=False): | |
795 | """Make a notification to user, additionally if logger is passed |
|
795 | """Make a notification to user, additionally if logger is passed | |
796 | it logs this action using given logger |
|
796 | it logs this action using given logger | |
797 |
|
797 | |||
798 | :param msg: message that will be printed to user |
|
798 | :param msg: message that will be printed to user | |
799 | :param log: logging instance, to use to additionally log this message |
|
799 | :param log: logging instance, to use to additionally log this message | |
800 |
|
800 | |||
801 | """ |
|
801 | """ | |
802 | if log and isinstance(log, logging): |
|
802 | if log and isinstance(log, logging): | |
803 | log(msg) |
|
803 | log(msg) | |
804 |
|
804 | |||
805 | def run(self, args): |
|
805 | def run(self, args): | |
806 | """ |
|
806 | """ | |
807 | Overrides Command.run |
|
807 | Overrides Command.run | |
808 |
|
808 | |||
809 | Checks for a config file argument and loads it. |
|
809 | Checks for a config file argument and loads it. | |
810 | """ |
|
810 | """ | |
811 | if len(args) < self.min_args: |
|
811 | if len(args) < self.min_args: | |
812 | raise BadCommand( |
|
812 | raise BadCommand( | |
813 | self.min_args_error % {'min_args': self.min_args, |
|
813 | self.min_args_error % {'min_args': self.min_args, | |
814 | 'actual_args': len(args)}) |
|
814 | 'actual_args': len(args)}) | |
815 |
|
815 | |||
816 | # Decrement because we're going to lob off the first argument. |
|
816 | # Decrement because we're going to lob off the first argument. | |
817 | # @@ This is hacky |
|
817 | # @@ This is hacky | |
818 | self.min_args -= 1 |
|
818 | self.min_args -= 1 | |
819 | self.bootstrap_config(args[0]) |
|
819 | self.bootstrap_config(args[0]) | |
820 | self.update_parser() |
|
820 | self.update_parser() | |
821 | return super(BasePasterCommand, self).run(args[1:]) |
|
821 | return super(BasePasterCommand, self).run(args[1:]) | |
822 |
|
822 | |||
823 | def update_parser(self): |
|
823 | def update_parser(self): | |
824 | """ |
|
824 | """ | |
825 | Abstract method. Allows for the class' parser to be updated |
|
825 | Abstract method. Allows for the class' parser to be updated | |
826 | before the superclass' `run` method is called. Necessary to |
|
826 | before the superclass' `run` method is called. Necessary to | |
827 | allow options/arguments to be passed through to the underlying |
|
827 | allow options/arguments to be passed through to the underlying | |
828 | celery command. |
|
828 | celery command. | |
829 | """ |
|
829 | """ | |
830 | raise NotImplementedError("Abstract Method.") |
|
830 | raise NotImplementedError("Abstract Method.") | |
831 |
|
831 | |||
832 | def bootstrap_config(self, conf): |
|
832 | def bootstrap_config(self, conf): | |
833 | """ |
|
833 | """ | |
834 | Loads the pylons configuration. |
|
834 | Loads the pylons configuration. | |
835 | """ |
|
835 | """ | |
836 | from pylons import config as pylonsconfig |
|
836 | from pylons import config as pylonsconfig | |
837 |
|
837 | |||
838 | self.path_to_ini_file = os.path.realpath(conf) |
|
838 | self.path_to_ini_file = os.path.realpath(conf) | |
839 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) |
|
839 | conf = paste.deploy.appconfig('config:' + self.path_to_ini_file) | |
840 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) |
|
840 | pylonsconfig.init_app(conf.global_conf, conf.local_conf) | |
841 |
|
841 | |||
842 | def _init_session(self): |
|
842 | def _init_session(self): | |
843 | """ |
|
843 | """ | |
844 | Inits SqlAlchemy Session |
|
844 | Inits SqlAlchemy Session | |
845 | """ |
|
845 | """ | |
846 | logging.config.fileConfig(self.path_to_ini_file) |
|
846 | logging.config.fileConfig(self.path_to_ini_file) | |
847 | from pylons import config |
|
847 | from pylons import config | |
848 | from rhodecode.config.utils import initialize_database |
|
848 | from rhodecode.config.utils import initialize_database | |
849 |
|
849 | |||
850 | # get to remove repos !! |
|
850 | # get to remove repos !! | |
851 | add_cache(config) |
|
851 | add_cache(config) | |
852 | initialize_database(config) |
|
852 | initialize_database(config) | |
853 |
|
853 | |||
854 |
|
854 | |||
855 | @decorator.decorator |
|
|||
856 | def jsonify(func, *args, **kwargs): |
|
|||
857 | """Action decorator that formats output for JSON |
|
|||
858 |
|
||||
859 | Given a function that will return content, this decorator will turn |
|
|||
860 | the result into JSON, with a content-type of 'application/json' and |
|
|||
861 | output it. |
|
|||
862 |
|
||||
863 | """ |
|
|||
864 | from pylons.decorators.util import get_pylons |
|
|||
865 | from rhodecode.lib.ext_json import json |
|
|||
866 | pylons = get_pylons(args) |
|
|||
867 | pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8' |
|
|||
868 | data = func(*args, **kwargs) |
|
|||
869 | if isinstance(data, (list, tuple)): |
|
|||
870 | msg = "JSON responses with Array envelopes are susceptible to " \ |
|
|||
871 | "cross-site data leak attacks, see " \ |
|
|||
872 | "http://wiki.pylonshq.com/display/pylonsfaq/Warnings" |
|
|||
873 | warnings.warn(msg, Warning, 2) |
|
|||
874 | log.warning(msg) |
|
|||
875 | log.debug("Returning JSON wrapped action output") |
|
|||
876 | return json.dumps(data, encoding='utf-8') |
|
|||
877 |
|
||||
878 |
|
||||
879 | class PartialRenderer(object): |
|
855 | class PartialRenderer(object): | |
880 | """ |
|
856 | """ | |
881 | Partial renderer used to render chunks of html used in datagrids |
|
857 | Partial renderer used to render chunks of html used in datagrids | |
882 | use like:: |
|
858 | use like:: | |
883 |
|
859 | |||
884 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
860 | _render = PartialRenderer('data_table/_dt_elements.mako') | |
885 | _render('quick_menu', args, kwargs) |
|
861 | _render('quick_menu', args, kwargs) | |
886 | PartialRenderer.h, |
|
862 | PartialRenderer.h, | |
887 | c, |
|
863 | c, | |
888 | _, |
|
864 | _, | |
889 | ungettext |
|
865 | ungettext | |
890 | are the template stuff initialized inside and can be re-used later |
|
866 | are the template stuff initialized inside and can be re-used later | |
891 |
|
867 | |||
892 | :param tmpl_name: template path relate to /templates/ dir |
|
868 | :param tmpl_name: template path relate to /templates/ dir | |
893 | """ |
|
869 | """ | |
894 |
|
870 | |||
895 | def __init__(self, tmpl_name): |
|
871 | def __init__(self, tmpl_name): | |
896 | import rhodecode |
|
872 | import rhodecode | |
897 | from pylons import request, tmpl_context as c |
|
873 | from pylons import request, tmpl_context as c | |
898 | from pylons.i18n.translation import _, ungettext |
|
874 | from pylons.i18n.translation import _, ungettext | |
899 | from rhodecode.lib import helpers as h |
|
875 | from rhodecode.lib import helpers as h | |
900 |
|
876 | |||
901 | self.tmpl_name = tmpl_name |
|
877 | self.tmpl_name = tmpl_name | |
902 | self.rhodecode = rhodecode |
|
878 | self.rhodecode = rhodecode | |
903 | self.c = c |
|
879 | self.c = c | |
904 | self._ = _ |
|
880 | self._ = _ | |
905 | self.ungettext = ungettext |
|
881 | self.ungettext = ungettext | |
906 | self.h = h |
|
882 | self.h = h | |
907 | self.request = request |
|
883 | self.request = request | |
908 |
|
884 | |||
909 | def _mako_lookup(self): |
|
885 | def _mako_lookup(self): | |
910 | _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup |
|
886 | _tmpl_lookup = self.rhodecode.CONFIG['pylons.app_globals'].mako_lookup | |
911 | return _tmpl_lookup.get_template(self.tmpl_name) |
|
887 | return _tmpl_lookup.get_template(self.tmpl_name) | |
912 |
|
888 | |||
913 | def _update_kwargs_for_render(self, kwargs): |
|
889 | def _update_kwargs_for_render(self, kwargs): | |
914 | """ |
|
890 | """ | |
915 | Inject params required for Mako rendering |
|
891 | Inject params required for Mako rendering | |
916 | """ |
|
892 | """ | |
917 | _kwargs = { |
|
893 | _kwargs = { | |
918 | '_': self._, |
|
894 | '_': self._, | |
919 | 'h': self.h, |
|
895 | 'h': self.h, | |
920 | 'c': self.c, |
|
896 | 'c': self.c, | |
921 | 'request': self.request, |
|
897 | 'request': self.request, | |
922 | '_ungettext': self.ungettext, |
|
898 | '_ungettext': self.ungettext, | |
923 | } |
|
899 | } | |
924 | _kwargs.update(kwargs) |
|
900 | _kwargs.update(kwargs) | |
925 | return _kwargs |
|
901 | return _kwargs | |
926 |
|
902 | |||
927 | def _render_with_exc(self, render_func, args, kwargs): |
|
903 | def _render_with_exc(self, render_func, args, kwargs): | |
928 | try: |
|
904 | try: | |
929 | return render_func.render(*args, **kwargs) |
|
905 | return render_func.render(*args, **kwargs) | |
930 | except: |
|
906 | except: | |
931 | log.error(exceptions.text_error_template().render()) |
|
907 | log.error(exceptions.text_error_template().render()) | |
932 | raise |
|
908 | raise | |
933 |
|
909 | |||
934 | def _get_template(self, template_obj, def_name): |
|
910 | def _get_template(self, template_obj, def_name): | |
935 | if def_name: |
|
911 | if def_name: | |
936 | tmpl = template_obj.get_def(def_name) |
|
912 | tmpl = template_obj.get_def(def_name) | |
937 | else: |
|
913 | else: | |
938 | tmpl = template_obj |
|
914 | tmpl = template_obj | |
939 | return tmpl |
|
915 | return tmpl | |
940 |
|
916 | |||
941 | def render(self, def_name, *args, **kwargs): |
|
917 | def render(self, def_name, *args, **kwargs): | |
942 | lookup_obj = self._mako_lookup() |
|
918 | lookup_obj = self._mako_lookup() | |
943 | tmpl = self._get_template(lookup_obj, def_name=def_name) |
|
919 | tmpl = self._get_template(lookup_obj, def_name=def_name) | |
944 | kwargs = self._update_kwargs_for_render(kwargs) |
|
920 | kwargs = self._update_kwargs_for_render(kwargs) | |
945 | return self._render_with_exc(tmpl, args, kwargs) |
|
921 | return self._render_with_exc(tmpl, args, kwargs) | |
946 |
|
922 | |||
947 | def __call__(self, tmpl, *args, **kwargs): |
|
923 | def __call__(self, tmpl, *args, **kwargs): | |
948 | return self.render(tmpl, *args, **kwargs) |
|
924 | return self.render(tmpl, *args, **kwargs) | |
949 |
|
925 | |||
950 |
|
926 | |||
951 | def password_changed(auth_user, session): |
|
927 | def password_changed(auth_user, session): | |
952 | # Never report password change in case of default user or anonymous user. |
|
928 | # Never report password change in case of default user or anonymous user. | |
953 | if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None: |
|
929 | if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None: | |
954 | return False |
|
930 | return False | |
955 |
|
931 | |||
956 | password_hash = md5(auth_user.password) if auth_user.password else None |
|
932 | password_hash = md5(auth_user.password) if auth_user.password else None | |
957 | rhodecode_user = session.get('rhodecode_user', {}) |
|
933 | rhodecode_user = session.get('rhodecode_user', {}) | |
958 | session_password_hash = rhodecode_user.get('password', '') |
|
934 | session_password_hash = rhodecode_user.get('password', '') | |
959 | return password_hash != session_password_hash |
|
935 | return password_hash != session_password_hash | |
960 |
|
936 | |||
961 |
|
937 | |||
962 | def read_opensource_licenses(): |
|
938 | def read_opensource_licenses(): | |
963 | global _license_cache |
|
939 | global _license_cache | |
964 |
|
940 | |||
965 | if not _license_cache: |
|
941 | if not _license_cache: | |
966 | licenses = pkg_resources.resource_string( |
|
942 | licenses = pkg_resources.resource_string( | |
967 | 'rhodecode', 'config/licenses.json') |
|
943 | 'rhodecode', 'config/licenses.json') | |
968 | _license_cache = json.loads(licenses) |
|
944 | _license_cache = json.loads(licenses) | |
969 |
|
945 | |||
970 | return _license_cache |
|
946 | return _license_cache | |
971 |
|
947 | |||
972 |
|
948 | |||
973 | def get_registry(request): |
|
949 | def get_registry(request): | |
974 | """ |
|
950 | """ | |
975 | Utility to get the pyramid registry from a request. During migration to |
|
951 | Utility to get the pyramid registry from a request. During migration to | |
976 | pyramid we sometimes want to use the pyramid registry from pylons context. |
|
952 | pyramid we sometimes want to use the pyramid registry from pylons context. | |
977 | Therefore this utility returns `request.registry` for pyramid requests and |
|
953 | Therefore this utility returns `request.registry` for pyramid requests and | |
978 | uses `get_current_registry()` for pylons requests. |
|
954 | uses `get_current_registry()` for pylons requests. | |
979 | """ |
|
955 | """ | |
980 | try: |
|
956 | try: | |
981 | return request.registry |
|
957 | return request.registry | |
982 | except AttributeError: |
|
958 | except AttributeError: | |
983 | return get_current_registry() |
|
959 | return get_current_registry() | |
984 |
|
960 | |||
985 |
|
961 | |||
986 | def generate_platform_uuid(): |
|
962 | def generate_platform_uuid(): | |
987 | """ |
|
963 | """ | |
988 | Generates platform UUID based on it's name |
|
964 | Generates platform UUID based on it's name | |
989 | """ |
|
965 | """ | |
990 | import platform |
|
966 | import platform | |
991 |
|
967 | |||
992 | try: |
|
968 | try: | |
993 | uuid_list = [platform.platform()] |
|
969 | uuid_list = [platform.platform()] | |
994 | return hashlib.sha256(':'.join(uuid_list)).hexdigest() |
|
970 | return hashlib.sha256(':'.join(uuid_list)).hexdigest() | |
995 | except Exception as e: |
|
971 | except Exception as e: | |
996 | log.error('Failed to generate host uuid: %s' % e) |
|
972 | log.error('Failed to generate host uuid: %s' % e) | |
997 | return 'UNDEFINED' |
|
973 | return 'UNDEFINED' |
@@ -1,1029 +1,1025 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2017 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
|||
22 | Repository model for rhodecode |
|
|||
23 | """ |
|
|||
24 |
|
||||
25 | import logging |
|
21 | import logging | |
26 | import os |
|
22 | import os | |
27 | import re |
|
23 | import re | |
28 | import shutil |
|
24 | import shutil | |
29 | import time |
|
25 | import time | |
30 | import traceback |
|
26 | import traceback | |
31 | import datetime |
|
27 | import datetime | |
32 |
|
28 | |||
33 | from pyramid.threadlocal import get_current_request |
|
29 | from pyramid.threadlocal import get_current_request | |
34 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
35 |
|
31 | |||
36 | from rhodecode import events |
|
32 | from rhodecode import events | |
37 | from rhodecode.lib import helpers as h |
|
33 | from rhodecode.lib import helpers as h | |
38 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
34 | from rhodecode.lib.auth import HasUserGroupPermissionAny | |
39 | from rhodecode.lib.caching_query import FromCache |
|
35 | from rhodecode.lib.caching_query import FromCache | |
40 | from rhodecode.lib.exceptions import AttachedForksError |
|
36 | from rhodecode.lib.exceptions import AttachedForksError | |
41 | from rhodecode.lib.hooks_base import log_delete_repository |
|
37 | from rhodecode.lib.hooks_base import log_delete_repository | |
42 | from rhodecode.lib.utils import make_db_config |
|
38 | from rhodecode.lib.utils import make_db_config | |
43 | from rhodecode.lib.utils2 import ( |
|
39 | from rhodecode.lib.utils2 import ( | |
44 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, | |
45 | get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic) |
|
41 | get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic) | |
46 | from rhodecode.lib.vcs.backends import get_backend |
|
42 | from rhodecode.lib.vcs.backends import get_backend | |
47 | from rhodecode.model import BaseModel |
|
43 | from rhodecode.model import BaseModel | |
48 | from rhodecode.model.db import (_hash_key, |
|
44 | from rhodecode.model.db import (_hash_key, | |
49 | Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, |
|
45 | Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, | |
50 | UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup, |
|
46 | UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup, | |
51 | RepoGroup, RepositoryField) |
|
47 | RepoGroup, RepositoryField) | |
52 |
|
48 | |||
53 | from rhodecode.model.settings import VcsSettingsModel |
|
49 | from rhodecode.model.settings import VcsSettingsModel | |
54 |
|
50 | |||
55 |
|
51 | |||
56 | log = logging.getLogger(__name__) |
|
52 | log = logging.getLogger(__name__) | |
57 |
|
53 | |||
58 |
|
54 | |||
59 | class RepoModel(BaseModel): |
|
55 | class RepoModel(BaseModel): | |
60 |
|
56 | |||
61 | cls = Repository |
|
57 | cls = Repository | |
62 |
|
58 | |||
63 | def _get_user_group(self, users_group): |
|
59 | def _get_user_group(self, users_group): | |
64 | return self._get_instance(UserGroup, users_group, |
|
60 | return self._get_instance(UserGroup, users_group, | |
65 | callback=UserGroup.get_by_group_name) |
|
61 | callback=UserGroup.get_by_group_name) | |
66 |
|
62 | |||
67 | def _get_repo_group(self, repo_group): |
|
63 | def _get_repo_group(self, repo_group): | |
68 | return self._get_instance(RepoGroup, repo_group, |
|
64 | return self._get_instance(RepoGroup, repo_group, | |
69 | callback=RepoGroup.get_by_group_name) |
|
65 | callback=RepoGroup.get_by_group_name) | |
70 |
|
66 | |||
71 | def _create_default_perms(self, repository, private): |
|
67 | def _create_default_perms(self, repository, private): | |
72 | # create default permission |
|
68 | # create default permission | |
73 | default = 'repository.read' |
|
69 | default = 'repository.read' | |
74 | def_user = User.get_default_user() |
|
70 | def_user = User.get_default_user() | |
75 | for p in def_user.user_perms: |
|
71 | for p in def_user.user_perms: | |
76 | if p.permission.permission_name.startswith('repository.'): |
|
72 | if p.permission.permission_name.startswith('repository.'): | |
77 | default = p.permission.permission_name |
|
73 | default = p.permission.permission_name | |
78 | break |
|
74 | break | |
79 |
|
75 | |||
80 | default_perm = 'repository.none' if private else default |
|
76 | default_perm = 'repository.none' if private else default | |
81 |
|
77 | |||
82 | repo_to_perm = UserRepoToPerm() |
|
78 | repo_to_perm = UserRepoToPerm() | |
83 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
79 | repo_to_perm.permission = Permission.get_by_key(default_perm) | |
84 |
|
80 | |||
85 | repo_to_perm.repository = repository |
|
81 | repo_to_perm.repository = repository | |
86 | repo_to_perm.user_id = def_user.user_id |
|
82 | repo_to_perm.user_id = def_user.user_id | |
87 |
|
83 | |||
88 | return repo_to_perm |
|
84 | return repo_to_perm | |
89 |
|
85 | |||
90 | @LazyProperty |
|
86 | @LazyProperty | |
91 | def repos_path(self): |
|
87 | def repos_path(self): | |
92 | """ |
|
88 | """ | |
93 | Gets the repositories root path from database |
|
89 | Gets the repositories root path from database | |
94 | """ |
|
90 | """ | |
95 | settings_model = VcsSettingsModel(sa=self.sa) |
|
91 | settings_model = VcsSettingsModel(sa=self.sa) | |
96 | return settings_model.get_repos_location() |
|
92 | return settings_model.get_repos_location() | |
97 |
|
93 | |||
98 | def get(self, repo_id, cache=False): |
|
94 | def get(self, repo_id, cache=False): | |
99 | repo = self.sa.query(Repository) \ |
|
95 | repo = self.sa.query(Repository) \ | |
100 | .filter(Repository.repo_id == repo_id) |
|
96 | .filter(Repository.repo_id == repo_id) | |
101 |
|
97 | |||
102 | if cache: |
|
98 | if cache: | |
103 | repo = repo.options( |
|
99 | repo = repo.options( | |
104 | FromCache("sql_cache_short", "get_repo_%s" % repo_id)) |
|
100 | FromCache("sql_cache_short", "get_repo_%s" % repo_id)) | |
105 | return repo.scalar() |
|
101 | return repo.scalar() | |
106 |
|
102 | |||
107 | def get_repo(self, repository): |
|
103 | def get_repo(self, repository): | |
108 | return self._get_repo(repository) |
|
104 | return self._get_repo(repository) | |
109 |
|
105 | |||
110 | def get_by_repo_name(self, repo_name, cache=False): |
|
106 | def get_by_repo_name(self, repo_name, cache=False): | |
111 | repo = self.sa.query(Repository) \ |
|
107 | repo = self.sa.query(Repository) \ | |
112 | .filter(Repository.repo_name == repo_name) |
|
108 | .filter(Repository.repo_name == repo_name) | |
113 |
|
109 | |||
114 | if cache: |
|
110 | if cache: | |
115 | name_key = _hash_key(repo_name) |
|
111 | name_key = _hash_key(repo_name) | |
116 | repo = repo.options( |
|
112 | repo = repo.options( | |
117 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
113 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) | |
118 | return repo.scalar() |
|
114 | return repo.scalar() | |
119 |
|
115 | |||
120 | def _extract_id_from_repo_name(self, repo_name): |
|
116 | def _extract_id_from_repo_name(self, repo_name): | |
121 | if repo_name.startswith('/'): |
|
117 | if repo_name.startswith('/'): | |
122 | repo_name = repo_name.lstrip('/') |
|
118 | repo_name = repo_name.lstrip('/') | |
123 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
119 | by_id_match = re.match(r'^_(\d{1,})', repo_name) | |
124 | if by_id_match: |
|
120 | if by_id_match: | |
125 | return by_id_match.groups()[0] |
|
121 | return by_id_match.groups()[0] | |
126 |
|
122 | |||
127 | def get_repo_by_id(self, repo_name): |
|
123 | def get_repo_by_id(self, repo_name): | |
128 | """ |
|
124 | """ | |
129 | Extracts repo_name by id from special urls. |
|
125 | Extracts repo_name by id from special urls. | |
130 | Example url is _11/repo_name |
|
126 | Example url is _11/repo_name | |
131 |
|
127 | |||
132 | :param repo_name: |
|
128 | :param repo_name: | |
133 | :return: repo object if matched else None |
|
129 | :return: repo object if matched else None | |
134 | """ |
|
130 | """ | |
135 |
|
131 | |||
136 | try: |
|
132 | try: | |
137 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
133 | _repo_id = self._extract_id_from_repo_name(repo_name) | |
138 | if _repo_id: |
|
134 | if _repo_id: | |
139 | return self.get(_repo_id) |
|
135 | return self.get(_repo_id) | |
140 | except Exception: |
|
136 | except Exception: | |
141 | log.exception('Failed to extract repo_name from URL') |
|
137 | log.exception('Failed to extract repo_name from URL') | |
142 |
|
138 | |||
143 | return None |
|
139 | return None | |
144 |
|
140 | |||
145 | def get_repos_for_root(self, root, traverse=False): |
|
141 | def get_repos_for_root(self, root, traverse=False): | |
146 | if traverse: |
|
142 | if traverse: | |
147 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
143 | like_expression = u'{}%'.format(safe_unicode(root)) | |
148 | repos = Repository.query().filter( |
|
144 | repos = Repository.query().filter( | |
149 | Repository.repo_name.like(like_expression)).all() |
|
145 | Repository.repo_name.like(like_expression)).all() | |
150 | else: |
|
146 | else: | |
151 | if root and not isinstance(root, RepoGroup): |
|
147 | if root and not isinstance(root, RepoGroup): | |
152 | raise ValueError( |
|
148 | raise ValueError( | |
153 | 'Root must be an instance ' |
|
149 | 'Root must be an instance ' | |
154 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
150 | 'of RepoGroup, got:{} instead'.format(type(root))) | |
155 | repos = Repository.query().filter(Repository.group == root).all() |
|
151 | repos = Repository.query().filter(Repository.group == root).all() | |
156 | return repos |
|
152 | return repos | |
157 |
|
153 | |||
158 | def get_url(self, repo, request=None, permalink=False): |
|
154 | def get_url(self, repo, request=None, permalink=False): | |
159 | if not request: |
|
155 | if not request: | |
160 | request = get_current_request() |
|
156 | request = get_current_request() | |
161 |
|
157 | |||
162 | if not request: |
|
158 | if not request: | |
163 | return |
|
159 | return | |
164 |
|
160 | |||
165 | if permalink: |
|
161 | if permalink: | |
166 | return request.route_url( |
|
162 | return request.route_url( | |
167 | 'repo_summary', repo_name=safe_str(repo.repo_id)) |
|
163 | 'repo_summary', repo_name=safe_str(repo.repo_id)) | |
168 | else: |
|
164 | else: | |
169 | return request.route_url( |
|
165 | return request.route_url( | |
170 | 'repo_summary', repo_name=safe_str(repo.repo_name)) |
|
166 | 'repo_summary', repo_name=safe_str(repo.repo_name)) | |
171 |
|
167 | |||
172 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): |
|
168 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): | |
173 | if not request: |
|
169 | if not request: | |
174 | request = get_current_request() |
|
170 | request = get_current_request() | |
175 |
|
171 | |||
176 | if not request: |
|
172 | if not request: | |
177 | return |
|
173 | return | |
178 |
|
174 | |||
179 | if permalink: |
|
175 | if permalink: | |
180 | return request.route_url( |
|
176 | return request.route_url( | |
181 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
177 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
182 | commit_id=commit_id) |
|
178 | commit_id=commit_id) | |
183 |
|
179 | |||
184 | else: |
|
180 | else: | |
185 | return request.route_url( |
|
181 | return request.route_url( | |
186 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
182 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
187 | commit_id=commit_id) |
|
183 | commit_id=commit_id) | |
188 |
|
184 | |||
189 | @classmethod |
|
185 | @classmethod | |
190 | def update_repoinfo(cls, repositories=None): |
|
186 | def update_repoinfo(cls, repositories=None): | |
191 | if not repositories: |
|
187 | if not repositories: | |
192 | repositories = Repository.getAll() |
|
188 | repositories = Repository.getAll() | |
193 | for repo in repositories: |
|
189 | for repo in repositories: | |
194 | repo.update_commit_cache() |
|
190 | repo.update_commit_cache() | |
195 |
|
191 | |||
196 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
192 | def get_repos_as_dict(self, repo_list=None, admin=False, | |
197 | super_user_actions=False): |
|
193 | super_user_actions=False): | |
198 | _render = get_current_request().get_partial_renderer( |
|
194 | _render = get_current_request().get_partial_renderer( | |
199 | 'data_table/_dt_elements.mako') |
|
195 | 'data_table/_dt_elements.mako') | |
200 | c = _render.get_call_context() |
|
196 | c = _render.get_call_context() | |
201 |
|
197 | |||
202 | def quick_menu(repo_name): |
|
198 | def quick_menu(repo_name): | |
203 | return _render('quick_menu', repo_name) |
|
199 | return _render('quick_menu', repo_name) | |
204 |
|
200 | |||
205 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
201 | def repo_lnk(name, rtype, rstate, private, fork_of): | |
206 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
202 | return _render('repo_name', name, rtype, rstate, private, fork_of, | |
207 | short_name=not admin, admin=False) |
|
203 | short_name=not admin, admin=False) | |
208 |
|
204 | |||
209 | def last_change(last_change): |
|
205 | def last_change(last_change): | |
210 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
206 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: | |
211 | last_change = last_change + datetime.timedelta(seconds= |
|
207 | last_change = last_change + datetime.timedelta(seconds= | |
212 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) |
|
208 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) | |
213 | return _render("last_change", last_change) |
|
209 | return _render("last_change", last_change) | |
214 |
|
210 | |||
215 | def rss_lnk(repo_name): |
|
211 | def rss_lnk(repo_name): | |
216 | return _render("rss", repo_name) |
|
212 | return _render("rss", repo_name) | |
217 |
|
213 | |||
218 | def atom_lnk(repo_name): |
|
214 | def atom_lnk(repo_name): | |
219 | return _render("atom", repo_name) |
|
215 | return _render("atom", repo_name) | |
220 |
|
216 | |||
221 | def last_rev(repo_name, cs_cache): |
|
217 | def last_rev(repo_name, cs_cache): | |
222 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
218 | return _render('revision', repo_name, cs_cache.get('revision'), | |
223 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
219 | cs_cache.get('raw_id'), cs_cache.get('author'), | |
224 | cs_cache.get('message')) |
|
220 | cs_cache.get('message')) | |
225 |
|
221 | |||
226 | def desc(desc): |
|
222 | def desc(desc): | |
227 | if c.visual.stylify_metatags: |
|
223 | if c.visual.stylify_metatags: | |
228 | desc = h.urlify_text(h.escaped_stylize(desc)) |
|
224 | desc = h.urlify_text(h.escaped_stylize(desc)) | |
229 | else: |
|
225 | else: | |
230 | desc = h.urlify_text(h.html_escape(desc)) |
|
226 | desc = h.urlify_text(h.html_escape(desc)) | |
231 |
|
227 | |||
232 | return _render('repo_desc', desc) |
|
228 | return _render('repo_desc', desc) | |
233 |
|
229 | |||
234 | def state(repo_state): |
|
230 | def state(repo_state): | |
235 | return _render("repo_state", repo_state) |
|
231 | return _render("repo_state", repo_state) | |
236 |
|
232 | |||
237 | def repo_actions(repo_name): |
|
233 | def repo_actions(repo_name): | |
238 | return _render('repo_actions', repo_name, super_user_actions) |
|
234 | return _render('repo_actions', repo_name, super_user_actions) | |
239 |
|
235 | |||
240 | def user_profile(username): |
|
236 | def user_profile(username): | |
241 | return _render('user_profile', username) |
|
237 | return _render('user_profile', username) | |
242 |
|
238 | |||
243 | repos_data = [] |
|
239 | repos_data = [] | |
244 | for repo in repo_list: |
|
240 | for repo in repo_list: | |
245 | cs_cache = repo.changeset_cache |
|
241 | cs_cache = repo.changeset_cache | |
246 | row = { |
|
242 | row = { | |
247 | "menu": quick_menu(repo.repo_name), |
|
243 | "menu": quick_menu(repo.repo_name), | |
248 |
|
244 | |||
249 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
245 | "name": repo_lnk(repo.repo_name, repo.repo_type, | |
250 | repo.repo_state, repo.private, repo.fork), |
|
246 | repo.repo_state, repo.private, repo.fork), | |
251 | "name_raw": repo.repo_name.lower(), |
|
247 | "name_raw": repo.repo_name.lower(), | |
252 |
|
248 | |||
253 | "last_change": last_change(repo.last_db_change), |
|
249 | "last_change": last_change(repo.last_db_change), | |
254 | "last_change_raw": datetime_to_time(repo.last_db_change), |
|
250 | "last_change_raw": datetime_to_time(repo.last_db_change), | |
255 |
|
251 | |||
256 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
252 | "last_changeset": last_rev(repo.repo_name, cs_cache), | |
257 | "last_changeset_raw": cs_cache.get('revision'), |
|
253 | "last_changeset_raw": cs_cache.get('revision'), | |
258 |
|
254 | |||
259 | "desc": desc(repo.description_safe), |
|
255 | "desc": desc(repo.description_safe), | |
260 | "owner": user_profile(repo.user.username), |
|
256 | "owner": user_profile(repo.user.username), | |
261 |
|
257 | |||
262 | "state": state(repo.repo_state), |
|
258 | "state": state(repo.repo_state), | |
263 | "rss": rss_lnk(repo.repo_name), |
|
259 | "rss": rss_lnk(repo.repo_name), | |
264 |
|
260 | |||
265 | "atom": atom_lnk(repo.repo_name), |
|
261 | "atom": atom_lnk(repo.repo_name), | |
266 | } |
|
262 | } | |
267 | if admin: |
|
263 | if admin: | |
268 | row.update({ |
|
264 | row.update({ | |
269 | "action": repo_actions(repo.repo_name), |
|
265 | "action": repo_actions(repo.repo_name), | |
270 | }) |
|
266 | }) | |
271 | repos_data.append(row) |
|
267 | repos_data.append(row) | |
272 |
|
268 | |||
273 | return repos_data |
|
269 | return repos_data | |
274 |
|
270 | |||
275 | def _get_defaults(self, repo_name): |
|
271 | def _get_defaults(self, repo_name): | |
276 | """ |
|
272 | """ | |
277 | Gets information about repository, and returns a dict for |
|
273 | Gets information about repository, and returns a dict for | |
278 | usage in forms |
|
274 | usage in forms | |
279 |
|
275 | |||
280 | :param repo_name: |
|
276 | :param repo_name: | |
281 | """ |
|
277 | """ | |
282 |
|
278 | |||
283 | repo_info = Repository.get_by_repo_name(repo_name) |
|
279 | repo_info = Repository.get_by_repo_name(repo_name) | |
284 |
|
280 | |||
285 | if repo_info is None: |
|
281 | if repo_info is None: | |
286 | return None |
|
282 | return None | |
287 |
|
283 | |||
288 | defaults = repo_info.get_dict() |
|
284 | defaults = repo_info.get_dict() | |
289 | defaults['repo_name'] = repo_info.just_name |
|
285 | defaults['repo_name'] = repo_info.just_name | |
290 |
|
286 | |||
291 | groups = repo_info.groups_with_parents |
|
287 | groups = repo_info.groups_with_parents | |
292 | parent_group = groups[-1] if groups else None |
|
288 | parent_group = groups[-1] if groups else None | |
293 |
|
289 | |||
294 | # we use -1 as this is how in HTML, we mark an empty group |
|
290 | # we use -1 as this is how in HTML, we mark an empty group | |
295 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
291 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) | |
296 |
|
292 | |||
297 | keys_to_process = ( |
|
293 | keys_to_process = ( | |
298 | {'k': 'repo_type', 'strip': False}, |
|
294 | {'k': 'repo_type', 'strip': False}, | |
299 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
295 | {'k': 'repo_enable_downloads', 'strip': True}, | |
300 | {'k': 'repo_description', 'strip': True}, |
|
296 | {'k': 'repo_description', 'strip': True}, | |
301 | {'k': 'repo_enable_locking', 'strip': True}, |
|
297 | {'k': 'repo_enable_locking', 'strip': True}, | |
302 | {'k': 'repo_landing_rev', 'strip': True}, |
|
298 | {'k': 'repo_landing_rev', 'strip': True}, | |
303 | {'k': 'clone_uri', 'strip': False}, |
|
299 | {'k': 'clone_uri', 'strip': False}, | |
304 | {'k': 'repo_private', 'strip': True}, |
|
300 | {'k': 'repo_private', 'strip': True}, | |
305 | {'k': 'repo_enable_statistics', 'strip': True} |
|
301 | {'k': 'repo_enable_statistics', 'strip': True} | |
306 | ) |
|
302 | ) | |
307 |
|
303 | |||
308 | for item in keys_to_process: |
|
304 | for item in keys_to_process: | |
309 | attr = item['k'] |
|
305 | attr = item['k'] | |
310 | if item['strip']: |
|
306 | if item['strip']: | |
311 | attr = remove_prefix(item['k'], 'repo_') |
|
307 | attr = remove_prefix(item['k'], 'repo_') | |
312 |
|
308 | |||
313 | val = defaults[attr] |
|
309 | val = defaults[attr] | |
314 | if item['k'] == 'repo_landing_rev': |
|
310 | if item['k'] == 'repo_landing_rev': | |
315 | val = ':'.join(defaults[attr]) |
|
311 | val = ':'.join(defaults[attr]) | |
316 | defaults[item['k']] = val |
|
312 | defaults[item['k']] = val | |
317 | if item['k'] == 'clone_uri': |
|
313 | if item['k'] == 'clone_uri': | |
318 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
314 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden | |
319 |
|
315 | |||
320 | # fill owner |
|
316 | # fill owner | |
321 | if repo_info.user: |
|
317 | if repo_info.user: | |
322 | defaults.update({'user': repo_info.user.username}) |
|
318 | defaults.update({'user': repo_info.user.username}) | |
323 | else: |
|
319 | else: | |
324 | replacement_user = User.get_first_super_admin().username |
|
320 | replacement_user = User.get_first_super_admin().username | |
325 | defaults.update({'user': replacement_user}) |
|
321 | defaults.update({'user': replacement_user}) | |
326 |
|
322 | |||
327 | return defaults |
|
323 | return defaults | |
328 |
|
324 | |||
329 | def update(self, repo, **kwargs): |
|
325 | def update(self, repo, **kwargs): | |
330 | try: |
|
326 | try: | |
331 | cur_repo = self._get_repo(repo) |
|
327 | cur_repo = self._get_repo(repo) | |
332 | source_repo_name = cur_repo.repo_name |
|
328 | source_repo_name = cur_repo.repo_name | |
333 | if 'user' in kwargs: |
|
329 | if 'user' in kwargs: | |
334 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
330 | cur_repo.user = User.get_by_username(kwargs['user']) | |
335 |
|
331 | |||
336 | if 'repo_group' in kwargs: |
|
332 | if 'repo_group' in kwargs: | |
337 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
333 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) | |
338 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
334 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) | |
339 |
|
335 | |||
340 | update_keys = [ |
|
336 | update_keys = [ | |
341 | (1, 'repo_description'), |
|
337 | (1, 'repo_description'), | |
342 | (1, 'repo_landing_rev'), |
|
338 | (1, 'repo_landing_rev'), | |
343 | (1, 'repo_private'), |
|
339 | (1, 'repo_private'), | |
344 | (1, 'repo_enable_downloads'), |
|
340 | (1, 'repo_enable_downloads'), | |
345 | (1, 'repo_enable_locking'), |
|
341 | (1, 'repo_enable_locking'), | |
346 | (1, 'repo_enable_statistics'), |
|
342 | (1, 'repo_enable_statistics'), | |
347 | (0, 'clone_uri'), |
|
343 | (0, 'clone_uri'), | |
348 | (0, 'fork_id') |
|
344 | (0, 'fork_id') | |
349 | ] |
|
345 | ] | |
350 | for strip, k in update_keys: |
|
346 | for strip, k in update_keys: | |
351 | if k in kwargs: |
|
347 | if k in kwargs: | |
352 | val = kwargs[k] |
|
348 | val = kwargs[k] | |
353 | if strip: |
|
349 | if strip: | |
354 | k = remove_prefix(k, 'repo_') |
|
350 | k = remove_prefix(k, 'repo_') | |
355 |
|
351 | |||
356 | setattr(cur_repo, k, val) |
|
352 | setattr(cur_repo, k, val) | |
357 |
|
353 | |||
358 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
354 | new_name = cur_repo.get_new_name(kwargs['repo_name']) | |
359 | cur_repo.repo_name = new_name |
|
355 | cur_repo.repo_name = new_name | |
360 |
|
356 | |||
361 | # if private flag is set, reset default permission to NONE |
|
357 | # if private flag is set, reset default permission to NONE | |
362 | if kwargs.get('repo_private'): |
|
358 | if kwargs.get('repo_private'): | |
363 | EMPTY_PERM = 'repository.none' |
|
359 | EMPTY_PERM = 'repository.none' | |
364 | RepoModel().grant_user_permission( |
|
360 | RepoModel().grant_user_permission( | |
365 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
361 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM | |
366 | ) |
|
362 | ) | |
367 |
|
363 | |||
368 | # handle extra fields |
|
364 | # handle extra fields | |
369 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
365 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), | |
370 | kwargs): |
|
366 | kwargs): | |
371 | k = RepositoryField.un_prefix_key(field) |
|
367 | k = RepositoryField.un_prefix_key(field) | |
372 | ex_field = RepositoryField.get_by_key_name( |
|
368 | ex_field = RepositoryField.get_by_key_name( | |
373 | key=k, repo=cur_repo) |
|
369 | key=k, repo=cur_repo) | |
374 | if ex_field: |
|
370 | if ex_field: | |
375 | ex_field.field_value = kwargs[field] |
|
371 | ex_field.field_value = kwargs[field] | |
376 | self.sa.add(ex_field) |
|
372 | self.sa.add(ex_field) | |
377 | cur_repo.updated_on = datetime.datetime.now() |
|
373 | cur_repo.updated_on = datetime.datetime.now() | |
378 | self.sa.add(cur_repo) |
|
374 | self.sa.add(cur_repo) | |
379 |
|
375 | |||
380 | if source_repo_name != new_name: |
|
376 | if source_repo_name != new_name: | |
381 | # rename repository |
|
377 | # rename repository | |
382 | self._rename_filesystem_repo( |
|
378 | self._rename_filesystem_repo( | |
383 | old=source_repo_name, new=new_name) |
|
379 | old=source_repo_name, new=new_name) | |
384 |
|
380 | |||
385 | return cur_repo |
|
381 | return cur_repo | |
386 | except Exception: |
|
382 | except Exception: | |
387 | log.error(traceback.format_exc()) |
|
383 | log.error(traceback.format_exc()) | |
388 | raise |
|
384 | raise | |
389 |
|
385 | |||
390 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
386 | def _create_repo(self, repo_name, repo_type, description, owner, | |
391 | private=False, clone_uri=None, repo_group=None, |
|
387 | private=False, clone_uri=None, repo_group=None, | |
392 | landing_rev='rev:tip', fork_of=None, |
|
388 | landing_rev='rev:tip', fork_of=None, | |
393 | copy_fork_permissions=False, enable_statistics=False, |
|
389 | copy_fork_permissions=False, enable_statistics=False, | |
394 | enable_locking=False, enable_downloads=False, |
|
390 | enable_locking=False, enable_downloads=False, | |
395 | copy_group_permissions=False, |
|
391 | copy_group_permissions=False, | |
396 | state=Repository.STATE_PENDING): |
|
392 | state=Repository.STATE_PENDING): | |
397 | """ |
|
393 | """ | |
398 | Create repository inside database with PENDING state, this should be |
|
394 | Create repository inside database with PENDING state, this should be | |
399 | only executed by create() repo. With exception of importing existing |
|
395 | only executed by create() repo. With exception of importing existing | |
400 | repos |
|
396 | repos | |
401 | """ |
|
397 | """ | |
402 | from rhodecode.model.scm import ScmModel |
|
398 | from rhodecode.model.scm import ScmModel | |
403 |
|
399 | |||
404 | owner = self._get_user(owner) |
|
400 | owner = self._get_user(owner) | |
405 | fork_of = self._get_repo(fork_of) |
|
401 | fork_of = self._get_repo(fork_of) | |
406 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
402 | repo_group = self._get_repo_group(safe_int(repo_group)) | |
407 |
|
403 | |||
408 | try: |
|
404 | try: | |
409 | repo_name = safe_unicode(repo_name) |
|
405 | repo_name = safe_unicode(repo_name) | |
410 | description = safe_unicode(description) |
|
406 | description = safe_unicode(description) | |
411 | # repo name is just a name of repository |
|
407 | # repo name is just a name of repository | |
412 | # while repo_name_full is a full qualified name that is combined |
|
408 | # while repo_name_full is a full qualified name that is combined | |
413 | # with name and path of group |
|
409 | # with name and path of group | |
414 | repo_name_full = repo_name |
|
410 | repo_name_full = repo_name | |
415 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
411 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] | |
416 |
|
412 | |||
417 | new_repo = Repository() |
|
413 | new_repo = Repository() | |
418 | new_repo.repo_state = state |
|
414 | new_repo.repo_state = state | |
419 | new_repo.enable_statistics = False |
|
415 | new_repo.enable_statistics = False | |
420 | new_repo.repo_name = repo_name_full |
|
416 | new_repo.repo_name = repo_name_full | |
421 | new_repo.repo_type = repo_type |
|
417 | new_repo.repo_type = repo_type | |
422 | new_repo.user = owner |
|
418 | new_repo.user = owner | |
423 | new_repo.group = repo_group |
|
419 | new_repo.group = repo_group | |
424 | new_repo.description = description or repo_name |
|
420 | new_repo.description = description or repo_name | |
425 | new_repo.private = private |
|
421 | new_repo.private = private | |
426 | new_repo.clone_uri = clone_uri |
|
422 | new_repo.clone_uri = clone_uri | |
427 | new_repo.landing_rev = landing_rev |
|
423 | new_repo.landing_rev = landing_rev | |
428 |
|
424 | |||
429 | new_repo.enable_statistics = enable_statistics |
|
425 | new_repo.enable_statistics = enable_statistics | |
430 | new_repo.enable_locking = enable_locking |
|
426 | new_repo.enable_locking = enable_locking | |
431 | new_repo.enable_downloads = enable_downloads |
|
427 | new_repo.enable_downloads = enable_downloads | |
432 |
|
428 | |||
433 | if repo_group: |
|
429 | if repo_group: | |
434 | new_repo.enable_locking = repo_group.enable_locking |
|
430 | new_repo.enable_locking = repo_group.enable_locking | |
435 |
|
431 | |||
436 | if fork_of: |
|
432 | if fork_of: | |
437 | parent_repo = fork_of |
|
433 | parent_repo = fork_of | |
438 | new_repo.fork = parent_repo |
|
434 | new_repo.fork = parent_repo | |
439 |
|
435 | |||
440 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
436 | events.trigger(events.RepoPreCreateEvent(new_repo)) | |
441 |
|
437 | |||
442 | self.sa.add(new_repo) |
|
438 | self.sa.add(new_repo) | |
443 |
|
439 | |||
444 | EMPTY_PERM = 'repository.none' |
|
440 | EMPTY_PERM = 'repository.none' | |
445 | if fork_of and copy_fork_permissions: |
|
441 | if fork_of and copy_fork_permissions: | |
446 | repo = fork_of |
|
442 | repo = fork_of | |
447 | user_perms = UserRepoToPerm.query() \ |
|
443 | user_perms = UserRepoToPerm.query() \ | |
448 | .filter(UserRepoToPerm.repository == repo).all() |
|
444 | .filter(UserRepoToPerm.repository == repo).all() | |
449 | group_perms = UserGroupRepoToPerm.query() \ |
|
445 | group_perms = UserGroupRepoToPerm.query() \ | |
450 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
446 | .filter(UserGroupRepoToPerm.repository == repo).all() | |
451 |
|
447 | |||
452 | for perm in user_perms: |
|
448 | for perm in user_perms: | |
453 | UserRepoToPerm.create( |
|
449 | UserRepoToPerm.create( | |
454 | perm.user, new_repo, perm.permission) |
|
450 | perm.user, new_repo, perm.permission) | |
455 |
|
451 | |||
456 | for perm in group_perms: |
|
452 | for perm in group_perms: | |
457 | UserGroupRepoToPerm.create( |
|
453 | UserGroupRepoToPerm.create( | |
458 | perm.users_group, new_repo, perm.permission) |
|
454 | perm.users_group, new_repo, perm.permission) | |
459 | # in case we copy permissions and also set this repo to private |
|
455 | # in case we copy permissions and also set this repo to private | |
460 | # override the default user permission to make it a private |
|
456 | # override the default user permission to make it a private | |
461 | # repo |
|
457 | # repo | |
462 | if private: |
|
458 | if private: | |
463 | RepoModel(self.sa).grant_user_permission( |
|
459 | RepoModel(self.sa).grant_user_permission( | |
464 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
460 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
465 |
|
461 | |||
466 | elif repo_group and copy_group_permissions: |
|
462 | elif repo_group and copy_group_permissions: | |
467 | user_perms = UserRepoGroupToPerm.query() \ |
|
463 | user_perms = UserRepoGroupToPerm.query() \ | |
468 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
464 | .filter(UserRepoGroupToPerm.group == repo_group).all() | |
469 |
|
465 | |||
470 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
466 | group_perms = UserGroupRepoGroupToPerm.query() \ | |
471 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
467 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() | |
472 |
|
468 | |||
473 | for perm in user_perms: |
|
469 | for perm in user_perms: | |
474 | perm_name = perm.permission.permission_name.replace( |
|
470 | perm_name = perm.permission.permission_name.replace( | |
475 | 'group.', 'repository.') |
|
471 | 'group.', 'repository.') | |
476 | perm_obj = Permission.get_by_key(perm_name) |
|
472 | perm_obj = Permission.get_by_key(perm_name) | |
477 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
473 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) | |
478 |
|
474 | |||
479 | for perm in group_perms: |
|
475 | for perm in group_perms: | |
480 | perm_name = perm.permission.permission_name.replace( |
|
476 | perm_name = perm.permission.permission_name.replace( | |
481 | 'group.', 'repository.') |
|
477 | 'group.', 'repository.') | |
482 | perm_obj = Permission.get_by_key(perm_name) |
|
478 | perm_obj = Permission.get_by_key(perm_name) | |
483 | UserGroupRepoToPerm.create( |
|
479 | UserGroupRepoToPerm.create( | |
484 | perm.users_group, new_repo, perm_obj) |
|
480 | perm.users_group, new_repo, perm_obj) | |
485 |
|
481 | |||
486 | if private: |
|
482 | if private: | |
487 | RepoModel(self.sa).grant_user_permission( |
|
483 | RepoModel(self.sa).grant_user_permission( | |
488 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
484 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
489 |
|
485 | |||
490 | else: |
|
486 | else: | |
491 | perm_obj = self._create_default_perms(new_repo, private) |
|
487 | perm_obj = self._create_default_perms(new_repo, private) | |
492 | self.sa.add(perm_obj) |
|
488 | self.sa.add(perm_obj) | |
493 |
|
489 | |||
494 | # now automatically start following this repository as owner |
|
490 | # now automatically start following this repository as owner | |
495 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, |
|
491 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, | |
496 | owner.user_id) |
|
492 | owner.user_id) | |
497 |
|
493 | |||
498 | # we need to flush here, in order to check if database won't |
|
494 | # we need to flush here, in order to check if database won't | |
499 | # throw any exceptions, create filesystem dirs at the very end |
|
495 | # throw any exceptions, create filesystem dirs at the very end | |
500 | self.sa.flush() |
|
496 | self.sa.flush() | |
501 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
497 | events.trigger(events.RepoCreateEvent(new_repo)) | |
502 | return new_repo |
|
498 | return new_repo | |
503 |
|
499 | |||
504 | except Exception: |
|
500 | except Exception: | |
505 | log.error(traceback.format_exc()) |
|
501 | log.error(traceback.format_exc()) | |
506 | raise |
|
502 | raise | |
507 |
|
503 | |||
508 | def create(self, form_data, cur_user): |
|
504 | def create(self, form_data, cur_user): | |
509 | """ |
|
505 | """ | |
510 | Create repository using celery tasks |
|
506 | Create repository using celery tasks | |
511 |
|
507 | |||
512 | :param form_data: |
|
508 | :param form_data: | |
513 | :param cur_user: |
|
509 | :param cur_user: | |
514 | """ |
|
510 | """ | |
515 | from rhodecode.lib.celerylib import tasks, run_task |
|
511 | from rhodecode.lib.celerylib import tasks, run_task | |
516 | return run_task(tasks.create_repo, form_data, cur_user) |
|
512 | return run_task(tasks.create_repo, form_data, cur_user) | |
517 |
|
513 | |||
518 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
514 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, | |
519 | perm_deletions=None, check_perms=True, |
|
515 | perm_deletions=None, check_perms=True, | |
520 | cur_user=None): |
|
516 | cur_user=None): | |
521 | if not perm_additions: |
|
517 | if not perm_additions: | |
522 | perm_additions = [] |
|
518 | perm_additions = [] | |
523 | if not perm_updates: |
|
519 | if not perm_updates: | |
524 | perm_updates = [] |
|
520 | perm_updates = [] | |
525 | if not perm_deletions: |
|
521 | if not perm_deletions: | |
526 | perm_deletions = [] |
|
522 | perm_deletions = [] | |
527 |
|
523 | |||
528 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
524 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') | |
529 |
|
525 | |||
530 | changes = { |
|
526 | changes = { | |
531 | 'added': [], |
|
527 | 'added': [], | |
532 | 'updated': [], |
|
528 | 'updated': [], | |
533 | 'deleted': [] |
|
529 | 'deleted': [] | |
534 | } |
|
530 | } | |
535 | # update permissions |
|
531 | # update permissions | |
536 | for member_id, perm, member_type in perm_updates: |
|
532 | for member_id, perm, member_type in perm_updates: | |
537 | member_id = int(member_id) |
|
533 | member_id = int(member_id) | |
538 | if member_type == 'user': |
|
534 | if member_type == 'user': | |
539 | member_name = User.get(member_id).username |
|
535 | member_name = User.get(member_id).username | |
540 | # this updates also current one if found |
|
536 | # this updates also current one if found | |
541 | self.grant_user_permission( |
|
537 | self.grant_user_permission( | |
542 | repo=repo, user=member_id, perm=perm) |
|
538 | repo=repo, user=member_id, perm=perm) | |
543 | else: # set for user group |
|
539 | else: # set for user group | |
544 | # check if we have permissions to alter this usergroup |
|
540 | # check if we have permissions to alter this usergroup | |
545 | member_name = UserGroup.get(member_id).users_group_name |
|
541 | member_name = UserGroup.get(member_id).users_group_name | |
546 | if not check_perms or HasUserGroupPermissionAny( |
|
542 | if not check_perms or HasUserGroupPermissionAny( | |
547 | *req_perms)(member_name, user=cur_user): |
|
543 | *req_perms)(member_name, user=cur_user): | |
548 | self.grant_user_group_permission( |
|
544 | self.grant_user_group_permission( | |
549 | repo=repo, group_name=member_id, perm=perm) |
|
545 | repo=repo, group_name=member_id, perm=perm) | |
550 |
|
546 | |||
551 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
547 | changes['updated'].append({'type': member_type, 'id': member_id, | |
552 | 'name': member_name, 'new_perm': perm}) |
|
548 | 'name': member_name, 'new_perm': perm}) | |
553 |
|
549 | |||
554 | # set new permissions |
|
550 | # set new permissions | |
555 | for member_id, perm, member_type in perm_additions: |
|
551 | for member_id, perm, member_type in perm_additions: | |
556 | member_id = int(member_id) |
|
552 | member_id = int(member_id) | |
557 | if member_type == 'user': |
|
553 | if member_type == 'user': | |
558 | member_name = User.get(member_id).username |
|
554 | member_name = User.get(member_id).username | |
559 | self.grant_user_permission( |
|
555 | self.grant_user_permission( | |
560 | repo=repo, user=member_id, perm=perm) |
|
556 | repo=repo, user=member_id, perm=perm) | |
561 | else: # set for user group |
|
557 | else: # set for user group | |
562 | # check if we have permissions to alter this usergroup |
|
558 | # check if we have permissions to alter this usergroup | |
563 | member_name = UserGroup.get(member_id).users_group_name |
|
559 | member_name = UserGroup.get(member_id).users_group_name | |
564 | if not check_perms or HasUserGroupPermissionAny( |
|
560 | if not check_perms or HasUserGroupPermissionAny( | |
565 | *req_perms)(member_name, user=cur_user): |
|
561 | *req_perms)(member_name, user=cur_user): | |
566 | self.grant_user_group_permission( |
|
562 | self.grant_user_group_permission( | |
567 | repo=repo, group_name=member_id, perm=perm) |
|
563 | repo=repo, group_name=member_id, perm=perm) | |
568 | changes['added'].append({'type': member_type, 'id': member_id, |
|
564 | changes['added'].append({'type': member_type, 'id': member_id, | |
569 | 'name': member_name, 'new_perm': perm}) |
|
565 | 'name': member_name, 'new_perm': perm}) | |
570 | # delete permissions |
|
566 | # delete permissions | |
571 | for member_id, perm, member_type in perm_deletions: |
|
567 | for member_id, perm, member_type in perm_deletions: | |
572 | member_id = int(member_id) |
|
568 | member_id = int(member_id) | |
573 | if member_type == 'user': |
|
569 | if member_type == 'user': | |
574 | member_name = User.get(member_id).username |
|
570 | member_name = User.get(member_id).username | |
575 | self.revoke_user_permission(repo=repo, user=member_id) |
|
571 | self.revoke_user_permission(repo=repo, user=member_id) | |
576 | else: # set for user group |
|
572 | else: # set for user group | |
577 | # check if we have permissions to alter this usergroup |
|
573 | # check if we have permissions to alter this usergroup | |
578 | member_name = UserGroup.get(member_id).users_group_name |
|
574 | member_name = UserGroup.get(member_id).users_group_name | |
579 | if not check_perms or HasUserGroupPermissionAny( |
|
575 | if not check_perms or HasUserGroupPermissionAny( | |
580 | *req_perms)(member_name, user=cur_user): |
|
576 | *req_perms)(member_name, user=cur_user): | |
581 | self.revoke_user_group_permission( |
|
577 | self.revoke_user_group_permission( | |
582 | repo=repo, group_name=member_id) |
|
578 | repo=repo, group_name=member_id) | |
583 |
|
579 | |||
584 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
580 | changes['deleted'].append({'type': member_type, 'id': member_id, | |
585 | 'name': member_name, 'new_perm': perm}) |
|
581 | 'name': member_name, 'new_perm': perm}) | |
586 | return changes |
|
582 | return changes | |
587 |
|
583 | |||
588 | def create_fork(self, form_data, cur_user): |
|
584 | def create_fork(self, form_data, cur_user): | |
589 | """ |
|
585 | """ | |
590 | Simple wrapper into executing celery task for fork creation |
|
586 | Simple wrapper into executing celery task for fork creation | |
591 |
|
587 | |||
592 | :param form_data: |
|
588 | :param form_data: | |
593 | :param cur_user: |
|
589 | :param cur_user: | |
594 | """ |
|
590 | """ | |
595 | from rhodecode.lib.celerylib import tasks, run_task |
|
591 | from rhodecode.lib.celerylib import tasks, run_task | |
596 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
592 | return run_task(tasks.create_repo_fork, form_data, cur_user) | |
597 |
|
593 | |||
598 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
594 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): | |
599 | """ |
|
595 | """ | |
600 | Delete given repository, forks parameter defines what do do with |
|
596 | Delete given repository, forks parameter defines what do do with | |
601 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
597 | attached forks. Throws AttachedForksError if deleted repo has attached | |
602 | forks |
|
598 | forks | |
603 |
|
599 | |||
604 | :param repo: |
|
600 | :param repo: | |
605 | :param forks: str 'delete' or 'detach' |
|
601 | :param forks: str 'delete' or 'detach' | |
606 | :param fs_remove: remove(archive) repo from filesystem |
|
602 | :param fs_remove: remove(archive) repo from filesystem | |
607 | """ |
|
603 | """ | |
608 | if not cur_user: |
|
604 | if not cur_user: | |
609 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
605 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) | |
610 | repo = self._get_repo(repo) |
|
606 | repo = self._get_repo(repo) | |
611 | if repo: |
|
607 | if repo: | |
612 | if forks == 'detach': |
|
608 | if forks == 'detach': | |
613 | for r in repo.forks: |
|
609 | for r in repo.forks: | |
614 | r.fork = None |
|
610 | r.fork = None | |
615 | self.sa.add(r) |
|
611 | self.sa.add(r) | |
616 | elif forks == 'delete': |
|
612 | elif forks == 'delete': | |
617 | for r in repo.forks: |
|
613 | for r in repo.forks: | |
618 | self.delete(r, forks='delete') |
|
614 | self.delete(r, forks='delete') | |
619 | elif [f for f in repo.forks]: |
|
615 | elif [f for f in repo.forks]: | |
620 | raise AttachedForksError() |
|
616 | raise AttachedForksError() | |
621 |
|
617 | |||
622 | old_repo_dict = repo.get_dict() |
|
618 | old_repo_dict = repo.get_dict() | |
623 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
619 | events.trigger(events.RepoPreDeleteEvent(repo)) | |
624 | try: |
|
620 | try: | |
625 | self.sa.delete(repo) |
|
621 | self.sa.delete(repo) | |
626 | if fs_remove: |
|
622 | if fs_remove: | |
627 | self._delete_filesystem_repo(repo) |
|
623 | self._delete_filesystem_repo(repo) | |
628 | else: |
|
624 | else: | |
629 | log.debug('skipping removal from filesystem') |
|
625 | log.debug('skipping removal from filesystem') | |
630 | old_repo_dict.update({ |
|
626 | old_repo_dict.update({ | |
631 | 'deleted_by': cur_user, |
|
627 | 'deleted_by': cur_user, | |
632 | 'deleted_on': time.time(), |
|
628 | 'deleted_on': time.time(), | |
633 | }) |
|
629 | }) | |
634 | log_delete_repository(**old_repo_dict) |
|
630 | log_delete_repository(**old_repo_dict) | |
635 | events.trigger(events.RepoDeleteEvent(repo)) |
|
631 | events.trigger(events.RepoDeleteEvent(repo)) | |
636 | except Exception: |
|
632 | except Exception: | |
637 | log.error(traceback.format_exc()) |
|
633 | log.error(traceback.format_exc()) | |
638 | raise |
|
634 | raise | |
639 |
|
635 | |||
640 | def grant_user_permission(self, repo, user, perm): |
|
636 | def grant_user_permission(self, repo, user, perm): | |
641 | """ |
|
637 | """ | |
642 | Grant permission for user on given repository, or update existing one |
|
638 | Grant permission for user on given repository, or update existing one | |
643 | if found |
|
639 | if found | |
644 |
|
640 | |||
645 | :param repo: Instance of Repository, repository_id, or repository name |
|
641 | :param repo: Instance of Repository, repository_id, or repository name | |
646 | :param user: Instance of User, user_id or username |
|
642 | :param user: Instance of User, user_id or username | |
647 | :param perm: Instance of Permission, or permission_name |
|
643 | :param perm: Instance of Permission, or permission_name | |
648 | """ |
|
644 | """ | |
649 | user = self._get_user(user) |
|
645 | user = self._get_user(user) | |
650 | repo = self._get_repo(repo) |
|
646 | repo = self._get_repo(repo) | |
651 | permission = self._get_perm(perm) |
|
647 | permission = self._get_perm(perm) | |
652 |
|
648 | |||
653 | # check if we have that permission already |
|
649 | # check if we have that permission already | |
654 | obj = self.sa.query(UserRepoToPerm) \ |
|
650 | obj = self.sa.query(UserRepoToPerm) \ | |
655 | .filter(UserRepoToPerm.user == user) \ |
|
651 | .filter(UserRepoToPerm.user == user) \ | |
656 | .filter(UserRepoToPerm.repository == repo) \ |
|
652 | .filter(UserRepoToPerm.repository == repo) \ | |
657 | .scalar() |
|
653 | .scalar() | |
658 | if obj is None: |
|
654 | if obj is None: | |
659 | # create new ! |
|
655 | # create new ! | |
660 | obj = UserRepoToPerm() |
|
656 | obj = UserRepoToPerm() | |
661 | obj.repository = repo |
|
657 | obj.repository = repo | |
662 | obj.user = user |
|
658 | obj.user = user | |
663 | obj.permission = permission |
|
659 | obj.permission = permission | |
664 | self.sa.add(obj) |
|
660 | self.sa.add(obj) | |
665 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
661 | log.debug('Granted perm %s to %s on %s', perm, user, repo) | |
666 | action_logger_generic( |
|
662 | action_logger_generic( | |
667 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
663 | 'granted permission: {} to user: {} on repo: {}'.format( | |
668 | perm, user, repo), namespace='security.repo') |
|
664 | perm, user, repo), namespace='security.repo') | |
669 | return obj |
|
665 | return obj | |
670 |
|
666 | |||
671 | def revoke_user_permission(self, repo, user): |
|
667 | def revoke_user_permission(self, repo, user): | |
672 | """ |
|
668 | """ | |
673 | Revoke permission for user on given repository |
|
669 | Revoke permission for user on given repository | |
674 |
|
670 | |||
675 | :param repo: Instance of Repository, repository_id, or repository name |
|
671 | :param repo: Instance of Repository, repository_id, or repository name | |
676 | :param user: Instance of User, user_id or username |
|
672 | :param user: Instance of User, user_id or username | |
677 | """ |
|
673 | """ | |
678 |
|
674 | |||
679 | user = self._get_user(user) |
|
675 | user = self._get_user(user) | |
680 | repo = self._get_repo(repo) |
|
676 | repo = self._get_repo(repo) | |
681 |
|
677 | |||
682 | obj = self.sa.query(UserRepoToPerm) \ |
|
678 | obj = self.sa.query(UserRepoToPerm) \ | |
683 | .filter(UserRepoToPerm.repository == repo) \ |
|
679 | .filter(UserRepoToPerm.repository == repo) \ | |
684 | .filter(UserRepoToPerm.user == user) \ |
|
680 | .filter(UserRepoToPerm.user == user) \ | |
685 | .scalar() |
|
681 | .scalar() | |
686 | if obj: |
|
682 | if obj: | |
687 | self.sa.delete(obj) |
|
683 | self.sa.delete(obj) | |
688 | log.debug('Revoked perm on %s on %s', repo, user) |
|
684 | log.debug('Revoked perm on %s on %s', repo, user) | |
689 | action_logger_generic( |
|
685 | action_logger_generic( | |
690 | 'revoked permission from user: {} on repo: {}'.format( |
|
686 | 'revoked permission from user: {} on repo: {}'.format( | |
691 | user, repo), namespace='security.repo') |
|
687 | user, repo), namespace='security.repo') | |
692 |
|
688 | |||
693 | def grant_user_group_permission(self, repo, group_name, perm): |
|
689 | def grant_user_group_permission(self, repo, group_name, perm): | |
694 | """ |
|
690 | """ | |
695 | Grant permission for user group on given repository, or update |
|
691 | Grant permission for user group on given repository, or update | |
696 | existing one if found |
|
692 | existing one if found | |
697 |
|
693 | |||
698 | :param repo: Instance of Repository, repository_id, or repository name |
|
694 | :param repo: Instance of Repository, repository_id, or repository name | |
699 | :param group_name: Instance of UserGroup, users_group_id, |
|
695 | :param group_name: Instance of UserGroup, users_group_id, | |
700 | or user group name |
|
696 | or user group name | |
701 | :param perm: Instance of Permission, or permission_name |
|
697 | :param perm: Instance of Permission, or permission_name | |
702 | """ |
|
698 | """ | |
703 | repo = self._get_repo(repo) |
|
699 | repo = self._get_repo(repo) | |
704 | group_name = self._get_user_group(group_name) |
|
700 | group_name = self._get_user_group(group_name) | |
705 | permission = self._get_perm(perm) |
|
701 | permission = self._get_perm(perm) | |
706 |
|
702 | |||
707 | # check if we have that permission already |
|
703 | # check if we have that permission already | |
708 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
704 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
709 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
705 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
710 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
706 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
711 | .scalar() |
|
707 | .scalar() | |
712 |
|
708 | |||
713 | if obj is None: |
|
709 | if obj is None: | |
714 | # create new |
|
710 | # create new | |
715 | obj = UserGroupRepoToPerm() |
|
711 | obj = UserGroupRepoToPerm() | |
716 |
|
712 | |||
717 | obj.repository = repo |
|
713 | obj.repository = repo | |
718 | obj.users_group = group_name |
|
714 | obj.users_group = group_name | |
719 | obj.permission = permission |
|
715 | obj.permission = permission | |
720 | self.sa.add(obj) |
|
716 | self.sa.add(obj) | |
721 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
717 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) | |
722 | action_logger_generic( |
|
718 | action_logger_generic( | |
723 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
719 | 'granted permission: {} to usergroup: {} on repo: {}'.format( | |
724 | perm, group_name, repo), namespace='security.repo') |
|
720 | perm, group_name, repo), namespace='security.repo') | |
725 |
|
721 | |||
726 | return obj |
|
722 | return obj | |
727 |
|
723 | |||
728 | def revoke_user_group_permission(self, repo, group_name): |
|
724 | def revoke_user_group_permission(self, repo, group_name): | |
729 | """ |
|
725 | """ | |
730 | Revoke permission for user group on given repository |
|
726 | Revoke permission for user group on given repository | |
731 |
|
727 | |||
732 | :param repo: Instance of Repository, repository_id, or repository name |
|
728 | :param repo: Instance of Repository, repository_id, or repository name | |
733 | :param group_name: Instance of UserGroup, users_group_id, |
|
729 | :param group_name: Instance of UserGroup, users_group_id, | |
734 | or user group name |
|
730 | or user group name | |
735 | """ |
|
731 | """ | |
736 | repo = self._get_repo(repo) |
|
732 | repo = self._get_repo(repo) | |
737 | group_name = self._get_user_group(group_name) |
|
733 | group_name = self._get_user_group(group_name) | |
738 |
|
734 | |||
739 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
735 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
740 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
736 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
741 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
737 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
742 | .scalar() |
|
738 | .scalar() | |
743 | if obj: |
|
739 | if obj: | |
744 | self.sa.delete(obj) |
|
740 | self.sa.delete(obj) | |
745 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
741 | log.debug('Revoked perm to %s on %s', repo, group_name) | |
746 | action_logger_generic( |
|
742 | action_logger_generic( | |
747 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
743 | 'revoked permission from usergroup: {} on repo: {}'.format( | |
748 | group_name, repo), namespace='security.repo') |
|
744 | group_name, repo), namespace='security.repo') | |
749 |
|
745 | |||
750 | def delete_stats(self, repo_name): |
|
746 | def delete_stats(self, repo_name): | |
751 | """ |
|
747 | """ | |
752 | removes stats for given repo |
|
748 | removes stats for given repo | |
753 |
|
749 | |||
754 | :param repo_name: |
|
750 | :param repo_name: | |
755 | """ |
|
751 | """ | |
756 | repo = self._get_repo(repo_name) |
|
752 | repo = self._get_repo(repo_name) | |
757 | try: |
|
753 | try: | |
758 | obj = self.sa.query(Statistics) \ |
|
754 | obj = self.sa.query(Statistics) \ | |
759 | .filter(Statistics.repository == repo).scalar() |
|
755 | .filter(Statistics.repository == repo).scalar() | |
760 | if obj: |
|
756 | if obj: | |
761 | self.sa.delete(obj) |
|
757 | self.sa.delete(obj) | |
762 | except Exception: |
|
758 | except Exception: | |
763 | log.error(traceback.format_exc()) |
|
759 | log.error(traceback.format_exc()) | |
764 | raise |
|
760 | raise | |
765 |
|
761 | |||
766 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
762 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', | |
767 | field_type='str', field_desc=''): |
|
763 | field_type='str', field_desc=''): | |
768 |
|
764 | |||
769 | repo = self._get_repo(repo_name) |
|
765 | repo = self._get_repo(repo_name) | |
770 |
|
766 | |||
771 | new_field = RepositoryField() |
|
767 | new_field = RepositoryField() | |
772 | new_field.repository = repo |
|
768 | new_field.repository = repo | |
773 | new_field.field_key = field_key |
|
769 | new_field.field_key = field_key | |
774 | new_field.field_type = field_type # python type |
|
770 | new_field.field_type = field_type # python type | |
775 | new_field.field_value = field_value |
|
771 | new_field.field_value = field_value | |
776 | new_field.field_desc = field_desc |
|
772 | new_field.field_desc = field_desc | |
777 | new_field.field_label = field_label |
|
773 | new_field.field_label = field_label | |
778 | self.sa.add(new_field) |
|
774 | self.sa.add(new_field) | |
779 | return new_field |
|
775 | return new_field | |
780 |
|
776 | |||
781 | def delete_repo_field(self, repo_name, field_key): |
|
777 | def delete_repo_field(self, repo_name, field_key): | |
782 | repo = self._get_repo(repo_name) |
|
778 | repo = self._get_repo(repo_name) | |
783 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
779 | field = RepositoryField.get_by_key_name(field_key, repo) | |
784 | if field: |
|
780 | if field: | |
785 | self.sa.delete(field) |
|
781 | self.sa.delete(field) | |
786 |
|
782 | |||
787 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
783 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, | |
788 | clone_uri=None, repo_store_location=None, |
|
784 | clone_uri=None, repo_store_location=None, | |
789 | use_global_config=False): |
|
785 | use_global_config=False): | |
790 | """ |
|
786 | """ | |
791 | makes repository on filesystem. It's group aware means it'll create |
|
787 | makes repository on filesystem. It's group aware means it'll create | |
792 | a repository within a group, and alter the paths accordingly of |
|
788 | a repository within a group, and alter the paths accordingly of | |
793 | group location |
|
789 | group location | |
794 |
|
790 | |||
795 | :param repo_name: |
|
791 | :param repo_name: | |
796 | :param alias: |
|
792 | :param alias: | |
797 | :param parent: |
|
793 | :param parent: | |
798 | :param clone_uri: |
|
794 | :param clone_uri: | |
799 | :param repo_store_location: |
|
795 | :param repo_store_location: | |
800 | """ |
|
796 | """ | |
801 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
797 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group | |
802 | from rhodecode.model.scm import ScmModel |
|
798 | from rhodecode.model.scm import ScmModel | |
803 |
|
799 | |||
804 | if Repository.NAME_SEP in repo_name: |
|
800 | if Repository.NAME_SEP in repo_name: | |
805 | raise ValueError( |
|
801 | raise ValueError( | |
806 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
802 | 'repo_name must not contain groups got `%s`' % repo_name) | |
807 |
|
803 | |||
808 | if isinstance(repo_group, RepoGroup): |
|
804 | if isinstance(repo_group, RepoGroup): | |
809 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
805 | new_parent_path = os.sep.join(repo_group.full_path_splitted) | |
810 | else: |
|
806 | else: | |
811 | new_parent_path = repo_group or '' |
|
807 | new_parent_path = repo_group or '' | |
812 |
|
808 | |||
813 | if repo_store_location: |
|
809 | if repo_store_location: | |
814 | _paths = [repo_store_location] |
|
810 | _paths = [repo_store_location] | |
815 | else: |
|
811 | else: | |
816 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
812 | _paths = [self.repos_path, new_parent_path, repo_name] | |
817 | # we need to make it str for mercurial |
|
813 | # we need to make it str for mercurial | |
818 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
814 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) | |
819 |
|
815 | |||
820 | # check if this path is not a repository |
|
816 | # check if this path is not a repository | |
821 | if is_valid_repo(repo_path, self.repos_path): |
|
817 | if is_valid_repo(repo_path, self.repos_path): | |
822 | raise Exception('This path %s is a valid repository' % repo_path) |
|
818 | raise Exception('This path %s is a valid repository' % repo_path) | |
823 |
|
819 | |||
824 | # check if this path is a group |
|
820 | # check if this path is a group | |
825 | if is_valid_repo_group(repo_path, self.repos_path): |
|
821 | if is_valid_repo_group(repo_path, self.repos_path): | |
826 | raise Exception('This path %s is a valid group' % repo_path) |
|
822 | raise Exception('This path %s is a valid group' % repo_path) | |
827 |
|
823 | |||
828 | log.info('creating repo %s in %s from url: `%s`', |
|
824 | log.info('creating repo %s in %s from url: `%s`', | |
829 | repo_name, safe_unicode(repo_path), |
|
825 | repo_name, safe_unicode(repo_path), | |
830 | obfuscate_url_pw(clone_uri)) |
|
826 | obfuscate_url_pw(clone_uri)) | |
831 |
|
827 | |||
832 | backend = get_backend(repo_type) |
|
828 | backend = get_backend(repo_type) | |
833 |
|
829 | |||
834 | config_repo = None if use_global_config else repo_name |
|
830 | config_repo = None if use_global_config else repo_name | |
835 | if config_repo and new_parent_path: |
|
831 | if config_repo and new_parent_path: | |
836 | config_repo = Repository.NAME_SEP.join( |
|
832 | config_repo = Repository.NAME_SEP.join( | |
837 | (new_parent_path, config_repo)) |
|
833 | (new_parent_path, config_repo)) | |
838 | config = make_db_config(clear_session=False, repo=config_repo) |
|
834 | config = make_db_config(clear_session=False, repo=config_repo) | |
839 | config.set('extensions', 'largefiles', '') |
|
835 | config.set('extensions', 'largefiles', '') | |
840 |
|
836 | |||
841 | # patch and reset hooks section of UI config to not run any |
|
837 | # patch and reset hooks section of UI config to not run any | |
842 | # hooks on creating remote repo |
|
838 | # hooks on creating remote repo | |
843 | config.clear_section('hooks') |
|
839 | config.clear_section('hooks') | |
844 |
|
840 | |||
845 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
841 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice | |
846 | if repo_type == 'git': |
|
842 | if repo_type == 'git': | |
847 | repo = backend( |
|
843 | repo = backend( | |
848 | repo_path, config=config, create=True, src_url=clone_uri, |
|
844 | repo_path, config=config, create=True, src_url=clone_uri, | |
849 | bare=True) |
|
845 | bare=True) | |
850 | else: |
|
846 | else: | |
851 | repo = backend( |
|
847 | repo = backend( | |
852 | repo_path, config=config, create=True, src_url=clone_uri) |
|
848 | repo_path, config=config, create=True, src_url=clone_uri) | |
853 |
|
849 | |||
854 | ScmModel().install_hooks(repo, repo_type=repo_type) |
|
850 | ScmModel().install_hooks(repo, repo_type=repo_type) | |
855 |
|
851 | |||
856 | log.debug('Created repo %s with %s backend', |
|
852 | log.debug('Created repo %s with %s backend', | |
857 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
853 | safe_unicode(repo_name), safe_unicode(repo_type)) | |
858 | return repo |
|
854 | return repo | |
859 |
|
855 | |||
860 | def _rename_filesystem_repo(self, old, new): |
|
856 | def _rename_filesystem_repo(self, old, new): | |
861 | """ |
|
857 | """ | |
862 | renames repository on filesystem |
|
858 | renames repository on filesystem | |
863 |
|
859 | |||
864 | :param old: old name |
|
860 | :param old: old name | |
865 | :param new: new name |
|
861 | :param new: new name | |
866 | """ |
|
862 | """ | |
867 | log.info('renaming repo from %s to %s', old, new) |
|
863 | log.info('renaming repo from %s to %s', old, new) | |
868 |
|
864 | |||
869 | old_path = os.path.join(self.repos_path, old) |
|
865 | old_path = os.path.join(self.repos_path, old) | |
870 | new_path = os.path.join(self.repos_path, new) |
|
866 | new_path = os.path.join(self.repos_path, new) | |
871 | if os.path.isdir(new_path): |
|
867 | if os.path.isdir(new_path): | |
872 | raise Exception( |
|
868 | raise Exception( | |
873 | 'Was trying to rename to already existing dir %s' % new_path |
|
869 | 'Was trying to rename to already existing dir %s' % new_path | |
874 | ) |
|
870 | ) | |
875 | shutil.move(old_path, new_path) |
|
871 | shutil.move(old_path, new_path) | |
876 |
|
872 | |||
877 | def _delete_filesystem_repo(self, repo): |
|
873 | def _delete_filesystem_repo(self, repo): | |
878 | """ |
|
874 | """ | |
879 | removes repo from filesystem, the removal is acctually made by |
|
875 | removes repo from filesystem, the removal is acctually made by | |
880 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
876 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this | |
881 | repository is no longer valid for rhodecode, can be undeleted later on |
|
877 | repository is no longer valid for rhodecode, can be undeleted later on | |
882 | by reverting the renames on this repository |
|
878 | by reverting the renames on this repository | |
883 |
|
879 | |||
884 | :param repo: repo object |
|
880 | :param repo: repo object | |
885 | """ |
|
881 | """ | |
886 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
882 | rm_path = os.path.join(self.repos_path, repo.repo_name) | |
887 | repo_group = repo.group |
|
883 | repo_group = repo.group | |
888 | log.info("Removing repository %s", rm_path) |
|
884 | log.info("Removing repository %s", rm_path) | |
889 | # disable hg/git internal that it doesn't get detected as repo |
|
885 | # disable hg/git internal that it doesn't get detected as repo | |
890 | alias = repo.repo_type |
|
886 | alias = repo.repo_type | |
891 |
|
887 | |||
892 | config = make_db_config(clear_session=False) |
|
888 | config = make_db_config(clear_session=False) | |
893 | config.set('extensions', 'largefiles', '') |
|
889 | config.set('extensions', 'largefiles', '') | |
894 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
890 | bare = getattr(repo.scm_instance(config=config), 'bare', False) | |
895 |
|
891 | |||
896 | # skip this for bare git repos |
|
892 | # skip this for bare git repos | |
897 | if not bare: |
|
893 | if not bare: | |
898 | # disable VCS repo |
|
894 | # disable VCS repo | |
899 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
895 | vcs_path = os.path.join(rm_path, '.%s' % alias) | |
900 | if os.path.exists(vcs_path): |
|
896 | if os.path.exists(vcs_path): | |
901 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
897 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) | |
902 |
|
898 | |||
903 | _now = datetime.datetime.now() |
|
899 | _now = datetime.datetime.now() | |
904 | _ms = str(_now.microsecond).rjust(6, '0') |
|
900 | _ms = str(_now.microsecond).rjust(6, '0') | |
905 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
901 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), | |
906 | repo.just_name) |
|
902 | repo.just_name) | |
907 | if repo_group: |
|
903 | if repo_group: | |
908 | # if repository is in group, prefix the removal path with the group |
|
904 | # if repository is in group, prefix the removal path with the group | |
909 | args = repo_group.full_path_splitted + [_d] |
|
905 | args = repo_group.full_path_splitted + [_d] | |
910 | _d = os.path.join(*args) |
|
906 | _d = os.path.join(*args) | |
911 |
|
907 | |||
912 | if os.path.isdir(rm_path): |
|
908 | if os.path.isdir(rm_path): | |
913 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
909 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
914 |
|
910 | |||
915 |
|
911 | |||
916 | class ReadmeFinder: |
|
912 | class ReadmeFinder: | |
917 | """ |
|
913 | """ | |
918 | Utility which knows how to find a readme for a specific commit. |
|
914 | Utility which knows how to find a readme for a specific commit. | |
919 |
|
915 | |||
920 | The main idea is that this is a configurable algorithm. When creating an |
|
916 | The main idea is that this is a configurable algorithm. When creating an | |
921 | instance you can define parameters, currently only the `default_renderer`. |
|
917 | instance you can define parameters, currently only the `default_renderer`. | |
922 | Based on this configuration the method :meth:`search` behaves slightly |
|
918 | Based on this configuration the method :meth:`search` behaves slightly | |
923 | different. |
|
919 | different. | |
924 | """ |
|
920 | """ | |
925 |
|
921 | |||
926 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
922 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) | |
927 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
923 | path_re = re.compile(r'^docs?', re.IGNORECASE) | |
928 |
|
924 | |||
929 | default_priorities = { |
|
925 | default_priorities = { | |
930 | None: 0, |
|
926 | None: 0, | |
931 | '.text': 2, |
|
927 | '.text': 2, | |
932 | '.txt': 3, |
|
928 | '.txt': 3, | |
933 | '.rst': 1, |
|
929 | '.rst': 1, | |
934 | '.rest': 2, |
|
930 | '.rest': 2, | |
935 | '.md': 1, |
|
931 | '.md': 1, | |
936 | '.mkdn': 2, |
|
932 | '.mkdn': 2, | |
937 | '.mdown': 3, |
|
933 | '.mdown': 3, | |
938 | '.markdown': 4, |
|
934 | '.markdown': 4, | |
939 | } |
|
935 | } | |
940 |
|
936 | |||
941 | path_priority = { |
|
937 | path_priority = { | |
942 | 'doc': 0, |
|
938 | 'doc': 0, | |
943 | 'docs': 1, |
|
939 | 'docs': 1, | |
944 | } |
|
940 | } | |
945 |
|
941 | |||
946 | FALLBACK_PRIORITY = 99 |
|
942 | FALLBACK_PRIORITY = 99 | |
947 |
|
943 | |||
948 | RENDERER_TO_EXTENSION = { |
|
944 | RENDERER_TO_EXTENSION = { | |
949 | 'rst': ['.rst', '.rest'], |
|
945 | 'rst': ['.rst', '.rest'], | |
950 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
946 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], | |
951 | } |
|
947 | } | |
952 |
|
948 | |||
953 | def __init__(self, default_renderer=None): |
|
949 | def __init__(self, default_renderer=None): | |
954 | self._default_renderer = default_renderer |
|
950 | self._default_renderer = default_renderer | |
955 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
951 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( | |
956 | default_renderer, []) |
|
952 | default_renderer, []) | |
957 |
|
953 | |||
958 | def search(self, commit, path='/'): |
|
954 | def search(self, commit, path='/'): | |
959 | """ |
|
955 | """ | |
960 | Find a readme in the given `commit`. |
|
956 | Find a readme in the given `commit`. | |
961 | """ |
|
957 | """ | |
962 | nodes = commit.get_nodes(path) |
|
958 | nodes = commit.get_nodes(path) | |
963 | matches = self._match_readmes(nodes) |
|
959 | matches = self._match_readmes(nodes) | |
964 | matches = self._sort_according_to_priority(matches) |
|
960 | matches = self._sort_according_to_priority(matches) | |
965 | if matches: |
|
961 | if matches: | |
966 | return matches[0].node |
|
962 | return matches[0].node | |
967 |
|
963 | |||
968 | paths = self._match_paths(nodes) |
|
964 | paths = self._match_paths(nodes) | |
969 | paths = self._sort_paths_according_to_priority(paths) |
|
965 | paths = self._sort_paths_according_to_priority(paths) | |
970 | for path in paths: |
|
966 | for path in paths: | |
971 | match = self.search(commit, path=path) |
|
967 | match = self.search(commit, path=path) | |
972 | if match: |
|
968 | if match: | |
973 | return match |
|
969 | return match | |
974 |
|
970 | |||
975 | return None |
|
971 | return None | |
976 |
|
972 | |||
977 | def _match_readmes(self, nodes): |
|
973 | def _match_readmes(self, nodes): | |
978 | for node in nodes: |
|
974 | for node in nodes: | |
979 | if not node.is_file(): |
|
975 | if not node.is_file(): | |
980 | continue |
|
976 | continue | |
981 | path = node.path.rsplit('/', 1)[-1] |
|
977 | path = node.path.rsplit('/', 1)[-1] | |
982 | match = self.readme_re.match(path) |
|
978 | match = self.readme_re.match(path) | |
983 | if match: |
|
979 | if match: | |
984 | extension = match.group(1) |
|
980 | extension = match.group(1) | |
985 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
981 | yield ReadmeMatch(node, match, self._priority(extension)) | |
986 |
|
982 | |||
987 | def _match_paths(self, nodes): |
|
983 | def _match_paths(self, nodes): | |
988 | for node in nodes: |
|
984 | for node in nodes: | |
989 | if not node.is_dir(): |
|
985 | if not node.is_dir(): | |
990 | continue |
|
986 | continue | |
991 | match = self.path_re.match(node.path) |
|
987 | match = self.path_re.match(node.path) | |
992 | if match: |
|
988 | if match: | |
993 | yield node.path |
|
989 | yield node.path | |
994 |
|
990 | |||
995 | def _priority(self, extension): |
|
991 | def _priority(self, extension): | |
996 | renderer_priority = ( |
|
992 | renderer_priority = ( | |
997 | 0 if extension in self._renderer_extensions else 1) |
|
993 | 0 if extension in self._renderer_extensions else 1) | |
998 | extension_priority = self.default_priorities.get( |
|
994 | extension_priority = self.default_priorities.get( | |
999 | extension, self.FALLBACK_PRIORITY) |
|
995 | extension, self.FALLBACK_PRIORITY) | |
1000 | return (renderer_priority, extension_priority) |
|
996 | return (renderer_priority, extension_priority) | |
1001 |
|
997 | |||
1002 | def _sort_according_to_priority(self, matches): |
|
998 | def _sort_according_to_priority(self, matches): | |
1003 |
|
999 | |||
1004 | def priority_and_path(match): |
|
1000 | def priority_and_path(match): | |
1005 | return (match.priority, match.path) |
|
1001 | return (match.priority, match.path) | |
1006 |
|
1002 | |||
1007 | return sorted(matches, key=priority_and_path) |
|
1003 | return sorted(matches, key=priority_and_path) | |
1008 |
|
1004 | |||
1009 | def _sort_paths_according_to_priority(self, paths): |
|
1005 | def _sort_paths_according_to_priority(self, paths): | |
1010 |
|
1006 | |||
1011 | def priority_and_path(path): |
|
1007 | def priority_and_path(path): | |
1012 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1008 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) | |
1013 |
|
1009 | |||
1014 | return sorted(paths, key=priority_and_path) |
|
1010 | return sorted(paths, key=priority_and_path) | |
1015 |
|
1011 | |||
1016 |
|
1012 | |||
1017 | class ReadmeMatch: |
|
1013 | class ReadmeMatch: | |
1018 |
|
1014 | |||
1019 | def __init__(self, node, match, priority): |
|
1015 | def __init__(self, node, match, priority): | |
1020 | self.node = node |
|
1016 | self.node = node | |
1021 | self._match = match |
|
1017 | self._match = match | |
1022 | self.priority = priority |
|
1018 | self.priority = priority | |
1023 |
|
1019 | |||
1024 | @property |
|
1020 | @property | |
1025 | def path(self): |
|
1021 | def path(self): | |
1026 | return self.node.path |
|
1022 | return self.node.path | |
1027 |
|
1023 | |||
1028 | def __repr__(self): |
|
1024 | def __repr__(self): | |
1029 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
|
1025 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,101 +1,101 b'' | |||||
1 | // Global keyboard bindings |
|
1 | // Global keyboard bindings | |
2 |
|
2 | |||
3 | function setRCMouseBindings(repoName, repoLandingRev) { |
|
3 | function setRCMouseBindings(repoName, repoLandingRev) { | |
4 |
|
4 | |||
5 | /** custom callback for supressing mousetrap from firing */ |
|
5 | /** custom callback for supressing mousetrap from firing */ | |
6 | Mousetrap.stopCallback = function(e, element) { |
|
6 | Mousetrap.stopCallback = function(e, element) { | |
7 | // if the element has the class "mousetrap" then no need to stop |
|
7 | // if the element has the class "mousetrap" then no need to stop | |
8 | if ((' ' + element.className + ' ').indexOf(' mousetrap ') > -1) { |
|
8 | if ((' ' + element.className + ' ').indexOf(' mousetrap ') > -1) { | |
9 | return false; |
|
9 | return false; | |
10 | } |
|
10 | } | |
11 |
|
11 | |||
12 | // stop for input, select, and textarea |
|
12 | // stop for input, select, and textarea | |
13 | return element.tagName == 'INPUT' || element.tagName == 'SELECT' || element.tagName == 'TEXTAREA' || element.isContentEditable; |
|
13 | return element.tagName == 'INPUT' || element.tagName == 'SELECT' || element.tagName == 'TEXTAREA' || element.isContentEditable; | |
14 | }; |
|
14 | }; | |
15 |
|
15 | |||
16 | // general help "?" |
|
16 | // general help "?" | |
17 | Mousetrap.bind(['?'], function(e) { |
|
17 | Mousetrap.bind(['?'], function(e) { | |
18 | $('#help_kb').modal({}); |
|
18 | $('#help_kb').modal({}); | |
19 | }); |
|
19 | }); | |
20 |
|
20 | |||
21 | // / open the quick filter |
|
21 | // / open the quick filter | |
22 | Mousetrap.bind(['/'], function(e) { |
|
22 | Mousetrap.bind(['/'], function(e) { | |
23 | $('#repo_switcher').select2('open'); |
|
23 | $('#repo_switcher').select2('open'); | |
24 |
|
24 | |||
25 | // return false to prevent default browser behavior |
|
25 | // return false to prevent default browser behavior | |
26 | // and stop event from bubbling |
|
26 | // and stop event from bubbling | |
27 | return false; |
|
27 | return false; | |
28 | }); |
|
28 | }); | |
29 |
|
29 | |||
30 | // ctrl/command+b, show the the main bar |
|
30 | // ctrl/command+b, show the the main bar | |
31 | Mousetrap.bind(['command+b', 'ctrl+b'], function(e) { |
|
31 | Mousetrap.bind(['command+b', 'ctrl+b'], function(e) { | |
32 | var $headerInner = $('#header-inner'), |
|
32 | var $headerInner = $('#header-inner'), | |
33 | $content = $('#content'); |
|
33 | $content = $('#content'); | |
34 | if ($headerInner.hasClass('hover') && $content.hasClass('hover')) { |
|
34 | if ($headerInner.hasClass('hover') && $content.hasClass('hover')) { | |
35 | $headerInner.removeClass('hover'); |
|
35 | $headerInner.removeClass('hover'); | |
36 | $content.removeClass('hover'); |
|
36 | $content.removeClass('hover'); | |
37 | } else { |
|
37 | } else { | |
38 | $headerInner.addClass('hover'); |
|
38 | $headerInner.addClass('hover'); | |
39 | $content.addClass('hover'); |
|
39 | $content.addClass('hover'); | |
40 | } |
|
40 | } | |
41 | return false; |
|
41 | return false; | |
42 | }); |
|
42 | }); | |
43 |
|
43 | |||
44 | // general nav g + action |
|
44 | // general nav g + action | |
45 | Mousetrap.bind(['g h'], function(e) { |
|
45 | Mousetrap.bind(['g h'], function(e) { | |
46 | window.location = pyroutes.url('home'); |
|
46 | window.location = pyroutes.url('home'); | |
47 | }); |
|
47 | }); | |
48 | Mousetrap.bind(['g g'], function(e) { |
|
48 | Mousetrap.bind(['g g'], function(e) { | |
49 | window.location = pyroutes.url('gists_show', {'private': 1}); |
|
49 | window.location = pyroutes.url('gists_show', {'private': 1}); | |
50 | }); |
|
50 | }); | |
51 | Mousetrap.bind(['g G'], function(e) { |
|
51 | Mousetrap.bind(['g G'], function(e) { | |
52 | window.location = pyroutes.url('gists_show', {'public': 1}); |
|
52 | window.location = pyroutes.url('gists_show', {'public': 1}); | |
53 | }); |
|
53 | }); | |
54 | Mousetrap.bind(['n g'], function(e) { |
|
54 | Mousetrap.bind(['n g'], function(e) { | |
55 | window.location = pyroutes.url('gists_new'); |
|
55 | window.location = pyroutes.url('gists_new'); | |
56 | }); |
|
56 | }); | |
57 | Mousetrap.bind(['n r'], function(e) { |
|
57 | Mousetrap.bind(['n r'], function(e) { | |
58 |
window.location = pyroutes.url(' |
|
58 | window.location = pyroutes.url('repo_new'); | |
59 | }); |
|
59 | }); | |
60 |
|
60 | |||
61 | if (repoName && repoName != '') { |
|
61 | if (repoName && repoName != '') { | |
62 | // nav in repo context |
|
62 | // nav in repo context | |
63 | Mousetrap.bind(['g s'], function(e) { |
|
63 | Mousetrap.bind(['g s'], function(e) { | |
64 | window.location = pyroutes.url( |
|
64 | window.location = pyroutes.url( | |
65 | 'repo_summary', {'repo_name': repoName}); |
|
65 | 'repo_summary', {'repo_name': repoName}); | |
66 | }); |
|
66 | }); | |
67 | Mousetrap.bind(['g c'], function(e) { |
|
67 | Mousetrap.bind(['g c'], function(e) { | |
68 | window.location = pyroutes.url( |
|
68 | window.location = pyroutes.url( | |
69 | 'repo_changelog', {'repo_name': repoName}); |
|
69 | 'repo_changelog', {'repo_name': repoName}); | |
70 | }); |
|
70 | }); | |
71 | Mousetrap.bind(['g F'], function(e) { |
|
71 | Mousetrap.bind(['g F'], function(e) { | |
72 | window.location = pyroutes.url( |
|
72 | window.location = pyroutes.url( | |
73 | 'repo_files', |
|
73 | 'repo_files', | |
74 | { |
|
74 | { | |
75 | 'repo_name': repoName, |
|
75 | 'repo_name': repoName, | |
76 | 'commit_id': repoLandingRev, |
|
76 | 'commit_id': repoLandingRev, | |
77 | 'f_path': '', |
|
77 | 'f_path': '', | |
78 | 'search': '1' |
|
78 | 'search': '1' | |
79 | }); |
|
79 | }); | |
80 | }); |
|
80 | }); | |
81 | Mousetrap.bind(['g f'], function(e) { |
|
81 | Mousetrap.bind(['g f'], function(e) { | |
82 | window.location = pyroutes.url( |
|
82 | window.location = pyroutes.url( | |
83 | 'repo_files', |
|
83 | 'repo_files', | |
84 | { |
|
84 | { | |
85 | 'repo_name': repoName, |
|
85 | 'repo_name': repoName, | |
86 | 'commit_id': repoLandingRev, |
|
86 | 'commit_id': repoLandingRev, | |
87 | 'f_path': '' |
|
87 | 'f_path': '' | |
88 | }); |
|
88 | }); | |
89 | }); |
|
89 | }); | |
90 | Mousetrap.bind(['g o'], function(e) { |
|
90 | Mousetrap.bind(['g o'], function(e) { | |
91 | window.location = pyroutes.url( |
|
91 | window.location = pyroutes.url( | |
92 | 'edit_repo', {'repo_name': repoName}); |
|
92 | 'edit_repo', {'repo_name': repoName}); | |
93 | }); |
|
93 | }); | |
94 | Mousetrap.bind(['g O'], function(e) { |
|
94 | Mousetrap.bind(['g O'], function(e) { | |
95 | window.location = pyroutes.url( |
|
95 | window.location = pyroutes.url( | |
96 | 'edit_repo_perms', {'repo_name': repoName}); |
|
96 | 'edit_repo_perms', {'repo_name': repoName}); | |
97 | }); |
|
97 | }); | |
98 | } |
|
98 | } | |
99 | } |
|
99 | } | |
100 |
|
100 | |||
101 | setRCMouseBindings(templateContext.repo_name, templateContext.repo_landing_commit); |
|
101 | setRCMouseBindings(templateContext.repo_name, templateContext.repo_landing_commit); |
@@ -1,234 +1,251 b'' | |||||
1 |
|
1 | |||
2 | /****************************************************************************** |
|
2 | /****************************************************************************** | |
3 | * * |
|
3 | * * | |
4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
4 | * DO NOT CHANGE THIS FILE MANUALLY * | |
5 | * * |
|
5 | * * | |
6 | * * |
|
6 | * * | |
7 | * This file is automatically generated when the app starts up with * |
|
7 | * This file is automatically generated when the app starts up with * | |
8 | * generate_js_files = true * |
|
8 | * generate_js_files = true * | |
9 | * * |
|
9 | * * | |
10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
10 | * To add a route here pass jsroute=True to the route definition in the app * | |
11 | * * |
|
11 | * * | |
12 | ******************************************************************************/ |
|
12 | ******************************************************************************/ | |
13 | function registerRCRoutes() { |
|
13 | function registerRCRoutes() { | |
14 | // routes registration |
|
14 | // routes registration | |
15 | pyroutes.register('new_repo', '/_admin/create_repository', []); |
|
|||
16 | pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
15 | pyroutes.register('edit_user', '/_admin/users/%(user_id)s/edit', ['user_id']); | |
17 | pyroutes.register('favicon', '/favicon.ico', []); |
|
16 | pyroutes.register('favicon', '/favicon.ico', []); | |
18 | pyroutes.register('robots', '/robots.txt', []); |
|
17 | pyroutes.register('robots', '/robots.txt', []); | |
19 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
18 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); | |
20 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
19 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); | |
21 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
20 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); | |
22 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
21 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); | |
23 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
22 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); | |
24 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
23 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); | |
25 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/settings/integrations', ['repo_group_name']); |
|
24 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/settings/integrations', ['repo_group_name']); | |
26 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/settings/integrations/new', ['repo_group_name']); |
|
25 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/settings/integrations/new', ['repo_group_name']); | |
27 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
26 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/settings/integrations/%(integration)s', ['repo_group_name', 'integration']); | |
28 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
27 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); | |
29 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
28 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); | |
30 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
29 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); | |
31 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
30 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); | |
32 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
31 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); | |
33 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
32 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); | |
34 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
33 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); | |
35 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
34 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); | |
36 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
35 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); | |
37 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
36 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); | |
38 | pyroutes.register('admin_home', '/_admin', []); |
|
37 | pyroutes.register('admin_home', '/_admin', []); | |
39 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
38 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); | |
40 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
39 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); | |
41 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
40 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); | |
42 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
41 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); | |
43 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
42 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); | |
44 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
43 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); | |
45 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
44 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); | |
46 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
45 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); | |
47 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
46 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); | |
48 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
47 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); | |
49 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
48 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); | |
50 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
49 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); | |
51 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
50 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); | |
52 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
51 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); | |
53 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
52 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); | |
54 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
53 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); | |
55 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
54 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); | |
56 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
55 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); | |
57 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
56 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); | |
58 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
57 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); | |
59 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
58 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); | |
60 | pyroutes.register('users', '/_admin/users', []); |
|
59 | pyroutes.register('users', '/_admin/users', []); | |
61 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
60 | pyroutes.register('users_data', '/_admin/users_data', []); | |
62 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
61 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); | |
63 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
62 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); | |
64 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
63 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); | |
65 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
64 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); | |
66 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
65 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); | |
67 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
66 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); | |
68 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
67 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); | |
69 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
68 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); | |
70 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
69 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); | |
71 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
70 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); | |
72 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
71 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); | |
73 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
72 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); | |
74 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
73 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); | |
75 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
74 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); | |
76 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
75 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); | |
77 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
76 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); | |
78 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
77 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); | |
79 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
78 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); | |
80 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
79 | pyroutes.register('user_groups', '/_admin/user_groups', []); | |
81 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
80 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); | |
82 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
81 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); | |
83 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
82 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); | |
84 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
83 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); | |
|
84 | pyroutes.register('repos', '/_admin/repos', []); | |||
|
85 | pyroutes.register('repo_new', '/_admin/repos/new', []); | |||
|
86 | pyroutes.register('repo_create', '/_admin/repos/create', []); | |||
85 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
87 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); | |
86 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
88 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); | |
87 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
89 | pyroutes.register('channelstream_proxy', '/_channelstream', []); | |
88 | pyroutes.register('login', '/_admin/login', []); |
|
90 | pyroutes.register('login', '/_admin/login', []); | |
89 | pyroutes.register('logout', '/_admin/logout', []); |
|
91 | pyroutes.register('logout', '/_admin/logout', []); | |
90 | pyroutes.register('register', '/_admin/register', []); |
|
92 | pyroutes.register('register', '/_admin/register', []); | |
91 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
93 | pyroutes.register('reset_password', '/_admin/password_reset', []); | |
92 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
94 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); | |
93 | pyroutes.register('home', '/', []); |
|
95 | pyroutes.register('home', '/', []); | |
94 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
96 | pyroutes.register('user_autocomplete_data', '/_users', []); | |
95 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
97 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); | |
96 | pyroutes.register('repo_list_data', '/_repos', []); |
|
98 | pyroutes.register('repo_list_data', '/_repos', []); | |
97 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
99 | pyroutes.register('goto_switcher_data', '/_goto_data', []); | |
98 | pyroutes.register('journal', '/_admin/journal', []); |
|
100 | pyroutes.register('journal', '/_admin/journal', []); | |
99 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
101 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); | |
100 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
102 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); | |
101 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
103 | pyroutes.register('journal_public', '/_admin/public_journal', []); | |
102 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
104 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); | |
103 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
105 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); | |
104 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
106 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); | |
105 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
107 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); | |
106 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
108 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); | |
107 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
109 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); | |
108 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
110 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); | |
109 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
111 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); | |
110 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
112 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); | |
111 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
113 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); | |
112 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
114 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); | |
113 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
115 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); | |
114 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
116 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); | |
115 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
117 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); | |
116 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
118 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); | |
117 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
119 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); | |
118 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
120 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); | |
119 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
121 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); | |
120 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
122 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); | |
121 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
123 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); | |
122 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
124 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); | |
123 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
125 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); | |
124 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
126 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); | |
125 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
127 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
126 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
128 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); | |
127 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
129 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); | |
128 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
130 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
129 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
131 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
130 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
132 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
131 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
133 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
132 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
134 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); | |
133 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
135 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
134 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
136 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
135 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
137 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
136 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
138 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
137 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
139 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
138 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
140 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
139 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
141 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
140 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
142 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
141 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
143 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
142 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
144 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
143 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
145 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
144 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
146 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
145 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
147 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); | |
146 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
148 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); | |
147 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
149 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); | |
148 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
150 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); | |
149 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
151 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
150 | pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']); |
|
152 | pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']); | |
151 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
153 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); | |
152 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
154 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); | |
153 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
155 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); | |
154 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
156 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); | |
155 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
157 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); | |
156 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
158 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); | |
157 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
159 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); | |
158 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
160 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); | |
159 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
161 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); | |
160 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
162 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); | |
161 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
163 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); | |
162 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
164 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); | |
163 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
165 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); | |
164 | pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); |
|
166 | pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); | |
165 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
167 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); | |
166 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
168 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); | |
167 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
169 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); | |
168 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
170 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); | |
169 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
171 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); | |
170 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
172 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); | |
171 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
173 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); | |
172 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
174 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); | |
173 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
175 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); | |
174 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
176 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); | |
175 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
177 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); | |
176 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
178 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); | |
177 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
179 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); | |
178 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
180 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); | |
179 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
181 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); | |
|
182 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); | |||
|
183 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); | |||
|
184 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); | |||
|
185 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); | |||
|
186 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); | |||
|
187 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); | |||
|
188 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); | |||
|
189 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); | |||
|
190 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); | |||
|
191 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); | |||
|
192 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); | |||
|
193 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); | |||
|
194 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); | |||
|
195 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); | |||
|
196 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); | |||
|
197 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); | |||
|
198 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); | |||
180 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
199 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); | |
181 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
200 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); | |
182 |
pyroutes.register(' |
|
201 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); | |
183 | pyroutes.register('repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
|||
184 | pyroutes.register('strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
|||
185 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
202 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); | |
186 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
203 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); | |
187 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
204 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']); | |
188 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
205 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']); | |
189 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
206 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); | |
190 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
207 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); | |
191 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
208 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); | |
192 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
209 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); | |
193 | pyroutes.register('search', '/_admin/search', []); |
|
210 | pyroutes.register('search', '/_admin/search', []); | |
194 | pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']); |
|
211 | pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']); | |
195 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
212 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); | |
196 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
213 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); | |
197 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
214 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); | |
198 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
215 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); | |
199 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
216 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); | |
200 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
217 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); | |
201 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
218 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); | |
202 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
219 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); | |
203 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
220 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); | |
204 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
221 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); | |
205 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
222 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); | |
206 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
223 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); | |
207 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
224 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); | |
208 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
225 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); | |
209 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
226 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); | |
210 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
227 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); | |
211 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
228 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); | |
212 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
229 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); | |
213 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
230 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); | |
214 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
231 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); | |
215 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
232 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); | |
216 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
233 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); | |
217 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
234 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); | |
218 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
235 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); | |
219 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
236 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); | |
220 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
237 | pyroutes.register('gists_show', '/_admin/gists', []); | |
221 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
238 | pyroutes.register('gists_new', '/_admin/gists/new', []); | |
222 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
239 | pyroutes.register('gists_create', '/_admin/gists/create', []); | |
223 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
240 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); | |
224 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
241 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); | |
225 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
242 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); | |
226 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
243 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); | |
227 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
244 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); | |
228 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
245 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); | |
229 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
246 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); | |
230 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
247 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); | |
231 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
248 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); | |
232 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
249 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); | |
233 | pyroutes.register('apiv2', '/_admin/api', []); |
|
250 | pyroutes.register('apiv2', '/_admin/api', []); | |
234 | } |
|
251 | } |
@@ -1,37 +1,37 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="/base/base.mako"/> |
|
2 | <%inherit file="/base/base.mako"/> | |
3 |
|
3 | |||
4 | <%def name="title()"> |
|
4 | <%def name="title()"> | |
5 | ${_('Add repository')} |
|
5 | ${_('Add repository')} | |
6 | %if c.rhodecode_name: |
|
6 | %if c.rhodecode_name: | |
7 | · ${h.branding(c.rhodecode_name)} |
|
7 | · ${h.branding(c.rhodecode_name)} | |
8 | %endif |
|
8 | %endif | |
9 | </%def> |
|
9 | </%def> | |
10 |
|
10 | |||
11 | <%def name="breadcrumbs_links()"> |
|
11 | <%def name="breadcrumbs_links()"> | |
12 | %if c.rhodecode_user.is_admin: |
|
12 | %if c.rhodecode_user.is_admin: | |
13 | ${h.link_to(_('Admin'),h.route_path('admin_home'))} |
|
13 | ${h.link_to(_('Admin'), h.route_path('admin_home'))} | |
14 | » |
|
14 | » | |
15 |
${h.link_to(_('Repositories'),h. |
|
15 | ${h.link_to(_('Repositories'), h.route_path('repos'))} | |
16 | %else: |
|
16 | %else: | |
17 | ${_('Admin')} |
|
17 | ${_('Admin')} | |
18 | » |
|
18 | » | |
19 | ${_('Repositories')} |
|
19 | ${_('Repositories')} | |
20 | %endif |
|
20 | %endif | |
21 | » |
|
21 | » | |
22 | ${_('Add Repository')} |
|
22 | ${_('Add Repository')} | |
23 | </%def> |
|
23 | </%def> | |
24 |
|
24 | |||
25 | <%def name="menu_bar_nav()"> |
|
25 | <%def name="menu_bar_nav()"> | |
26 | ${self.menu_items(active='admin')} |
|
26 | ${self.menu_items(active='admin')} | |
27 | </%def> |
|
27 | </%def> | |
28 |
|
28 | |||
29 | <%def name="main()"> |
|
29 | <%def name="main()"> | |
30 | <div class="box"> |
|
30 | <div class="box"> | |
31 | <!-- box / title --> |
|
31 | <!-- box / title --> | |
32 | <div class="title"> |
|
32 | <div class="title"> | |
33 | ${self.breadcrumbs()} |
|
33 | ${self.breadcrumbs()} | |
34 | </div> |
|
34 | </div> | |
35 | <%include file="repo_add_base.mako"/> |
|
35 | <%include file="repo_add_base.mako"/> | |
36 | </div> |
|
36 | </div> | |
37 | </%def> |
|
37 | </%def> |
@@ -1,159 +1,159 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | ${h.secure_form(h.url('repos'))} |
|
3 | ${h.secure_form(h.route_path('repo_create'), method='POST', request=request)} | |
4 | <div class="form"> |
|
4 | <div class="form"> | |
5 | <!-- fields --> |
|
5 | <!-- fields --> | |
6 | <div class="fields"> |
|
6 | <div class="fields"> | |
7 | <div class="field"> |
|
7 | <div class="field"> | |
8 | <div class="label"> |
|
8 | <div class="label"> | |
9 | <label for="repo_name">${_('Name')}:</label> |
|
9 | <label for="repo_name">${_('Name')}:</label> | |
10 | </div> |
|
10 | </div> | |
11 | <div class="input"> |
|
11 | <div class="input"> | |
12 | ${h.text('repo_name', class_="medium")} |
|
12 | ${h.text('repo_name', class_="medium")} | |
13 | <div class="info-block"> |
|
13 | <div class="info-block"> | |
14 | <a id="remote_clone_toggle" href="#"><i class="icon-download-alt"></i> ${_('Import Existing Repository ?')}</a> |
|
14 | <a id="remote_clone_toggle" href="#"><i class="icon-download-alt"></i> ${_('Import Existing Repository ?')}</a> | |
15 | </div> |
|
15 | </div> | |
16 | %if not c.rhodecode_user.is_admin: |
|
16 | %if not c.rhodecode_user.is_admin: | |
17 | ${h.hidden('user_created',True)} |
|
17 | ${h.hidden('user_created',True)} | |
18 | %endif |
|
18 | %endif | |
19 | </div> |
|
19 | </div> | |
20 | </div> |
|
20 | </div> | |
21 | <div id="remote_clone" class="field" style="display: none;"> |
|
21 | <div id="remote_clone" class="field" style="display: none;"> | |
22 | <div class="label"> |
|
22 | <div class="label"> | |
23 | <label for="clone_uri">${_('Clone from')}:</label> |
|
23 | <label for="clone_uri">${_('Clone from')}:</label> | |
24 | </div> |
|
24 | </div> | |
25 | <div class="input"> |
|
25 | <div class="input"> | |
26 | ${h.text('clone_uri', class_="medium")} |
|
26 | ${h.text('clone_uri', class_="medium")} | |
27 | <span class="help-block"> |
|
27 | <span class="help-block"> | |
28 | <pre> |
|
28 | <pre> | |
29 | - The repository must be accessible over http:// or https:// |
|
29 | - The repository must be accessible over http:// or https:// | |
30 | - For Git projects it's recommended appending .git to the end of clone url. |
|
30 | - For Git projects it's recommended appending .git to the end of clone url. | |
31 | - Make sure to select proper repository type from the below selector before importing it. |
|
31 | - Make sure to select proper repository type from the below selector before importing it. | |
32 | - If your HTTP[S] repository is not publicly accessible, |
|
32 | - If your HTTP[S] repository is not publicly accessible, | |
33 | add authentication information to the URL: https://username:password@server.company.com/repo-name. |
|
33 | add authentication information to the URL: https://username:password@server.company.com/repo-name. | |
34 | - The Git LFS/Mercurial Largefiles objects will not be imported. |
|
34 | - The Git LFS/Mercurial Largefiles objects will not be imported. | |
35 | - For very large repositories, it's recommended to manually copy them into the |
|
35 | - For very large repositories, it's recommended to manually copy them into the | |
36 | RhodeCode <a href="${h.url('admin_settings_vcs', anchor='vcs-storage-options')}">storage location</a> and run <a href="${h.url('admin_settings_mapping')}">Remap and Rescan</a>. |
|
36 | RhodeCode <a href="${h.url('admin_settings_vcs', anchor='vcs-storage-options')}">storage location</a> and run <a href="${h.url('admin_settings_mapping')}">Remap and Rescan</a>. | |
37 | </pre> |
|
37 | </pre> | |
38 | </span> |
|
38 | </span> | |
39 | </div> |
|
39 | </div> | |
40 | </div> |
|
40 | </div> | |
41 | <div class="field"> |
|
41 | <div class="field"> | |
42 | <div class="label"> |
|
42 | <div class="label"> | |
43 | <label for="repo_description">${_('Description')}:</label> |
|
43 | <label for="repo_description">${_('Description')}:</label> | |
44 | </div> |
|
44 | </div> | |
45 | <div class="textarea editor"> |
|
45 | <div class="textarea editor"> | |
46 | ${h.textarea('repo_description')} |
|
46 | ${h.textarea('repo_description')} | |
47 | <span class="help-block">${_('Keep it short and to the point. Use a README file for longer descriptions.')}</span> |
|
47 | <span class="help-block">${_('Keep it short and to the point. Use a README file for longer descriptions.')}</span> | |
48 | </div> |
|
48 | </div> | |
49 | </div> |
|
49 | </div> | |
50 | <div class="field"> |
|
50 | <div class="field"> | |
51 | <div class="label"> |
|
51 | <div class="label"> | |
52 | <label for="repo_group">${_('Repository Group')}:</label> |
|
52 | <label for="repo_group">${_('Repository Group')}:</label> | |
53 | </div> |
|
53 | </div> | |
54 | <div class="select"> |
|
54 | <div class="select"> | |
55 | ${h.select('repo_group',request.GET.get('parent_group'),c.repo_groups,class_="medium")} |
|
55 | ${h.select('repo_group',request.GET.get('parent_group'),c.repo_groups,class_="medium")} | |
56 | % if c.personal_repo_group: |
|
56 | % if c.personal_repo_group: | |
57 | <a class="btn" href="#" id="select_my_group" data-personal-group-id="${c.personal_repo_group.group_id}"> |
|
57 | <a class="btn" href="#" id="select_my_group" data-personal-group-id="${c.personal_repo_group.group_id}"> | |
58 | ${_('Select my personal group (%(repo_group_name)s)') % {'repo_group_name': c.personal_repo_group.group_name}} |
|
58 | ${_('Select my personal group (%(repo_group_name)s)') % {'repo_group_name': c.personal_repo_group.group_name}} | |
59 | </a> |
|
59 | </a> | |
60 | % endif |
|
60 | % endif | |
61 | <span class="help-block">${_('Optionally select a group to put this repository into.')}</span> |
|
61 | <span class="help-block">${_('Optionally select a group to put this repository into.')}</span> | |
62 | </div> |
|
62 | </div> | |
63 | </div> |
|
63 | </div> | |
64 | <div id="copy_perms" class="field"> |
|
64 | <div id="copy_perms" class="field"> | |
65 | <div class="label label-checkbox"> |
|
65 | <div class="label label-checkbox"> | |
66 | <label for="repo_copy_permissions">${_('Copy Parent Group Permissions')}:</label> |
|
66 | <label for="repo_copy_permissions">${_('Copy Parent Group Permissions')}:</label> | |
67 | </div> |
|
67 | </div> | |
68 | <div class="checkboxes"> |
|
68 | <div class="checkboxes"> | |
69 | ${h.checkbox('repo_copy_permissions', value="True", checked="checked")} |
|
69 | ${h.checkbox('repo_copy_permissions', value="True", checked="checked")} | |
70 | <span class="help-block">${_('Copy permission set from the parent repository group.')}</span> |
|
70 | <span class="help-block">${_('Copy permission set from the parent repository group.')}</span> | |
71 | </div> |
|
71 | </div> | |
72 | </div> |
|
72 | </div> | |
73 | <div class="field"> |
|
73 | <div class="field"> | |
74 | <div class="label"> |
|
74 | <div class="label"> | |
75 | <label for="repo_type">${_('Type')}:</label> |
|
75 | <label for="repo_type">${_('Type')}:</label> | |
76 | </div> |
|
76 | </div> | |
77 | <div class="select"> |
|
77 | <div class="select"> | |
78 | ${h.select('repo_type','hg',c.backends)} |
|
78 | ${h.select('repo_type','hg',c.backends)} | |
79 | <span class="help-block">${_('Set the type of repository to create.')}</span> |
|
79 | <span class="help-block">${_('Set the type of repository to create.')}</span> | |
80 | </div> |
|
80 | </div> | |
81 | </div> |
|
81 | </div> | |
82 | <div class="field"> |
|
82 | <div class="field"> | |
83 | <div class="label"> |
|
83 | <div class="label"> | |
84 | <label for="repo_landing_rev">${_('Landing commit')}:</label> |
|
84 | <label for="repo_landing_rev">${_('Landing commit')}:</label> | |
85 | </div> |
|
85 | </div> | |
86 | <div class="select"> |
|
86 | <div class="select"> | |
87 | ${h.select('repo_landing_rev','',c.landing_revs,class_="medium")} |
|
87 | ${h.select('repo_landing_rev','',c.landing_revs,class_="medium")} | |
88 | <span class="help-block">${_('The default commit for file pages, downloads, full text search index, and README generation.')}</span> |
|
88 | <span class="help-block">${_('The default commit for file pages, downloads, full text search index, and README generation.')}</span> | |
89 | </div> |
|
89 | </div> | |
90 | </div> |
|
90 | </div> | |
91 | <div class="field"> |
|
91 | <div class="field"> | |
92 | <div class="label label-checkbox"> |
|
92 | <div class="label label-checkbox"> | |
93 | <label for="repo_private">${_('Private Repository')}:</label> |
|
93 | <label for="repo_private">${_('Private Repository')}:</label> | |
94 | </div> |
|
94 | </div> | |
95 | <div class="checkboxes"> |
|
95 | <div class="checkboxes"> | |
96 | ${h.checkbox('repo_private',value="True")} |
|
96 | ${h.checkbox('repo_private',value="True")} | |
97 | <span class="help-block">${_('Private repositories are only visible to people explicitly added as collaborators.')}</span> |
|
97 | <span class="help-block">${_('Private repositories are only visible to people explicitly added as collaborators.')}</span> | |
98 | </div> |
|
98 | </div> | |
99 | </div> |
|
99 | </div> | |
100 | <div class="buttons"> |
|
100 | <div class="buttons"> | |
101 | ${h.submit('save',_('Save'),class_="btn")} |
|
101 | ${h.submit('save',_('Save'),class_="btn")} | |
102 | </div> |
|
102 | </div> | |
103 | </div> |
|
103 | </div> | |
104 | </div> |
|
104 | </div> | |
105 | <script> |
|
105 | <script> | |
106 | $(document).ready(function(){ |
|
106 | $(document).ready(function(){ | |
107 | var setCopyPermsOption = function(group_val){ |
|
107 | var setCopyPermsOption = function(group_val){ | |
108 | if(group_val != "-1"){ |
|
108 | if(group_val != "-1"){ | |
109 | $('#copy_perms').show() |
|
109 | $('#copy_perms').show() | |
110 | } |
|
110 | } | |
111 | else{ |
|
111 | else{ | |
112 | $('#copy_perms').hide(); |
|
112 | $('#copy_perms').hide(); | |
113 | } |
|
113 | } | |
114 | }; |
|
114 | }; | |
115 |
|
115 | |||
116 | $('#remote_clone_toggle').on('click', function(e){ |
|
116 | $('#remote_clone_toggle').on('click', function(e){ | |
117 | $('#remote_clone').show(); |
|
117 | $('#remote_clone').show(); | |
118 | e.preventDefault(); |
|
118 | e.preventDefault(); | |
119 | }); |
|
119 | }); | |
120 |
|
120 | |||
121 | if($('#remote_clone input').hasClass('error')){ |
|
121 | if($('#remote_clone input').hasClass('error')){ | |
122 | $('#remote_clone').show(); |
|
122 | $('#remote_clone').show(); | |
123 | } |
|
123 | } | |
124 | if($('#remote_clone input').val()){ |
|
124 | if($('#remote_clone input').val()){ | |
125 | $('#remote_clone').show(); |
|
125 | $('#remote_clone').show(); | |
126 | } |
|
126 | } | |
127 |
|
127 | |||
128 | $("#repo_group").select2({ |
|
128 | $("#repo_group").select2({ | |
129 | 'containerCssClass': "drop-menu", |
|
129 | 'containerCssClass': "drop-menu", | |
130 | 'dropdownCssClass': "drop-menu-dropdown", |
|
130 | 'dropdownCssClass': "drop-menu-dropdown", | |
131 | 'dropdownAutoWidth': true, |
|
131 | 'dropdownAutoWidth': true, | |
132 | 'width': "resolve" |
|
132 | 'width': "resolve" | |
133 | }); |
|
133 | }); | |
134 |
|
134 | |||
135 | setCopyPermsOption($('#repo_group').val()); |
|
135 | setCopyPermsOption($('#repo_group').val()); | |
136 | $("#repo_group").on("change", function(e) { |
|
136 | $("#repo_group").on("change", function(e) { | |
137 | setCopyPermsOption(e.val) |
|
137 | setCopyPermsOption(e.val) | |
138 | }); |
|
138 | }); | |
139 |
|
139 | |||
140 | $("#repo_type").select2({ |
|
140 | $("#repo_type").select2({ | |
141 | 'containerCssClass': "drop-menu", |
|
141 | 'containerCssClass': "drop-menu", | |
142 | 'dropdownCssClass': "drop-menu-dropdown", |
|
142 | 'dropdownCssClass': "drop-menu-dropdown", | |
143 | 'minimumResultsForSearch': -1, |
|
143 | 'minimumResultsForSearch': -1, | |
144 | }); |
|
144 | }); | |
145 | $("#repo_landing_rev").select2({ |
|
145 | $("#repo_landing_rev").select2({ | |
146 | 'containerCssClass': "drop-menu", |
|
146 | 'containerCssClass': "drop-menu", | |
147 | 'dropdownCssClass': "drop-menu-dropdown", |
|
147 | 'dropdownCssClass': "drop-menu-dropdown", | |
148 | 'minimumResultsForSearch': -1, |
|
148 | 'minimumResultsForSearch': -1, | |
149 | }); |
|
149 | }); | |
150 | $('#repo_name').focus(); |
|
150 | $('#repo_name').focus(); | |
151 |
|
151 | |||
152 | $('#select_my_group').on('click', function(e){ |
|
152 | $('#select_my_group').on('click', function(e){ | |
153 | e.preventDefault(); |
|
153 | e.preventDefault(); | |
154 | $("#repo_group").val($(this).data('personalGroupId')).trigger("change"); |
|
154 | $("#repo_group").val($(this).data('personalGroupId')).trigger("change"); | |
155 | }) |
|
155 | }) | |
156 |
|
156 | |||
157 | }) |
|
157 | }) | |
158 | </script> |
|
158 | </script> | |
159 | ${h.end_form()} |
|
159 | ${h.end_form()} |
@@ -1,99 +1,99 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | ## |
|
2 | ## | |
3 | ## See also repo_settings.html |
|
3 | ## See also repo_settings.html | |
4 | ## |
|
4 | ## | |
5 | <%inherit file="/base/base.mako"/> |
|
5 | <%inherit file="/base/base.mako"/> | |
6 |
|
6 | |||
7 | <%def name="title()"> |
|
7 | <%def name="title()"> | |
8 | ${_('%s repository settings') % c.repo_info.repo_name} |
|
8 | ${_('%s repository settings') % c.repo_info.repo_name} | |
9 | %if c.rhodecode_name: |
|
9 | %if c.rhodecode_name: | |
10 | · ${h.branding(c.rhodecode_name)} |
|
10 | · ${h.branding(c.rhodecode_name)} | |
11 | %endif |
|
11 | %endif | |
12 | </%def> |
|
12 | </%def> | |
13 |
|
13 | |||
14 | <%def name="breadcrumbs_links()"> |
|
14 | <%def name="breadcrumbs_links()"> | |
15 | ${_('Settings')} |
|
15 | ${_('Settings')} | |
16 | </%def> |
|
16 | </%def> | |
17 |
|
17 | |||
18 | <%def name="menu_bar_nav()"> |
|
18 | <%def name="menu_bar_nav()"> | |
19 | ${self.menu_items(active='repositories')} |
|
19 | ${self.menu_items(active='repositories')} | |
20 | </%def> |
|
20 | </%def> | |
21 |
|
21 | |||
22 | <%def name="menu_bar_subnav()"> |
|
22 | <%def name="menu_bar_subnav()"> | |
23 | ${self.repo_menu(active='options')} |
|
23 | ${self.repo_menu(active='options')} | |
24 | </%def> |
|
24 | </%def> | |
25 |
|
25 | |||
26 | <%def name="main_content()"> |
|
26 | <%def name="main_content()"> | |
27 | % if hasattr(c, 'repo_edit_template'): |
|
27 | % if hasattr(c, 'repo_edit_template'): | |
28 | <%include file="${c.repo_edit_template}"/> |
|
28 | <%include file="${c.repo_edit_template}"/> | |
29 | % else: |
|
29 | % else: | |
30 | <%include file="/admin/repos/repo_edit_${c.active}.mako"/> |
|
30 | <%include file="/admin/repos/repo_edit_${c.active}.mako"/> | |
31 | % endif |
|
31 | % endif | |
32 | </%def> |
|
32 | </%def> | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | <%def name="main()"> |
|
35 | <%def name="main()"> | |
36 | <div class="box"> |
|
36 | <div class="box"> | |
37 | <div class="title"> |
|
37 | <div class="title"> | |
38 | ${self.repo_page_title(c.rhodecode_db_repo)} |
|
38 | ${self.repo_page_title(c.rhodecode_db_repo)} | |
39 | ${self.breadcrumbs()} |
|
39 | ${self.breadcrumbs()} | |
40 | </div> |
|
40 | </div> | |
41 |
|
41 | |||
42 | <div class="sidebar-col-wrapper scw-small"> |
|
42 | <div class="sidebar-col-wrapper scw-small"> | |
43 | <div class="sidebar"> |
|
43 | <div class="sidebar"> | |
44 | <ul class="nav nav-pills nav-stacked"> |
|
44 | <ul class="nav nav-pills nav-stacked"> | |
45 | <li class="${'active' if c.active=='settings' else ''}"> |
|
45 | <li class="${'active' if c.active=='settings' else ''}"> | |
46 | <a href="${h.route_path('edit_repo', repo_name=c.repo_name)}">${_('Settings')}</a> |
|
46 | <a href="${h.route_path('edit_repo', repo_name=c.repo_name)}">${_('Settings')}</a> | |
47 | </li> |
|
47 | </li> | |
48 | <li class="${'active' if c.active=='permissions' else ''}"> |
|
48 | <li class="${'active' if c.active=='permissions' else ''}"> | |
49 | <a href="${h.route_path('edit_repo_perms', repo_name=c.repo_name)}">${_('Permissions')}</a> |
|
49 | <a href="${h.route_path('edit_repo_perms', repo_name=c.repo_name)}">${_('Permissions')}</a> | |
50 | </li> |
|
50 | </li> | |
51 | <li class="${'active' if c.active=='advanced' else ''}"> |
|
51 | <li class="${'active' if c.active=='advanced' else ''}"> | |
52 | <a href="${h.route_path('edit_repo_advanced', repo_name=c.repo_name)}">${_('Advanced')}</a> |
|
52 | <a href="${h.route_path('edit_repo_advanced', repo_name=c.repo_name)}">${_('Advanced')}</a> | |
53 | </li> |
|
53 | </li> | |
54 | <li class="${'active' if c.active=='vcs' else ''}"> |
|
54 | <li class="${'active' if c.active=='vcs' else ''}"> | |
55 |
<a href="${h. |
|
55 | <a href="${h.route_path('edit_repo_vcs', repo_name=c.repo_name)}">${_('VCS')}</a> | |
56 | </li> |
|
56 | </li> | |
57 | <li class="${'active' if c.active=='fields' else ''}"> |
|
57 | <li class="${'active' if c.active=='fields' else ''}"> | |
58 |
<a href="${h. |
|
58 | <a href="${h.route_path('edit_repo_fields', repo_name=c.repo_name)}">${_('Extra Fields')}</a> | |
59 | </li> |
|
59 | </li> | |
60 | <li class="${'active' if c.active=='issuetracker' else ''}"> |
|
60 | <li class="${'active' if c.active=='issuetracker' else ''}"> | |
61 |
<a href="${h. |
|
61 | <a href="${h.route_path('edit_repo_issuetracker', repo_name=c.repo_name)}">${_('Issue Tracker')}</a> | |
62 | </li> |
|
62 | </li> | |
63 | <li class="${'active' if c.active=='caches' else ''}"> |
|
63 | <li class="${'active' if c.active=='caches' else ''}"> | |
64 | <a href="${h.route_path('edit_repo_caches', repo_name=c.repo_name)}">${_('Caches')}</a> |
|
64 | <a href="${h.route_path('edit_repo_caches', repo_name=c.repo_name)}">${_('Caches')}</a> | |
65 | </li> |
|
65 | </li> | |
66 | %if c.repo_info.repo_type != 'svn': |
|
66 | %if c.repo_info.repo_type != 'svn': | |
67 | <li class="${'active' if c.active=='remote' else ''}"> |
|
67 | <li class="${'active' if c.active=='remote' else ''}"> | |
68 |
<a href="${h. |
|
68 | <a href="${h.route_path('edit_repo_remote', repo_name=c.repo_name)}">${_('Remote')}</a> | |
69 | </li> |
|
69 | </li> | |
70 | %endif |
|
70 | %endif | |
71 | <li class="${'active' if c.active=='statistics' else ''}"> |
|
71 | <li class="${'active' if c.active=='statistics' else ''}"> | |
72 |
<a href="${h. |
|
72 | <a href="${h.route_path('edit_repo_statistics', repo_name=c.repo_name)}">${_('Statistics')}</a> | |
73 | </li> |
|
73 | </li> | |
74 | <li class="${'active' if c.active=='integrations' else ''}"> |
|
74 | <li class="${'active' if c.active=='integrations' else ''}"> | |
75 | <a href="${h.route_path('repo_integrations_home', repo_name=c.repo_name)}">${_('Integrations')}</a> |
|
75 | <a href="${h.route_path('repo_integrations_home', repo_name=c.repo_name)}">${_('Integrations')}</a> | |
76 | </li> |
|
76 | </li> | |
77 | %if c.repo_info.repo_type != 'svn': |
|
77 | %if c.repo_info.repo_type != 'svn': | |
78 | <li class="${'active' if c.active=='reviewers' else ''}"> |
|
78 | <li class="${'active' if c.active=='reviewers' else ''}"> | |
79 | <a href="${h.route_path('repo_reviewers', repo_name=c.repo_name)}">${_('Reviewer Rules')}</a> |
|
79 | <a href="${h.route_path('repo_reviewers', repo_name=c.repo_name)}">${_('Reviewer Rules')}</a> | |
80 | </li> |
|
80 | </li> | |
81 | %endif |
|
81 | %endif | |
82 | <li class="${'active' if c.active=='maintenance' else ''}"> |
|
82 | <li class="${'active' if c.active=='maintenance' else ''}"> | |
83 | <a href="${h.route_path('repo_maintenance', repo_name=c.repo_name)}">${_('Maintenance')}</a> |
|
83 | <a href="${h.route_path('edit_repo_maintenance', repo_name=c.repo_name)}">${_('Maintenance')}</a> | |
84 | </li> |
|
84 | </li> | |
85 | <li class="${'active' if c.active=='strip' else ''}"> |
|
85 | <li class="${'active' if c.active=='strip' else ''}"> | |
86 | <a href="${h.route_path('strip', repo_name=c.repo_name)}">${_('Strip')}</a> |
|
86 | <a href="${h.route_path('edit_repo_strip', repo_name=c.repo_name)}">${_('Strip')}</a> | |
87 | </li> |
|
87 | </li> | |
88 |
|
88 | |||
89 | </ul> |
|
89 | </ul> | |
90 | </div> |
|
90 | </div> | |
91 |
|
91 | |||
92 | <div class="main-content-full-width"> |
|
92 | <div class="main-content-full-width"> | |
93 | ${self.main_content()} |
|
93 | ${self.main_content()} | |
94 | </div> |
|
94 | </div> | |
95 |
|
95 | |||
96 | </div> |
|
96 | </div> | |
97 | </div> |
|
97 | </div> | |
98 |
|
98 | |||
99 | </%def> No newline at end of file |
|
99 | </%def> |
@@ -1,79 +1,79 b'' | |||||
1 | <div class="panel panel-default"> |
|
1 | <div class="panel panel-default"> | |
2 | <div class="panel-heading"> |
|
2 | <div class="panel-heading"> | |
3 | <h3 class="panel-title">${_('Custom extra fields for this repository')}</h3> |
|
3 | <h3 class="panel-title">${_('Custom extra fields for this repository')}</h3> | |
4 | </div> |
|
4 | </div> | |
5 | <div class="panel-body"> |
|
5 | <div class="panel-body"> | |
6 | %if c.visual.repository_fields: |
|
6 | %if c.visual.repository_fields: | |
7 | %if c.repo_fields: |
|
7 | %if c.repo_fields: | |
8 | <div class="emails_wrap"> |
|
8 | <div class="emails_wrap"> | |
9 | <table class="rctable edit_fields"> |
|
9 | <table class="rctable edit_fields"> | |
10 | <th>${_('Label')}</th> |
|
10 | <th>${_('Label')}</th> | |
11 | <th>${_('Key')}</th> |
|
11 | <th>${_('Key')}</th> | |
12 | <th>${_('Type')}</th> |
|
12 | <th>${_('Type')}</th> | |
13 | <th>${_('Action')}</th> |
|
13 | <th>${_('Action')}</th> | |
14 |
|
14 | |||
15 | %for field in c.repo_fields: |
|
15 | %for field in c.repo_fields: | |
16 | <tr> |
|
16 | <tr> | |
17 | <td class="td-tags">${field.field_label}</td> |
|
17 | <td class="td-tags">${field.field_label}</td> | |
18 | <td class="td-hash">${field.field_key}</td> |
|
18 | <td class="td-hash">${field.field_key}</td> | |
19 | <td class="td-type">${field.field_type}</td> |
|
19 | <td class="td-type">${field.field_type}</td> | |
20 | <td class="td-action"> |
|
20 | <td class="td-action"> | |
21 |
${h.secure_form(h. |
|
21 | ${h.secure_form(h.route_path('edit_repo_fields_delete', repo_name=c.repo_info.repo_name, field_id=field.repo_field_id), method='POST', request=request)} | |
22 | ${h.hidden('del_repo_field',field.repo_field_id)} |
|
22 | ${h.hidden('del_repo_field',field.repo_field_id)} | |
23 | <button class="btn btn-link btn-danger" type="submit" |
|
23 | <button class="btn btn-link btn-danger" type="submit" | |
24 | onclick="return confirm('${_('Confirm to delete this field: %s') % field.field_key}');"> |
|
24 | onclick="return confirm('${_('Confirm to delete this field: %s') % field.field_key}');"> | |
25 | ${_('Delete')} |
|
25 | ${_('Delete')} | |
26 | </button> |
|
26 | </button> | |
27 | ${h.end_form()} |
|
27 | ${h.end_form()} | |
28 | </td> |
|
28 | </td> | |
29 | </tr> |
|
29 | </tr> | |
30 | %endfor |
|
30 | %endfor | |
31 | </table> |
|
31 | </table> | |
32 | </div> |
|
32 | </div> | |
33 | %endif |
|
33 | %endif | |
34 |
${h.secure_form(h. |
|
34 | ${h.secure_form(h.route_path('edit_repo_fields_create', repo_name=c.repo_name), method='POST', request=request)} | |
35 | <div class="form"> |
|
35 | <div class="form"> | |
36 | <!-- fields --> |
|
36 | <!-- fields --> | |
37 | <div class="fields"> |
|
37 | <div class="fields"> | |
38 | <div class="field"> |
|
38 | <div class="field"> | |
39 | <div class="label"> |
|
39 | <div class="label"> | |
40 | <label for="new_field_key">${_('New Field Key')}:</label> |
|
40 | <label for="new_field_key">${_('New Field Key')}:</label> | |
41 | </div> |
|
41 | </div> | |
42 | <div class="input"> |
|
42 | <div class="input"> | |
43 | ${h.text('new_field_key', class_='medium')} |
|
43 | ${h.text('new_field_key', class_='medium')} | |
44 | </div> |
|
44 | </div> | |
45 | </div> |
|
45 | </div> | |
46 | <div class="field"> |
|
46 | <div class="field"> | |
47 | <div class="label"> |
|
47 | <div class="label"> | |
48 | <label for="new_field_label">${_('New Field Label')}:</label> |
|
48 | <label for="new_field_label">${_('New Field Label')}:</label> | |
49 | </div> |
|
49 | </div> | |
50 | <div class="input"> |
|
50 | <div class="input"> | |
51 | ${h.text('new_field_label', class_='medium', placeholder=_('Enter short label'))} |
|
51 | ${h.text('new_field_label', class_='medium', placeholder=_('Enter short label'))} | |
52 | </div> |
|
52 | </div> | |
53 | </div> |
|
53 | </div> | |
54 |
|
54 | |||
55 | <div class="field"> |
|
55 | <div class="field"> | |
56 | <div class="label"> |
|
56 | <div class="label"> | |
57 | <label for="new_field_desc">${_('New Field Description')}:</label> |
|
57 | <label for="new_field_desc">${_('New Field Description')}:</label> | |
58 | </div> |
|
58 | </div> | |
59 | <div class="input"> |
|
59 | <div class="input"> | |
60 | ${h.text('new_field_desc', class_='medium', placeholder=_('Enter a full description for the field'))} |
|
60 | ${h.text('new_field_desc', class_='medium', placeholder=_('Enter a full description for the field'))} | |
61 | </div> |
|
61 | </div> | |
62 | </div> |
|
62 | </div> | |
63 |
|
63 | |||
64 | <div class="buttons"> |
|
64 | <div class="buttons"> | |
65 | ${h.submit('save',_('Add'),class_="btn")} |
|
65 | ${h.submit('save',_('Add'),class_="btn")} | |
66 | ${h.reset('reset',_('Reset'),class_="btn")} |
|
66 | ${h.reset('reset',_('Reset'),class_="btn")} | |
67 | </div> |
|
67 | </div> | |
68 | </div> |
|
68 | </div> | |
69 | </div> |
|
69 | </div> | |
70 | ${h.end_form()} |
|
70 | ${h.end_form()} | |
71 | %else: |
|
71 | %else: | |
72 | <h2> |
|
72 | <h2> | |
73 | ${_('Extra fields are disabled. You can enable them from the Admin/Settings/Visual page.')} |
|
73 | ${_('Extra fields are disabled. You can enable them from the Admin/Settings/Visual page.')} | |
74 | </h2> |
|
74 | </h2> | |
75 | %endif |
|
75 | %endif | |
76 | </div> |
|
76 | </div> | |
77 | </div> |
|
77 | </div> | |
78 |
|
78 | |||
79 |
|
79 |
@@ -1,109 +1,109 b'' | |||||
1 | <%namespace name="its" file="/base/issue_tracker_settings.mako"/> |
|
1 | <%namespace name="its" file="/base/issue_tracker_settings.mako"/> | |
2 |
|
2 | |||
3 | <div id="repo_issue_tracker" class="${'inherited' if c.settings_model.inherit_global_settings else ''}"> |
|
3 | <div id="repo_issue_tracker" class="${'inherited' if c.settings_model.inherit_global_settings else ''}"> | |
4 |
${h.secure_form(h. |
|
4 | ${h.secure_form(h.route_path('edit_repo_issuetracker_update', repo_name=c.repo_name), id="inherit-form", method='POST', request=request)} | |
5 | <div class="panel panel-default panel-body"> |
|
5 | <div class="panel panel-default panel-body"> | |
6 | <div class="fields"> |
|
6 | <div class="fields"> | |
7 | <div class="field"> |
|
7 | <div class="field"> | |
8 | <div class="label label-checkbox"> |
|
8 | <div class="label label-checkbox"> | |
9 | <label for="inherit_default_permissions">${_('Inherit from global settings')}:</label> |
|
9 | <label for="inherit_default_permissions">${_('Inherit from global settings')}:</label> | |
10 | </div> |
|
10 | </div> | |
11 | <div class="checkboxes"> |
|
11 | <div class="checkboxes"> | |
12 | ${h.checkbox('inherit_global_issuetracker', value='inherited', checked=c.settings_model.inherit_global_settings)} |
|
12 | ${h.checkbox('inherit_global_issuetracker', value='inherited', checked=c.settings_model.inherit_global_settings)} | |
13 | <span class="help-block"> |
|
13 | <span class="help-block"> | |
14 | ${h.literal(_('Select to inherit global patterns for issue tracker.'))} |
|
14 | ${h.literal(_('Select to inherit global patterns for issue tracker.'))} | |
15 | </span> |
|
15 | </span> | |
16 | </div> |
|
16 | </div> | |
17 | </div> |
|
17 | </div> | |
18 | </div> |
|
18 | </div> | |
19 | </div> |
|
19 | </div> | |
20 |
|
20 | |||
21 | <div id="inherit_overlay"> |
|
21 | <div id="inherit_overlay"> | |
22 | <div class="panel panel-default"> |
|
22 | <div class="panel panel-default"> | |
23 | <div class="panel-heading"> |
|
23 | <div class="panel-heading"> | |
24 | <h3 class="panel-title">${_('Inherited Issue Tracker Patterns')}</h3> |
|
24 | <h3 class="panel-title">${_('Inherited Issue Tracker Patterns')}</h3> | |
25 | </div> |
|
25 | </div> | |
26 | <div class="panel-body"> |
|
26 | <div class="panel-body"> | |
27 | <table class="rctable issuetracker readonly"> |
|
27 | <table class="rctable issuetracker readonly"> | |
28 | <tr> |
|
28 | <tr> | |
29 | <th>${_('Description')}</th> |
|
29 | <th>${_('Description')}</th> | |
30 | <th>${_('Pattern')}</th> |
|
30 | <th>${_('Pattern')}</th> | |
31 | <th>${_('Url')}</th> |
|
31 | <th>${_('Url')}</th> | |
32 | <th>${_('Prefix')}</th> |
|
32 | <th>${_('Prefix')}</th> | |
33 | <th ></th> |
|
33 | <th ></th> | |
34 | </tr> |
|
34 | </tr> | |
35 | %for uid, entry in c.global_patterns.items(): |
|
35 | %for uid, entry in c.global_patterns.items(): | |
36 | <tr id="${uid}"> |
|
36 | <tr id="${uid}"> | |
37 | <td class="td-description issuetracker_desc"> |
|
37 | <td class="td-description issuetracker_desc"> | |
38 | <span class="entry"> |
|
38 | <span class="entry"> | |
39 | ${entry.desc} |
|
39 | ${entry.desc} | |
40 | </span> |
|
40 | </span> | |
41 | </td> |
|
41 | </td> | |
42 | <td class="td-regex issuetracker_pat"> |
|
42 | <td class="td-regex issuetracker_pat"> | |
43 | <span class="entry"> |
|
43 | <span class="entry"> | |
44 | ${entry.pat} |
|
44 | ${entry.pat} | |
45 | </span> |
|
45 | </span> | |
46 | </td> |
|
46 | </td> | |
47 | <td class="td-url issuetracker_url"> |
|
47 | <td class="td-url issuetracker_url"> | |
48 | <span class="entry"> |
|
48 | <span class="entry"> | |
49 | ${entry.url} |
|
49 | ${entry.url} | |
50 | </span> |
|
50 | </span> | |
51 | </td> |
|
51 | </td> | |
52 | <td class="td-prefix issuetracker_pref"> |
|
52 | <td class="td-prefix issuetracker_pref"> | |
53 | <span class="entry"> |
|
53 | <span class="entry"> | |
54 | ${entry.pref} |
|
54 | ${entry.pref} | |
55 | </span> |
|
55 | </span> | |
56 | </td> |
|
56 | </td> | |
57 | <td class="td-action"> |
|
57 | <td class="td-action"> | |
58 | </td> |
|
58 | </td> | |
59 | </tr> |
|
59 | </tr> | |
60 | %endfor |
|
60 | %endfor | |
61 |
|
61 | |||
62 | </table> |
|
62 | </table> | |
63 | </div> |
|
63 | </div> | |
64 | </div> |
|
64 | </div> | |
65 | </div> |
|
65 | </div> | |
66 |
|
66 | |||
67 | <div id="custom_overlay"> |
|
67 | <div id="custom_overlay"> | |
68 | <div class="panel panel-default"> |
|
68 | <div class="panel panel-default"> | |
69 | <div class="panel-heading"> |
|
69 | <div class="panel-heading"> | |
70 | <h3 class="panel-title">${_('Issue Tracker / Wiki Patterns')}</h3> |
|
70 | <h3 class="panel-title">${_('Issue Tracker / Wiki Patterns')}</h3> | |
71 | </div> |
|
71 | </div> | |
72 | <div class="panel-body"> |
|
72 | <div class="panel-body"> | |
73 | ${its.issue_tracker_settings_table( |
|
73 | ${its.issue_tracker_settings_table( | |
74 | patterns=c.repo_patterns.items(), |
|
74 | patterns=c.repo_patterns.items(), | |
75 |
form_url=h. |
|
75 | form_url=h.route_path('edit_repo_issuetracker', repo_name=c.repo_info.repo_name), | |
76 |
delete_url=h. |
|
76 | delete_url=h.route_path('edit_repo_issuetracker_delete', repo_name=c.repo_info.repo_name) | |
77 | )} |
|
77 | )} | |
78 | <div class="buttons"> |
|
78 | <div class="buttons"> | |
79 | <button type="submit" class="btn btn-primary save-inheritance" id="save">${_('Save')}</button> |
|
79 | <button type="submit" class="btn btn-primary save-inheritance" id="save">${_('Save')}</button> | |
80 | <button type="reset" class="btn reset-inheritance">${_('Reset')}</button> |
|
80 | <button type="reset" class="btn reset-inheritance">${_('Reset')}</button> | |
81 | </div> |
|
81 | </div> | |
82 | </div> |
|
82 | </div> | |
83 | </div> |
|
83 | </div> | |
84 | </div> |
|
84 | </div> | |
85 |
|
85 | |||
86 |
|
86 | |||
87 | ${h.end_form()} |
|
87 | ${h.end_form()} | |
88 |
|
88 | |||
89 | <div class="panel panel-default"> |
|
89 | <div class="panel panel-default"> | |
90 | <div class="panel-heading"> |
|
90 | <div class="panel-heading"> | |
91 | <h3 class="panel-title">${_('Test Patterns')}</h3> |
|
91 | <h3 class="panel-title">${_('Test Patterns')}</h3> | |
92 | </div> |
|
92 | </div> | |
93 | <div class="panel-body"> |
|
93 | <div class="panel-body"> | |
94 | ${its.issue_tracker_new_row()} |
|
94 | ${its.issue_tracker_new_row()} | |
95 |
${its.issue_tracker_settings_test(test_url=h. |
|
95 | ${its.issue_tracker_settings_test(test_url=h.route_path('edit_repo_issuetracker_test', repo_name=c.repo_info.repo_name))} | |
96 | </div> |
|
96 | </div> | |
97 | </div> |
|
97 | </div> | |
98 |
|
98 | |||
99 | </div> |
|
99 | </div> | |
100 |
|
100 | |||
101 | <script> |
|
101 | <script> | |
102 | $('#inherit_global_issuetracker').on('change', function(e){ |
|
102 | $('#inherit_global_issuetracker').on('change', function(e){ | |
103 | $('#repo_issue_tracker').toggleClass('inherited',this.checked); |
|
103 | $('#repo_issue_tracker').toggleClass('inherited',this.checked); | |
104 | }); |
|
104 | }); | |
105 |
|
105 | |||
106 | $('.reset-inheritance').on('click', function(e){ |
|
106 | $('.reset-inheritance').on('click', function(e){ | |
107 | $('#inherit_global_issuetracker').prop('checked', false).change(); |
|
107 | $('#inherit_global_issuetracker').prop('checked', false).change(); | |
108 | }); |
|
108 | }); | |
109 | </script> |
|
109 | </script> |
@@ -1,40 +1,40 b'' | |||||
1 | <div class="panel panel-default"> |
|
1 | <div class="panel panel-default"> | |
2 | <div class="panel-heading"> |
|
2 | <div class="panel-heading"> | |
3 | <h3 class="panel-title">${_('Remote url')}</h3> |
|
3 | <h3 class="panel-title">${_('Remote url')}</h3> | |
4 | </div> |
|
4 | </div> | |
5 | <div class="panel-body"> |
|
5 | <div class="panel-body"> | |
6 |
|
6 | |||
7 | <h4>${_('Manually pull changes from external repository.')}</h4> |
|
7 | <h4>${_('Manually pull changes from external repository.')}</h4> | |
8 |
|
8 | |||
9 | %if c.repo_info.clone_uri: |
|
9 | %if c.repo_info.clone_uri: | |
10 |
|
10 | |||
11 | ${_('Remote mirror url')}: |
|
11 | ${_('Remote mirror url')}: | |
12 | <a href="${c.repo_info.clone_uri}">${c.repo_info.clone_uri_hidden}</a> |
|
12 | <a href="${c.repo_info.clone_uri}">${c.repo_info.clone_uri_hidden}</a> | |
13 |
|
13 | |||
14 | <p> |
|
14 | <p> | |
15 | ${_('Pull can be automated by such api call. Can be called periodically in crontab etc.')} |
|
15 | ${_('Pull can be automated by such api call. Can be called periodically in crontab etc.')} | |
16 | <br/> |
|
16 | <br/> | |
17 | <code> |
|
17 | <code> | |
18 | ${h.api_call_example(method='pull', args={"repoid": c.repo_info.repo_name})} |
|
18 | ${h.api_call_example(method='pull', args={"repoid": c.repo_info.repo_name})} | |
19 | </code> |
|
19 | </code> | |
20 | </p> |
|
20 | </p> | |
21 |
|
21 | |||
22 |
${h.secure_form(h. |
|
22 | ${h.secure_form(h.route_path('edit_repo_remote_pull', repo_name=c.repo_name), method='POST', request=request)} | |
23 | <div class="form"> |
|
23 | <div class="form"> | |
24 | <div class="fields"> |
|
24 | <div class="fields"> | |
25 | ${h.submit('remote_pull_%s' % c.repo_info.repo_name,_('Pull changes from remote location'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to pull changes from remote side')+"');")} |
|
25 | ${h.submit('remote_pull_%s' % c.repo_info.repo_name,_('Pull changes from remote location'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to pull changes from remote side')+"');")} | |
26 | </div> |
|
26 | </div> | |
27 | </div> |
|
27 | </div> | |
28 | ${h.end_form()} |
|
28 | ${h.end_form()} | |
29 | %else: |
|
29 | %else: | |
30 |
|
30 | |||
31 | ${_('This repository does not have any remote mirror url set.')} |
|
31 | ${_('This repository does not have any remote mirror url set.')} | |
32 | <a href="${h.route_path('edit_repo', repo_name=c.repo_info.repo_name)}">${_('Set remote url.')}</a> |
|
32 | <a href="${h.route_path('edit_repo', repo_name=c.repo_info.repo_name)}">${_('Set remote url.')}</a> | |
33 | <br/> |
|
33 | <br/> | |
34 | <br/> |
|
34 | <br/> | |
35 | <button class="btn disabled" type="submit" disabled="disabled"> |
|
35 | <button class="btn disabled" type="submit" disabled="disabled"> | |
36 | ${_('Pull changes from remote location')} |
|
36 | ${_('Pull changes from remote location')} | |
37 | </button> |
|
37 | </button> | |
38 | %endif |
|
38 | %endif | |
39 | </div> |
|
39 | </div> | |
40 | </div> |
|
40 | </div> |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now