Show More
@@ -0,0 +1,128 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import colander | |
|
22 | import pytest | |
|
23 | ||
|
24 | from rhodecode.model.validation_schema import types | |
|
25 | from rhodecode.model.validation_schema.schemas import repo_schema | |
|
26 | ||
|
27 | ||
|
28 | class TestRepoSchema(object): | |
|
29 | ||
|
30 | #TODO: | |
|
31 | # test nested groups | |
|
32 | ||
|
33 | @pytest.mark.parametrize('given, expected', [ | |
|
34 | ('my repo', 'my-repo'), | |
|
35 | (' hello world mike ', 'hello-world-mike'), | |
|
36 | ||
|
37 | ('//group1/group2//', 'group1/group2'), | |
|
38 | ('//group1///group2//', 'group1/group2'), | |
|
39 | ('///group1/group2///group3', 'group1/group2/group3'), | |
|
40 | ('word g1/group2///group3', 'word-g1/group2/group3'), | |
|
41 | ||
|
42 | ('grou p1/gro;,,##up2//.../group3', 'grou-p1/group2/group3'), | |
|
43 | ||
|
44 | ('group,,,/,,,/1/2/3', 'group/1/2/3'), | |
|
45 | ('grou[]p1/gro;up2///gro up3', 'group1/group2/gro-up3'), | |
|
46 | (u'grou[]p1/gro;up2///gro up3/Δ Δ', u'group1/group2/gro-up3/Δ Δ'), | |
|
47 | ]) | |
|
48 | def test_deserialize_repo_name(self, app, user_admin, given, expected): | |
|
49 | ||
|
50 | schema = repo_schema.RepoSchema().bind() | |
|
51 | assert expected == schema.get('repo_name').deserialize(given) | |
|
52 | ||
|
53 | def test_deserialize(self, app, user_admin): | |
|
54 | schema = repo_schema.RepoSchema().bind( | |
|
55 | repo_type_options=['hg'], | |
|
56 | user=user_admin | |
|
57 | ) | |
|
58 | ||
|
59 | schema_data = schema.deserialize(dict( | |
|
60 | repo_name='dupa', | |
|
61 | repo_type='hg', | |
|
62 | repo_owner=user_admin.username | |
|
63 | )) | |
|
64 | ||
|
65 | assert schema_data['repo_name'] == 'dupa' | |
|
66 | assert schema_data['repo_group'] == { | |
|
67 | 'repo_group_id': None, | |
|
68 | 'repo_group_name': types.RootLocation, | |
|
69 | 'repo_name_without_group': 'dupa'} | |
|
70 | ||
|
71 | @pytest.mark.parametrize('given, err_key, expected_exc', [ | |
|
72 | ('xxx/dupa','repo_group', 'Repository group `xxx` does not exist'), | |
|
73 | ('', 'repo_name', 'Name must start with a letter or number. Got ``'), | |
|
74 | ]) | |
|
75 | def test_deserialize_with_bad_group_name( | |
|
76 | self, app, user_admin, given, err_key, expected_exc): | |
|
77 | ||
|
78 | schema = repo_schema.RepoSchema().bind( | |
|
79 | repo_type_options=['hg'], | |
|
80 | user=user_admin | |
|
81 | ) | |
|
82 | ||
|
83 | with pytest.raises(colander.Invalid) as excinfo: | |
|
84 | schema.deserialize(dict( | |
|
85 | repo_name=given, | |
|
86 | repo_type='hg', | |
|
87 | repo_owner=user_admin.username | |
|
88 | )) | |
|
89 | ||
|
90 | assert excinfo.value.asdict()[err_key] == expected_exc | |
|
91 | ||
|
92 | def test_deserialize_with_group_name(self, app, user_admin, test_repo_group): | |
|
93 | schema = repo_schema.RepoSchema().bind( | |
|
94 | repo_type_options=['hg'], | |
|
95 | user=user_admin | |
|
96 | ) | |
|
97 | ||
|
98 | full_name = test_repo_group.group_name + '/dupa' | |
|
99 | schema_data = schema.deserialize(dict( | |
|
100 | repo_name=full_name, | |
|
101 | repo_type='hg', | |
|
102 | repo_owner=user_admin.username | |
|
103 | )) | |
|
104 | ||
|
105 | assert schema_data['repo_name'] == full_name | |
|
106 | assert schema_data['repo_group'] == { | |
|
107 | 'repo_group_id': test_repo_group.group_id, | |
|
108 | 'repo_group_name': test_repo_group.group_name, | |
|
109 | 'repo_name_without_group': 'dupa'} | |
|
110 | ||
|
111 | def test_deserialize_with_group_name_regular_user_no_perms( | |
|
112 | self, app, user_regular, test_repo_group): | |
|
113 | schema = repo_schema.RepoSchema().bind( | |
|
114 | repo_type_options=['hg'], | |
|
115 | user=user_regular | |
|
116 | ) | |
|
117 | ||
|
118 | full_name = test_repo_group.group_name + '/dupa' | |
|
119 | with pytest.raises(colander.Invalid) as excinfo: | |
|
120 | schema.deserialize(dict( | |
|
121 | repo_name=full_name, | |
|
122 | repo_type='hg', | |
|
123 | repo_owner=user_regular.username | |
|
124 | )) | |
|
125 | ||
|
126 | expected = 'Repository group `{}` does not exist'.format( | |
|
127 | test_repo_group.group_name) | |
|
128 | assert excinfo.value.asdict()['repo_group'] == expected |
@@ -1,270 +1,350 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import json |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 | from rhodecode.lib.utils2 import safe_unicode | |
|
26 | 27 | from rhodecode.lib.vcs import settings |
|
28 | from rhodecode.model.meta import Session | |
|
27 | 29 | from rhodecode.model.repo import RepoModel |
|
30 | from rhodecode.model.user import UserModel | |
|
28 | 31 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
29 | 32 | from rhodecode.api.tests.utils import ( |
|
30 | 33 | build_data, api_call, assert_ok, assert_error, crash) |
|
31 | 34 | from rhodecode.tests.fixture import Fixture |
|
32 | 35 | |
|
33 | 36 | |
|
34 | 37 | fixture = Fixture() |
|
35 | 38 | |
|
36 | 39 | |
|
37 | 40 | @pytest.mark.usefixtures("testuser_api", "app") |
|
38 | 41 | class TestCreateRepo(object): |
|
39 | def test_api_create_repo(self, backend): | |
|
40 | repo_name = 'api-repo-1' | |
|
42 | ||
|
43 | @pytest.mark.parametrize('given, expected_name, expected_exc', [ | |
|
44 | ('api repo-1', 'api-repo-1', False), | |
|
45 | ('api-repo 1-Δ Δ', 'api-repo-1-Δ Δ', False), | |
|
46 | (u'unicode-Δ Δ', u'unicode-Δ Δ', False), | |
|
47 | ('some repo v1.2', 'some-repo-v1.2', False), | |
|
48 | ('v2.0', 'v2.0', False), | |
|
49 | ]) | |
|
50 | def test_api_create_repo(self, backend, given, expected_name, expected_exc): | |
|
51 | ||
|
41 | 52 | id_, params = build_data( |
|
42 | 53 | self.apikey, |
|
43 | 54 | 'create_repo', |
|
44 |
repo_name= |
|
|
55 | repo_name=given, | |
|
45 | 56 | owner=TEST_USER_ADMIN_LOGIN, |
|
46 | 57 | repo_type=backend.alias, |
|
47 | 58 | ) |
|
48 | 59 | response = api_call(self.app, params) |
|
49 | 60 | |
|
50 | repo = RepoModel().get_by_repo_name(repo_name) | |
|
51 | ||
|
52 | assert repo is not None | |
|
53 | 61 | ret = { |
|
54 |
'msg': 'Created new repository `%s`' % ( |
|
|
62 | 'msg': 'Created new repository `%s`' % (expected_name,), | |
|
55 | 63 | 'success': True, |
|
56 | 64 | 'task': None, |
|
57 | 65 | } |
|
58 | 66 | expected = ret |
|
59 | 67 | assert_ok(id_, expected, given=response.body) |
|
60 | 68 | |
|
61 | id_, params = build_data(self.apikey, 'get_repo', repoid=repo_name) | |
|
69 | repo = RepoModel().get_by_repo_name(safe_unicode(expected_name)) | |
|
70 | assert repo is not None | |
|
71 | ||
|
72 | id_, params = build_data(self.apikey, 'get_repo', repoid=expected_name) | |
|
62 | 73 | response = api_call(self.app, params) |
|
63 | 74 | body = json.loads(response.body) |
|
64 | 75 | |
|
65 | 76 | assert body['result']['enable_downloads'] is False |
|
66 | 77 | assert body['result']['enable_locking'] is False |
|
67 | 78 | assert body['result']['enable_statistics'] is False |
|
68 | 79 | |
|
69 |
fixture.destroy_repo( |
|
|
80 | fixture.destroy_repo(safe_unicode(expected_name)) | |
|
70 | 81 | |
|
71 | 82 | def test_api_create_restricted_repo_type(self, backend): |
|
72 | 83 | repo_name = 'api-repo-type-{0}'.format(backend.alias) |
|
73 | 84 | id_, params = build_data( |
|
74 | 85 | self.apikey, |
|
75 | 86 | 'create_repo', |
|
76 | 87 | repo_name=repo_name, |
|
77 | 88 | owner=TEST_USER_ADMIN_LOGIN, |
|
78 | 89 | repo_type=backend.alias, |
|
79 | 90 | ) |
|
80 | 91 | git_backend = settings.BACKENDS['git'] |
|
81 | 92 | with mock.patch( |
|
82 | 93 | 'rhodecode.lib.vcs.settings.BACKENDS', {'git': git_backend}): |
|
83 | 94 | response = api_call(self.app, params) |
|
84 | 95 | |
|
85 | 96 | repo = RepoModel().get_by_repo_name(repo_name) |
|
86 | 97 | |
|
87 | 98 | if backend.alias == 'git': |
|
88 | 99 | assert repo is not None |
|
89 | 100 | expected = { |
|
90 | 101 | 'msg': 'Created new repository `{0}`'.format(repo_name,), |
|
91 | 102 | 'success': True, |
|
92 | 103 | 'task': None, |
|
93 | 104 | } |
|
94 | 105 | assert_ok(id_, expected, given=response.body) |
|
95 | 106 | else: |
|
96 | 107 | assert repo is None |
|
97 | 108 | |
|
98 | 109 | fixture.destroy_repo(repo_name) |
|
99 | 110 | |
|
100 | 111 | def test_api_create_repo_with_booleans(self, backend): |
|
101 | 112 | repo_name = 'api-repo-2' |
|
102 | 113 | id_, params = build_data( |
|
103 | 114 | self.apikey, |
|
104 | 115 | 'create_repo', |
|
105 | 116 | repo_name=repo_name, |
|
106 | 117 | owner=TEST_USER_ADMIN_LOGIN, |
|
107 | 118 | repo_type=backend.alias, |
|
108 | 119 | enable_statistics=True, |
|
109 | 120 | enable_locking=True, |
|
110 | 121 | enable_downloads=True |
|
111 | 122 | ) |
|
112 | 123 | response = api_call(self.app, params) |
|
113 | 124 | |
|
114 | 125 | repo = RepoModel().get_by_repo_name(repo_name) |
|
115 | 126 | |
|
116 | 127 | assert repo is not None |
|
117 | 128 | ret = { |
|
118 | 129 | 'msg': 'Created new repository `%s`' % (repo_name,), |
|
119 | 130 | 'success': True, |
|
120 | 131 | 'task': None, |
|
121 | 132 | } |
|
122 | 133 | expected = ret |
|
123 | 134 | assert_ok(id_, expected, given=response.body) |
|
124 | 135 | |
|
125 | 136 | id_, params = build_data(self.apikey, 'get_repo', repoid=repo_name) |
|
126 | 137 | response = api_call(self.app, params) |
|
127 | 138 | body = json.loads(response.body) |
|
128 | 139 | |
|
129 | 140 | assert body['result']['enable_downloads'] is True |
|
130 | 141 | assert body['result']['enable_locking'] is True |
|
131 | 142 | assert body['result']['enable_statistics'] is True |
|
132 | 143 | |
|
133 | 144 | fixture.destroy_repo(repo_name) |
|
134 | 145 | |
|
135 | 146 | def test_api_create_repo_in_group(self, backend): |
|
136 | 147 | repo_group_name = 'my_gr' |
|
137 | 148 | # create the parent |
|
138 | 149 | fixture.create_repo_group(repo_group_name) |
|
139 | 150 | |
|
140 | 151 | repo_name = '%s/api-repo-gr' % (repo_group_name,) |
|
141 | 152 | id_, params = build_data( |
|
142 | 153 | self.apikey, 'create_repo', |
|
143 | 154 | repo_name=repo_name, |
|
144 | 155 | owner=TEST_USER_ADMIN_LOGIN, |
|
145 | 156 | repo_type=backend.alias,) |
|
146 | 157 | response = api_call(self.app, params) |
|
147 | 158 | repo = RepoModel().get_by_repo_name(repo_name) |
|
148 | 159 | assert repo is not None |
|
149 | 160 | assert repo.group is not None |
|
150 | 161 | |
|
151 | 162 | ret = { |
|
152 | 163 | 'msg': 'Created new repository `%s`' % (repo_name,), |
|
153 | 164 | 'success': True, |
|
154 | 165 | 'task': None, |
|
155 | 166 | } |
|
156 | 167 | expected = ret |
|
157 | 168 | assert_ok(id_, expected, given=response.body) |
|
158 | 169 | fixture.destroy_repo(repo_name) |
|
159 | 170 | fixture.destroy_repo_group(repo_group_name) |
|
160 | 171 | |
|
172 | def test_create_repo_in_group_that_doesnt_exist(self, backend, user_util): | |
|
173 | repo_group_name = 'fake_group' | |
|
174 | ||
|
175 | repo_name = '%s/api-repo-gr' % (repo_group_name,) | |
|
176 | id_, params = build_data( | |
|
177 | self.apikey, 'create_repo', | |
|
178 | repo_name=repo_name, | |
|
179 | owner=TEST_USER_ADMIN_LOGIN, | |
|
180 | repo_type=backend.alias,) | |
|
181 | response = api_call(self.app, params) | |
|
182 | ||
|
183 | expected = {'repo_group': 'Repository group `{}` does not exist'.format( | |
|
184 | repo_group_name)} | |
|
185 | assert_error(id_, expected, given=response.body) | |
|
186 | ||
|
161 | 187 | def test_api_create_repo_unknown_owner(self, backend): |
|
162 | 188 | repo_name = 'api-repo-2' |
|
163 | 189 | owner = 'i-dont-exist' |
|
164 | 190 | id_, params = build_data( |
|
165 | 191 | self.apikey, 'create_repo', |
|
166 | 192 | repo_name=repo_name, |
|
167 | 193 | owner=owner, |
|
168 | 194 | repo_type=backend.alias) |
|
169 | 195 | response = api_call(self.app, params) |
|
170 | 196 | expected = 'user `%s` does not exist' % (owner,) |
|
171 | 197 | assert_error(id_, expected, given=response.body) |
|
172 | 198 | |
|
173 | 199 | def test_api_create_repo_dont_specify_owner(self, backend): |
|
174 | 200 | repo_name = 'api-repo-3' |
|
175 | 201 | id_, params = build_data( |
|
176 | 202 | self.apikey, 'create_repo', |
|
177 | 203 | repo_name=repo_name, |
|
178 | 204 | repo_type=backend.alias) |
|
179 | 205 | response = api_call(self.app, params) |
|
180 | 206 | |
|
181 | 207 | repo = RepoModel().get_by_repo_name(repo_name) |
|
182 | 208 | assert repo is not None |
|
183 | 209 | ret = { |
|
184 | 210 | 'msg': 'Created new repository `%s`' % (repo_name,), |
|
185 | 211 | 'success': True, |
|
186 | 212 | 'task': None, |
|
187 | 213 | } |
|
188 | 214 | expected = ret |
|
189 | 215 | assert_ok(id_, expected, given=response.body) |
|
190 | 216 | fixture.destroy_repo(repo_name) |
|
191 | 217 | |
|
192 | 218 | def test_api_create_repo_by_non_admin(self, backend): |
|
193 | 219 | repo_name = 'api-repo-4' |
|
194 | 220 | id_, params = build_data( |
|
195 | 221 | self.apikey_regular, 'create_repo', |
|
196 | 222 | repo_name=repo_name, |
|
197 | 223 | repo_type=backend.alias) |
|
198 | 224 | response = api_call(self.app, params) |
|
199 | 225 | |
|
200 | 226 | repo = RepoModel().get_by_repo_name(repo_name) |
|
201 | 227 | assert repo is not None |
|
202 | 228 | ret = { |
|
203 | 229 | 'msg': 'Created new repository `%s`' % (repo_name,), |
|
204 | 230 | 'success': True, |
|
205 | 231 | 'task': None, |
|
206 | 232 | } |
|
207 | 233 | expected = ret |
|
208 | 234 | assert_ok(id_, expected, given=response.body) |
|
209 | 235 | fixture.destroy_repo(repo_name) |
|
210 | 236 | |
|
211 | 237 | def test_api_create_repo_by_non_admin_specify_owner(self, backend): |
|
212 | 238 | repo_name = 'api-repo-5' |
|
213 | 239 | owner = 'i-dont-exist' |
|
214 | 240 | id_, params = build_data( |
|
215 | 241 | self.apikey_regular, 'create_repo', |
|
216 | 242 | repo_name=repo_name, |
|
217 | 243 | repo_type=backend.alias, |
|
218 | 244 | owner=owner) |
|
219 | 245 | response = api_call(self.app, params) |
|
220 | 246 | |
|
221 | expected = 'Only RhodeCode admin can specify `owner` param' | |
|
247 | expected = 'Only RhodeCode super-admin can specify `owner` param' | |
|
222 | 248 | assert_error(id_, expected, given=response.body) |
|
223 | 249 | fixture.destroy_repo(repo_name) |
|
224 | 250 | |
|
251 | def test_api_create_repo_by_non_admin_no_parent_group_perms(self, backend): | |
|
252 | repo_group_name = 'no-access' | |
|
253 | fixture.create_repo_group(repo_group_name) | |
|
254 | repo_name = 'no-access/api-repo' | |
|
255 | ||
|
256 | id_, params = build_data( | |
|
257 | self.apikey_regular, 'create_repo', | |
|
258 | repo_name=repo_name, | |
|
259 | repo_type=backend.alias) | |
|
260 | response = api_call(self.app, params) | |
|
261 | ||
|
262 | expected = {'repo_group': 'Repository group `{}` does not exist'.format( | |
|
263 | repo_group_name)} | |
|
264 | assert_error(id_, expected, given=response.body) | |
|
265 | fixture.destroy_repo_group(repo_group_name) | |
|
266 | fixture.destroy_repo(repo_name) | |
|
267 | ||
|
268 | def test_api_create_repo_non_admin_no_permission_to_create_to_root_level( | |
|
269 | self, backend, user_util): | |
|
270 | ||
|
271 | regular_user = user_util.create_user() | |
|
272 | regular_user_api_key = regular_user.api_key | |
|
273 | ||
|
274 | usr = UserModel().get_by_username(regular_user.username) | |
|
275 | usr.inherit_default_permissions = False | |
|
276 | Session().add(usr) | |
|
277 | ||
|
278 | repo_name = backend.new_repo_name() | |
|
279 | id_, params = build_data( | |
|
280 | regular_user_api_key, 'create_repo', | |
|
281 | repo_name=repo_name, | |
|
282 | repo_type=backend.alias) | |
|
283 | response = api_call(self.app, params) | |
|
284 | expected = { | |
|
285 | "repo_name": "You do not have the permission to " | |
|
286 | "store repositories in the root location."} | |
|
287 | assert_error(id_, expected, given=response.body) | |
|
288 | ||
|
225 | 289 | def test_api_create_repo_exists(self, backend): |
|
226 | 290 | repo_name = backend.repo_name |
|
227 | 291 | id_, params = build_data( |
|
228 | 292 | self.apikey, 'create_repo', |
|
229 | 293 | repo_name=repo_name, |
|
230 | 294 | owner=TEST_USER_ADMIN_LOGIN, |
|
231 | 295 | repo_type=backend.alias,) |
|
232 | 296 | response = api_call(self.app, params) |
|
233 | expected = "repo `%s` already exist" % (repo_name,) | |
|
297 | expected = { | |
|
298 | 'unique_repo_name': 'Repository with name `{}` already exists'.format( | |
|
299 | repo_name)} | |
|
234 | 300 | assert_error(id_, expected, given=response.body) |
|
235 | 301 | |
|
236 | 302 | @mock.patch.object(RepoModel, 'create', crash) |
|
237 | 303 | def test_api_create_repo_exception_occurred(self, backend): |
|
238 | 304 | repo_name = 'api-repo-6' |
|
239 | 305 | id_, params = build_data( |
|
240 | 306 | self.apikey, 'create_repo', |
|
241 | 307 | repo_name=repo_name, |
|
242 | 308 | owner=TEST_USER_ADMIN_LOGIN, |
|
243 | 309 | repo_type=backend.alias,) |
|
244 | 310 | response = api_call(self.app, params) |
|
245 | 311 | expected = 'failed to create repository `%s`' % (repo_name,) |
|
246 | 312 | assert_error(id_, expected, given=response.body) |
|
247 | 313 | |
|
248 | def test_create_repo_with_extra_slashes_in_name(self, backend, user_util): | |
|
249 | existing_repo_group = user_util.create_repo_group() | |
|
250 | dirty_repo_name = '//{}/repo_name//'.format( | |
|
251 | existing_repo_group.group_name) | |
|
252 | cleaned_repo_name = '{}/repo_name'.format( | |
|
253 | existing_repo_group.group_name) | |
|
314 | @pytest.mark.parametrize('parent_group, dirty_name, expected_name', [ | |
|
315 | (None, 'foo bar x', 'foo-bar-x'), | |
|
316 | ('foo', '/foo//bar x', 'foo/bar-x'), | |
|
317 | ('foo-bar', 'foo-bar //bar x', 'foo-bar/bar-x'), | |
|
318 | ]) | |
|
319 | def test_create_repo_with_extra_slashes_in_name( | |
|
320 | self, backend, parent_group, dirty_name, expected_name): | |
|
321 | ||
|
322 | if parent_group: | |
|
323 | gr = fixture.create_repo_group(parent_group) | |
|
324 | assert gr.group_name == parent_group | |
|
254 | 325 | |
|
255 | 326 | id_, params = build_data( |
|
256 | 327 | self.apikey, 'create_repo', |
|
257 |
repo_name=dirty_ |
|
|
328 | repo_name=dirty_name, | |
|
258 | 329 | repo_type=backend.alias, |
|
259 | 330 | owner=TEST_USER_ADMIN_LOGIN,) |
|
260 | 331 | response = api_call(self.app, params) |
|
261 | repo = RepoModel().get_by_repo_name(cleaned_repo_name) | |
|
332 | expected ={ | |
|
333 | "msg": "Created new repository `{}`".format(expected_name), | |
|
334 | "task": None, | |
|
335 | "success": True | |
|
336 | } | |
|
337 | assert_ok(id_, expected, response.body) | |
|
338 | ||
|
339 | repo = RepoModel().get_by_repo_name(expected_name) | |
|
262 | 340 | assert repo is not None |
|
263 | 341 | |
|
264 | 342 | expected = { |
|
265 |
'msg': 'Created new repository `%s`' % ( |
|
|
343 | 'msg': 'Created new repository `%s`' % (expected_name,), | |
|
266 | 344 | 'success': True, |
|
267 | 345 | 'task': None, |
|
268 | 346 | } |
|
269 | 347 | assert_ok(id_, expected, given=response.body) |
|
270 |
fixture.destroy_repo( |
|
|
348 | fixture.destroy_repo(expected_name) | |
|
349 | if parent_group: | |
|
350 | fixture.destroy_repo_group(parent_group) |
@@ -1,224 +1,279 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import mock |
|
23 | 23 | import pytest |
|
24 | 24 | |
|
25 | 25 | from rhodecode.model.meta import Session |
|
26 | 26 | from rhodecode.model.repo import RepoModel |
|
27 | from rhodecode.model.repo_group import RepoGroupModel | |
|
27 | 28 | from rhodecode.model.user import UserModel |
|
28 | 29 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
29 | 30 | from rhodecode.api.tests.utils import ( |
|
30 | 31 | build_data, api_call, assert_error, assert_ok, crash) |
|
31 | 32 | from rhodecode.tests.fixture import Fixture |
|
32 | 33 | |
|
33 | 34 | |
|
34 | 35 | fixture = Fixture() |
|
35 | 36 | |
|
36 | 37 | |
|
37 | 38 | @pytest.mark.usefixtures("testuser_api", "app") |
|
38 | 39 | class TestApiForkRepo(object): |
|
39 | 40 | def test_api_fork_repo(self, backend): |
|
40 | 41 | source_name = backend['minimal'].repo_name |
|
41 | 42 | fork_name = backend.new_repo_name() |
|
42 | 43 | |
|
43 | 44 | id_, params = build_data( |
|
44 | 45 | self.apikey, 'fork_repo', |
|
45 | 46 | repoid=source_name, |
|
46 | 47 | fork_name=fork_name, |
|
47 | 48 | owner=TEST_USER_ADMIN_LOGIN) |
|
48 | 49 | response = api_call(self.app, params) |
|
49 | 50 | |
|
50 | 51 | expected = { |
|
51 | 52 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), |
|
52 | 53 | 'success': True, |
|
53 | 54 | 'task': None, |
|
54 | 55 | } |
|
55 | 56 | try: |
|
56 | 57 | assert_ok(id_, expected, given=response.body) |
|
57 | 58 | finally: |
|
58 | 59 | fixture.destroy_repo(fork_name) |
|
59 | 60 | |
|
60 | 61 | def test_api_fork_repo_into_group(self, backend, user_util): |
|
61 | 62 | source_name = backend['minimal'].repo_name |
|
62 | 63 | repo_group = user_util.create_repo_group() |
|
63 | 64 | fork_name = '%s/api-repo-fork' % repo_group.group_name |
|
64 | 65 | id_, params = build_data( |
|
65 | 66 | self.apikey, 'fork_repo', |
|
66 | 67 | repoid=source_name, |
|
67 | 68 | fork_name=fork_name, |
|
68 | 69 | owner=TEST_USER_ADMIN_LOGIN) |
|
69 | 70 | response = api_call(self.app, params) |
|
70 | 71 | |
|
71 | 72 | ret = { |
|
72 | 73 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), |
|
73 | 74 | 'success': True, |
|
74 | 75 | 'task': None, |
|
75 | 76 | } |
|
76 | 77 | expected = ret |
|
77 | 78 | try: |
|
78 | 79 | assert_ok(id_, expected, given=response.body) |
|
79 | 80 | finally: |
|
80 | 81 | fixture.destroy_repo(fork_name) |
|
81 | 82 | |
|
82 | 83 | def test_api_fork_repo_non_admin(self, backend): |
|
83 | 84 | source_name = backend['minimal'].repo_name |
|
84 | 85 | fork_name = backend.new_repo_name() |
|
85 | 86 | |
|
86 | 87 | id_, params = build_data( |
|
87 | 88 | self.apikey_regular, 'fork_repo', |
|
88 | 89 | repoid=source_name, |
|
89 | 90 | fork_name=fork_name) |
|
90 | 91 | response = api_call(self.app, params) |
|
91 | 92 | |
|
92 | 93 | expected = { |
|
93 | 94 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), |
|
94 | 95 | 'success': True, |
|
95 | 96 | 'task': None, |
|
96 | 97 | } |
|
97 | 98 | try: |
|
98 | 99 | assert_ok(id_, expected, given=response.body) |
|
99 | 100 | finally: |
|
100 | 101 | fixture.destroy_repo(fork_name) |
|
101 | 102 | |
|
103 | def test_api_fork_repo_non_admin_into_group_no_permission(self, backend, user_util): | |
|
104 | source_name = backend['minimal'].repo_name | |
|
105 | repo_group = user_util.create_repo_group() | |
|
106 | repo_group_name = repo_group.group_name | |
|
107 | fork_name = '%s/api-repo-fork' % repo_group_name | |
|
108 | ||
|
109 | id_, params = build_data( | |
|
110 | self.apikey_regular, 'fork_repo', | |
|
111 | repoid=source_name, | |
|
112 | fork_name=fork_name) | |
|
113 | response = api_call(self.app, params) | |
|
114 | ||
|
115 | expected = { | |
|
116 | 'repo_group': 'Repository group `{}` does not exist'.format( | |
|
117 | repo_group_name)} | |
|
118 | try: | |
|
119 | assert_error(id_, expected, given=response.body) | |
|
120 | finally: | |
|
121 | fixture.destroy_repo(fork_name) | |
|
122 | ||
|
102 | 123 | def test_api_fork_repo_non_admin_into_group(self, backend, user_util): |
|
103 | 124 | source_name = backend['minimal'].repo_name |
|
104 | 125 | repo_group = user_util.create_repo_group() |
|
105 | 126 | fork_name = '%s/api-repo-fork' % repo_group.group_name |
|
106 | 127 | |
|
128 | RepoGroupModel().grant_user_permission( | |
|
129 | repo_group, self.TEST_USER_LOGIN, 'group.admin') | |
|
130 | Session().commit() | |
|
131 | ||
|
107 | 132 | id_, params = build_data( |
|
108 | 133 | self.apikey_regular, 'fork_repo', |
|
109 | 134 | repoid=source_name, |
|
110 | 135 | fork_name=fork_name) |
|
111 | 136 | response = api_call(self.app, params) |
|
112 | 137 | |
|
113 | 138 | expected = { |
|
114 | 139 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), |
|
115 | 140 | 'success': True, |
|
116 | 141 | 'task': None, |
|
117 | 142 | } |
|
118 | 143 | try: |
|
119 | 144 | assert_ok(id_, expected, given=response.body) |
|
120 | 145 | finally: |
|
121 | 146 | fixture.destroy_repo(fork_name) |
|
122 | 147 | |
|
123 | 148 | def test_api_fork_repo_non_admin_specify_owner(self, backend): |
|
124 | 149 | source_name = backend['minimal'].repo_name |
|
125 | 150 | fork_name = backend.new_repo_name() |
|
126 | 151 | id_, params = build_data( |
|
127 | 152 | self.apikey_regular, 'fork_repo', |
|
128 | 153 | repoid=source_name, |
|
129 | 154 | fork_name=fork_name, |
|
130 | 155 | owner=TEST_USER_ADMIN_LOGIN) |
|
131 | 156 | response = api_call(self.app, params) |
|
132 | expected = 'Only RhodeCode admin can specify `owner` param' | |
|
157 | expected = 'Only RhodeCode super-admin can specify `owner` param' | |
|
133 | 158 | assert_error(id_, expected, given=response.body) |
|
134 | 159 | |
|
135 |
def test_api_fork_repo_non_admin_no_permission_ |
|
|
160 | def test_api_fork_repo_non_admin_no_permission_of_source_repo( | |
|
161 | self, backend): | |
|
136 | 162 | source_name = backend['minimal'].repo_name |
|
137 | 163 | RepoModel().grant_user_permission(repo=source_name, |
|
138 | 164 | user=self.TEST_USER_LOGIN, |
|
139 | 165 | perm='repository.none') |
|
140 | 166 | fork_name = backend.new_repo_name() |
|
141 | 167 | id_, params = build_data( |
|
142 | 168 | self.apikey_regular, 'fork_repo', |
|
143 | 169 | repoid=backend.repo_name, |
|
144 | 170 | fork_name=fork_name) |
|
145 | 171 | response = api_call(self.app, params) |
|
146 | 172 | expected = 'repository `%s` does not exist' % (backend.repo_name) |
|
147 | 173 | assert_error(id_, expected, given=response.body) |
|
148 | 174 | |
|
149 | 175 | def test_api_fork_repo_non_admin_no_permission_to_fork_to_root_level( |
|
150 | self, backend): | |
|
176 | self, backend, user_util): | |
|
177 | ||
|
178 | regular_user = user_util.create_user() | |
|
179 | regular_user_api_key = regular_user.api_key | |
|
180 | usr = UserModel().get_by_username(regular_user.username) | |
|
181 | usr.inherit_default_permissions = False | |
|
182 | Session().add(usr) | |
|
183 | UserModel().grant_perm(regular_user.username, 'hg.fork.repository') | |
|
184 | ||
|
151 | 185 | source_name = backend['minimal'].repo_name |
|
186 | fork_name = backend.new_repo_name() | |
|
187 | id_, params = build_data( | |
|
188 | regular_user_api_key, 'fork_repo', | |
|
189 | repoid=source_name, | |
|
190 | fork_name=fork_name) | |
|
191 | response = api_call(self.app, params) | |
|
192 | expected = { | |
|
193 | "repo_name": "You do not have the permission to " | |
|
194 | "store repositories in the root location."} | |
|
195 | assert_error(id_, expected, given=response.body) | |
|
152 | 196 | |
|
153 | usr = UserModel().get_by_username(self.TEST_USER_LOGIN) | |
|
197 | def test_api_fork_repo_non_admin_no_permission_to_fork( | |
|
198 | self, backend, user_util): | |
|
199 | ||
|
200 | regular_user = user_util.create_user() | |
|
201 | regular_user_api_key = regular_user.api_key | |
|
202 | usr = UserModel().get_by_username(regular_user.username) | |
|
154 | 203 | usr.inherit_default_permissions = False |
|
155 | 204 | Session().add(usr) |
|
156 | 205 | |
|
206 | source_name = backend['minimal'].repo_name | |
|
157 | 207 | fork_name = backend.new_repo_name() |
|
158 | 208 | id_, params = build_data( |
|
159 |
|
|
|
209 | regular_user_api_key, 'fork_repo', | |
|
160 | 210 | repoid=source_name, |
|
161 | 211 | fork_name=fork_name) |
|
162 | 212 | response = api_call(self.app, params) |
|
213 | ||
|
163 | 214 | expected = "Access was denied to this resource." |
|
164 | 215 | assert_error(id_, expected, given=response.body) |
|
165 | 216 | |
|
166 | 217 | def test_api_fork_repo_unknown_owner(self, backend): |
|
167 | 218 | source_name = backend['minimal'].repo_name |
|
168 | 219 | fork_name = backend.new_repo_name() |
|
169 | 220 | owner = 'i-dont-exist' |
|
170 | 221 | id_, params = build_data( |
|
171 | 222 | self.apikey, 'fork_repo', |
|
172 | 223 | repoid=source_name, |
|
173 | 224 | fork_name=fork_name, |
|
174 | 225 | owner=owner) |
|
175 | 226 | response = api_call(self.app, params) |
|
176 | 227 | expected = 'user `%s` does not exist' % (owner,) |
|
177 | 228 | assert_error(id_, expected, given=response.body) |
|
178 | 229 | |
|
179 | 230 | def test_api_fork_repo_fork_exists(self, backend): |
|
180 | 231 | source_name = backend['minimal'].repo_name |
|
181 | 232 | fork_name = backend.new_repo_name() |
|
182 | 233 | fork_repo = fixture.create_fork(source_name, fork_name) |
|
183 | 234 | |
|
184 | 235 | id_, params = build_data( |
|
185 | 236 | self.apikey, 'fork_repo', |
|
186 | 237 | repoid=source_name, |
|
187 | 238 | fork_name=fork_name, |
|
188 | 239 | owner=TEST_USER_ADMIN_LOGIN) |
|
189 | 240 | response = api_call(self.app, params) |
|
190 | 241 | |
|
191 | 242 | try: |
|
192 | expected = "fork `%s` already exist" % (fork_name,) | |
|
243 | expected = { | |
|
244 | 'unique_repo_name': 'Repository with name `{}` already exists'.format( | |
|
245 | fork_name)} | |
|
193 | 246 | assert_error(id_, expected, given=response.body) |
|
194 | 247 | finally: |
|
195 | 248 | fixture.destroy_repo(fork_repo.repo_name) |
|
196 | 249 | |
|
197 | 250 | def test_api_fork_repo_repo_exists(self, backend): |
|
198 | 251 | source_name = backend['minimal'].repo_name |
|
199 | 252 | fork_name = source_name |
|
200 | 253 | |
|
201 | 254 | id_, params = build_data( |
|
202 | 255 | self.apikey, 'fork_repo', |
|
203 | 256 | repoid=source_name, |
|
204 | 257 | fork_name=fork_name, |
|
205 | 258 | owner=TEST_USER_ADMIN_LOGIN) |
|
206 | 259 | response = api_call(self.app, params) |
|
207 | 260 | |
|
208 | expected = "repo `%s` already exist" % (fork_name,) | |
|
261 | expected = { | |
|
262 | 'unique_repo_name': 'Repository with name `{}` already exists'.format( | |
|
263 | fork_name)} | |
|
209 | 264 | assert_error(id_, expected, given=response.body) |
|
210 | 265 | |
|
211 | 266 | @mock.patch.object(RepoModel, 'create_fork', crash) |
|
212 | 267 | def test_api_fork_repo_exception_occurred(self, backend): |
|
213 | 268 | source_name = backend['minimal'].repo_name |
|
214 | 269 | fork_name = backend.new_repo_name() |
|
215 | 270 | id_, params = build_data( |
|
216 | 271 | self.apikey, 'fork_repo', |
|
217 | 272 | repoid=source_name, |
|
218 | 273 | fork_name=fork_name, |
|
219 | 274 | owner=TEST_USER_ADMIN_LOGIN) |
|
220 | 275 | response = api_call(self.app, params) |
|
221 | 276 | |
|
222 | 277 | expected = 'failed to fork repository `%s` as `%s`' % (source_name, |
|
223 | 278 | fork_name) |
|
224 | 279 | assert_error(id_, expected, given=response.body) |
@@ -1,163 +1,189 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 | 24 | from rhodecode.model.repo import RepoModel |
|
25 | 25 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN |
|
26 | 26 | from rhodecode.api.tests.utils import ( |
|
27 | 27 | build_data, api_call, assert_error, assert_ok, crash, jsonify) |
|
28 | 28 | from rhodecode.tests.fixture import Fixture |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | fixture = Fixture() |
|
32 | 32 | |
|
33 | 33 | UPDATE_REPO_NAME = 'api_update_me' |
|
34 | 34 | |
|
35 | class SAME_AS_UPDATES(object): """ Constant used for tests below """ | |
|
35 | ||
|
36 | class SAME_AS_UPDATES(object): | |
|
37 | """ Constant used for tests below """ | |
|
38 | ||
|
36 | 39 | |
|
37 | 40 | @pytest.mark.usefixtures("testuser_api", "app") |
|
38 | 41 | class TestApiUpdateRepo(object): |
|
39 | 42 | |
|
40 | 43 | @pytest.mark.parametrize("updates, expected", [ |
|
41 |
({'owner': TEST_USER_REGULAR_LOGIN}, |
|
|
42 | ({'description': 'new description'}, SAME_AS_UPDATES), | |
|
43 | ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES), | |
|
44 | ({'clone_uri': None}, {'clone_uri': ''}), | |
|
45 | ({'clone_uri': ''}, {'clone_uri': ''}), | |
|
46 | ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}), | |
|
47 | ({'enable_statistics': True}, SAME_AS_UPDATES), | |
|
48 |
|
|
|
49 | ({'enable_downloads': True}, SAME_AS_UPDATES), | |
|
50 |
({'n |
|
|
44 | ({'owner': TEST_USER_REGULAR_LOGIN}, | |
|
45 | SAME_AS_UPDATES), | |
|
46 | ||
|
47 | ({'description': 'new description'}, | |
|
48 | SAME_AS_UPDATES), | |
|
49 | ||
|
50 | ({'clone_uri': 'http://foo.com/repo'}, | |
|
51 | SAME_AS_UPDATES), | |
|
52 | ||
|
53 | ({'clone_uri': None}, | |
|
54 | {'clone_uri': ''}), | |
|
55 | ||
|
56 | ({'clone_uri': ''}, | |
|
57 | {'clone_uri': ''}), | |
|
58 | ||
|
59 | ({'landing_rev': 'rev:tip'}, | |
|
60 | {'landing_rev': ['rev', 'tip']}), | |
|
61 | ||
|
62 | ({'enable_statistics': True}, | |
|
63 | SAME_AS_UPDATES), | |
|
64 | ||
|
65 | ({'enable_locking': True}, | |
|
66 | SAME_AS_UPDATES), | |
|
67 | ||
|
68 | ({'enable_downloads': True}, | |
|
69 | SAME_AS_UPDATES), | |
|
70 | ||
|
71 | ({'repo_name': 'new_repo_name'}, | |
|
72 | { | |
|
51 | 73 | 'repo_name': 'new_repo_name', |
|
52 |
'url': 'http://test.example.com:80/new_repo_name' |
|
|
74 | 'url': 'http://test.example.com:80/new_repo_name' | |
|
53 | 75 | }), |
|
54 | ({'group': 'test_group_for_update'}, { | |
|
55 |
|
|
|
56 | 'url': 'http://test.example.com:80/test_group_for_update/%s' % UPDATE_REPO_NAME | |
|
76 | ||
|
77 | ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME), | |
|
78 | '_group': 'test_group_for_update'}, | |
|
79 | { | |
|
80 | 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME), | |
|
81 | 'url': 'http://test.example.com:80/test_group_for_update/{}'.format(UPDATE_REPO_NAME) | |
|
57 | 82 | }), |
|
58 | 83 | ]) |
|
59 | 84 | def test_api_update_repo(self, updates, expected, backend): |
|
60 | 85 | repo_name = UPDATE_REPO_NAME |
|
61 | 86 | repo = fixture.create_repo(repo_name, repo_type=backend.alias) |
|
62 | if updates.get('group'): | |
|
63 | fixture.create_repo_group(updates['group']) | |
|
87 | if updates.get('_group'): | |
|
88 | fixture.create_repo_group(updates['_group']) | |
|
64 | 89 | |
|
65 | 90 | expected_api_data = repo.get_api_data(include_secrets=True) |
|
66 | 91 | if expected is SAME_AS_UPDATES: |
|
67 | 92 | expected_api_data.update(updates) |
|
68 | 93 | else: |
|
69 | 94 | expected_api_data.update(expected) |
|
70 | 95 | |
|
71 | ||
|
72 | 96 | id_, params = build_data( |
|
73 | 97 | self.apikey, 'update_repo', repoid=repo_name, **updates) |
|
74 | 98 | response = api_call(self.app, params) |
|
75 | 99 | |
|
76 | if updates.get('name'): | |
|
77 | repo_name = updates['name'] | |
|
78 | if updates.get('group'): | |
|
79 | repo_name = '/'.join([updates['group'], repo_name]) | |
|
100 | if updates.get('repo_name'): | |
|
101 | repo_name = updates['repo_name'] | |
|
80 | 102 | |
|
81 | 103 | try: |
|
82 | 104 | expected = { |
|
83 | 105 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name), |
|
84 | 106 | 'repository': jsonify(expected_api_data) |
|
85 | 107 | } |
|
86 | 108 | assert_ok(id_, expected, given=response.body) |
|
87 | 109 | finally: |
|
88 | 110 | fixture.destroy_repo(repo_name) |
|
89 | if updates.get('group'): | |
|
90 | fixture.destroy_repo_group(updates['group']) | |
|
111 | if updates.get('_group'): | |
|
112 | fixture.destroy_repo_group(updates['_group']) | |
|
91 | 113 | |
|
92 | 114 | def test_api_update_repo_fork_of_field(self, backend): |
|
93 | 115 | master_repo = backend.create_repo() |
|
94 | 116 | repo = backend.create_repo() |
|
95 | 117 | updates = { |
|
96 | 118 | 'fork_of': master_repo.repo_name |
|
97 | 119 | } |
|
98 | 120 | expected_api_data = repo.get_api_data(include_secrets=True) |
|
99 | 121 | expected_api_data.update(updates) |
|
100 | 122 | |
|
101 | 123 | id_, params = build_data( |
|
102 | 124 | self.apikey, 'update_repo', repoid=repo.repo_name, **updates) |
|
103 | 125 | response = api_call(self.app, params) |
|
104 | 126 | expected = { |
|
105 | 127 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), |
|
106 | 128 | 'repository': jsonify(expected_api_data) |
|
107 | 129 | } |
|
108 | 130 | assert_ok(id_, expected, given=response.body) |
|
109 | 131 | result = response.json['result']['repository'] |
|
110 | 132 | assert result['fork_of'] == master_repo.repo_name |
|
111 | 133 | |
|
112 | 134 | def test_api_update_repo_fork_of_not_found(self, backend): |
|
113 | 135 | master_repo_name = 'fake-parent-repo' |
|
114 | 136 | repo = backend.create_repo() |
|
115 | 137 | updates = { |
|
116 | 138 | 'fork_of': master_repo_name |
|
117 | 139 | } |
|
118 | 140 | id_, params = build_data( |
|
119 | 141 | self.apikey, 'update_repo', repoid=repo.repo_name, **updates) |
|
120 | 142 | response = api_call(self.app, params) |
|
121 | expected = 'repository `{}` does not exist'.format(master_repo_name) | |
|
143 | expected = { | |
|
144 | 'repo_fork_of': 'Fork with id `{}` does not exists'.format( | |
|
145 | master_repo_name)} | |
|
122 | 146 | assert_error(id_, expected, given=response.body) |
|
123 | 147 | |
|
124 | 148 | def test_api_update_repo_with_repo_group_not_existing(self): |
|
125 | 149 | repo_name = 'admin_owned' |
|
150 | fake_repo_group = 'test_group_for_update' | |
|
126 | 151 | fixture.create_repo(repo_name) |
|
127 | updates = {'group': 'test_group_for_update'} | |
|
152 | updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)} | |
|
128 | 153 | id_, params = build_data( |
|
129 | 154 | self.apikey, 'update_repo', repoid=repo_name, **updates) |
|
130 | 155 | response = api_call(self.app, params) |
|
131 | 156 | try: |
|
132 | expected = 'repository group `%s` does not exist' % ( | |
|
133 | updates['group'],) | |
|
157 | expected = { | |
|
158 | 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group) | |
|
159 | } | |
|
134 | 160 | assert_error(id_, expected, given=response.body) |
|
135 | 161 | finally: |
|
136 | 162 | fixture.destroy_repo(repo_name) |
|
137 | 163 | |
|
138 | 164 | def test_api_update_repo_regular_user_not_allowed(self): |
|
139 | 165 | repo_name = 'admin_owned' |
|
140 | 166 | fixture.create_repo(repo_name) |
|
141 | 167 | updates = {'active': False} |
|
142 | 168 | id_, params = build_data( |
|
143 | 169 | self.apikey_regular, 'update_repo', repoid=repo_name, **updates) |
|
144 | 170 | response = api_call(self.app, params) |
|
145 | 171 | try: |
|
146 | 172 | expected = 'repository `%s` does not exist' % (repo_name,) |
|
147 | 173 | assert_error(id_, expected, given=response.body) |
|
148 | 174 | finally: |
|
149 | 175 | fixture.destroy_repo(repo_name) |
|
150 | 176 | |
|
151 | 177 | @mock.patch.object(RepoModel, 'update', crash) |
|
152 | 178 | def test_api_update_repo_exception_occurred(self, backend): |
|
153 | 179 | repo_name = UPDATE_REPO_NAME |
|
154 | 180 | fixture.create_repo(repo_name, repo_type=backend.alias) |
|
155 | 181 | id_, params = build_data( |
|
156 | 182 | self.apikey, 'update_repo', repoid=repo_name, |
|
157 | 183 | owner=TEST_USER_ADMIN_LOGIN,) |
|
158 | 184 | response = api_call(self.app, params) |
|
159 | 185 | try: |
|
160 | 186 | expected = 'failed to update repo `%s`' % (repo_name,) |
|
161 | 187 | assert_error(id_, expected, given=response.body) |
|
162 | 188 | finally: |
|
163 | 189 | fixture.destroy_repo(repo_name) |
@@ -1,1888 +1,1918 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import time |
|
23 | 23 | |
|
24 |
import co |
|
|
25 | ||
|
26 | from rhodecode import BACKENDS | |
|
27 | from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCForbidden, json | |
|
24 | import rhodecode | |
|
25 | from rhodecode.api import ( | |
|
26 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) | |
|
28 | 27 | from rhodecode.api.utils import ( |
|
29 | 28 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
30 |
get_user_group_or_error, get_user_or_error, |
|
|
31 | get_perm_or_error, store_update, get_repo_group_or_error, parse_args, | |
|
32 | get_origin, build_commit_data) | |
|
33 | from rhodecode.lib.auth import ( | |
|
34 | HasPermissionAnyApi, HasRepoGroupPermissionAnyApi, | |
|
35 | HasUserGroupPermissionAnyApi) | |
|
29 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, | |
|
30 | get_perm_or_error, parse_args, get_origin, build_commit_data, | |
|
31 | validate_set_owner_permissions) | |
|
32 | from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi | |
|
36 | 33 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
37 | from rhodecode.lib.utils import map_groups | |
|
38 | 34 | from rhodecode.lib.utils2 import str2bool, time_to_datetime |
|
35 | from rhodecode.lib.ext_json import json | |
|
39 | 36 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
40 | 37 | from rhodecode.model.comment import ChangesetCommentsModel |
|
41 | 38 | from rhodecode.model.db import ( |
|
42 | 39 | Session, ChangesetStatus, RepositoryField, Repository) |
|
43 | 40 | from rhodecode.model.repo import RepoModel |
|
44 | from rhodecode.model.repo_group import RepoGroupModel | |
|
45 | 41 | from rhodecode.model.scm import ScmModel, RepoList |
|
46 | 42 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
43 | from rhodecode.model import validation_schema | |
|
47 | 44 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
48 | 45 | |
|
49 | 46 | log = logging.getLogger(__name__) |
|
50 | 47 | |
|
51 | 48 | |
|
52 | 49 | @jsonrpc_method() |
|
53 | 50 | def get_repo(request, apiuser, repoid, cache=Optional(True)): |
|
54 | 51 | """ |
|
55 | 52 | Gets an existing repository by its name or repository_id. |
|
56 | 53 | |
|
57 | 54 | The members section so the output returns users groups or users |
|
58 | 55 | associated with that repository. |
|
59 | 56 | |
|
60 | 57 | This command can only be run using an |authtoken| with admin rights, |
|
61 | 58 | or users with at least read rights to the |repo|. |
|
62 | 59 | |
|
63 | 60 | :param apiuser: This is filled automatically from the |authtoken|. |
|
64 | 61 | :type apiuser: AuthUser |
|
65 | 62 | :param repoid: The repository name or repository id. |
|
66 | 63 | :type repoid: str or int |
|
67 | 64 | :param cache: use the cached value for last changeset |
|
68 | 65 | :type: cache: Optional(bool) |
|
69 | 66 | |
|
70 | 67 | Example output: |
|
71 | 68 | |
|
72 | 69 | .. code-block:: bash |
|
73 | 70 | |
|
74 | 71 | { |
|
75 | 72 | "error": null, |
|
76 | 73 | "id": <repo_id>, |
|
77 | 74 | "result": { |
|
78 | 75 | "clone_uri": null, |
|
79 | 76 | "created_on": "timestamp", |
|
80 | 77 | "description": "repo description", |
|
81 | 78 | "enable_downloads": false, |
|
82 | 79 | "enable_locking": false, |
|
83 | 80 | "enable_statistics": false, |
|
84 | 81 | "followers": [ |
|
85 | 82 | { |
|
86 | 83 | "active": true, |
|
87 | 84 | "admin": false, |
|
88 | 85 | "api_key": "****************************************", |
|
89 | 86 | "api_keys": [ |
|
90 | 87 | "****************************************" |
|
91 | 88 | ], |
|
92 | 89 | "email": "user@example.com", |
|
93 | 90 | "emails": [ |
|
94 | 91 | "user@example.com" |
|
95 | 92 | ], |
|
96 | 93 | "extern_name": "rhodecode", |
|
97 | 94 | "extern_type": "rhodecode", |
|
98 | 95 | "firstname": "username", |
|
99 | 96 | "ip_addresses": [], |
|
100 | 97 | "language": null, |
|
101 | 98 | "last_login": "2015-09-16T17:16:35.854", |
|
102 | 99 | "lastname": "surname", |
|
103 | 100 | "user_id": <user_id>, |
|
104 | 101 | "username": "name" |
|
105 | 102 | } |
|
106 | 103 | ], |
|
107 | 104 | "fork_of": "parent-repo", |
|
108 | 105 | "landing_rev": [ |
|
109 | 106 | "rev", |
|
110 | 107 | "tip" |
|
111 | 108 | ], |
|
112 | 109 | "last_changeset": { |
|
113 | 110 | "author": "User <user@example.com>", |
|
114 | 111 | "branch": "default", |
|
115 | 112 | "date": "timestamp", |
|
116 | 113 | "message": "last commit message", |
|
117 | 114 | "parents": [ |
|
118 | 115 | { |
|
119 | 116 | "raw_id": "commit-id" |
|
120 | 117 | } |
|
121 | 118 | ], |
|
122 | 119 | "raw_id": "commit-id", |
|
123 | 120 | "revision": <revision number>, |
|
124 | 121 | "short_id": "short id" |
|
125 | 122 | }, |
|
126 | 123 | "lock_reason": null, |
|
127 | 124 | "locked_by": null, |
|
128 | 125 | "locked_date": null, |
|
129 | 126 | "members": [ |
|
130 | 127 | { |
|
131 | 128 | "name": "super-admin-name", |
|
132 | 129 | "origin": "super-admin", |
|
133 | 130 | "permission": "repository.admin", |
|
134 | 131 | "type": "user" |
|
135 | 132 | }, |
|
136 | 133 | { |
|
137 | 134 | "name": "owner-name", |
|
138 | 135 | "origin": "owner", |
|
139 | 136 | "permission": "repository.admin", |
|
140 | 137 | "type": "user" |
|
141 | 138 | }, |
|
142 | 139 | { |
|
143 | 140 | "name": "user-group-name", |
|
144 | 141 | "origin": "permission", |
|
145 | 142 | "permission": "repository.write", |
|
146 | 143 | "type": "user_group" |
|
147 | 144 | } |
|
148 | 145 | ], |
|
149 | 146 | "owner": "owner-name", |
|
150 | 147 | "permissions": [ |
|
151 | 148 | { |
|
152 | 149 | "name": "super-admin-name", |
|
153 | 150 | "origin": "super-admin", |
|
154 | 151 | "permission": "repository.admin", |
|
155 | 152 | "type": "user" |
|
156 | 153 | }, |
|
157 | 154 | { |
|
158 | 155 | "name": "owner-name", |
|
159 | 156 | "origin": "owner", |
|
160 | 157 | "permission": "repository.admin", |
|
161 | 158 | "type": "user" |
|
162 | 159 | }, |
|
163 | 160 | { |
|
164 | 161 | "name": "user-group-name", |
|
165 | 162 | "origin": "permission", |
|
166 | 163 | "permission": "repository.write", |
|
167 | 164 | "type": "user_group" |
|
168 | 165 | } |
|
169 | 166 | ], |
|
170 | 167 | "private": true, |
|
171 | 168 | "repo_id": 676, |
|
172 | 169 | "repo_name": "user-group/repo-name", |
|
173 | 170 | "repo_type": "hg" |
|
174 | 171 | } |
|
175 | 172 | } |
|
176 | 173 | """ |
|
177 | 174 | |
|
178 | 175 | repo = get_repo_or_error(repoid) |
|
179 | 176 | cache = Optional.extract(cache) |
|
177 | ||
|
180 | 178 | include_secrets = False |
|
181 | 179 | if has_superadmin_permission(apiuser): |
|
182 | 180 | include_secrets = True |
|
183 | 181 | else: |
|
184 | 182 | # check if we have at least read permission for this repo ! |
|
185 | 183 | _perms = ( |
|
186 | 184 | 'repository.admin', 'repository.write', 'repository.read',) |
|
187 |
|
|
|
185 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
188 | 186 | |
|
189 | 187 | permissions = [] |
|
190 | 188 | for _user in repo.permissions(): |
|
191 | 189 | user_data = { |
|
192 | 190 | 'name': _user.username, |
|
193 | 191 | 'permission': _user.permission, |
|
194 | 192 | 'origin': get_origin(_user), |
|
195 | 193 | 'type': "user", |
|
196 | 194 | } |
|
197 | 195 | permissions.append(user_data) |
|
198 | 196 | |
|
199 | 197 | for _user_group in repo.permission_user_groups(): |
|
200 | 198 | user_group_data = { |
|
201 | 199 | 'name': _user_group.users_group_name, |
|
202 | 200 | 'permission': _user_group.permission, |
|
203 | 201 | 'origin': get_origin(_user_group), |
|
204 | 202 | 'type': "user_group", |
|
205 | 203 | } |
|
206 | 204 | permissions.append(user_group_data) |
|
207 | 205 | |
|
208 | 206 | following_users = [ |
|
209 | 207 | user.user.get_api_data(include_secrets=include_secrets) |
|
210 | 208 | for user in repo.followers] |
|
211 | 209 | |
|
212 | 210 | if not cache: |
|
213 | 211 | repo.update_commit_cache() |
|
214 | 212 | data = repo.get_api_data(include_secrets=include_secrets) |
|
215 | 213 | data['members'] = permissions # TODO: this should be deprecated soon |
|
216 | 214 | data['permissions'] = permissions |
|
217 | 215 | data['followers'] = following_users |
|
218 | 216 | return data |
|
219 | 217 | |
|
220 | 218 | |
|
221 | 219 | @jsonrpc_method() |
|
222 | 220 | def get_repos(request, apiuser): |
|
223 | 221 | """ |
|
224 | 222 | Lists all existing repositories. |
|
225 | 223 | |
|
226 | 224 | This command can only be run using an |authtoken| with admin rights, |
|
227 | 225 | or users with at least read rights to |repos|. |
|
228 | 226 | |
|
229 | 227 | :param apiuser: This is filled automatically from the |authtoken|. |
|
230 | 228 | :type apiuser: AuthUser |
|
231 | 229 | |
|
232 | 230 | Example output: |
|
233 | 231 | |
|
234 | 232 | .. code-block:: bash |
|
235 | 233 | |
|
236 | 234 | id : <id_given_in_input> |
|
237 | 235 | result: [ |
|
238 | 236 | { |
|
239 | 237 | "repo_id" : "<repo_id>", |
|
240 | 238 | "repo_name" : "<reponame>" |
|
241 | 239 | "repo_type" : "<repo_type>", |
|
242 | 240 | "clone_uri" : "<clone_uri>", |
|
243 | 241 | "private": : "<bool>", |
|
244 | 242 | "created_on" : "<datetimecreated>", |
|
245 | 243 | "description" : "<description>", |
|
246 | 244 | "landing_rev": "<landing_rev>", |
|
247 | 245 | "owner": "<repo_owner>", |
|
248 | 246 | "fork_of": "<name_of_fork_parent>", |
|
249 | 247 | "enable_downloads": "<bool>", |
|
250 | 248 | "enable_locking": "<bool>", |
|
251 | 249 | "enable_statistics": "<bool>", |
|
252 | 250 | }, |
|
253 | 251 | ... |
|
254 | 252 | ] |
|
255 | 253 | error: null |
|
256 | 254 | """ |
|
257 | 255 | |
|
258 | 256 | include_secrets = has_superadmin_permission(apiuser) |
|
259 | 257 | _perms = ('repository.read', 'repository.write', 'repository.admin',) |
|
260 | 258 | extras = {'user': apiuser} |
|
261 | 259 | |
|
262 | 260 | repo_list = RepoList( |
|
263 | 261 | RepoModel().get_all(), perm_set=_perms, extra_kwargs=extras) |
|
264 | 262 | return [repo.get_api_data(include_secrets=include_secrets) |
|
265 | 263 | for repo in repo_list] |
|
266 | 264 | |
|
267 | 265 | |
|
268 | 266 | @jsonrpc_method() |
|
269 | 267 | def get_repo_changeset(request, apiuser, repoid, revision, |
|
270 | 268 | details=Optional('basic')): |
|
271 | 269 | """ |
|
272 | 270 | Returns information about a changeset. |
|
273 | 271 | |
|
274 | 272 | Additionally parameters define the amount of details returned by |
|
275 | 273 | this function. |
|
276 | 274 | |
|
277 | 275 | This command can only be run using an |authtoken| with admin rights, |
|
278 | 276 | or users with at least read rights to the |repo|. |
|
279 | 277 | |
|
280 | 278 | :param apiuser: This is filled automatically from the |authtoken|. |
|
281 | 279 | :type apiuser: AuthUser |
|
282 | 280 | :param repoid: The repository name or repository id |
|
283 | 281 | :type repoid: str or int |
|
284 | 282 | :param revision: revision for which listing should be done |
|
285 | 283 | :type revision: str |
|
286 | 284 | :param details: details can be 'basic|extended|full' full gives diff |
|
287 | 285 | info details like the diff itself, and number of changed files etc. |
|
288 | 286 | :type details: Optional(str) |
|
289 | 287 | |
|
290 | 288 | """ |
|
291 | 289 | repo = get_repo_or_error(repoid) |
|
292 | 290 | if not has_superadmin_permission(apiuser): |
|
293 | 291 | _perms = ( |
|
294 | 292 | 'repository.admin', 'repository.write', 'repository.read',) |
|
295 |
|
|
|
293 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
296 | 294 | |
|
297 | 295 | changes_details = Optional.extract(details) |
|
298 | 296 | _changes_details_types = ['basic', 'extended', 'full'] |
|
299 | 297 | if changes_details not in _changes_details_types: |
|
300 | 298 | raise JSONRPCError( |
|
301 | 299 | 'ret_type must be one of %s' % ( |
|
302 | 300 | ','.join(_changes_details_types))) |
|
303 | 301 | |
|
304 | 302 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
305 | 303 | 'status', '_commit', '_file_paths'] |
|
306 | 304 | |
|
307 | 305 | try: |
|
308 | 306 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) |
|
309 | 307 | except TypeError as e: |
|
310 | 308 | raise JSONRPCError(e.message) |
|
311 | 309 | _cs_json = cs.__json__() |
|
312 | 310 | _cs_json['diff'] = build_commit_data(cs, changes_details) |
|
313 | 311 | if changes_details == 'full': |
|
314 | 312 | _cs_json['refs'] = { |
|
315 | 313 | 'branches': [cs.branch], |
|
316 | 314 | 'bookmarks': getattr(cs, 'bookmarks', []), |
|
317 | 315 | 'tags': cs.tags |
|
318 | 316 | } |
|
319 | 317 | return _cs_json |
|
320 | 318 | |
|
321 | 319 | |
|
322 | 320 | @jsonrpc_method() |
|
323 | 321 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, |
|
324 | 322 | details=Optional('basic')): |
|
325 | 323 | """ |
|
326 | 324 | Returns a set of commits limited by the number starting |
|
327 | 325 | from the `start_rev` option. |
|
328 | 326 | |
|
329 | 327 | Additional parameters define the amount of details returned by this |
|
330 | 328 | function. |
|
331 | 329 | |
|
332 | 330 | This command can only be run using an |authtoken| with admin rights, |
|
333 | 331 | or users with at least read rights to |repos|. |
|
334 | 332 | |
|
335 | 333 | :param apiuser: This is filled automatically from the |authtoken|. |
|
336 | 334 | :type apiuser: AuthUser |
|
337 | 335 | :param repoid: The repository name or repository ID. |
|
338 | 336 | :type repoid: str or int |
|
339 | 337 | :param start_rev: The starting revision from where to get changesets. |
|
340 | 338 | :type start_rev: str |
|
341 | 339 | :param limit: Limit the number of commits to this amount |
|
342 | 340 | :type limit: str or int |
|
343 | 341 | :param details: Set the level of detail returned. Valid option are: |
|
344 | 342 | ``basic``, ``extended`` and ``full``. |
|
345 | 343 | :type details: Optional(str) |
|
346 | 344 | |
|
347 | 345 | .. note:: |
|
348 | 346 | |
|
349 | 347 | Setting the parameter `details` to the value ``full`` is extensive |
|
350 | 348 | and returns details like the diff itself, and the number |
|
351 | 349 | of changed files. |
|
352 | 350 | |
|
353 | 351 | """ |
|
354 | 352 | repo = get_repo_or_error(repoid) |
|
355 | 353 | if not has_superadmin_permission(apiuser): |
|
356 | 354 | _perms = ( |
|
357 | 355 | 'repository.admin', 'repository.write', 'repository.read',) |
|
358 |
|
|
|
356 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
359 | 357 | |
|
360 | 358 | changes_details = Optional.extract(details) |
|
361 | 359 | _changes_details_types = ['basic', 'extended', 'full'] |
|
362 | 360 | if changes_details not in _changes_details_types: |
|
363 | 361 | raise JSONRPCError( |
|
364 | 362 | 'ret_type must be one of %s' % ( |
|
365 | 363 | ','.join(_changes_details_types))) |
|
366 | 364 | |
|
367 | 365 | limit = int(limit) |
|
368 | 366 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
369 | 367 | 'status', '_commit', '_file_paths'] |
|
370 | 368 | |
|
371 | 369 | vcs_repo = repo.scm_instance() |
|
372 | 370 | # SVN needs a special case to distinguish its index and commit id |
|
373 | 371 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): |
|
374 | 372 | start_rev = vcs_repo.commit_ids[0] |
|
375 | 373 | |
|
376 | 374 | try: |
|
377 | 375 | commits = vcs_repo.get_commits( |
|
378 | 376 | start_id=start_rev, pre_load=pre_load) |
|
379 | 377 | except TypeError as e: |
|
380 | 378 | raise JSONRPCError(e.message) |
|
381 | 379 | except Exception: |
|
382 | 380 | log.exception('Fetching of commits failed') |
|
383 | 381 | raise JSONRPCError('Error occurred during commit fetching') |
|
384 | 382 | |
|
385 | 383 | ret = [] |
|
386 | 384 | for cnt, commit in enumerate(commits): |
|
387 | 385 | if cnt >= limit != -1: |
|
388 | 386 | break |
|
389 | 387 | _cs_json = commit.__json__() |
|
390 | 388 | _cs_json['diff'] = build_commit_data(commit, changes_details) |
|
391 | 389 | if changes_details == 'full': |
|
392 | 390 | _cs_json['refs'] = { |
|
393 | 391 | 'branches': [commit.branch], |
|
394 | 392 | 'bookmarks': getattr(commit, 'bookmarks', []), |
|
395 | 393 | 'tags': commit.tags |
|
396 | 394 | } |
|
397 | 395 | ret.append(_cs_json) |
|
398 | 396 | return ret |
|
399 | 397 | |
|
400 | 398 | |
|
401 | 399 | @jsonrpc_method() |
|
402 | 400 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, |
|
403 | 401 | ret_type=Optional('all'), details=Optional('basic'), |
|
404 | 402 | max_file_bytes=Optional(None)): |
|
405 | 403 | """ |
|
406 | 404 | Returns a list of nodes and children in a flat list for a given |
|
407 | 405 | path at given revision. |
|
408 | 406 | |
|
409 | 407 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
410 | 408 | |
|
411 | 409 | This command can only be run using an |authtoken| with admin rights, |
|
412 | 410 | or users with at least read rights to |repos|. |
|
413 | 411 | |
|
414 | 412 | :param apiuser: This is filled automatically from the |authtoken|. |
|
415 | 413 | :type apiuser: AuthUser |
|
416 | 414 | :param repoid: The repository name or repository ID. |
|
417 | 415 | :type repoid: str or int |
|
418 | 416 | :param revision: The revision for which listing should be done. |
|
419 | 417 | :type revision: str |
|
420 | 418 | :param root_path: The path from which to start displaying. |
|
421 | 419 | :type root_path: str |
|
422 | 420 | :param ret_type: Set the return type. Valid options are |
|
423 | 421 | ``all`` (default), ``files`` and ``dirs``. |
|
424 | 422 | :type ret_type: Optional(str) |
|
425 | 423 | :param details: Returns extended information about nodes, such as |
|
426 | 424 | md5, binary, and or content. The valid options are ``basic`` and |
|
427 | 425 | ``full``. |
|
428 | 426 | :type details: Optional(str) |
|
429 | 427 | :param max_file_bytes: Only return file content under this file size bytes |
|
430 | 428 | :type details: Optional(int) |
|
431 | 429 | |
|
432 | 430 | Example output: |
|
433 | 431 | |
|
434 | 432 | .. code-block:: bash |
|
435 | 433 | |
|
436 | 434 | id : <id_given_in_input> |
|
437 | 435 | result: [ |
|
438 | 436 | { |
|
439 | 437 | "name" : "<name>" |
|
440 | 438 | "type" : "<type>", |
|
441 | 439 | "binary": "<true|false>" (only in extended mode) |
|
442 | 440 | "md5" : "<md5 of file content>" (only in extended mode) |
|
443 | 441 | }, |
|
444 | 442 | ... |
|
445 | 443 | ] |
|
446 | 444 | error: null |
|
447 | 445 | """ |
|
448 | 446 | |
|
449 | 447 | repo = get_repo_or_error(repoid) |
|
450 | 448 | if not has_superadmin_permission(apiuser): |
|
451 | 449 | _perms = ( |
|
452 | 450 | 'repository.admin', 'repository.write', 'repository.read',) |
|
453 |
|
|
|
451 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
454 | 452 | |
|
455 | 453 | ret_type = Optional.extract(ret_type) |
|
456 | 454 | details = Optional.extract(details) |
|
457 | 455 | _extended_types = ['basic', 'full'] |
|
458 | 456 | if details not in _extended_types: |
|
459 | 457 | raise JSONRPCError( |
|
460 | 458 | 'ret_type must be one of %s' % (','.join(_extended_types))) |
|
461 | 459 | extended_info = False |
|
462 | 460 | content = False |
|
463 | 461 | if details == 'basic': |
|
464 | 462 | extended_info = True |
|
465 | 463 | |
|
466 | 464 | if details == 'full': |
|
467 | 465 | extended_info = content = True |
|
468 | 466 | |
|
469 | 467 | _map = {} |
|
470 | 468 | try: |
|
471 | 469 | # check if repo is not empty by any chance, skip quicker if it is. |
|
472 | 470 | _scm = repo.scm_instance() |
|
473 | 471 | if _scm.is_empty(): |
|
474 | 472 | return [] |
|
475 | 473 | |
|
476 | 474 | _d, _f = ScmModel().get_nodes( |
|
477 | 475 | repo, revision, root_path, flat=False, |
|
478 | 476 | extended_info=extended_info, content=content, |
|
479 | 477 | max_file_bytes=max_file_bytes) |
|
480 | 478 | _map = { |
|
481 | 479 | 'all': _d + _f, |
|
482 | 480 | 'files': _f, |
|
483 | 481 | 'dirs': _d, |
|
484 | 482 | } |
|
485 | 483 | return _map[ret_type] |
|
486 | 484 | except KeyError: |
|
487 | 485 | raise JSONRPCError( |
|
488 | 486 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) |
|
489 | 487 | except Exception: |
|
490 | 488 | log.exception("Exception occurred while trying to get repo nodes") |
|
491 | 489 | raise JSONRPCError( |
|
492 | 490 | 'failed to get repo: `%s` nodes' % repo.repo_name |
|
493 | 491 | ) |
|
494 | 492 | |
|
495 | 493 | |
|
496 | 494 | @jsonrpc_method() |
|
497 | 495 | def get_repo_refs(request, apiuser, repoid): |
|
498 | 496 | """ |
|
499 | 497 | Returns a dictionary of current references. It returns |
|
500 | 498 | bookmarks, branches, closed_branches, and tags for given repository |
|
501 | 499 | |
|
502 | 500 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
503 | 501 | |
|
504 | 502 | This command can only be run using an |authtoken| with admin rights, |
|
505 | 503 | or users with at least read rights to |repos|. |
|
506 | 504 | |
|
507 | 505 | :param apiuser: This is filled automatically from the |authtoken|. |
|
508 | 506 | :type apiuser: AuthUser |
|
509 | 507 | :param repoid: The repository name or repository ID. |
|
510 | 508 | :type repoid: str or int |
|
511 | 509 | |
|
512 | 510 | Example output: |
|
513 | 511 | |
|
514 | 512 | .. code-block:: bash |
|
515 | 513 | |
|
516 | 514 | id : <id_given_in_input> |
|
517 | 515 | result: [ |
|
518 | 516 | TODO... |
|
519 | 517 | ] |
|
520 | 518 | error: null |
|
521 | 519 | """ |
|
522 | 520 | |
|
523 | 521 | repo = get_repo_or_error(repoid) |
|
524 | 522 | if not has_superadmin_permission(apiuser): |
|
525 | 523 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
526 |
|
|
|
524 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
527 | 525 | |
|
528 | 526 | try: |
|
529 | 527 | # check if repo is not empty by any chance, skip quicker if it is. |
|
530 | 528 | vcs_instance = repo.scm_instance() |
|
531 | 529 | refs = vcs_instance.refs() |
|
532 | 530 | return refs |
|
533 | 531 | except Exception: |
|
534 | 532 | log.exception("Exception occurred while trying to get repo refs") |
|
535 | 533 | raise JSONRPCError( |
|
536 | 534 | 'failed to get repo: `%s` references' % repo.repo_name |
|
537 | 535 | ) |
|
538 | 536 | |
|
539 | 537 | |
|
540 | 538 | @jsonrpc_method() |
|
541 | def create_repo(request, apiuser, repo_name, repo_type, | |
|
542 | owner=Optional(OAttr('apiuser')), description=Optional(''), | |
|
543 | private=Optional(False), clone_uri=Optional(None), | |
|
539 | def create_repo( | |
|
540 | request, apiuser, repo_name, repo_type, | |
|
541 | owner=Optional(OAttr('apiuser')), | |
|
542 | description=Optional(''), | |
|
543 | private=Optional(False), | |
|
544 | clone_uri=Optional(None), | |
|
544 | 545 |
|
|
545 | 546 |
|
|
546 | 547 |
|
|
547 | 548 |
|
|
548 | 549 |
|
|
549 | 550 | """ |
|
550 | 551 | Creates a repository. |
|
551 | 552 | |
|
552 |
* If the repository name contains "/", |
|
|
553 | groups will be created. | |
|
553 | * If the repository name contains "/", repository will be created inside | |
|
554 | a repository group or nested repository groups | |
|
554 | 555 | |
|
555 |
For example "foo/bar/ |
|
|
556 | (with "foo" as parent). It will also create the "baz" repository | |
|
557 | with "bar" as |repo| group. | |
|
556 | For example "foo/bar/repo1" will create |repo| called "repo1" inside | |
|
557 | group "foo/bar". You have to have permissions to access and write to | |
|
558 | the last repository group ("bar" in this example) | |
|
558 | 559 | |
|
559 | 560 | This command can only be run using an |authtoken| with at least |
|
560 | write permissions to the |repo|. | |
|
561 | permissions to create repositories, or write permissions to | |
|
562 | parent repository groups. | |
|
561 | 563 | |
|
562 | 564 | :param apiuser: This is filled automatically from the |authtoken|. |
|
563 | 565 | :type apiuser: AuthUser |
|
564 | 566 | :param repo_name: Set the repository name. |
|
565 | 567 | :type repo_name: str |
|
566 | 568 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. |
|
567 | 569 | :type repo_type: str |
|
568 | 570 | :param owner: user_id or username |
|
569 | 571 | :type owner: Optional(str) |
|
570 | 572 | :param description: Set the repository description. |
|
571 | 573 | :type description: Optional(str) |
|
572 | :param private: | |
|
574 | :param private: set repository as private | |
|
573 | 575 | :type private: bool |
|
574 | :param clone_uri: | |
|
576 | :param clone_uri: set clone_uri | |
|
575 | 577 | :type clone_uri: str |
|
576 | 578 | :param landing_rev: <rev_type>:<rev> |
|
577 | 579 | :type landing_rev: str |
|
578 | 580 | :param enable_locking: |
|
579 | 581 | :type enable_locking: bool |
|
580 | 582 | :param enable_downloads: |
|
581 | 583 | :type enable_downloads: bool |
|
582 | 584 | :param enable_statistics: |
|
583 | 585 | :type enable_statistics: bool |
|
584 | 586 | :param copy_permissions: Copy permission from group in which the |
|
585 | 587 | repository is being created. |
|
586 | 588 | :type copy_permissions: bool |
|
587 | 589 | |
|
588 | 590 | |
|
589 | 591 | Example output: |
|
590 | 592 | |
|
591 | 593 | .. code-block:: bash |
|
592 | 594 | |
|
593 | 595 | id : <id_given_in_input> |
|
594 | 596 | result: { |
|
595 | 597 | "msg": "Created new repository `<reponame>`", |
|
596 | 598 | "success": true, |
|
597 | 599 | "task": "<celery task id or None if done sync>" |
|
598 | 600 | } |
|
599 | 601 | error: null |
|
600 | 602 | |
|
601 | 603 | |
|
602 | 604 | Example error output: |
|
603 | 605 | |
|
604 | 606 | .. code-block:: bash |
|
605 | 607 | |
|
606 | 608 | id : <id_given_in_input> |
|
607 | 609 | result : null |
|
608 | 610 | error : { |
|
609 | 611 | 'failed to create repository `<repo_name>`' |
|
610 | 612 | } |
|
611 | 613 | |
|
612 | 614 | """ |
|
613 | schema = repo_schema.RepoSchema() | |
|
614 | try: | |
|
615 | data = schema.deserialize({ | |
|
616 | 'repo_name': repo_name | |
|
617 | }) | |
|
618 | except colander.Invalid as e: | |
|
619 | raise JSONRPCError("Validation failed: %s" % (e.asdict(),)) | |
|
620 | repo_name = data['repo_name'] | |
|
621 | 615 | |
|
622 | (repo_name_cleaned, | |
|
623 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent( | |
|
624 | repo_name) | |
|
625 | ||
|
626 | if not HasPermissionAnyApi( | |
|
627 | 'hg.admin', 'hg.create.repository')(user=apiuser): | |
|
628 | # check if we have admin permission for this repo group if given ! | |
|
629 | ||
|
630 | if parent_group_name: | |
|
631 | repogroupid = parent_group_name | |
|
632 | repo_group = get_repo_group_or_error(parent_group_name) | |
|
616 | owner = validate_set_owner_permissions(apiuser, owner) | |
|
633 | 617 | |
|
634 | _perms = ('group.admin',) | |
|
635 | if not HasRepoGroupPermissionAnyApi(*_perms)( | |
|
636 | user=apiuser, group_name=repo_group.group_name): | |
|
637 | raise JSONRPCError( | |
|
638 | 'repository group `%s` does not exist' % ( | |
|
639 | repogroupid,)) | |
|
640 | else: | |
|
641 | raise JSONRPCForbidden() | |
|
642 | ||
|
643 | if not has_superadmin_permission(apiuser): | |
|
644 | if not isinstance(owner, Optional): | |
|
645 | # forbid setting owner for non-admins | |
|
646 | raise JSONRPCError( | |
|
647 | 'Only RhodeCode admin can specify `owner` param') | |
|
648 | ||
|
649 | if isinstance(owner, Optional): | |
|
650 | owner = apiuser.user_id | |
|
651 | ||
|
652 | owner = get_user_or_error(owner) | |
|
653 | ||
|
654 | if RepoModel().get_by_repo_name(repo_name): | |
|
655 | raise JSONRPCError("repo `%s` already exist" % repo_name) | |
|
618 | description = Optional.extract(description) | |
|
619 | copy_permissions = Optional.extract(copy_permissions) | |
|
620 | clone_uri = Optional.extract(clone_uri) | |
|
621 | landing_commit_ref = Optional.extract(landing_rev) | |
|
656 | 622 | |
|
657 | 623 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
658 | 624 | if isinstance(private, Optional): |
|
659 | 625 | private = defs.get('repo_private') or Optional.extract(private) |
|
660 | 626 | if isinstance(repo_type, Optional): |
|
661 | 627 | repo_type = defs.get('repo_type') |
|
662 | 628 | if isinstance(enable_statistics, Optional): |
|
663 | 629 | enable_statistics = defs.get('repo_enable_statistics') |
|
664 | 630 | if isinstance(enable_locking, Optional): |
|
665 | 631 | enable_locking = defs.get('repo_enable_locking') |
|
666 | 632 | if isinstance(enable_downloads, Optional): |
|
667 | 633 | enable_downloads = defs.get('repo_enable_downloads') |
|
668 | 634 | |
|
669 | clone_uri = Optional.extract(clone_uri) | |
|
670 | description = Optional.extract(description) | |
|
671 | landing_rev = Optional.extract(landing_rev) | |
|
672 | copy_permissions = Optional.extract(copy_permissions) | |
|
635 | schema = repo_schema.RepoSchema().bind( | |
|
636 | repo_type_options=rhodecode.BACKENDS.keys(), | |
|
637 | # user caller | |
|
638 | user=apiuser) | |
|
673 | 639 | |
|
674 | 640 | try: |
|
675 | # create structure of groups and return the last group | |
|
676 |
|
|
|
641 | schema_data = schema.deserialize(dict( | |
|
642 | repo_name=repo_name, | |
|
643 | repo_type=repo_type, | |
|
644 | repo_owner=owner.username, | |
|
645 | repo_description=description, | |
|
646 | repo_landing_commit_ref=landing_commit_ref, | |
|
647 | repo_clone_uri=clone_uri, | |
|
648 | repo_private=private, | |
|
649 | repo_copy_permissions=copy_permissions, | |
|
650 | repo_enable_statistics=enable_statistics, | |
|
651 | repo_enable_downloads=enable_downloads, | |
|
652 | repo_enable_locking=enable_locking)) | |
|
653 | except validation_schema.Invalid as err: | |
|
654 | raise JSONRPCValidationError(colander_exc=err) | |
|
655 | ||
|
656 | try: | |
|
677 | 657 | data = { |
|
678 | 'repo_name': repo_name_cleaned, | |
|
679 | 'repo_name_full': repo_name, | |
|
680 | 'repo_type': repo_type, | |
|
681 | 'repo_description': description, | |
|
682 | 658 | 'owner': owner, |
|
683 | 'repo_private': private, | |
|
684 | 'clone_uri': clone_uri, | |
|
685 |
'repo_group': repo_group |
|
|
686 | 'repo_landing_rev': landing_rev, | |
|
687 | 'enable_statistics': enable_statistics, | |
|
688 | 'enable_locking': enable_locking, | |
|
689 | 'enable_downloads': enable_downloads, | |
|
690 | 'repo_copy_permissions': copy_permissions, | |
|
659 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], | |
|
660 | 'repo_name_full': schema_data['repo_name'], | |
|
661 | 'repo_group': schema_data['repo_group']['repo_group_id'], | |
|
662 | 'repo_type': schema_data['repo_type'], | |
|
663 | 'repo_description': schema_data['repo_description'], | |
|
664 | 'repo_private': schema_data['repo_private'], | |
|
665 | 'clone_uri': schema_data['repo_clone_uri'], | |
|
666 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], | |
|
667 | 'enable_statistics': schema_data['repo_enable_statistics'], | |
|
668 | 'enable_locking': schema_data['repo_enable_locking'], | |
|
669 | 'enable_downloads': schema_data['repo_enable_downloads'], | |
|
670 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], | |
|
691 | 671 | } |
|
692 | 672 | |
|
693 | if repo_type not in BACKENDS.keys(): | |
|
694 | raise Exception("Invalid backend type %s" % repo_type) | |
|
695 | 673 | task = RepoModel().create(form_data=data, cur_user=owner) |
|
696 | 674 | from celery.result import BaseAsyncResult |
|
697 | 675 | task_id = None |
|
698 | 676 | if isinstance(task, BaseAsyncResult): |
|
699 | 677 | task_id = task.task_id |
|
700 | 678 | # no commit, it's done in RepoModel, or async via celery |
|
701 | 679 | return { |
|
702 | 'msg': "Created new repository `%s`" % (repo_name,), | |
|
680 | 'msg': "Created new repository `%s`" % (schema_data['repo_name'],), | |
|
703 | 681 | 'success': True, # cannot return the repo data here since fork |
|
704 |
# can |
|
|
682 | # can be done async | |
|
705 | 683 | 'task': task_id |
|
706 | 684 | } |
|
707 | 685 | except Exception: |
|
708 | 686 | log.exception( |
|
709 | 687 | u"Exception while trying to create the repository %s", |
|
710 | repo_name) | |
|
688 | schema_data['repo_name']) | |
|
711 | 689 | raise JSONRPCError( |
|
712 | 'failed to create repository `%s`' % (repo_name,)) | |
|
690 | 'failed to create repository `%s`' % (schema_data['repo_name'],)) | |
|
713 | 691 | |
|
714 | 692 | |
|
715 | 693 | @jsonrpc_method() |
|
716 | 694 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), |
|
717 | 695 | description=Optional('')): |
|
718 | 696 | """ |
|
719 | 697 | Adds an extra field to a repository. |
|
720 | 698 | |
|
721 | 699 | This command can only be run using an |authtoken| with at least |
|
722 | 700 | write permissions to the |repo|. |
|
723 | 701 | |
|
724 | 702 | :param apiuser: This is filled automatically from the |authtoken|. |
|
725 | 703 | :type apiuser: AuthUser |
|
726 | 704 | :param repoid: Set the repository name or repository id. |
|
727 | 705 | :type repoid: str or int |
|
728 | 706 | :param key: Create a unique field key for this repository. |
|
729 | 707 | :type key: str |
|
730 | 708 | :param label: |
|
731 | 709 | :type label: Optional(str) |
|
732 | 710 | :param description: |
|
733 | 711 | :type description: Optional(str) |
|
734 | 712 | """ |
|
735 | 713 | repo = get_repo_or_error(repoid) |
|
736 | 714 | if not has_superadmin_permission(apiuser): |
|
737 | 715 | _perms = ('repository.admin',) |
|
738 |
|
|
|
716 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
739 | 717 | |
|
740 | 718 | label = Optional.extract(label) or key |
|
741 | 719 | description = Optional.extract(description) |
|
742 | 720 | |
|
743 | 721 | field = RepositoryField.get_by_key_name(key, repo) |
|
744 | 722 | if field: |
|
745 | 723 | raise JSONRPCError('Field with key ' |
|
746 | 724 | '`%s` exists for repo `%s`' % (key, repoid)) |
|
747 | 725 | |
|
748 | 726 | try: |
|
749 | 727 | RepoModel().add_repo_field(repo, key, field_label=label, |
|
750 | 728 | field_desc=description) |
|
751 | 729 | Session().commit() |
|
752 | 730 | return { |
|
753 | 731 | 'msg': "Added new repository field `%s`" % (key,), |
|
754 | 732 | 'success': True, |
|
755 | 733 | } |
|
756 | 734 | except Exception: |
|
757 | 735 | log.exception("Exception occurred while trying to add field to repo") |
|
758 | 736 | raise JSONRPCError( |
|
759 | 737 | 'failed to create new field for repository `%s`' % (repoid,)) |
|
760 | 738 | |
|
761 | 739 | |
|
762 | 740 | @jsonrpc_method() |
|
763 | 741 | def remove_field_from_repo(request, apiuser, repoid, key): |
|
764 | 742 | """ |
|
765 | 743 | Removes an extra field from a repository. |
|
766 | 744 | |
|
767 | 745 | This command can only be run using an |authtoken| with at least |
|
768 | 746 | write permissions to the |repo|. |
|
769 | 747 | |
|
770 | 748 | :param apiuser: This is filled automatically from the |authtoken|. |
|
771 | 749 | :type apiuser: AuthUser |
|
772 | 750 | :param repoid: Set the repository name or repository ID. |
|
773 | 751 | :type repoid: str or int |
|
774 | 752 | :param key: Set the unique field key for this repository. |
|
775 | 753 | :type key: str |
|
776 | 754 | """ |
|
777 | 755 | |
|
778 | 756 | repo = get_repo_or_error(repoid) |
|
779 | 757 | if not has_superadmin_permission(apiuser): |
|
780 | 758 | _perms = ('repository.admin',) |
|
781 |
|
|
|
759 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
782 | 760 | |
|
783 | 761 | field = RepositoryField.get_by_key_name(key, repo) |
|
784 | 762 | if not field: |
|
785 | 763 | raise JSONRPCError('Field with key `%s` does not ' |
|
786 | 764 | 'exists for repo `%s`' % (key, repoid)) |
|
787 | 765 | |
|
788 | 766 | try: |
|
789 | 767 | RepoModel().delete_repo_field(repo, field_key=key) |
|
790 | 768 | Session().commit() |
|
791 | 769 | return { |
|
792 | 770 | 'msg': "Deleted repository field `%s`" % (key,), |
|
793 | 771 | 'success': True, |
|
794 | 772 | } |
|
795 | 773 | except Exception: |
|
796 | 774 | log.exception( |
|
797 | 775 | "Exception occurred while trying to delete field from repo") |
|
798 | 776 | raise JSONRPCError( |
|
799 | 777 | 'failed to delete field for repository `%s`' % (repoid,)) |
|
800 | 778 | |
|
801 | 779 | |
|
802 | 780 | @jsonrpc_method() |
|
803 | def update_repo(request, apiuser, repoid, name=Optional(None), | |
|
804 | owner=Optional(OAttr('apiuser')), | |
|
805 | group=Optional(None), | |
|
806 | fork_of=Optional(None), | |
|
807 |
|
|
|
808 | clone_uri=Optional(None), landing_rev=Optional('rev:tip'), | |
|
781 | def update_repo( | |
|
782 | request, apiuser, repoid, repo_name=Optional(None), | |
|
783 | owner=Optional(OAttr('apiuser')), description=Optional(''), | |
|
784 | private=Optional(False), clone_uri=Optional(None), | |
|
785 | landing_rev=Optional('rev:tip'), fork_of=Optional(None), | |
|
809 | 786 |
|
|
810 | 787 |
|
|
811 |
|
|
|
812 | fields=Optional('')): | |
|
788 | enable_downloads=Optional(False), fields=Optional('')): | |
|
813 | 789 | """ |
|
814 | 790 | Updates a repository with the given information. |
|
815 | 791 | |
|
816 | 792 | This command can only be run using an |authtoken| with at least |
|
817 |
|
|
|
793 | admin permissions to the |repo|. | |
|
794 | ||
|
795 | * If the repository name contains "/", repository will be updated | |
|
796 | accordingly with a repository group or nested repository groups | |
|
797 | ||
|
798 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| | |
|
799 | called "repo-test" and place it inside group "foo/bar". | |
|
800 | You have to have permissions to access and write to the last repository | |
|
801 | group ("bar" in this example) | |
|
818 | 802 | |
|
819 | 803 | :param apiuser: This is filled automatically from the |authtoken|. |
|
820 | 804 | :type apiuser: AuthUser |
|
821 | 805 | :param repoid: repository name or repository ID. |
|
822 | 806 | :type repoid: str or int |
|
823 |
:param name: Update the |repo| name |
|
|
824 | :type name: str | |
|
807 | :param repo_name: Update the |repo| name, including the | |
|
808 | repository group it's in. | |
|
809 | :type repo_name: str | |
|
825 | 810 | :param owner: Set the |repo| owner. |
|
826 | 811 | :type owner: str |
|
827 |
:param |
|
|
828 | :type group: str | |
|
829 | :param fork_of: Set the master |repo| name. | |
|
812 | :param fork_of: Set the |repo| as fork of another |repo|. | |
|
830 | 813 | :type fork_of: str |
|
831 | 814 | :param description: Update the |repo| description. |
|
832 | 815 | :type description: str |
|
833 | 816 | :param private: Set the |repo| as private. (True | False) |
|
834 | 817 | :type private: bool |
|
835 | 818 | :param clone_uri: Update the |repo| clone URI. |
|
836 | 819 | :type clone_uri: str |
|
837 | :param landing_rev: Set the |repo| landing revision. Default is | |
|
838 | ``tip``. | |
|
820 | :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``. | |
|
839 | 821 | :type landing_rev: str |
|
840 | :param enable_statistics: Enable statistics on the |repo|, | |
|
841 | (True | False). | |
|
822 | :param enable_statistics: Enable statistics on the |repo|, (True | False). | |
|
842 | 823 | :type enable_statistics: bool |
|
843 | 824 | :param enable_locking: Enable |repo| locking. |
|
844 | 825 | :type enable_locking: bool |
|
845 | :param enable_downloads: Enable downloads from the |repo|, | |
|
846 | (True | False). | |
|
826 | :param enable_downloads: Enable downloads from the |repo|, (True | False). | |
|
847 | 827 | :type enable_downloads: bool |
|
848 | 828 | :param fields: Add extra fields to the |repo|. Use the following |
|
849 | 829 | example format: ``field_key=field_val,field_key2=fieldval2``. |
|
850 | 830 | Escape ', ' with \, |
|
851 | 831 | :type fields: str |
|
852 | 832 | """ |
|
833 | ||
|
853 | 834 | repo = get_repo_or_error(repoid) |
|
835 | ||
|
854 | 836 | include_secrets = False |
|
855 | if has_superadmin_permission(apiuser): | |
|
837 | if not has_superadmin_permission(apiuser): | |
|
838 | validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',)) | |
|
839 | else: | |
|
856 | 840 | include_secrets = True |
|
857 | else: | |
|
858 | _perms = ('repository.admin',) | |
|
859 | has_repo_permissions(apiuser, repoid, repo, _perms) | |
|
841 | ||
|
842 | updates = dict( | |
|
843 | repo_name=repo_name | |
|
844 | if not isinstance(repo_name, Optional) else repo.repo_name, | |
|
845 | ||
|
846 | fork_id=fork_of | |
|
847 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, | |
|
848 | ||
|
849 | user=owner | |
|
850 | if not isinstance(owner, Optional) else repo.user.username, | |
|
860 | 851 | |
|
861 | updates = { | |
|
862 | # update function requires this. | |
|
863 | 'repo_name': repo.just_name | |
|
864 | } | |
|
865 | repo_group = group | |
|
866 | if not isinstance(repo_group, Optional): | |
|
867 | repo_group = get_repo_group_or_error(repo_group) | |
|
868 | repo_group = repo_group.group_id | |
|
852 | repo_description=description | |
|
853 | if not isinstance(description, Optional) else repo.description, | |
|
854 | ||
|
855 | repo_private=private | |
|
856 | if not isinstance(private, Optional) else repo.private, | |
|
857 | ||
|
858 | clone_uri=clone_uri | |
|
859 | if not isinstance(clone_uri, Optional) else repo.clone_uri, | |
|
860 | ||
|
861 | repo_landing_rev=landing_rev | |
|
862 | if not isinstance(landing_rev, Optional) else repo._landing_revision, | |
|
863 | ||
|
864 | repo_enable_statistics=enable_statistics | |
|
865 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, | |
|
866 | ||
|
867 | repo_enable_locking=enable_locking | |
|
868 | if not isinstance(enable_locking, Optional) else repo.enable_locking, | |
|
869 | ||
|
870 | repo_enable_downloads=enable_downloads | |
|
871 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) | |
|
872 | ||
|
873 | ref_choices, _labels = ScmModel().get_repo_landing_revs(repo=repo) | |
|
869 | 874 | |
|
870 | repo_fork_of = fork_of | |
|
871 | if not isinstance(repo_fork_of, Optional): | |
|
872 | repo_fork_of = get_repo_or_error(repo_fork_of) | |
|
873 | repo_fork_of = repo_fork_of.repo_id | |
|
874 | ||
|
875 | schema = repo_schema.RepoSchema().bind( | |
|
876 | repo_type_options=rhodecode.BACKENDS.keys(), | |
|
877 | repo_ref_options=ref_choices, | |
|
878 | # user caller | |
|
879 | user=apiuser, | |
|
880 | old_values=repo.get_api_data()) | |
|
875 | 881 | try: |
|
876 | store_update(updates, name, 'repo_name') | |
|
877 | store_update(updates, repo_group, 'repo_group') | |
|
878 | store_update(updates, repo_fork_of, 'fork_id') | |
|
879 | store_update(updates, owner, 'user') | |
|
880 | store_update(updates, description, 'repo_description') | |
|
881 | store_update(updates, private, 'repo_private') | |
|
882 | store_update(updates, clone_uri, 'clone_uri') | |
|
883 | store_update(updates, landing_rev, 'repo_landing_rev') | |
|
884 | store_update(updates, enable_statistics, 'repo_enable_statistics') | |
|
885 | store_update(updates, enable_locking, 'repo_enable_locking') | |
|
886 | store_update(updates, enable_downloads, 'repo_enable_downloads') | |
|
882 | schema_data = schema.deserialize(dict( | |
|
883 | # we save old value, users cannot change type | |
|
884 | repo_type=repo.repo_type, | |
|
885 | ||
|
886 | repo_name=updates['repo_name'], | |
|
887 | repo_owner=updates['user'], | |
|
888 | repo_description=updates['repo_description'], | |
|
889 | repo_clone_uri=updates['clone_uri'], | |
|
890 | repo_fork_of=updates['fork_id'], | |
|
891 | repo_private=updates['repo_private'], | |
|
892 | repo_landing_commit_ref=updates['repo_landing_rev'], | |
|
893 | repo_enable_statistics=updates['repo_enable_statistics'], | |
|
894 | repo_enable_downloads=updates['repo_enable_downloads'], | |
|
895 | repo_enable_locking=updates['repo_enable_locking'])) | |
|
896 | except validation_schema.Invalid as err: | |
|
897 | raise JSONRPCValidationError(colander_exc=err) | |
|
898 | ||
|
899 | # save validated data back into the updates dict | |
|
900 | validated_updates = dict( | |
|
901 | repo_name=schema_data['repo_group']['repo_name_without_group'], | |
|
902 | repo_group=schema_data['repo_group']['repo_group_id'], | |
|
903 | ||
|
904 | user=schema_data['repo_owner'], | |
|
905 | repo_description=schema_data['repo_description'], | |
|
906 | repo_private=schema_data['repo_private'], | |
|
907 | clone_uri=schema_data['repo_clone_uri'], | |
|
908 | repo_landing_rev=schema_data['repo_landing_commit_ref'], | |
|
909 | repo_enable_statistics=schema_data['repo_enable_statistics'], | |
|
910 | repo_enable_locking=schema_data['repo_enable_locking'], | |
|
911 | repo_enable_downloads=schema_data['repo_enable_downloads'], | |
|
912 | ) | |
|
913 | ||
|
914 | if schema_data['repo_fork_of']: | |
|
915 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) | |
|
916 | validated_updates['fork_id'] = fork_repo.repo_id | |
|
887 | 917 | |
|
888 | 918 |
|
|
889 | 919 |
|
|
890 | 920 |
|
|
891 |
|
|
|
921 | validated_updates.update(fields) | |
|
892 | 922 | |
|
893 | RepoModel().update(repo, **updates) | |
|
923 | try: | |
|
924 | RepoModel().update(repo, **validated_updates) | |
|
894 | 925 | Session().commit() |
|
895 | 926 | return { |
|
896 | 'msg': 'updated repo ID:%s %s' % ( | |
|
897 | repo.repo_id, repo.repo_name), | |
|
898 | 'repository': repo.get_api_data( | |
|
899 | include_secrets=include_secrets) | |
|
927 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), | |
|
928 | 'repository': repo.get_api_data(include_secrets=include_secrets) | |
|
900 | 929 | } |
|
901 | 930 | except Exception: |
|
902 | 931 | log.exception( |
|
903 | 932 | u"Exception while trying to update the repository %s", |
|
904 | 933 | repoid) |
|
905 | 934 | raise JSONRPCError('failed to update repo `%s`' % repoid) |
|
906 | 935 | |
|
907 | 936 | |
|
908 | 937 | @jsonrpc_method() |
|
909 | 938 | def fork_repo(request, apiuser, repoid, fork_name, |
|
910 | 939 | owner=Optional(OAttr('apiuser')), |
|
911 |
description=Optional(''), |
|
|
912 |
private=Optional(False), |
|
|
940 | description=Optional(''), | |
|
941 | private=Optional(False), | |
|
942 | clone_uri=Optional(None), | |
|
943 | landing_rev=Optional('rev:tip'), | |
|
944 | copy_permissions=Optional(False)): | |
|
913 | 945 | """ |
|
914 | 946 | Creates a fork of the specified |repo|. |
|
915 | 947 | |
|
916 | * If using |RCE| with Celery this will immediately return a success | |
|
917 | message, even though the fork will be created asynchronously. | |
|
948 | * If the fork_name contains "/", fork will be created inside | |
|
949 | a repository group or nested repository groups | |
|
918 | 950 | |
|
919 | This command can only be run using an |authtoken| with fork | |
|
920 | permissions on the |repo|. | |
|
951 | For example "foo/bar/fork-repo" will create fork called "fork-repo" | |
|
952 | inside group "foo/bar". You have to have permissions to access and | |
|
953 | write to the last repository group ("bar" in this example) | |
|
954 | ||
|
955 | This command can only be run using an |authtoken| with minimum | |
|
956 | read permissions of the forked repo, create fork permissions for an user. | |
|
921 | 957 | |
|
922 | 958 | :param apiuser: This is filled automatically from the |authtoken|. |
|
923 | 959 | :type apiuser: AuthUser |
|
924 | 960 | :param repoid: Set repository name or repository ID. |
|
925 | 961 | :type repoid: str or int |
|
926 | :param fork_name: Set the fork name. | |
|
962 | :param fork_name: Set the fork name, including it's repository group membership. | |
|
927 | 963 | :type fork_name: str |
|
928 | 964 | :param owner: Set the fork owner. |
|
929 | 965 | :type owner: str |
|
930 | :param description: Set the fork descripton. | |
|
966 | :param description: Set the fork description. | |
|
931 | 967 | :type description: str |
|
932 | 968 | :param copy_permissions: Copy permissions from parent |repo|. The |
|
933 | 969 | default is False. |
|
934 | 970 | :type copy_permissions: bool |
|
935 | 971 | :param private: Make the fork private. The default is False. |
|
936 | 972 | :type private: bool |
|
937 | 973 | :param landing_rev: Set the landing revision. The default is tip. |
|
938 | 974 | |
|
939 | 975 | Example output: |
|
940 | 976 | |
|
941 | 977 | .. code-block:: bash |
|
942 | 978 | |
|
943 | 979 | id : <id_for_response> |
|
944 | 980 | api_key : "<api_key>" |
|
945 | 981 | args: { |
|
946 | 982 | "repoid" : "<reponame or repo_id>", |
|
947 | 983 | "fork_name": "<forkname>", |
|
948 | 984 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
949 | 985 | "description": "<description>", |
|
950 | 986 | "copy_permissions": "<bool>", |
|
951 | 987 | "private": "<bool>", |
|
952 | 988 | "landing_rev": "<landing_rev>" |
|
953 | 989 | } |
|
954 | 990 | |
|
955 | 991 | Example error output: |
|
956 | 992 | |
|
957 | 993 | .. code-block:: bash |
|
958 | 994 | |
|
959 | 995 | id : <id_given_in_input> |
|
960 | 996 | result: { |
|
961 | 997 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
962 | 998 | "success": true, |
|
963 | 999 | "task": "<celery task id or None if done sync>" |
|
964 | 1000 | } |
|
965 | 1001 | error: null |
|
966 | 1002 | |
|
967 | 1003 | """ |
|
968 | if not has_superadmin_permission(apiuser): | |
|
969 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): | |
|
970 | raise JSONRPCForbidden() | |
|
971 | 1004 | |
|
972 | 1005 | repo = get_repo_or_error(repoid) |
|
973 | 1006 | repo_name = repo.repo_name |
|
974 | 1007 | |
|
975 | (fork_name_cleaned, | |
|
976 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent( | |
|
977 | fork_name) | |
|
978 | ||
|
979 | 1008 | if not has_superadmin_permission(apiuser): |
|
980 | 1009 | # check if we have at least read permission for |
|
981 | 1010 | # this repo that we fork ! |
|
982 | 1011 | _perms = ( |
|
983 | 1012 | 'repository.admin', 'repository.write', 'repository.read') |
|
984 |
|
|
|
1013 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
985 | 1014 | |
|
986 | if not isinstance(owner, Optional): | |
|
987 | # forbid setting owner for non super admins | |
|
988 | raise JSONRPCError( | |
|
989 | 'Only RhodeCode admin can specify `owner` param' | |
|
990 | ) | |
|
991 | # check if we have a create.repo permission if not maybe the parent | |
|
992 | # group permission | |
|
993 | if not HasPermissionAnyApi('hg.create.repository')(user=apiuser): | |
|
994 | if parent_group_name: | |
|
995 | repogroupid = parent_group_name | |
|
996 | repo_group = get_repo_group_or_error(parent_group_name) | |
|
997 | ||
|
998 | _perms = ('group.admin',) | |
|
999 | if not HasRepoGroupPermissionAnyApi(*_perms)( | |
|
1000 | user=apiuser, group_name=repo_group.group_name): | |
|
1001 | raise JSONRPCError( | |
|
1002 | 'repository group `%s` does not exist' % ( | |
|
1003 | repogroupid,)) | |
|
1004 | else: | |
|
1015 | # check if the regular user has at least fork permissions as well | |
|
1016 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): | |
|
1005 | 1017 |
|
|
1006 | 1018 | |
|
1007 | _repo = RepoModel().get_by_repo_name(fork_name) | |
|
1008 | if _repo: | |
|
1009 | type_ = 'fork' if _repo.fork else 'repo' | |
|
1010 | raise JSONRPCError("%s `%s` already exist" % (type_, fork_name)) | |
|
1019 | # check if user can set owner parameter | |
|
1020 | owner = validate_set_owner_permissions(apiuser, owner) | |
|
1011 | 1021 | |
|
1012 | if isinstance(owner, Optional): | |
|
1013 | owner = apiuser.user_id | |
|
1022 | description = Optional.extract(description) | |
|
1023 | copy_permissions = Optional.extract(copy_permissions) | |
|
1024 | clone_uri = Optional.extract(clone_uri) | |
|
1025 | landing_commit_ref = Optional.extract(landing_rev) | |
|
1026 | private = Optional.extract(private) | |
|
1014 | 1027 | |
|
1015 | owner = get_user_or_error(owner) | |
|
1028 | schema = repo_schema.RepoSchema().bind( | |
|
1029 | repo_type_options=rhodecode.BACKENDS.keys(), | |
|
1030 | # user caller | |
|
1031 | user=apiuser) | |
|
1016 | 1032 | |
|
1017 | 1033 | try: |
|
1018 | # create structure of groups and return the last group | |
|
1019 |
|
|
|
1020 | form_data = { | |
|
1021 | 'repo_name': fork_name_cleaned, | |
|
1022 | 'repo_name_full': fork_name, | |
|
1023 | 'repo_group': repo_group.group_id if repo_group else None, | |
|
1024 | 'repo_type': repo.repo_type, | |
|
1025 | 'description': Optional.extract(description), | |
|
1026 | 'private': Optional.extract(private), | |
|
1027 | 'copy_permissions': Optional.extract(copy_permissions), | |
|
1028 | 'landing_rev': Optional.extract(landing_rev), | |
|
1034 | schema_data = schema.deserialize(dict( | |
|
1035 | repo_name=fork_name, | |
|
1036 | repo_type=repo.repo_type, | |
|
1037 | repo_owner=owner.username, | |
|
1038 | repo_description=description, | |
|
1039 | repo_landing_commit_ref=landing_commit_ref, | |
|
1040 | repo_clone_uri=clone_uri, | |
|
1041 | repo_private=private, | |
|
1042 | repo_copy_permissions=copy_permissions)) | |
|
1043 | except validation_schema.Invalid as err: | |
|
1044 | raise JSONRPCValidationError(colander_exc=err) | |
|
1045 | ||
|
1046 | try: | |
|
1047 | data = { | |
|
1029 | 1048 | 'fork_parent_id': repo.repo_id, |
|
1049 | ||
|
1050 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], | |
|
1051 | 'repo_name_full': schema_data['repo_name'], | |
|
1052 | 'repo_group': schema_data['repo_group']['repo_group_id'], | |
|
1053 | 'repo_type': schema_data['repo_type'], | |
|
1054 | 'description': schema_data['repo_description'], | |
|
1055 | 'private': schema_data['repo_private'], | |
|
1056 | 'copy_permissions': schema_data['repo_copy_permissions'], | |
|
1057 | 'landing_rev': schema_data['repo_landing_commit_ref'], | |
|
1030 | 1058 | } |
|
1031 | 1059 | |
|
1032 |
task = RepoModel().create_fork( |
|
|
1060 | task = RepoModel().create_fork(data, cur_user=owner) | |
|
1033 | 1061 | # no commit, it's done in RepoModel, or async via celery |
|
1034 | 1062 | from celery.result import BaseAsyncResult |
|
1035 | 1063 | task_id = None |
|
1036 | 1064 | if isinstance(task, BaseAsyncResult): |
|
1037 | 1065 | task_id = task.task_id |
|
1038 | 1066 | return { |
|
1039 | 1067 | 'msg': 'Created fork of `%s` as `%s`' % ( |
|
1040 |
repo.repo_name, |
|
|
1068 | repo.repo_name, schema_data['repo_name']), | |
|
1041 | 1069 | 'success': True, # cannot return the repo data here since fork |
|
1042 | 1070 | # can be done async |
|
1043 | 1071 | 'task': task_id |
|
1044 | 1072 | } |
|
1045 | 1073 | except Exception: |
|
1046 | log.exception("Exception occurred while trying to fork a repo") | |
|
1074 | log.exception( | |
|
1075 | u"Exception while trying to create fork %s", | |
|
1076 | schema_data['repo_name']) | |
|
1047 | 1077 | raise JSONRPCError( |
|
1048 | 1078 | 'failed to fork repository `%s` as `%s`' % ( |
|
1049 |
repo_name, |
|
|
1079 | repo_name, schema_data['repo_name'])) | |
|
1050 | 1080 | |
|
1051 | 1081 | |
|
1052 | 1082 | @jsonrpc_method() |
|
1053 | 1083 | def delete_repo(request, apiuser, repoid, forks=Optional('')): |
|
1054 | 1084 | """ |
|
1055 | 1085 | Deletes a repository. |
|
1056 | 1086 | |
|
1057 | 1087 | * When the `forks` parameter is set it's possible to detach or delete |
|
1058 | 1088 | forks of deleted repository. |
|
1059 | 1089 | |
|
1060 | 1090 | This command can only be run using an |authtoken| with admin |
|
1061 | 1091 | permissions on the |repo|. |
|
1062 | 1092 | |
|
1063 | 1093 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1064 | 1094 | :type apiuser: AuthUser |
|
1065 | 1095 | :param repoid: Set the repository name or repository ID. |
|
1066 | 1096 | :type repoid: str or int |
|
1067 | 1097 | :param forks: Set to `detach` or `delete` forks from the |repo|. |
|
1068 | 1098 | :type forks: Optional(str) |
|
1069 | 1099 | |
|
1070 | 1100 | Example error output: |
|
1071 | 1101 | |
|
1072 | 1102 | .. code-block:: bash |
|
1073 | 1103 | |
|
1074 | 1104 | id : <id_given_in_input> |
|
1075 | 1105 | result: { |
|
1076 | 1106 | "msg": "Deleted repository `<reponame>`", |
|
1077 | 1107 | "success": true |
|
1078 | 1108 | } |
|
1079 | 1109 | error: null |
|
1080 | 1110 | """ |
|
1081 | 1111 | |
|
1082 | 1112 | repo = get_repo_or_error(repoid) |
|
1083 | 1113 | if not has_superadmin_permission(apiuser): |
|
1084 | 1114 | _perms = ('repository.admin',) |
|
1085 |
|
|
|
1115 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1086 | 1116 | |
|
1087 | 1117 | try: |
|
1088 | 1118 | handle_forks = Optional.extract(forks) |
|
1089 | 1119 | _forks_msg = '' |
|
1090 | 1120 | _forks = [f for f in repo.forks] |
|
1091 | 1121 | if handle_forks == 'detach': |
|
1092 | 1122 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) |
|
1093 | 1123 | elif handle_forks == 'delete': |
|
1094 | 1124 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) |
|
1095 | 1125 | elif _forks: |
|
1096 | 1126 | raise JSONRPCError( |
|
1097 | 1127 | 'Cannot delete `%s` it still contains attached forks' % |
|
1098 | 1128 | (repo.repo_name,) |
|
1099 | 1129 | ) |
|
1100 | 1130 | |
|
1101 | 1131 | RepoModel().delete(repo, forks=forks) |
|
1102 | 1132 | Session().commit() |
|
1103 | 1133 | return { |
|
1104 | 1134 | 'msg': 'Deleted repository `%s`%s' % ( |
|
1105 | 1135 | repo.repo_name, _forks_msg), |
|
1106 | 1136 | 'success': True |
|
1107 | 1137 | } |
|
1108 | 1138 | except Exception: |
|
1109 | 1139 | log.exception("Exception occurred while trying to delete repo") |
|
1110 | 1140 | raise JSONRPCError( |
|
1111 | 1141 | 'failed to delete repository `%s`' % (repo.repo_name,) |
|
1112 | 1142 | ) |
|
1113 | 1143 | |
|
1114 | 1144 | |
|
1115 | 1145 | #TODO: marcink, change name ? |
|
1116 | 1146 | @jsonrpc_method() |
|
1117 | 1147 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): |
|
1118 | 1148 | """ |
|
1119 | 1149 | Invalidates the cache for the specified repository. |
|
1120 | 1150 | |
|
1121 | 1151 | This command can only be run using an |authtoken| with admin rights to |
|
1122 | 1152 | the specified repository. |
|
1123 | 1153 | |
|
1124 | 1154 | This command takes the following options: |
|
1125 | 1155 | |
|
1126 | 1156 | :param apiuser: This is filled automatically from |authtoken|. |
|
1127 | 1157 | :type apiuser: AuthUser |
|
1128 | 1158 | :param repoid: Sets the repository name or repository ID. |
|
1129 | 1159 | :type repoid: str or int |
|
1130 | 1160 | :param delete_keys: This deletes the invalidated keys instead of |
|
1131 | 1161 | just flagging them. |
|
1132 | 1162 | :type delete_keys: Optional(``True`` | ``False``) |
|
1133 | 1163 | |
|
1134 | 1164 | Example output: |
|
1135 | 1165 | |
|
1136 | 1166 | .. code-block:: bash |
|
1137 | 1167 | |
|
1138 | 1168 | id : <id_given_in_input> |
|
1139 | 1169 | result : { |
|
1140 | 1170 | 'msg': Cache for repository `<repository name>` was invalidated, |
|
1141 | 1171 | 'repository': <repository name> |
|
1142 | 1172 | } |
|
1143 | 1173 | error : null |
|
1144 | 1174 | |
|
1145 | 1175 | Example error output: |
|
1146 | 1176 | |
|
1147 | 1177 | .. code-block:: bash |
|
1148 | 1178 | |
|
1149 | 1179 | id : <id_given_in_input> |
|
1150 | 1180 | result : null |
|
1151 | 1181 | error : { |
|
1152 | 1182 | 'Error occurred during cache invalidation action' |
|
1153 | 1183 | } |
|
1154 | 1184 | |
|
1155 | 1185 | """ |
|
1156 | 1186 | |
|
1157 | 1187 | repo = get_repo_or_error(repoid) |
|
1158 | 1188 | if not has_superadmin_permission(apiuser): |
|
1159 | 1189 | _perms = ('repository.admin', 'repository.write',) |
|
1160 |
|
|
|
1190 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1161 | 1191 | |
|
1162 | 1192 | delete = Optional.extract(delete_keys) |
|
1163 | 1193 | try: |
|
1164 | 1194 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) |
|
1165 | 1195 | return { |
|
1166 | 1196 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), |
|
1167 | 1197 | 'repository': repo.repo_name |
|
1168 | 1198 | } |
|
1169 | 1199 | except Exception: |
|
1170 | 1200 | log.exception( |
|
1171 | 1201 | "Exception occurred while trying to invalidate repo cache") |
|
1172 | 1202 | raise JSONRPCError( |
|
1173 | 1203 | 'Error occurred during cache invalidation action' |
|
1174 | 1204 | ) |
|
1175 | 1205 | |
|
1176 | 1206 | |
|
1177 | 1207 | #TODO: marcink, change name ? |
|
1178 | 1208 | @jsonrpc_method() |
|
1179 | 1209 | def lock(request, apiuser, repoid, locked=Optional(None), |
|
1180 | 1210 | userid=Optional(OAttr('apiuser'))): |
|
1181 | 1211 | """ |
|
1182 | 1212 | Sets the lock state of the specified |repo| by the given user. |
|
1183 | 1213 | From more information, see :ref:`repo-locking`. |
|
1184 | 1214 | |
|
1185 | 1215 | * If the ``userid`` option is not set, the repository is locked to the |
|
1186 | 1216 | user who called the method. |
|
1187 | 1217 | * If the ``locked`` parameter is not set, the current lock state of the |
|
1188 | 1218 | repository is displayed. |
|
1189 | 1219 | |
|
1190 | 1220 | This command can only be run using an |authtoken| with admin rights to |
|
1191 | 1221 | the specified repository. |
|
1192 | 1222 | |
|
1193 | 1223 | This command takes the following options: |
|
1194 | 1224 | |
|
1195 | 1225 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1196 | 1226 | :type apiuser: AuthUser |
|
1197 | 1227 | :param repoid: Sets the repository name or repository ID. |
|
1198 | 1228 | :type repoid: str or int |
|
1199 | 1229 | :param locked: Sets the lock state. |
|
1200 | 1230 | :type locked: Optional(``True`` | ``False``) |
|
1201 | 1231 | :param userid: Set the repository lock to this user. |
|
1202 | 1232 | :type userid: Optional(str or int) |
|
1203 | 1233 | |
|
1204 | 1234 | Example error output: |
|
1205 | 1235 | |
|
1206 | 1236 | .. code-block:: bash |
|
1207 | 1237 | |
|
1208 | 1238 | id : <id_given_in_input> |
|
1209 | 1239 | result : { |
|
1210 | 1240 | 'repo': '<reponame>', |
|
1211 | 1241 | 'locked': <bool: lock state>, |
|
1212 | 1242 | 'locked_since': <int: lock timestamp>, |
|
1213 | 1243 | 'locked_by': <username of person who made the lock>, |
|
1214 | 1244 | 'lock_reason': <str: reason for locking>, |
|
1215 | 1245 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, |
|
1216 | 1246 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' |
|
1217 | 1247 | or |
|
1218 | 1248 | 'msg': 'Repo `<repository name>` not locked.' |
|
1219 | 1249 | or |
|
1220 | 1250 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' |
|
1221 | 1251 | } |
|
1222 | 1252 | error : null |
|
1223 | 1253 | |
|
1224 | 1254 | Example error output: |
|
1225 | 1255 | |
|
1226 | 1256 | .. code-block:: bash |
|
1227 | 1257 | |
|
1228 | 1258 | id : <id_given_in_input> |
|
1229 | 1259 | result : null |
|
1230 | 1260 | error : { |
|
1231 | 1261 | 'Error occurred locking repository `<reponame>`' |
|
1232 | 1262 | } |
|
1233 | 1263 | """ |
|
1234 | 1264 | |
|
1235 | 1265 | repo = get_repo_or_error(repoid) |
|
1236 | 1266 | if not has_superadmin_permission(apiuser): |
|
1237 | 1267 | # check if we have at least write permission for this repo ! |
|
1238 | 1268 | _perms = ('repository.admin', 'repository.write',) |
|
1239 |
|
|
|
1269 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1240 | 1270 | |
|
1241 | 1271 | # make sure normal user does not pass someone else userid, |
|
1242 | 1272 | # he is not allowed to do that |
|
1243 | 1273 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
1244 | 1274 | raise JSONRPCError('userid is not the same as your user') |
|
1245 | 1275 | |
|
1246 | 1276 | if isinstance(userid, Optional): |
|
1247 | 1277 | userid = apiuser.user_id |
|
1248 | 1278 | |
|
1249 | 1279 | user = get_user_or_error(userid) |
|
1250 | 1280 | |
|
1251 | 1281 | if isinstance(locked, Optional): |
|
1252 | 1282 | lockobj = repo.locked |
|
1253 | 1283 | |
|
1254 | 1284 | if lockobj[0] is None: |
|
1255 | 1285 | _d = { |
|
1256 | 1286 | 'repo': repo.repo_name, |
|
1257 | 1287 | 'locked': False, |
|
1258 | 1288 | 'locked_since': None, |
|
1259 | 1289 | 'locked_by': None, |
|
1260 | 1290 | 'lock_reason': None, |
|
1261 | 1291 | 'lock_state_changed': False, |
|
1262 | 1292 | 'msg': 'Repo `%s` not locked.' % repo.repo_name |
|
1263 | 1293 | } |
|
1264 | 1294 | return _d |
|
1265 | 1295 | else: |
|
1266 | 1296 | _user_id, _time, _reason = lockobj |
|
1267 | 1297 | lock_user = get_user_or_error(userid) |
|
1268 | 1298 | _d = { |
|
1269 | 1299 | 'repo': repo.repo_name, |
|
1270 | 1300 | 'locked': True, |
|
1271 | 1301 | 'locked_since': _time, |
|
1272 | 1302 | 'locked_by': lock_user.username, |
|
1273 | 1303 | 'lock_reason': _reason, |
|
1274 | 1304 | 'lock_state_changed': False, |
|
1275 | 1305 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
1276 | 1306 | % (repo.repo_name, lock_user.username, |
|
1277 | 1307 | json.dumps(time_to_datetime(_time)))) |
|
1278 | 1308 | } |
|
1279 | 1309 | return _d |
|
1280 | 1310 | |
|
1281 | 1311 | # force locked state through a flag |
|
1282 | 1312 | else: |
|
1283 | 1313 | locked = str2bool(locked) |
|
1284 | 1314 | lock_reason = Repository.LOCK_API |
|
1285 | 1315 | try: |
|
1286 | 1316 | if locked: |
|
1287 | 1317 | lock_time = time.time() |
|
1288 | 1318 | Repository.lock(repo, user.user_id, lock_time, lock_reason) |
|
1289 | 1319 | else: |
|
1290 | 1320 | lock_time = None |
|
1291 | 1321 | Repository.unlock(repo) |
|
1292 | 1322 | _d = { |
|
1293 | 1323 | 'repo': repo.repo_name, |
|
1294 | 1324 | 'locked': locked, |
|
1295 | 1325 | 'locked_since': lock_time, |
|
1296 | 1326 | 'locked_by': user.username, |
|
1297 | 1327 | 'lock_reason': lock_reason, |
|
1298 | 1328 | 'lock_state_changed': True, |
|
1299 | 1329 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
1300 | 1330 | % (user.username, repo.repo_name, locked)) |
|
1301 | 1331 | } |
|
1302 | 1332 | return _d |
|
1303 | 1333 | except Exception: |
|
1304 | 1334 | log.exception( |
|
1305 | 1335 | "Exception occurred while trying to lock repository") |
|
1306 | 1336 | raise JSONRPCError( |
|
1307 | 1337 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
1308 | 1338 | ) |
|
1309 | 1339 | |
|
1310 | 1340 | |
|
1311 | 1341 | @jsonrpc_method() |
|
1312 | 1342 | def comment_commit( |
|
1313 | 1343 | request, apiuser, repoid, commit_id, message, |
|
1314 | 1344 | userid=Optional(OAttr('apiuser')), status=Optional(None)): |
|
1315 | 1345 | """ |
|
1316 | 1346 | Set a commit comment, and optionally change the status of the commit. |
|
1317 | 1347 | |
|
1318 | 1348 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1319 | 1349 | :type apiuser: AuthUser |
|
1320 | 1350 | :param repoid: Set the repository name or repository ID. |
|
1321 | 1351 | :type repoid: str or int |
|
1322 | 1352 | :param commit_id: Specify the commit_id for which to set a comment. |
|
1323 | 1353 | :type commit_id: str |
|
1324 | 1354 | :param message: The comment text. |
|
1325 | 1355 | :type message: str |
|
1326 | 1356 | :param userid: Set the user name of the comment creator. |
|
1327 | 1357 | :type userid: Optional(str or int) |
|
1328 | 1358 | :param status: status, one of 'not_reviewed', 'approved', 'rejected', |
|
1329 | 1359 | 'under_review' |
|
1330 | 1360 | :type status: str |
|
1331 | 1361 | |
|
1332 | 1362 | Example error output: |
|
1333 | 1363 | |
|
1334 | 1364 | .. code-block:: json |
|
1335 | 1365 | |
|
1336 | 1366 | { |
|
1337 | 1367 | "id" : <id_given_in_input>, |
|
1338 | 1368 | "result" : { |
|
1339 | 1369 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", |
|
1340 | 1370 | "status_change": null or <status>, |
|
1341 | 1371 | "success": true |
|
1342 | 1372 | }, |
|
1343 | 1373 | "error" : null |
|
1344 | 1374 | } |
|
1345 | 1375 | |
|
1346 | 1376 | """ |
|
1347 | 1377 | repo = get_repo_or_error(repoid) |
|
1348 | 1378 | if not has_superadmin_permission(apiuser): |
|
1349 | 1379 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1350 |
|
|
|
1380 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1351 | 1381 | |
|
1352 | 1382 | if isinstance(userid, Optional): |
|
1353 | 1383 | userid = apiuser.user_id |
|
1354 | 1384 | |
|
1355 | 1385 | user = get_user_or_error(userid) |
|
1356 | 1386 | status = Optional.extract(status) |
|
1357 | 1387 | |
|
1358 | 1388 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] |
|
1359 | 1389 | if status and status not in allowed_statuses: |
|
1360 | 1390 | raise JSONRPCError('Bad status, must be on ' |
|
1361 | 1391 | 'of %s got %s' % (allowed_statuses, status,)) |
|
1362 | 1392 | |
|
1363 | 1393 | try: |
|
1364 | 1394 | rc_config = SettingsModel().get_all_settings() |
|
1365 | 1395 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
1366 | 1396 | status_change_label = ChangesetStatus.get_status_lbl(status) |
|
1367 | 1397 | comm = ChangesetCommentsModel().create( |
|
1368 | 1398 | message, repo, user, revision=commit_id, |
|
1369 | 1399 | status_change=status_change_label, |
|
1370 | 1400 | status_change_type=status, |
|
1371 | 1401 | renderer=renderer) |
|
1372 | 1402 | if status: |
|
1373 | 1403 | # also do a status change |
|
1374 | 1404 | try: |
|
1375 | 1405 | ChangesetStatusModel().set_status( |
|
1376 | 1406 | repo, status, user, comm, revision=commit_id, |
|
1377 | 1407 | dont_allow_on_closed_pull_request=True |
|
1378 | 1408 | ) |
|
1379 | 1409 | except StatusChangeOnClosedPullRequestError: |
|
1380 | 1410 | log.exception( |
|
1381 | 1411 | "Exception occurred while trying to change repo commit status") |
|
1382 | 1412 | msg = ('Changing status on a changeset associated with ' |
|
1383 | 1413 | 'a closed pull request is not allowed') |
|
1384 | 1414 | raise JSONRPCError(msg) |
|
1385 | 1415 | |
|
1386 | 1416 | Session().commit() |
|
1387 | 1417 | return { |
|
1388 | 1418 | 'msg': ( |
|
1389 | 1419 | 'Commented on commit `%s` for repository `%s`' % ( |
|
1390 | 1420 | comm.revision, repo.repo_name)), |
|
1391 | 1421 | 'status_change': status, |
|
1392 | 1422 | 'success': True, |
|
1393 | 1423 | } |
|
1394 | 1424 | except JSONRPCError: |
|
1395 | 1425 | # catch any inside errors, and re-raise them to prevent from |
|
1396 | 1426 | # below global catch to silence them |
|
1397 | 1427 | raise |
|
1398 | 1428 | except Exception: |
|
1399 | 1429 | log.exception("Exception occurred while trying to comment on commit") |
|
1400 | 1430 | raise JSONRPCError( |
|
1401 | 1431 | 'failed to set comment on repository `%s`' % (repo.repo_name,) |
|
1402 | 1432 | ) |
|
1403 | 1433 | |
|
1404 | 1434 | |
|
1405 | 1435 | @jsonrpc_method() |
|
1406 | 1436 | def grant_user_permission(request, apiuser, repoid, userid, perm): |
|
1407 | 1437 | """ |
|
1408 | 1438 | Grant permissions for the specified user on the given repository, |
|
1409 | 1439 | or update existing permissions if found. |
|
1410 | 1440 | |
|
1411 | 1441 | This command can only be run using an |authtoken| with admin |
|
1412 | 1442 | permissions on the |repo|. |
|
1413 | 1443 | |
|
1414 | 1444 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1415 | 1445 | :type apiuser: AuthUser |
|
1416 | 1446 | :param repoid: Set the repository name or repository ID. |
|
1417 | 1447 | :type repoid: str or int |
|
1418 | 1448 | :param userid: Set the user name. |
|
1419 | 1449 | :type userid: str |
|
1420 | 1450 | :param perm: Set the user permissions, using the following format |
|
1421 | 1451 | ``(repository.(none|read|write|admin))`` |
|
1422 | 1452 | :type perm: str |
|
1423 | 1453 | |
|
1424 | 1454 | Example output: |
|
1425 | 1455 | |
|
1426 | 1456 | .. code-block:: bash |
|
1427 | 1457 | |
|
1428 | 1458 | id : <id_given_in_input> |
|
1429 | 1459 | result: { |
|
1430 | 1460 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
1431 | 1461 | "success": true |
|
1432 | 1462 | } |
|
1433 | 1463 | error: null |
|
1434 | 1464 | """ |
|
1435 | 1465 | |
|
1436 | 1466 | repo = get_repo_or_error(repoid) |
|
1437 | 1467 | user = get_user_or_error(userid) |
|
1438 | 1468 | perm = get_perm_or_error(perm) |
|
1439 | 1469 | if not has_superadmin_permission(apiuser): |
|
1440 | 1470 | _perms = ('repository.admin',) |
|
1441 |
|
|
|
1471 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1442 | 1472 | |
|
1443 | 1473 | try: |
|
1444 | 1474 | |
|
1445 | 1475 | RepoModel().grant_user_permission(repo=repo, user=user, perm=perm) |
|
1446 | 1476 | |
|
1447 | 1477 | Session().commit() |
|
1448 | 1478 | return { |
|
1449 | 1479 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( |
|
1450 | 1480 | perm.permission_name, user.username, repo.repo_name |
|
1451 | 1481 | ), |
|
1452 | 1482 | 'success': True |
|
1453 | 1483 | } |
|
1454 | 1484 | except Exception: |
|
1455 | 1485 | log.exception( |
|
1456 | 1486 | "Exception occurred while trying edit permissions for repo") |
|
1457 | 1487 | raise JSONRPCError( |
|
1458 | 1488 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1459 | 1489 | userid, repoid |
|
1460 | 1490 | ) |
|
1461 | 1491 | ) |
|
1462 | 1492 | |
|
1463 | 1493 | |
|
1464 | 1494 | @jsonrpc_method() |
|
1465 | 1495 | def revoke_user_permission(request, apiuser, repoid, userid): |
|
1466 | 1496 | """ |
|
1467 | 1497 | Revoke permission for a user on the specified repository. |
|
1468 | 1498 | |
|
1469 | 1499 | This command can only be run using an |authtoken| with admin |
|
1470 | 1500 | permissions on the |repo|. |
|
1471 | 1501 | |
|
1472 | 1502 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1473 | 1503 | :type apiuser: AuthUser |
|
1474 | 1504 | :param repoid: Set the repository name or repository ID. |
|
1475 | 1505 | :type repoid: str or int |
|
1476 | 1506 | :param userid: Set the user name of revoked user. |
|
1477 | 1507 | :type userid: str or int |
|
1478 | 1508 | |
|
1479 | 1509 | Example error output: |
|
1480 | 1510 | |
|
1481 | 1511 | .. code-block:: bash |
|
1482 | 1512 | |
|
1483 | 1513 | id : <id_given_in_input> |
|
1484 | 1514 | result: { |
|
1485 | 1515 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
1486 | 1516 | "success": true |
|
1487 | 1517 | } |
|
1488 | 1518 | error: null |
|
1489 | 1519 | """ |
|
1490 | 1520 | |
|
1491 | 1521 | repo = get_repo_or_error(repoid) |
|
1492 | 1522 | user = get_user_or_error(userid) |
|
1493 | 1523 | if not has_superadmin_permission(apiuser): |
|
1494 | 1524 | _perms = ('repository.admin',) |
|
1495 |
|
|
|
1525 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1496 | 1526 | |
|
1497 | 1527 | try: |
|
1498 | 1528 | RepoModel().revoke_user_permission(repo=repo, user=user) |
|
1499 | 1529 | Session().commit() |
|
1500 | 1530 | return { |
|
1501 | 1531 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( |
|
1502 | 1532 | user.username, repo.repo_name |
|
1503 | 1533 | ), |
|
1504 | 1534 | 'success': True |
|
1505 | 1535 | } |
|
1506 | 1536 | except Exception: |
|
1507 | 1537 | log.exception( |
|
1508 | 1538 | "Exception occurred while trying revoke permissions to repo") |
|
1509 | 1539 | raise JSONRPCError( |
|
1510 | 1540 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1511 | 1541 | userid, repoid |
|
1512 | 1542 | ) |
|
1513 | 1543 | ) |
|
1514 | 1544 | |
|
1515 | 1545 | |
|
1516 | 1546 | @jsonrpc_method() |
|
1517 | 1547 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): |
|
1518 | 1548 | """ |
|
1519 | 1549 | Grant permission for a user group on the specified repository, |
|
1520 | 1550 | or update existing permissions. |
|
1521 | 1551 | |
|
1522 | 1552 | This command can only be run using an |authtoken| with admin |
|
1523 | 1553 | permissions on the |repo|. |
|
1524 | 1554 | |
|
1525 | 1555 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1526 | 1556 | :type apiuser: AuthUser |
|
1527 | 1557 | :param repoid: Set the repository name or repository ID. |
|
1528 | 1558 | :type repoid: str or int |
|
1529 | 1559 | :param usergroupid: Specify the ID of the user group. |
|
1530 | 1560 | :type usergroupid: str or int |
|
1531 | 1561 | :param perm: Set the user group permissions using the following |
|
1532 | 1562 | format: (repository.(none|read|write|admin)) |
|
1533 | 1563 | :type perm: str |
|
1534 | 1564 | |
|
1535 | 1565 | Example output: |
|
1536 | 1566 | |
|
1537 | 1567 | .. code-block:: bash |
|
1538 | 1568 | |
|
1539 | 1569 | id : <id_given_in_input> |
|
1540 | 1570 | result : { |
|
1541 | 1571 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1542 | 1572 | "success": true |
|
1543 | 1573 | |
|
1544 | 1574 | } |
|
1545 | 1575 | error : null |
|
1546 | 1576 | |
|
1547 | 1577 | Example error output: |
|
1548 | 1578 | |
|
1549 | 1579 | .. code-block:: bash |
|
1550 | 1580 | |
|
1551 | 1581 | id : <id_given_in_input> |
|
1552 | 1582 | result : null |
|
1553 | 1583 | error : { |
|
1554 | 1584 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' |
|
1555 | 1585 | } |
|
1556 | 1586 | |
|
1557 | 1587 | """ |
|
1558 | 1588 | |
|
1559 | 1589 | repo = get_repo_or_error(repoid) |
|
1560 | 1590 | perm = get_perm_or_error(perm) |
|
1561 | 1591 | if not has_superadmin_permission(apiuser): |
|
1562 | 1592 | _perms = ('repository.admin',) |
|
1563 |
|
|
|
1593 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1564 | 1594 | |
|
1565 | 1595 | user_group = get_user_group_or_error(usergroupid) |
|
1566 | 1596 | if not has_superadmin_permission(apiuser): |
|
1567 | 1597 | # check if we have at least read permission for this user group ! |
|
1568 | 1598 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
1569 | 1599 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
1570 | 1600 | user=apiuser, user_group_name=user_group.users_group_name): |
|
1571 | 1601 | raise JSONRPCError( |
|
1572 | 1602 | 'user group `%s` does not exist' % (usergroupid,)) |
|
1573 | 1603 | |
|
1574 | 1604 | try: |
|
1575 | 1605 | RepoModel().grant_user_group_permission( |
|
1576 | 1606 | repo=repo, group_name=user_group, perm=perm) |
|
1577 | 1607 | |
|
1578 | 1608 | Session().commit() |
|
1579 | 1609 | return { |
|
1580 | 1610 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' |
|
1581 | 1611 | 'repo: `%s`' % ( |
|
1582 | 1612 | perm.permission_name, user_group.users_group_name, |
|
1583 | 1613 | repo.repo_name |
|
1584 | 1614 | ), |
|
1585 | 1615 | 'success': True |
|
1586 | 1616 | } |
|
1587 | 1617 | except Exception: |
|
1588 | 1618 | log.exception( |
|
1589 | 1619 | "Exception occurred while trying change permission on repo") |
|
1590 | 1620 | raise JSONRPCError( |
|
1591 | 1621 | 'failed to edit permission for user group: `%s` in ' |
|
1592 | 1622 | 'repo: `%s`' % ( |
|
1593 | 1623 | usergroupid, repo.repo_name |
|
1594 | 1624 | ) |
|
1595 | 1625 | ) |
|
1596 | 1626 | |
|
1597 | 1627 | |
|
1598 | 1628 | @jsonrpc_method() |
|
1599 | 1629 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): |
|
1600 | 1630 | """ |
|
1601 | 1631 | Revoke the permissions of a user group on a given repository. |
|
1602 | 1632 | |
|
1603 | 1633 | This command can only be run using an |authtoken| with admin |
|
1604 | 1634 | permissions on the |repo|. |
|
1605 | 1635 | |
|
1606 | 1636 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1607 | 1637 | :type apiuser: AuthUser |
|
1608 | 1638 | :param repoid: Set the repository name or repository ID. |
|
1609 | 1639 | :type repoid: str or int |
|
1610 | 1640 | :param usergroupid: Specify the user group ID. |
|
1611 | 1641 | :type usergroupid: str or int |
|
1612 | 1642 | |
|
1613 | 1643 | Example output: |
|
1614 | 1644 | |
|
1615 | 1645 | .. code-block:: bash |
|
1616 | 1646 | |
|
1617 | 1647 | id : <id_given_in_input> |
|
1618 | 1648 | result: { |
|
1619 | 1649 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1620 | 1650 | "success": true |
|
1621 | 1651 | } |
|
1622 | 1652 | error: null |
|
1623 | 1653 | """ |
|
1624 | 1654 | |
|
1625 | 1655 | repo = get_repo_or_error(repoid) |
|
1626 | 1656 | if not has_superadmin_permission(apiuser): |
|
1627 | 1657 | _perms = ('repository.admin',) |
|
1628 |
|
|
|
1658 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1629 | 1659 | |
|
1630 | 1660 | user_group = get_user_group_or_error(usergroupid) |
|
1631 | 1661 | if not has_superadmin_permission(apiuser): |
|
1632 | 1662 | # check if we have at least read permission for this user group ! |
|
1633 | 1663 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
1634 | 1664 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
1635 | 1665 | user=apiuser, user_group_name=user_group.users_group_name): |
|
1636 | 1666 | raise JSONRPCError( |
|
1637 | 1667 | 'user group `%s` does not exist' % (usergroupid,)) |
|
1638 | 1668 | |
|
1639 | 1669 | try: |
|
1640 | 1670 | RepoModel().revoke_user_group_permission( |
|
1641 | 1671 | repo=repo, group_name=user_group) |
|
1642 | 1672 | |
|
1643 | 1673 | Session().commit() |
|
1644 | 1674 | return { |
|
1645 | 1675 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( |
|
1646 | 1676 | user_group.users_group_name, repo.repo_name |
|
1647 | 1677 | ), |
|
1648 | 1678 | 'success': True |
|
1649 | 1679 | } |
|
1650 | 1680 | except Exception: |
|
1651 | 1681 | log.exception("Exception occurred while trying revoke " |
|
1652 | 1682 | "user group permission on repo") |
|
1653 | 1683 | raise JSONRPCError( |
|
1654 | 1684 | 'failed to edit permission for user group: `%s` in ' |
|
1655 | 1685 | 'repo: `%s`' % ( |
|
1656 | 1686 | user_group.users_group_name, repo.repo_name |
|
1657 | 1687 | ) |
|
1658 | 1688 | ) |
|
1659 | 1689 | |
|
1660 | 1690 | |
|
1661 | 1691 | @jsonrpc_method() |
|
1662 | 1692 | def pull(request, apiuser, repoid): |
|
1663 | 1693 | """ |
|
1664 | 1694 | Triggers a pull on the given repository from a remote location. You |
|
1665 | 1695 | can use this to keep remote repositories up-to-date. |
|
1666 | 1696 | |
|
1667 | 1697 | This command can only be run using an |authtoken| with admin |
|
1668 | 1698 | rights to the specified repository. For more information, |
|
1669 | 1699 | see :ref:`config-token-ref`. |
|
1670 | 1700 | |
|
1671 | 1701 | This command takes the following options: |
|
1672 | 1702 | |
|
1673 | 1703 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1674 | 1704 | :type apiuser: AuthUser |
|
1675 | 1705 | :param repoid: The repository name or repository ID. |
|
1676 | 1706 | :type repoid: str or int |
|
1677 | 1707 | |
|
1678 | 1708 | Example output: |
|
1679 | 1709 | |
|
1680 | 1710 | .. code-block:: bash |
|
1681 | 1711 | |
|
1682 | 1712 | id : <id_given_in_input> |
|
1683 | 1713 | result : { |
|
1684 | 1714 | "msg": "Pulled from `<repository name>`" |
|
1685 | 1715 | "repository": "<repository name>" |
|
1686 | 1716 | } |
|
1687 | 1717 | error : null |
|
1688 | 1718 | |
|
1689 | 1719 | Example error output: |
|
1690 | 1720 | |
|
1691 | 1721 | .. code-block:: bash |
|
1692 | 1722 | |
|
1693 | 1723 | id : <id_given_in_input> |
|
1694 | 1724 | result : null |
|
1695 | 1725 | error : { |
|
1696 | 1726 | "Unable to pull changes from `<reponame>`" |
|
1697 | 1727 | } |
|
1698 | 1728 | |
|
1699 | 1729 | """ |
|
1700 | 1730 | |
|
1701 | 1731 | repo = get_repo_or_error(repoid) |
|
1702 | 1732 | if not has_superadmin_permission(apiuser): |
|
1703 | 1733 | _perms = ('repository.admin',) |
|
1704 |
|
|
|
1734 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1705 | 1735 | |
|
1706 | 1736 | try: |
|
1707 | 1737 | ScmModel().pull_changes(repo.repo_name, apiuser.username) |
|
1708 | 1738 | return { |
|
1709 | 1739 | 'msg': 'Pulled from `%s`' % repo.repo_name, |
|
1710 | 1740 | 'repository': repo.repo_name |
|
1711 | 1741 | } |
|
1712 | 1742 | except Exception: |
|
1713 | 1743 | log.exception("Exception occurred while trying to " |
|
1714 | 1744 | "pull changes from remote location") |
|
1715 | 1745 | raise JSONRPCError( |
|
1716 | 1746 | 'Unable to pull changes from `%s`' % repo.repo_name |
|
1717 | 1747 | ) |
|
1718 | 1748 | |
|
1719 | 1749 | |
|
1720 | 1750 | @jsonrpc_method() |
|
1721 | 1751 | def strip(request, apiuser, repoid, revision, branch): |
|
1722 | 1752 | """ |
|
1723 | 1753 | Strips the given revision from the specified repository. |
|
1724 | 1754 | |
|
1725 | 1755 | * This will remove the revision and all of its decendants. |
|
1726 | 1756 | |
|
1727 | 1757 | This command can only be run using an |authtoken| with admin rights to |
|
1728 | 1758 | the specified repository. |
|
1729 | 1759 | |
|
1730 | 1760 | This command takes the following options: |
|
1731 | 1761 | |
|
1732 | 1762 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1733 | 1763 | :type apiuser: AuthUser |
|
1734 | 1764 | :param repoid: The repository name or repository ID. |
|
1735 | 1765 | :type repoid: str or int |
|
1736 | 1766 | :param revision: The revision you wish to strip. |
|
1737 | 1767 | :type revision: str |
|
1738 | 1768 | :param branch: The branch from which to strip the revision. |
|
1739 | 1769 | :type branch: str |
|
1740 | 1770 | |
|
1741 | 1771 | Example output: |
|
1742 | 1772 | |
|
1743 | 1773 | .. code-block:: bash |
|
1744 | 1774 | |
|
1745 | 1775 | id : <id_given_in_input> |
|
1746 | 1776 | result : { |
|
1747 | 1777 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" |
|
1748 | 1778 | "repository": "<repository name>" |
|
1749 | 1779 | } |
|
1750 | 1780 | error : null |
|
1751 | 1781 | |
|
1752 | 1782 | Example error output: |
|
1753 | 1783 | |
|
1754 | 1784 | .. code-block:: bash |
|
1755 | 1785 | |
|
1756 | 1786 | id : <id_given_in_input> |
|
1757 | 1787 | result : null |
|
1758 | 1788 | error : { |
|
1759 | 1789 | "Unable to strip commit <commit_hash> from repo `<repository name>`" |
|
1760 | 1790 | } |
|
1761 | 1791 | |
|
1762 | 1792 | """ |
|
1763 | 1793 | |
|
1764 | 1794 | repo = get_repo_or_error(repoid) |
|
1765 | 1795 | if not has_superadmin_permission(apiuser): |
|
1766 | 1796 | _perms = ('repository.admin',) |
|
1767 |
|
|
|
1797 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
|
1768 | 1798 | |
|
1769 | 1799 | try: |
|
1770 | 1800 | ScmModel().strip(repo, revision, branch) |
|
1771 | 1801 | return { |
|
1772 | 1802 | 'msg': 'Stripped commit %s from repo `%s`' % ( |
|
1773 | 1803 | revision, repo.repo_name), |
|
1774 | 1804 | 'repository': repo.repo_name |
|
1775 | 1805 | } |
|
1776 | 1806 | except Exception: |
|
1777 | 1807 | log.exception("Exception while trying to strip") |
|
1778 | 1808 | raise JSONRPCError( |
|
1779 | 1809 | 'Unable to strip commit %s from repo `%s`' % ( |
|
1780 | 1810 | revision, repo.repo_name) |
|
1781 | 1811 | ) |
|
1782 | 1812 | |
|
1783 | 1813 | |
|
1784 | 1814 | @jsonrpc_method() |
|
1785 | 1815 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): |
|
1786 | 1816 | """ |
|
1787 | 1817 | Returns all settings for a repository. If key is given it only returns the |
|
1788 | 1818 | setting identified by the key or null. |
|
1789 | 1819 | |
|
1790 | 1820 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1791 | 1821 | :type apiuser: AuthUser |
|
1792 | 1822 | :param repoid: The repository name or repository id. |
|
1793 | 1823 | :type repoid: str or int |
|
1794 | 1824 | :param key: Key of the setting to return. |
|
1795 | 1825 | :type: key: Optional(str) |
|
1796 | 1826 | |
|
1797 | 1827 | Example output: |
|
1798 | 1828 | |
|
1799 | 1829 | .. code-block:: bash |
|
1800 | 1830 | |
|
1801 | 1831 | { |
|
1802 | 1832 | "error": null, |
|
1803 | 1833 | "id": 237, |
|
1804 | 1834 | "result": { |
|
1805 | 1835 | "extensions_largefiles": true, |
|
1806 | 1836 | "hooks_changegroup_push_logger": true, |
|
1807 | 1837 | "hooks_changegroup_repo_size": false, |
|
1808 | 1838 | "hooks_outgoing_pull_logger": true, |
|
1809 | 1839 | "phases_publish": "True", |
|
1810 | 1840 | "rhodecode_hg_use_rebase_for_merging": true, |
|
1811 | 1841 | "rhodecode_pr_merge_enabled": true, |
|
1812 | 1842 | "rhodecode_use_outdated_comments": true |
|
1813 | 1843 | } |
|
1814 | 1844 | } |
|
1815 | 1845 | """ |
|
1816 | 1846 | |
|
1817 | 1847 | # Restrict access to this api method to admins only. |
|
1818 | 1848 | if not has_superadmin_permission(apiuser): |
|
1819 | 1849 | raise JSONRPCForbidden() |
|
1820 | 1850 | |
|
1821 | 1851 | try: |
|
1822 | 1852 | repo = get_repo_or_error(repoid) |
|
1823 | 1853 | settings_model = VcsSettingsModel(repo=repo) |
|
1824 | 1854 | settings = settings_model.get_global_settings() |
|
1825 | 1855 | settings.update(settings_model.get_repo_settings()) |
|
1826 | 1856 | |
|
1827 | 1857 | # If only a single setting is requested fetch it from all settings. |
|
1828 | 1858 | key = Optional.extract(key) |
|
1829 | 1859 | if key is not None: |
|
1830 | 1860 | settings = settings.get(key, None) |
|
1831 | 1861 | except Exception: |
|
1832 | 1862 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) |
|
1833 | 1863 | log.exception(msg) |
|
1834 | 1864 | raise JSONRPCError(msg) |
|
1835 | 1865 | |
|
1836 | 1866 | return settings |
|
1837 | 1867 | |
|
1838 | 1868 | |
|
1839 | 1869 | @jsonrpc_method() |
|
1840 | 1870 | def set_repo_settings(request, apiuser, repoid, settings): |
|
1841 | 1871 | """ |
|
1842 | 1872 | Update repository settings. Returns true on success. |
|
1843 | 1873 | |
|
1844 | 1874 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1845 | 1875 | :type apiuser: AuthUser |
|
1846 | 1876 | :param repoid: The repository name or repository id. |
|
1847 | 1877 | :type repoid: str or int |
|
1848 | 1878 | :param settings: The new settings for the repository. |
|
1849 | 1879 | :type: settings: dict |
|
1850 | 1880 | |
|
1851 | 1881 | Example output: |
|
1852 | 1882 | |
|
1853 | 1883 | .. code-block:: bash |
|
1854 | 1884 | |
|
1855 | 1885 | { |
|
1856 | 1886 | "error": null, |
|
1857 | 1887 | "id": 237, |
|
1858 | 1888 | "result": true |
|
1859 | 1889 | } |
|
1860 | 1890 | """ |
|
1861 | 1891 | # Restrict access to this api method to admins only. |
|
1862 | 1892 | if not has_superadmin_permission(apiuser): |
|
1863 | 1893 | raise JSONRPCForbidden() |
|
1864 | 1894 | |
|
1865 | 1895 | if type(settings) is not dict: |
|
1866 | 1896 | raise JSONRPCError('Settings have to be a JSON Object.') |
|
1867 | 1897 | |
|
1868 | 1898 | try: |
|
1869 | 1899 | settings_model = VcsSettingsModel(repo=repoid) |
|
1870 | 1900 | |
|
1871 | 1901 | # Merge global, repo and incoming settings. |
|
1872 | 1902 | new_settings = settings_model.get_global_settings() |
|
1873 | 1903 | new_settings.update(settings_model.get_repo_settings()) |
|
1874 | 1904 | new_settings.update(settings) |
|
1875 | 1905 | |
|
1876 | 1906 | # Update the settings. |
|
1877 | 1907 | inherit_global_settings = new_settings.get( |
|
1878 | 1908 | 'inherit_global_settings', False) |
|
1879 | 1909 | settings_model.create_or_update_repo_settings( |
|
1880 | 1910 | new_settings, inherit_global_settings=inherit_global_settings) |
|
1881 | 1911 | Session().commit() |
|
1882 | 1912 | except Exception: |
|
1883 | 1913 | msg = 'Failed to update settings for repository `{}`'.format(repoid) |
|
1884 | 1914 | log.exception(msg) |
|
1885 | 1915 | raise JSONRPCError(msg) |
|
1886 | 1916 | |
|
1887 | 1917 | # Indicate success. |
|
1888 | 1918 | return True |
@@ -1,1057 +1,1057 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Repository model for rhodecode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import os |
|
27 | 27 | import re |
|
28 | 28 | import shutil |
|
29 | 29 | import time |
|
30 | 30 | import traceback |
|
31 | 31 | from datetime import datetime |
|
32 | 32 | |
|
33 | 33 | from sqlalchemy.sql import func |
|
34 | 34 | from sqlalchemy.sql.expression import true, or_ |
|
35 | 35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
36 | 36 | |
|
37 | 37 | from rhodecode import events |
|
38 | 38 | from rhodecode.lib import helpers as h |
|
39 | 39 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
40 | 40 | from rhodecode.lib.caching_query import FromCache |
|
41 | 41 | from rhodecode.lib.exceptions import AttachedForksError |
|
42 | 42 | from rhodecode.lib.hooks_base import log_delete_repository |
|
43 | 43 | from rhodecode.lib.markup_renderer import MarkupRenderer |
|
44 | 44 | from rhodecode.lib.utils import make_db_config |
|
45 | 45 | from rhodecode.lib.utils2 import ( |
|
46 | 46 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
47 | 47 | get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic) |
|
48 | 48 | from rhodecode.lib.vcs.backends import get_backend |
|
49 | 49 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError |
|
50 | 50 | from rhodecode.model import BaseModel |
|
51 | 51 | from rhodecode.model.db import ( |
|
52 | 52 | Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, |
|
53 | 53 | UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup, |
|
54 | 54 | RepoGroup, RepositoryField) |
|
55 | 55 | from rhodecode.model.scm import UserGroupList |
|
56 | 56 | from rhodecode.model.settings import VcsSettingsModel |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | log = logging.getLogger(__name__) |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | class RepoModel(BaseModel): |
|
63 | 63 | |
|
64 | 64 | cls = Repository |
|
65 | 65 | |
|
66 | 66 | def _get_user_group(self, users_group): |
|
67 | 67 | return self._get_instance(UserGroup, users_group, |
|
68 | 68 | callback=UserGroup.get_by_group_name) |
|
69 | 69 | |
|
70 | 70 | def _get_repo_group(self, repo_group): |
|
71 | 71 | return self._get_instance(RepoGroup, repo_group, |
|
72 | 72 | callback=RepoGroup.get_by_group_name) |
|
73 | 73 | |
|
74 | 74 | def _create_default_perms(self, repository, private): |
|
75 | 75 | # create default permission |
|
76 | 76 | default = 'repository.read' |
|
77 | 77 | def_user = User.get_default_user() |
|
78 | 78 | for p in def_user.user_perms: |
|
79 | 79 | if p.permission.permission_name.startswith('repository.'): |
|
80 | 80 | default = p.permission.permission_name |
|
81 | 81 | break |
|
82 | 82 | |
|
83 | 83 | default_perm = 'repository.none' if private else default |
|
84 | 84 | |
|
85 | 85 | repo_to_perm = UserRepoToPerm() |
|
86 | 86 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
87 | 87 | |
|
88 | 88 | repo_to_perm.repository = repository |
|
89 | 89 | repo_to_perm.user_id = def_user.user_id |
|
90 | 90 | |
|
91 | 91 | return repo_to_perm |
|
92 | 92 | |
|
93 | 93 | @LazyProperty |
|
94 | 94 | def repos_path(self): |
|
95 | 95 | """ |
|
96 | 96 | Gets the repositories root path from database |
|
97 | 97 | """ |
|
98 | 98 | settings_model = VcsSettingsModel(sa=self.sa) |
|
99 | 99 | return settings_model.get_repos_location() |
|
100 | 100 | |
|
101 | 101 | def get(self, repo_id, cache=False): |
|
102 | 102 | repo = self.sa.query(Repository) \ |
|
103 | 103 | .filter(Repository.repo_id == repo_id) |
|
104 | 104 | |
|
105 | 105 | if cache: |
|
106 | 106 | repo = repo.options(FromCache("sql_cache_short", |
|
107 | 107 | "get_repo_%s" % repo_id)) |
|
108 | 108 | return repo.scalar() |
|
109 | 109 | |
|
110 | 110 | def get_repo(self, repository): |
|
111 | 111 | return self._get_repo(repository) |
|
112 | 112 | |
|
113 | 113 | def get_by_repo_name(self, repo_name, cache=False): |
|
114 | 114 | repo = self.sa.query(Repository) \ |
|
115 | 115 | .filter(Repository.repo_name == repo_name) |
|
116 | 116 | |
|
117 | 117 | if cache: |
|
118 | 118 | repo = repo.options(FromCache("sql_cache_short", |
|
119 | 119 | "get_repo_%s" % repo_name)) |
|
120 | 120 | return repo.scalar() |
|
121 | 121 | |
|
122 | 122 | def _extract_id_from_repo_name(self, repo_name): |
|
123 | 123 | if repo_name.startswith('/'): |
|
124 | 124 | repo_name = repo_name.lstrip('/') |
|
125 | 125 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
126 | 126 | if by_id_match: |
|
127 | 127 | return by_id_match.groups()[0] |
|
128 | 128 | |
|
129 | 129 | def get_repo_by_id(self, repo_name): |
|
130 | 130 | """ |
|
131 | 131 | Extracts repo_name by id from special urls. |
|
132 | 132 | Example url is _11/repo_name |
|
133 | 133 | |
|
134 | 134 | :param repo_name: |
|
135 | 135 | :return: repo object if matched else None |
|
136 | 136 | """ |
|
137 | 137 | try: |
|
138 | 138 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
139 | 139 | if _repo_id: |
|
140 | 140 | return self.get(_repo_id) |
|
141 | 141 | except Exception: |
|
142 | 142 | log.exception('Failed to extract repo_name from URL') |
|
143 | 143 | |
|
144 | 144 | return None |
|
145 | 145 | |
|
146 | 146 | def get_url(self, repo): |
|
147 | 147 | return h.url('summary_home', repo_name=safe_str(repo.repo_name), |
|
148 | 148 | qualified=True) |
|
149 | 149 | |
|
150 | 150 | def get_users(self, name_contains=None, limit=20, only_active=True): |
|
151 | 151 | |
|
152 | 152 | # TODO: mikhail: move this method to the UserModel. |
|
153 | 153 | query = self.sa.query(User) |
|
154 | 154 | if only_active: |
|
155 | 155 | query = query.filter(User.active == true()) |
|
156 | 156 | |
|
157 | 157 | if name_contains: |
|
158 | 158 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
159 | 159 | query = query.filter( |
|
160 | 160 | or_( |
|
161 | 161 | User.name.ilike(ilike_expression), |
|
162 | 162 | User.lastname.ilike(ilike_expression), |
|
163 | 163 | User.username.ilike(ilike_expression) |
|
164 | 164 | ) |
|
165 | 165 | ) |
|
166 | 166 | query = query.limit(limit) |
|
167 | 167 | users = query.all() |
|
168 | 168 | |
|
169 | 169 | _users = [ |
|
170 | 170 | { |
|
171 | 171 | 'id': user.user_id, |
|
172 | 172 | 'first_name': user.name, |
|
173 | 173 | 'last_name': user.lastname, |
|
174 | 174 | 'username': user.username, |
|
175 | 175 | 'email': user.email, |
|
176 | 176 | 'icon_link': h.gravatar_url(user.email, 30), |
|
177 | 177 | 'value_display': h.person(user), |
|
178 | 178 | 'value': user.username, |
|
179 | 179 | 'value_type': 'user', |
|
180 | 180 | 'active': user.active, |
|
181 | 181 | } |
|
182 | 182 | for user in users |
|
183 | 183 | ] |
|
184 | 184 | return _users |
|
185 | 185 | |
|
186 | 186 | def get_user_groups(self, name_contains=None, limit=20, only_active=True): |
|
187 | 187 | # TODO: mikhail: move this method to the UserGroupModel. |
|
188 | 188 | query = self.sa.query(UserGroup) |
|
189 | 189 | if only_active: |
|
190 | 190 | query = query.filter(UserGroup.users_group_active == true()) |
|
191 | 191 | |
|
192 | 192 | if name_contains: |
|
193 | 193 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
194 | 194 | query = query.filter( |
|
195 | 195 | UserGroup.users_group_name.ilike(ilike_expression))\ |
|
196 | 196 | .order_by(func.length(UserGroup.users_group_name))\ |
|
197 | 197 | .order_by(UserGroup.users_group_name) |
|
198 | 198 | |
|
199 | 199 | query = query.limit(limit) |
|
200 | 200 | user_groups = query.all() |
|
201 | 201 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
202 | 202 | user_groups = UserGroupList(user_groups, perm_set=perm_set) |
|
203 | 203 | |
|
204 | 204 | _groups = [ |
|
205 | 205 | { |
|
206 | 206 | 'id': group.users_group_id, |
|
207 | 207 | # TODO: marcink figure out a way to generate the url for the |
|
208 | 208 | # icon |
|
209 | 209 | 'icon_link': '', |
|
210 | 210 | 'value_display': 'Group: %s (%d members)' % ( |
|
211 | 211 | group.users_group_name, len(group.members),), |
|
212 | 212 | 'value': group.users_group_name, |
|
213 | 213 | 'value_type': 'user_group', |
|
214 | 214 | 'active': group.users_group_active, |
|
215 | 215 | } |
|
216 | 216 | for group in user_groups |
|
217 | 217 | ] |
|
218 | 218 | return _groups |
|
219 | 219 | |
|
220 | 220 | @classmethod |
|
221 | 221 | def update_repoinfo(cls, repositories=None): |
|
222 | 222 | if not repositories: |
|
223 | 223 | repositories = Repository.getAll() |
|
224 | 224 | for repo in repositories: |
|
225 | 225 | repo.update_commit_cache() |
|
226 | 226 | |
|
227 | 227 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
228 | 228 | super_user_actions=False): |
|
229 | 229 | |
|
230 | 230 | from rhodecode.lib.utils import PartialRenderer |
|
231 | 231 | _render = PartialRenderer('data_table/_dt_elements.html') |
|
232 | 232 | c = _render.c |
|
233 | 233 | |
|
234 | 234 | def quick_menu(repo_name): |
|
235 | 235 | return _render('quick_menu', repo_name) |
|
236 | 236 | |
|
237 | 237 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
238 | 238 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
239 | 239 | short_name=not admin, admin=False) |
|
240 | 240 | |
|
241 | 241 | def last_change(last_change): |
|
242 | 242 | return _render("last_change", last_change) |
|
243 | 243 | |
|
244 | 244 | def rss_lnk(repo_name): |
|
245 | 245 | return _render("rss", repo_name) |
|
246 | 246 | |
|
247 | 247 | def atom_lnk(repo_name): |
|
248 | 248 | return _render("atom", repo_name) |
|
249 | 249 | |
|
250 | 250 | def last_rev(repo_name, cs_cache): |
|
251 | 251 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
252 | 252 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
253 | 253 | cs_cache.get('message')) |
|
254 | 254 | |
|
255 | 255 | def desc(desc): |
|
256 | 256 | if c.visual.stylify_metatags: |
|
257 | 257 | desc = h.urlify_text(h.escaped_stylize(desc)) |
|
258 | 258 | else: |
|
259 | 259 | desc = h.urlify_text(h.html_escape(desc)) |
|
260 | 260 | |
|
261 | 261 | return _render('repo_desc', desc) |
|
262 | 262 | |
|
263 | 263 | def state(repo_state): |
|
264 | 264 | return _render("repo_state", repo_state) |
|
265 | 265 | |
|
266 | 266 | def repo_actions(repo_name): |
|
267 | 267 | return _render('repo_actions', repo_name, super_user_actions) |
|
268 | 268 | |
|
269 | 269 | def user_profile(username): |
|
270 | 270 | return _render('user_profile', username) |
|
271 | 271 | |
|
272 | 272 | repos_data = [] |
|
273 | 273 | for repo in repo_list: |
|
274 | 274 | cs_cache = repo.changeset_cache |
|
275 | 275 | row = { |
|
276 | 276 | "menu": quick_menu(repo.repo_name), |
|
277 | 277 | |
|
278 | 278 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
279 | 279 | repo.repo_state, repo.private, repo.fork), |
|
280 | 280 | "name_raw": repo.repo_name.lower(), |
|
281 | 281 | |
|
282 | 282 | "last_change": last_change(repo.last_db_change), |
|
283 | 283 | "last_change_raw": datetime_to_time(repo.last_db_change), |
|
284 | 284 | |
|
285 | 285 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
286 | 286 | "last_changeset_raw": cs_cache.get('revision'), |
|
287 | 287 | |
|
288 | 288 | "desc": desc(repo.description), |
|
289 | 289 | "owner": user_profile(repo.user.username), |
|
290 | 290 | |
|
291 | 291 | "state": state(repo.repo_state), |
|
292 | 292 | "rss": rss_lnk(repo.repo_name), |
|
293 | 293 | |
|
294 | 294 | "atom": atom_lnk(repo.repo_name), |
|
295 | 295 | } |
|
296 | 296 | if admin: |
|
297 | 297 | row.update({ |
|
298 | 298 | "action": repo_actions(repo.repo_name), |
|
299 | 299 | }) |
|
300 | 300 | repos_data.append(row) |
|
301 | 301 | |
|
302 | 302 | return repos_data |
|
303 | 303 | |
|
304 | 304 | def _get_defaults(self, repo_name): |
|
305 | 305 | """ |
|
306 | 306 | Gets information about repository, and returns a dict for |
|
307 | 307 | usage in forms |
|
308 | 308 | |
|
309 | 309 | :param repo_name: |
|
310 | 310 | """ |
|
311 | 311 | |
|
312 | 312 | repo_info = Repository.get_by_repo_name(repo_name) |
|
313 | 313 | |
|
314 | 314 | if repo_info is None: |
|
315 | 315 | return None |
|
316 | 316 | |
|
317 | 317 | defaults = repo_info.get_dict() |
|
318 | 318 | defaults['repo_name'] = repo_info.just_name |
|
319 | 319 | |
|
320 | 320 | groups = repo_info.groups_with_parents |
|
321 | 321 | parent_group = groups[-1] if groups else None |
|
322 | 322 | |
|
323 | 323 | # we use -1 as this is how in HTML, we mark an empty group |
|
324 | 324 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
325 | 325 | |
|
326 | 326 | keys_to_process = ( |
|
327 | 327 | {'k': 'repo_type', 'strip': False}, |
|
328 | 328 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
329 | 329 | {'k': 'repo_description', 'strip': True}, |
|
330 | 330 | {'k': 'repo_enable_locking', 'strip': True}, |
|
331 | 331 | {'k': 'repo_landing_rev', 'strip': True}, |
|
332 | 332 | {'k': 'clone_uri', 'strip': False}, |
|
333 | 333 | {'k': 'repo_private', 'strip': True}, |
|
334 | 334 | {'k': 'repo_enable_statistics', 'strip': True} |
|
335 | 335 | ) |
|
336 | 336 | |
|
337 | 337 | for item in keys_to_process: |
|
338 | 338 | attr = item['k'] |
|
339 | 339 | if item['strip']: |
|
340 | 340 | attr = remove_prefix(item['k'], 'repo_') |
|
341 | 341 | |
|
342 | 342 | val = defaults[attr] |
|
343 | 343 | if item['k'] == 'repo_landing_rev': |
|
344 | 344 | val = ':'.join(defaults[attr]) |
|
345 | 345 | defaults[item['k']] = val |
|
346 | 346 | if item['k'] == 'clone_uri': |
|
347 | 347 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
348 | 348 | |
|
349 | 349 | # fill owner |
|
350 | 350 | if repo_info.user: |
|
351 | 351 | defaults.update({'user': repo_info.user.username}) |
|
352 | 352 | else: |
|
353 | 353 | replacement_user = User.get_first_super_admin().username |
|
354 | 354 | defaults.update({'user': replacement_user}) |
|
355 | 355 | |
|
356 | 356 | # fill repository users |
|
357 | 357 | for p in repo_info.repo_to_perm: |
|
358 | 358 | defaults.update({'u_perm_%s' % p.user.user_id: |
|
359 | 359 | p.permission.permission_name}) |
|
360 | 360 | |
|
361 | 361 | # fill repository groups |
|
362 | 362 | for p in repo_info.users_group_to_perm: |
|
363 | 363 | defaults.update({'g_perm_%s' % p.users_group.users_group_id: |
|
364 | 364 | p.permission.permission_name}) |
|
365 | 365 | |
|
366 | 366 | return defaults |
|
367 | 367 | |
|
368 | 368 | def update(self, repo, **kwargs): |
|
369 | 369 | try: |
|
370 | 370 | cur_repo = self._get_repo(repo) |
|
371 | 371 | source_repo_name = cur_repo.repo_name |
|
372 | 372 | if 'user' in kwargs: |
|
373 | 373 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
374 | 374 | |
|
375 | 375 | if 'repo_group' in kwargs: |
|
376 | 376 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
377 | 377 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
378 | 378 | |
|
379 | 379 | update_keys = [ |
|
380 | (1, 'repo_enable_downloads'), | |
|
381 | 380 | (1, 'repo_description'), |
|
382 | (1, 'repo_enable_locking'), | |
|
383 | 381 | (1, 'repo_landing_rev'), |
|
384 | 382 | (1, 'repo_private'), |
|
383 | (1, 'repo_enable_downloads'), | |
|
384 | (1, 'repo_enable_locking'), | |
|
385 | 385 | (1, 'repo_enable_statistics'), |
|
386 | 386 | (0, 'clone_uri'), |
|
387 | 387 | (0, 'fork_id') |
|
388 | 388 | ] |
|
389 | 389 | for strip, k in update_keys: |
|
390 | 390 | if k in kwargs: |
|
391 | 391 | val = kwargs[k] |
|
392 | 392 | if strip: |
|
393 | 393 | k = remove_prefix(k, 'repo_') |
|
394 | 394 | if k == 'clone_uri': |
|
395 | 395 | from rhodecode.model.validators import Missing |
|
396 | 396 | _change = kwargs.get('clone_uri_change') |
|
397 | 397 | if _change in [Missing, 'OLD']: |
|
398 | 398 | # we don't change the value, so use original one |
|
399 | 399 | val = cur_repo.clone_uri |
|
400 | 400 | |
|
401 | 401 | setattr(cur_repo, k, val) |
|
402 | 402 | |
|
403 | 403 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
404 | 404 | cur_repo.repo_name = new_name |
|
405 | 405 | |
|
406 | 406 | # if private flag is set, reset default permission to NONE |
|
407 | 407 | if kwargs.get('repo_private'): |
|
408 | 408 | EMPTY_PERM = 'repository.none' |
|
409 | 409 | RepoModel().grant_user_permission( |
|
410 | 410 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
411 | 411 | ) |
|
412 | 412 | |
|
413 | 413 | # handle extra fields |
|
414 | 414 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
415 | 415 | kwargs): |
|
416 | 416 | k = RepositoryField.un_prefix_key(field) |
|
417 | 417 | ex_field = RepositoryField.get_by_key_name( |
|
418 | 418 | key=k, repo=cur_repo) |
|
419 | 419 | if ex_field: |
|
420 | 420 | ex_field.field_value = kwargs[field] |
|
421 | 421 | self.sa.add(ex_field) |
|
422 | 422 | self.sa.add(cur_repo) |
|
423 | 423 | |
|
424 | 424 | if source_repo_name != new_name: |
|
425 | 425 | # rename repository |
|
426 | 426 | self._rename_filesystem_repo( |
|
427 | 427 | old=source_repo_name, new=new_name) |
|
428 | 428 | |
|
429 | 429 | return cur_repo |
|
430 | 430 | except Exception: |
|
431 | 431 | log.error(traceback.format_exc()) |
|
432 | 432 | raise |
|
433 | 433 | |
|
434 | 434 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
435 | 435 | private=False, clone_uri=None, repo_group=None, |
|
436 | 436 | landing_rev='rev:tip', fork_of=None, |
|
437 | 437 | copy_fork_permissions=False, enable_statistics=False, |
|
438 | 438 | enable_locking=False, enable_downloads=False, |
|
439 | 439 | copy_group_permissions=False, |
|
440 | 440 | state=Repository.STATE_PENDING): |
|
441 | 441 | """ |
|
442 | 442 | Create repository inside database with PENDING state, this should be |
|
443 | 443 | only executed by create() repo. With exception of importing existing |
|
444 | 444 | repos |
|
445 | 445 | """ |
|
446 | 446 | from rhodecode.model.scm import ScmModel |
|
447 | 447 | |
|
448 | 448 | owner = self._get_user(owner) |
|
449 | 449 | fork_of = self._get_repo(fork_of) |
|
450 | 450 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
451 | 451 | |
|
452 | 452 | try: |
|
453 | 453 | repo_name = safe_unicode(repo_name) |
|
454 | 454 | description = safe_unicode(description) |
|
455 | 455 | # repo name is just a name of repository |
|
456 | 456 | # while repo_name_full is a full qualified name that is combined |
|
457 | 457 | # with name and path of group |
|
458 | 458 | repo_name_full = repo_name |
|
459 | 459 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
460 | 460 | |
|
461 | 461 | new_repo = Repository() |
|
462 | 462 | new_repo.repo_state = state |
|
463 | 463 | new_repo.enable_statistics = False |
|
464 | 464 | new_repo.repo_name = repo_name_full |
|
465 | 465 | new_repo.repo_type = repo_type |
|
466 | 466 | new_repo.user = owner |
|
467 | 467 | new_repo.group = repo_group |
|
468 | 468 | new_repo.description = description or repo_name |
|
469 | 469 | new_repo.private = private |
|
470 | 470 | new_repo.clone_uri = clone_uri |
|
471 | 471 | new_repo.landing_rev = landing_rev |
|
472 | 472 | |
|
473 | 473 | new_repo.enable_statistics = enable_statistics |
|
474 | 474 | new_repo.enable_locking = enable_locking |
|
475 | 475 | new_repo.enable_downloads = enable_downloads |
|
476 | 476 | |
|
477 | 477 | if repo_group: |
|
478 | 478 | new_repo.enable_locking = repo_group.enable_locking |
|
479 | 479 | |
|
480 | 480 | if fork_of: |
|
481 | 481 | parent_repo = fork_of |
|
482 | 482 | new_repo.fork = parent_repo |
|
483 | 483 | |
|
484 | 484 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
485 | 485 | |
|
486 | 486 | self.sa.add(new_repo) |
|
487 | 487 | |
|
488 | 488 | EMPTY_PERM = 'repository.none' |
|
489 | 489 | if fork_of and copy_fork_permissions: |
|
490 | 490 | repo = fork_of |
|
491 | 491 | user_perms = UserRepoToPerm.query() \ |
|
492 | 492 | .filter(UserRepoToPerm.repository == repo).all() |
|
493 | 493 | group_perms = UserGroupRepoToPerm.query() \ |
|
494 | 494 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
495 | 495 | |
|
496 | 496 | for perm in user_perms: |
|
497 | 497 | UserRepoToPerm.create( |
|
498 | 498 | perm.user, new_repo, perm.permission) |
|
499 | 499 | |
|
500 | 500 | for perm in group_perms: |
|
501 | 501 | UserGroupRepoToPerm.create( |
|
502 | 502 | perm.users_group, new_repo, perm.permission) |
|
503 | 503 | # in case we copy permissions and also set this repo to private |
|
504 | 504 | # override the default user permission to make it a private |
|
505 | 505 | # repo |
|
506 | 506 | if private: |
|
507 | 507 | RepoModel(self.sa).grant_user_permission( |
|
508 | 508 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
509 | 509 | |
|
510 | 510 | elif repo_group and copy_group_permissions: |
|
511 | 511 | user_perms = UserRepoGroupToPerm.query() \ |
|
512 | 512 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
513 | 513 | |
|
514 | 514 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
515 | 515 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
516 | 516 | |
|
517 | 517 | for perm in user_perms: |
|
518 | 518 | perm_name = perm.permission.permission_name.replace( |
|
519 | 519 | 'group.', 'repository.') |
|
520 | 520 | perm_obj = Permission.get_by_key(perm_name) |
|
521 | 521 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
522 | 522 | |
|
523 | 523 | for perm in group_perms: |
|
524 | 524 | perm_name = perm.permission.permission_name.replace( |
|
525 | 525 | 'group.', 'repository.') |
|
526 | 526 | perm_obj = Permission.get_by_key(perm_name) |
|
527 | 527 | UserGroupRepoToPerm.create( |
|
528 | 528 | perm.users_group, new_repo, perm_obj) |
|
529 | 529 | |
|
530 | 530 | if private: |
|
531 | 531 | RepoModel(self.sa).grant_user_permission( |
|
532 | 532 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
533 | 533 | |
|
534 | 534 | else: |
|
535 | 535 | perm_obj = self._create_default_perms(new_repo, private) |
|
536 | 536 | self.sa.add(perm_obj) |
|
537 | 537 | |
|
538 | 538 | # now automatically start following this repository as owner |
|
539 | 539 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, |
|
540 | 540 | owner.user_id) |
|
541 | 541 | |
|
542 | 542 | # we need to flush here, in order to check if database won't |
|
543 | 543 | # throw any exceptions, create filesystem dirs at the very end |
|
544 | 544 | self.sa.flush() |
|
545 | 545 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
546 | 546 | return new_repo |
|
547 | 547 | |
|
548 | 548 | except Exception: |
|
549 | 549 | log.error(traceback.format_exc()) |
|
550 | 550 | raise |
|
551 | 551 | |
|
552 | 552 | def create(self, form_data, cur_user): |
|
553 | 553 | """ |
|
554 | 554 | Create repository using celery tasks |
|
555 | 555 | |
|
556 | 556 | :param form_data: |
|
557 | 557 | :param cur_user: |
|
558 | 558 | """ |
|
559 | 559 | from rhodecode.lib.celerylib import tasks, run_task |
|
560 | 560 | return run_task(tasks.create_repo, form_data, cur_user) |
|
561 | 561 | |
|
562 | 562 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
563 | 563 | perm_deletions=None, check_perms=True, |
|
564 | 564 | cur_user=None): |
|
565 | 565 | if not perm_additions: |
|
566 | 566 | perm_additions = [] |
|
567 | 567 | if not perm_updates: |
|
568 | 568 | perm_updates = [] |
|
569 | 569 | if not perm_deletions: |
|
570 | 570 | perm_deletions = [] |
|
571 | 571 | |
|
572 | 572 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
573 | 573 | |
|
574 | 574 | # update permissions |
|
575 | 575 | for member_id, perm, member_type in perm_updates: |
|
576 | 576 | member_id = int(member_id) |
|
577 | 577 | if member_type == 'user': |
|
578 | 578 | # this updates also current one if found |
|
579 | 579 | self.grant_user_permission( |
|
580 | 580 | repo=repo, user=member_id, perm=perm) |
|
581 | 581 | else: # set for user group |
|
582 | 582 | # check if we have permissions to alter this usergroup |
|
583 | 583 | member_name = UserGroup.get(member_id).users_group_name |
|
584 | 584 | if not check_perms or HasUserGroupPermissionAny( |
|
585 | 585 | *req_perms)(member_name, user=cur_user): |
|
586 | 586 | self.grant_user_group_permission( |
|
587 | 587 | repo=repo, group_name=member_id, perm=perm) |
|
588 | 588 | |
|
589 | 589 | # set new permissions |
|
590 | 590 | for member_id, perm, member_type in perm_additions: |
|
591 | 591 | member_id = int(member_id) |
|
592 | 592 | if member_type == 'user': |
|
593 | 593 | self.grant_user_permission( |
|
594 | 594 | repo=repo, user=member_id, perm=perm) |
|
595 | 595 | else: # set for user group |
|
596 | 596 | # check if we have permissions to alter this usergroup |
|
597 | 597 | member_name = UserGroup.get(member_id).users_group_name |
|
598 | 598 | if not check_perms or HasUserGroupPermissionAny( |
|
599 | 599 | *req_perms)(member_name, user=cur_user): |
|
600 | 600 | self.grant_user_group_permission( |
|
601 | 601 | repo=repo, group_name=member_id, perm=perm) |
|
602 | 602 | |
|
603 | 603 | # delete permissions |
|
604 | 604 | for member_id, perm, member_type in perm_deletions: |
|
605 | 605 | member_id = int(member_id) |
|
606 | 606 | if member_type == 'user': |
|
607 | 607 | self.revoke_user_permission(repo=repo, user=member_id) |
|
608 | 608 | else: # set for user group |
|
609 | 609 | # check if we have permissions to alter this usergroup |
|
610 | 610 | member_name = UserGroup.get(member_id).users_group_name |
|
611 | 611 | if not check_perms or HasUserGroupPermissionAny( |
|
612 | 612 | *req_perms)(member_name, user=cur_user): |
|
613 | 613 | self.revoke_user_group_permission( |
|
614 | 614 | repo=repo, group_name=member_id) |
|
615 | 615 | |
|
616 | 616 | def create_fork(self, form_data, cur_user): |
|
617 | 617 | """ |
|
618 | 618 | Simple wrapper into executing celery task for fork creation |
|
619 | 619 | |
|
620 | 620 | :param form_data: |
|
621 | 621 | :param cur_user: |
|
622 | 622 | """ |
|
623 | 623 | from rhodecode.lib.celerylib import tasks, run_task |
|
624 | 624 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
625 | 625 | |
|
626 | 626 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
627 | 627 | """ |
|
628 | 628 | Delete given repository, forks parameter defines what do do with |
|
629 | 629 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
630 | 630 | forks |
|
631 | 631 | |
|
632 | 632 | :param repo: |
|
633 | 633 | :param forks: str 'delete' or 'detach' |
|
634 | 634 | :param fs_remove: remove(archive) repo from filesystem |
|
635 | 635 | """ |
|
636 | 636 | if not cur_user: |
|
637 | 637 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
638 | 638 | repo = self._get_repo(repo) |
|
639 | 639 | if repo: |
|
640 | 640 | if forks == 'detach': |
|
641 | 641 | for r in repo.forks: |
|
642 | 642 | r.fork = None |
|
643 | 643 | self.sa.add(r) |
|
644 | 644 | elif forks == 'delete': |
|
645 | 645 | for r in repo.forks: |
|
646 | 646 | self.delete(r, forks='delete') |
|
647 | 647 | elif [f for f in repo.forks]: |
|
648 | 648 | raise AttachedForksError() |
|
649 | 649 | |
|
650 | 650 | old_repo_dict = repo.get_dict() |
|
651 | 651 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
652 | 652 | try: |
|
653 | 653 | self.sa.delete(repo) |
|
654 | 654 | if fs_remove: |
|
655 | 655 | self._delete_filesystem_repo(repo) |
|
656 | 656 | else: |
|
657 | 657 | log.debug('skipping removal from filesystem') |
|
658 | 658 | old_repo_dict.update({ |
|
659 | 659 | 'deleted_by': cur_user, |
|
660 | 660 | 'deleted_on': time.time(), |
|
661 | 661 | }) |
|
662 | 662 | log_delete_repository(**old_repo_dict) |
|
663 | 663 | events.trigger(events.RepoDeleteEvent(repo)) |
|
664 | 664 | except Exception: |
|
665 | 665 | log.error(traceback.format_exc()) |
|
666 | 666 | raise |
|
667 | 667 | |
|
668 | 668 | def grant_user_permission(self, repo, user, perm): |
|
669 | 669 | """ |
|
670 | 670 | Grant permission for user on given repository, or update existing one |
|
671 | 671 | if found |
|
672 | 672 | |
|
673 | 673 | :param repo: Instance of Repository, repository_id, or repository name |
|
674 | 674 | :param user: Instance of User, user_id or username |
|
675 | 675 | :param perm: Instance of Permission, or permission_name |
|
676 | 676 | """ |
|
677 | 677 | user = self._get_user(user) |
|
678 | 678 | repo = self._get_repo(repo) |
|
679 | 679 | permission = self._get_perm(perm) |
|
680 | 680 | |
|
681 | 681 | # check if we have that permission already |
|
682 | 682 | obj = self.sa.query(UserRepoToPerm) \ |
|
683 | 683 | .filter(UserRepoToPerm.user == user) \ |
|
684 | 684 | .filter(UserRepoToPerm.repository == repo) \ |
|
685 | 685 | .scalar() |
|
686 | 686 | if obj is None: |
|
687 | 687 | # create new ! |
|
688 | 688 | obj = UserRepoToPerm() |
|
689 | 689 | obj.repository = repo |
|
690 | 690 | obj.user = user |
|
691 | 691 | obj.permission = permission |
|
692 | 692 | self.sa.add(obj) |
|
693 | 693 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
694 | 694 | action_logger_generic( |
|
695 | 695 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
696 | 696 | perm, user, repo), namespace='security.repo') |
|
697 | 697 | return obj |
|
698 | 698 | |
|
699 | 699 | def revoke_user_permission(self, repo, user): |
|
700 | 700 | """ |
|
701 | 701 | Revoke permission for user on given repository |
|
702 | 702 | |
|
703 | 703 | :param repo: Instance of Repository, repository_id, or repository name |
|
704 | 704 | :param user: Instance of User, user_id or username |
|
705 | 705 | """ |
|
706 | 706 | |
|
707 | 707 | user = self._get_user(user) |
|
708 | 708 | repo = self._get_repo(repo) |
|
709 | 709 | |
|
710 | 710 | obj = self.sa.query(UserRepoToPerm) \ |
|
711 | 711 | .filter(UserRepoToPerm.repository == repo) \ |
|
712 | 712 | .filter(UserRepoToPerm.user == user) \ |
|
713 | 713 | .scalar() |
|
714 | 714 | if obj: |
|
715 | 715 | self.sa.delete(obj) |
|
716 | 716 | log.debug('Revoked perm on %s on %s', repo, user) |
|
717 | 717 | action_logger_generic( |
|
718 | 718 | 'revoked permission from user: {} on repo: {}'.format( |
|
719 | 719 | user, repo), namespace='security.repo') |
|
720 | 720 | |
|
721 | 721 | def grant_user_group_permission(self, repo, group_name, perm): |
|
722 | 722 | """ |
|
723 | 723 | Grant permission for user group on given repository, or update |
|
724 | 724 | existing one if found |
|
725 | 725 | |
|
726 | 726 | :param repo: Instance of Repository, repository_id, or repository name |
|
727 | 727 | :param group_name: Instance of UserGroup, users_group_id, |
|
728 | 728 | or user group name |
|
729 | 729 | :param perm: Instance of Permission, or permission_name |
|
730 | 730 | """ |
|
731 | 731 | repo = self._get_repo(repo) |
|
732 | 732 | group_name = self._get_user_group(group_name) |
|
733 | 733 | permission = self._get_perm(perm) |
|
734 | 734 | |
|
735 | 735 | # check if we have that permission already |
|
736 | 736 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
737 | 737 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
738 | 738 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
739 | 739 | .scalar() |
|
740 | 740 | |
|
741 | 741 | if obj is None: |
|
742 | 742 | # create new |
|
743 | 743 | obj = UserGroupRepoToPerm() |
|
744 | 744 | |
|
745 | 745 | obj.repository = repo |
|
746 | 746 | obj.users_group = group_name |
|
747 | 747 | obj.permission = permission |
|
748 | 748 | self.sa.add(obj) |
|
749 | 749 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
750 | 750 | action_logger_generic( |
|
751 | 751 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
752 | 752 | perm, group_name, repo), namespace='security.repo') |
|
753 | 753 | |
|
754 | 754 | return obj |
|
755 | 755 | |
|
756 | 756 | def revoke_user_group_permission(self, repo, group_name): |
|
757 | 757 | """ |
|
758 | 758 | Revoke permission for user group on given repository |
|
759 | 759 | |
|
760 | 760 | :param repo: Instance of Repository, repository_id, or repository name |
|
761 | 761 | :param group_name: Instance of UserGroup, users_group_id, |
|
762 | 762 | or user group name |
|
763 | 763 | """ |
|
764 | 764 | repo = self._get_repo(repo) |
|
765 | 765 | group_name = self._get_user_group(group_name) |
|
766 | 766 | |
|
767 | 767 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
768 | 768 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
769 | 769 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
770 | 770 | .scalar() |
|
771 | 771 | if obj: |
|
772 | 772 | self.sa.delete(obj) |
|
773 | 773 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
774 | 774 | action_logger_generic( |
|
775 | 775 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
776 | 776 | group_name, repo), namespace='security.repo') |
|
777 | 777 | |
|
778 | 778 | def delete_stats(self, repo_name): |
|
779 | 779 | """ |
|
780 | 780 | removes stats for given repo |
|
781 | 781 | |
|
782 | 782 | :param repo_name: |
|
783 | 783 | """ |
|
784 | 784 | repo = self._get_repo(repo_name) |
|
785 | 785 | try: |
|
786 | 786 | obj = self.sa.query(Statistics) \ |
|
787 | 787 | .filter(Statistics.repository == repo).scalar() |
|
788 | 788 | if obj: |
|
789 | 789 | self.sa.delete(obj) |
|
790 | 790 | except Exception: |
|
791 | 791 | log.error(traceback.format_exc()) |
|
792 | 792 | raise |
|
793 | 793 | |
|
794 | 794 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
795 | 795 | field_type='str', field_desc=''): |
|
796 | 796 | |
|
797 | 797 | repo = self._get_repo(repo_name) |
|
798 | 798 | |
|
799 | 799 | new_field = RepositoryField() |
|
800 | 800 | new_field.repository = repo |
|
801 | 801 | new_field.field_key = field_key |
|
802 | 802 | new_field.field_type = field_type # python type |
|
803 | 803 | new_field.field_value = field_value |
|
804 | 804 | new_field.field_desc = field_desc |
|
805 | 805 | new_field.field_label = field_label |
|
806 | 806 | self.sa.add(new_field) |
|
807 | 807 | return new_field |
|
808 | 808 | |
|
809 | 809 | def delete_repo_field(self, repo_name, field_key): |
|
810 | 810 | repo = self._get_repo(repo_name) |
|
811 | 811 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
812 | 812 | if field: |
|
813 | 813 | self.sa.delete(field) |
|
814 | 814 | |
|
815 | 815 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
816 | 816 | clone_uri=None, repo_store_location=None, |
|
817 | 817 | use_global_config=False): |
|
818 | 818 | """ |
|
819 | 819 | makes repository on filesystem. It's group aware means it'll create |
|
820 | 820 | a repository within a group, and alter the paths accordingly of |
|
821 | 821 | group location |
|
822 | 822 | |
|
823 | 823 | :param repo_name: |
|
824 | 824 | :param alias: |
|
825 | 825 | :param parent: |
|
826 | 826 | :param clone_uri: |
|
827 | 827 | :param repo_store_location: |
|
828 | 828 | """ |
|
829 | 829 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
830 | 830 | from rhodecode.model.scm import ScmModel |
|
831 | 831 | |
|
832 | 832 | if Repository.NAME_SEP in repo_name: |
|
833 | 833 | raise ValueError( |
|
834 | 834 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
835 | 835 | |
|
836 | 836 | if isinstance(repo_group, RepoGroup): |
|
837 | 837 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
838 | 838 | else: |
|
839 | 839 | new_parent_path = repo_group or '' |
|
840 | 840 | |
|
841 | 841 | if repo_store_location: |
|
842 | 842 | _paths = [repo_store_location] |
|
843 | 843 | else: |
|
844 | 844 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
845 | 845 | # we need to make it str for mercurial |
|
846 | 846 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
847 | 847 | |
|
848 | 848 | # check if this path is not a repository |
|
849 | 849 | if is_valid_repo(repo_path, self.repos_path): |
|
850 | 850 | raise Exception('This path %s is a valid repository' % repo_path) |
|
851 | 851 | |
|
852 | 852 | # check if this path is a group |
|
853 | 853 | if is_valid_repo_group(repo_path, self.repos_path): |
|
854 | 854 | raise Exception('This path %s is a valid group' % repo_path) |
|
855 | 855 | |
|
856 | 856 | log.info('creating repo %s in %s from url: `%s`', |
|
857 | 857 | repo_name, safe_unicode(repo_path), |
|
858 | 858 | obfuscate_url_pw(clone_uri)) |
|
859 | 859 | |
|
860 | 860 | backend = get_backend(repo_type) |
|
861 | 861 | |
|
862 | 862 | config_repo = None if use_global_config else repo_name |
|
863 | 863 | if config_repo and new_parent_path: |
|
864 | 864 | config_repo = Repository.NAME_SEP.join( |
|
865 | 865 | (new_parent_path, config_repo)) |
|
866 | 866 | config = make_db_config(clear_session=False, repo=config_repo) |
|
867 | 867 | config.set('extensions', 'largefiles', '') |
|
868 | 868 | |
|
869 | 869 | # patch and reset hooks section of UI config to not run any |
|
870 | 870 | # hooks on creating remote repo |
|
871 | 871 | config.clear_section('hooks') |
|
872 | 872 | |
|
873 | 873 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
874 | 874 | if repo_type == 'git': |
|
875 | 875 | repo = backend( |
|
876 | 876 | repo_path, config=config, create=True, src_url=clone_uri, |
|
877 | 877 | bare=True) |
|
878 | 878 | else: |
|
879 | 879 | repo = backend( |
|
880 | 880 | repo_path, config=config, create=True, src_url=clone_uri) |
|
881 | 881 | |
|
882 | 882 | ScmModel().install_hooks(repo, repo_type=repo_type) |
|
883 | 883 | |
|
884 | 884 | log.debug('Created repo %s with %s backend', |
|
885 | 885 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
886 | 886 | return repo |
|
887 | 887 | |
|
888 | 888 | def _rename_filesystem_repo(self, old, new): |
|
889 | 889 | """ |
|
890 | 890 | renames repository on filesystem |
|
891 | 891 | |
|
892 | 892 | :param old: old name |
|
893 | 893 | :param new: new name |
|
894 | 894 | """ |
|
895 | 895 | log.info('renaming repo from %s to %s', old, new) |
|
896 | 896 | |
|
897 | 897 | old_path = os.path.join(self.repos_path, old) |
|
898 | 898 | new_path = os.path.join(self.repos_path, new) |
|
899 | 899 | if os.path.isdir(new_path): |
|
900 | 900 | raise Exception( |
|
901 | 901 | 'Was trying to rename to already existing dir %s' % new_path |
|
902 | 902 | ) |
|
903 | 903 | shutil.move(old_path, new_path) |
|
904 | 904 | |
|
905 | 905 | def _delete_filesystem_repo(self, repo): |
|
906 | 906 | """ |
|
907 | 907 | removes repo from filesystem, the removal is acctually made by |
|
908 | 908 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
909 | 909 | repository is no longer valid for rhodecode, can be undeleted later on |
|
910 | 910 | by reverting the renames on this repository |
|
911 | 911 | |
|
912 | 912 | :param repo: repo object |
|
913 | 913 | """ |
|
914 | 914 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
915 | 915 | repo_group = repo.group |
|
916 | 916 | log.info("Removing repository %s", rm_path) |
|
917 | 917 | # disable hg/git internal that it doesn't get detected as repo |
|
918 | 918 | alias = repo.repo_type |
|
919 | 919 | |
|
920 | 920 | config = make_db_config(clear_session=False) |
|
921 | 921 | config.set('extensions', 'largefiles', '') |
|
922 | 922 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
923 | 923 | |
|
924 | 924 | # skip this for bare git repos |
|
925 | 925 | if not bare: |
|
926 | 926 | # disable VCS repo |
|
927 | 927 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
928 | 928 | if os.path.exists(vcs_path): |
|
929 | 929 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
930 | 930 | |
|
931 | 931 | _now = datetime.now() |
|
932 | 932 | _ms = str(_now.microsecond).rjust(6, '0') |
|
933 | 933 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
934 | 934 | repo.just_name) |
|
935 | 935 | if repo_group: |
|
936 | 936 | # if repository is in group, prefix the removal path with the group |
|
937 | 937 | args = repo_group.full_path_splitted + [_d] |
|
938 | 938 | _d = os.path.join(*args) |
|
939 | 939 | |
|
940 | 940 | if os.path.isdir(rm_path): |
|
941 | 941 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
942 | 942 | |
|
943 | 943 | |
|
944 | 944 | class ReadmeFinder: |
|
945 | 945 | """ |
|
946 | 946 | Utility which knows how to find a readme for a specific commit. |
|
947 | 947 | |
|
948 | 948 | The main idea is that this is a configurable algorithm. When creating an |
|
949 | 949 | instance you can define parameters, currently only the `default_renderer`. |
|
950 | 950 | Based on this configuration the method :meth:`search` behaves slightly |
|
951 | 951 | different. |
|
952 | 952 | """ |
|
953 | 953 | |
|
954 | 954 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
955 | 955 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
956 | 956 | |
|
957 | 957 | default_priorities = { |
|
958 | 958 | None: 0, |
|
959 | 959 | '.text': 2, |
|
960 | 960 | '.txt': 3, |
|
961 | 961 | '.rst': 1, |
|
962 | 962 | '.rest': 2, |
|
963 | 963 | '.md': 1, |
|
964 | 964 | '.mkdn': 2, |
|
965 | 965 | '.mdown': 3, |
|
966 | 966 | '.markdown': 4, |
|
967 | 967 | } |
|
968 | 968 | |
|
969 | 969 | path_priority = { |
|
970 | 970 | 'doc': 0, |
|
971 | 971 | 'docs': 1, |
|
972 | 972 | } |
|
973 | 973 | |
|
974 | 974 | FALLBACK_PRIORITY = 99 |
|
975 | 975 | |
|
976 | 976 | RENDERER_TO_EXTENSION = { |
|
977 | 977 | 'rst': ['.rst', '.rest'], |
|
978 | 978 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
979 | 979 | } |
|
980 | 980 | |
|
981 | 981 | def __init__(self, default_renderer=None): |
|
982 | 982 | self._default_renderer = default_renderer |
|
983 | 983 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
984 | 984 | default_renderer, []) |
|
985 | 985 | |
|
986 | 986 | def search(self, commit, path='/'): |
|
987 | 987 | """ |
|
988 | 988 | Find a readme in the given `commit`. |
|
989 | 989 | """ |
|
990 | 990 | nodes = commit.get_nodes(path) |
|
991 | 991 | matches = self._match_readmes(nodes) |
|
992 | 992 | matches = self._sort_according_to_priority(matches) |
|
993 | 993 | if matches: |
|
994 | 994 | return matches[0].node |
|
995 | 995 | |
|
996 | 996 | paths = self._match_paths(nodes) |
|
997 | 997 | paths = self._sort_paths_according_to_priority(paths) |
|
998 | 998 | for path in paths: |
|
999 | 999 | match = self.search(commit, path=path) |
|
1000 | 1000 | if match: |
|
1001 | 1001 | return match |
|
1002 | 1002 | |
|
1003 | 1003 | return None |
|
1004 | 1004 | |
|
1005 | 1005 | def _match_readmes(self, nodes): |
|
1006 | 1006 | for node in nodes: |
|
1007 | 1007 | if not node.is_file(): |
|
1008 | 1008 | continue |
|
1009 | 1009 | path = node.path.rsplit('/', 1)[-1] |
|
1010 | 1010 | match = self.readme_re.match(path) |
|
1011 | 1011 | if match: |
|
1012 | 1012 | extension = match.group(1) |
|
1013 | 1013 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
1014 | 1014 | |
|
1015 | 1015 | def _match_paths(self, nodes): |
|
1016 | 1016 | for node in nodes: |
|
1017 | 1017 | if not node.is_dir(): |
|
1018 | 1018 | continue |
|
1019 | 1019 | match = self.path_re.match(node.path) |
|
1020 | 1020 | if match: |
|
1021 | 1021 | yield node.path |
|
1022 | 1022 | |
|
1023 | 1023 | def _priority(self, extension): |
|
1024 | 1024 | renderer_priority = ( |
|
1025 | 1025 | 0 if extension in self._renderer_extensions else 1) |
|
1026 | 1026 | extension_priority = self.default_priorities.get( |
|
1027 | 1027 | extension, self.FALLBACK_PRIORITY) |
|
1028 | 1028 | return (renderer_priority, extension_priority) |
|
1029 | 1029 | |
|
1030 | 1030 | def _sort_according_to_priority(self, matches): |
|
1031 | 1031 | |
|
1032 | 1032 | def priority_and_path(match): |
|
1033 | 1033 | return (match.priority, match.path) |
|
1034 | 1034 | |
|
1035 | 1035 | return sorted(matches, key=priority_and_path) |
|
1036 | 1036 | |
|
1037 | 1037 | def _sort_paths_according_to_priority(self, paths): |
|
1038 | 1038 | |
|
1039 | 1039 | def priority_and_path(path): |
|
1040 | 1040 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1041 | 1041 | |
|
1042 | 1042 | return sorted(paths, key=priority_and_path) |
|
1043 | 1043 | |
|
1044 | 1044 | |
|
1045 | 1045 | class ReadmeMatch: |
|
1046 | 1046 | |
|
1047 | 1047 | def __init__(self, node, match, priority): |
|
1048 | 1048 | self.node = node |
|
1049 | 1049 | self._match = match |
|
1050 | 1050 | self.priority = priority |
|
1051 | 1051 | |
|
1052 | 1052 | @property |
|
1053 | 1053 | def path(self): |
|
1054 | 1054 | return self.node.path |
|
1055 | 1055 | |
|
1056 | 1056 | def __repr__(self): |
|
1057 | 1057 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,911 +1,915 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Scm model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os.path |
|
26 | 26 | import re |
|
27 | 27 | import sys |
|
28 | 28 | import traceback |
|
29 | 29 | import logging |
|
30 | 30 | import cStringIO |
|
31 | 31 | import pkg_resources |
|
32 | 32 | |
|
33 | 33 | from pylons.i18n.translation import _ |
|
34 | 34 | from sqlalchemy import func |
|
35 | 35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
36 | 36 | |
|
37 | 37 | import rhodecode |
|
38 | 38 | from rhodecode.lib.vcs import get_backend |
|
39 | 39 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError |
|
40 | 40 | from rhodecode.lib.vcs.nodes import FileNode |
|
41 | 41 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
42 | 42 | from rhodecode.lib import helpers as h |
|
43 | 43 | |
|
44 | 44 | from rhodecode.lib.auth import ( |
|
45 | 45 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
46 | 46 | HasUserGroupPermissionAny) |
|
47 | 47 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
48 | 48 | from rhodecode.lib import hooks_utils, caches |
|
49 | 49 | from rhodecode.lib.utils import ( |
|
50 | 50 | get_filesystem_repos, action_logger, make_db_config) |
|
51 | 51 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) |
|
52 | 52 | from rhodecode.lib.system_info import get_system_info |
|
53 | 53 | from rhodecode.model import BaseModel |
|
54 | 54 | from rhodecode.model.db import ( |
|
55 | 55 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, |
|
56 | 56 | PullRequest) |
|
57 | 57 | from rhodecode.model.settings import VcsSettingsModel |
|
58 | 58 | |
|
59 | 59 | log = logging.getLogger(__name__) |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | class UserTemp(object): |
|
63 | 63 | def __init__(self, user_id): |
|
64 | 64 | self.user_id = user_id |
|
65 | 65 | |
|
66 | 66 | def __repr__(self): |
|
67 | 67 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | class RepoTemp(object): |
|
71 | 71 | def __init__(self, repo_id): |
|
72 | 72 | self.repo_id = repo_id |
|
73 | 73 | |
|
74 | 74 | def __repr__(self): |
|
75 | 75 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | class SimpleCachedRepoList(object): |
|
79 | 79 | """ |
|
80 | 80 | Lighter version of of iteration of repos without the scm initialisation, |
|
81 | 81 | and with cache usage |
|
82 | 82 | """ |
|
83 | 83 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): |
|
84 | 84 | self.db_repo_list = db_repo_list |
|
85 | 85 | self.repos_path = repos_path |
|
86 | 86 | self.order_by = order_by |
|
87 | 87 | self.reversed = (order_by or '').startswith('-') |
|
88 | 88 | if not perm_set: |
|
89 | 89 | perm_set = ['repository.read', 'repository.write', |
|
90 | 90 | 'repository.admin'] |
|
91 | 91 | self.perm_set = perm_set |
|
92 | 92 | |
|
93 | 93 | def __len__(self): |
|
94 | 94 | return len(self.db_repo_list) |
|
95 | 95 | |
|
96 | 96 | def __repr__(self): |
|
97 | 97 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
98 | 98 | |
|
99 | 99 | def __iter__(self): |
|
100 | 100 | for dbr in self.db_repo_list: |
|
101 | 101 | # check permission at this level |
|
102 | 102 | has_perm = HasRepoPermissionAny(*self.perm_set)( |
|
103 | 103 | dbr.repo_name, 'SimpleCachedRepoList check') |
|
104 | 104 | if not has_perm: |
|
105 | 105 | continue |
|
106 | 106 | |
|
107 | 107 | tmp_d = { |
|
108 | 108 | 'name': dbr.repo_name, |
|
109 | 109 | 'dbrepo': dbr.get_dict(), |
|
110 | 110 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} |
|
111 | 111 | } |
|
112 | 112 | yield tmp_d |
|
113 | 113 | |
|
114 | 114 | |
|
115 | 115 | class _PermCheckIterator(object): |
|
116 | 116 | |
|
117 | 117 | def __init__( |
|
118 | 118 | self, obj_list, obj_attr, perm_set, perm_checker, |
|
119 | 119 | extra_kwargs=None): |
|
120 | 120 | """ |
|
121 | 121 | Creates iterator from given list of objects, additionally |
|
122 | 122 | checking permission for them from perm_set var |
|
123 | 123 | |
|
124 | 124 | :param obj_list: list of db objects |
|
125 | 125 | :param obj_attr: attribute of object to pass into perm_checker |
|
126 | 126 | :param perm_set: list of permissions to check |
|
127 | 127 | :param perm_checker: callable to check permissions against |
|
128 | 128 | """ |
|
129 | 129 | self.obj_list = obj_list |
|
130 | 130 | self.obj_attr = obj_attr |
|
131 | 131 | self.perm_set = perm_set |
|
132 | 132 | self.perm_checker = perm_checker |
|
133 | 133 | self.extra_kwargs = extra_kwargs or {} |
|
134 | 134 | |
|
135 | 135 | def __len__(self): |
|
136 | 136 | return len(self.obj_list) |
|
137 | 137 | |
|
138 | 138 | def __repr__(self): |
|
139 | 139 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
140 | 140 | |
|
141 | 141 | def __iter__(self): |
|
142 | 142 | checker = self.perm_checker(*self.perm_set) |
|
143 | 143 | for db_obj in self.obj_list: |
|
144 | 144 | # check permission at this level |
|
145 | 145 | name = getattr(db_obj, self.obj_attr, None) |
|
146 | 146 | if not checker(name, self.__class__.__name__, **self.extra_kwargs): |
|
147 | 147 | continue |
|
148 | 148 | |
|
149 | 149 | yield db_obj |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | class RepoList(_PermCheckIterator): |
|
153 | 153 | |
|
154 | 154 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): |
|
155 | 155 | if not perm_set: |
|
156 | 156 | perm_set = [ |
|
157 | 157 | 'repository.read', 'repository.write', 'repository.admin'] |
|
158 | 158 | |
|
159 | 159 | super(RepoList, self).__init__( |
|
160 | 160 | obj_list=db_repo_list, |
|
161 | 161 | obj_attr='repo_name', perm_set=perm_set, |
|
162 | 162 | perm_checker=HasRepoPermissionAny, |
|
163 | 163 | extra_kwargs=extra_kwargs) |
|
164 | 164 | |
|
165 | 165 | |
|
166 | 166 | class RepoGroupList(_PermCheckIterator): |
|
167 | 167 | |
|
168 | 168 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): |
|
169 | 169 | if not perm_set: |
|
170 | 170 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
171 | 171 | |
|
172 | 172 | super(RepoGroupList, self).__init__( |
|
173 | 173 | obj_list=db_repo_group_list, |
|
174 | 174 | obj_attr='group_name', perm_set=perm_set, |
|
175 | 175 | perm_checker=HasRepoGroupPermissionAny, |
|
176 | 176 | extra_kwargs=extra_kwargs) |
|
177 | 177 | |
|
178 | 178 | |
|
179 | 179 | class UserGroupList(_PermCheckIterator): |
|
180 | 180 | |
|
181 | 181 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): |
|
182 | 182 | if not perm_set: |
|
183 | 183 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
184 | 184 | |
|
185 | 185 | super(UserGroupList, self).__init__( |
|
186 | 186 | obj_list=db_user_group_list, |
|
187 | 187 | obj_attr='users_group_name', perm_set=perm_set, |
|
188 | 188 | perm_checker=HasUserGroupPermissionAny, |
|
189 | 189 | extra_kwargs=extra_kwargs) |
|
190 | 190 | |
|
191 | 191 | |
|
192 | 192 | class ScmModel(BaseModel): |
|
193 | 193 | """ |
|
194 | 194 | Generic Scm Model |
|
195 | 195 | """ |
|
196 | 196 | |
|
197 | 197 | @LazyProperty |
|
198 | 198 | def repos_path(self): |
|
199 | 199 | """ |
|
200 | 200 | Gets the repositories root path from database |
|
201 | 201 | """ |
|
202 | 202 | |
|
203 | 203 | settings_model = VcsSettingsModel(sa=self.sa) |
|
204 | 204 | return settings_model.get_repos_location() |
|
205 | 205 | |
|
206 | 206 | def repo_scan(self, repos_path=None): |
|
207 | 207 | """ |
|
208 | 208 | Listing of repositories in given path. This path should not be a |
|
209 | 209 | repository itself. Return a dictionary of repository objects |
|
210 | 210 | |
|
211 | 211 | :param repos_path: path to directory containing repositories |
|
212 | 212 | """ |
|
213 | 213 | |
|
214 | 214 | if repos_path is None: |
|
215 | 215 | repos_path = self.repos_path |
|
216 | 216 | |
|
217 | 217 | log.info('scanning for repositories in %s', repos_path) |
|
218 | 218 | |
|
219 | 219 | config = make_db_config() |
|
220 | 220 | config.set('extensions', 'largefiles', '') |
|
221 | 221 | repos = {} |
|
222 | 222 | |
|
223 | 223 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
224 | 224 | # name need to be decomposed and put back together using the / |
|
225 | 225 | # since this is internal storage separator for rhodecode |
|
226 | 226 | name = Repository.normalize_repo_name(name) |
|
227 | 227 | |
|
228 | 228 | try: |
|
229 | 229 | if name in repos: |
|
230 | 230 | raise RepositoryError('Duplicate repository name %s ' |
|
231 | 231 | 'found in %s' % (name, path)) |
|
232 | 232 | elif path[0] in rhodecode.BACKENDS: |
|
233 | 233 | klass = get_backend(path[0]) |
|
234 | 234 | repos[name] = klass(path[1], config=config) |
|
235 | 235 | except OSError: |
|
236 | 236 | continue |
|
237 | 237 | log.debug('found %s paths with repositories', len(repos)) |
|
238 | 238 | return repos |
|
239 | 239 | |
|
240 | 240 | def get_repos(self, all_repos=None, sort_key=None): |
|
241 | 241 | """ |
|
242 | 242 | Get all repositories from db and for each repo create it's |
|
243 | 243 | backend instance and fill that backed with information from database |
|
244 | 244 | |
|
245 | 245 | :param all_repos: list of repository names as strings |
|
246 | 246 | give specific repositories list, good for filtering |
|
247 | 247 | |
|
248 | 248 | :param sort_key: initial sorting of repositories |
|
249 | 249 | """ |
|
250 | 250 | if all_repos is None: |
|
251 | 251 | all_repos = self.sa.query(Repository)\ |
|
252 | 252 | .filter(Repository.group_id == None)\ |
|
253 | 253 | .order_by(func.lower(Repository.repo_name)).all() |
|
254 | 254 | repo_iter = SimpleCachedRepoList( |
|
255 | 255 | all_repos, repos_path=self.repos_path, order_by=sort_key) |
|
256 | 256 | return repo_iter |
|
257 | 257 | |
|
258 | 258 | def get_repo_groups(self, all_groups=None): |
|
259 | 259 | if all_groups is None: |
|
260 | 260 | all_groups = RepoGroup.query()\ |
|
261 | 261 | .filter(RepoGroup.group_parent_id == None).all() |
|
262 | 262 | return [x for x in RepoGroupList(all_groups)] |
|
263 | 263 | |
|
264 | 264 | def mark_for_invalidation(self, repo_name, delete=False): |
|
265 | 265 | """ |
|
266 | 266 | Mark caches of this repo invalid in the database. `delete` flag |
|
267 | 267 | removes the cache entries |
|
268 | 268 | |
|
269 | 269 | :param repo_name: the repo_name for which caches should be marked |
|
270 | 270 | invalid, or deleted |
|
271 | 271 | :param delete: delete the entry keys instead of setting bool |
|
272 | 272 | flag on them |
|
273 | 273 | """ |
|
274 | 274 | CacheKey.set_invalidate(repo_name, delete=delete) |
|
275 | 275 | repo = Repository.get_by_repo_name(repo_name) |
|
276 | 276 | |
|
277 | 277 | if repo: |
|
278 | 278 | config = repo._config |
|
279 | 279 | config.set('extensions', 'largefiles', '') |
|
280 | 280 | repo.update_commit_cache(config=config, cs_cache=None) |
|
281 | 281 | caches.clear_repo_caches(repo_name) |
|
282 | 282 | |
|
283 | 283 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
284 | 284 | |
|
285 | 285 | f = self.sa.query(UserFollowing)\ |
|
286 | 286 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
287 | 287 | .filter(UserFollowing.user_id == user_id).scalar() |
|
288 | 288 | |
|
289 | 289 | if f is not None: |
|
290 | 290 | try: |
|
291 | 291 | self.sa.delete(f) |
|
292 | 292 | action_logger(UserTemp(user_id), |
|
293 | 293 | 'stopped_following_repo', |
|
294 | 294 | RepoTemp(follow_repo_id)) |
|
295 | 295 | return |
|
296 | 296 | except Exception: |
|
297 | 297 | log.error(traceback.format_exc()) |
|
298 | 298 | raise |
|
299 | 299 | |
|
300 | 300 | try: |
|
301 | 301 | f = UserFollowing() |
|
302 | 302 | f.user_id = user_id |
|
303 | 303 | f.follows_repo_id = follow_repo_id |
|
304 | 304 | self.sa.add(f) |
|
305 | 305 | |
|
306 | 306 | action_logger(UserTemp(user_id), |
|
307 | 307 | 'started_following_repo', |
|
308 | 308 | RepoTemp(follow_repo_id)) |
|
309 | 309 | except Exception: |
|
310 | 310 | log.error(traceback.format_exc()) |
|
311 | 311 | raise |
|
312 | 312 | |
|
313 | 313 | def toggle_following_user(self, follow_user_id, user_id): |
|
314 | 314 | f = self.sa.query(UserFollowing)\ |
|
315 | 315 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
316 | 316 | .filter(UserFollowing.user_id == user_id).scalar() |
|
317 | 317 | |
|
318 | 318 | if f is not None: |
|
319 | 319 | try: |
|
320 | 320 | self.sa.delete(f) |
|
321 | 321 | return |
|
322 | 322 | except Exception: |
|
323 | 323 | log.error(traceback.format_exc()) |
|
324 | 324 | raise |
|
325 | 325 | |
|
326 | 326 | try: |
|
327 | 327 | f = UserFollowing() |
|
328 | 328 | f.user_id = user_id |
|
329 | 329 | f.follows_user_id = follow_user_id |
|
330 | 330 | self.sa.add(f) |
|
331 | 331 | except Exception: |
|
332 | 332 | log.error(traceback.format_exc()) |
|
333 | 333 | raise |
|
334 | 334 | |
|
335 | 335 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
336 | 336 | r = self.sa.query(Repository)\ |
|
337 | 337 | .filter(Repository.repo_name == repo_name).scalar() |
|
338 | 338 | |
|
339 | 339 | f = self.sa.query(UserFollowing)\ |
|
340 | 340 | .filter(UserFollowing.follows_repository == r)\ |
|
341 | 341 | .filter(UserFollowing.user_id == user_id).scalar() |
|
342 | 342 | |
|
343 | 343 | return f is not None |
|
344 | 344 | |
|
345 | 345 | def is_following_user(self, username, user_id, cache=False): |
|
346 | 346 | u = User.get_by_username(username) |
|
347 | 347 | |
|
348 | 348 | f = self.sa.query(UserFollowing)\ |
|
349 | 349 | .filter(UserFollowing.follows_user == u)\ |
|
350 | 350 | .filter(UserFollowing.user_id == user_id).scalar() |
|
351 | 351 | |
|
352 | 352 | return f is not None |
|
353 | 353 | |
|
354 | 354 | def get_followers(self, repo): |
|
355 | 355 | repo = self._get_repo(repo) |
|
356 | 356 | |
|
357 | 357 | return self.sa.query(UserFollowing)\ |
|
358 | 358 | .filter(UserFollowing.follows_repository == repo).count() |
|
359 | 359 | |
|
360 | 360 | def get_forks(self, repo): |
|
361 | 361 | repo = self._get_repo(repo) |
|
362 | 362 | return self.sa.query(Repository)\ |
|
363 | 363 | .filter(Repository.fork == repo).count() |
|
364 | 364 | |
|
365 | 365 | def get_pull_requests(self, repo): |
|
366 | 366 | repo = self._get_repo(repo) |
|
367 | 367 | return self.sa.query(PullRequest)\ |
|
368 | 368 | .filter(PullRequest.target_repo == repo)\ |
|
369 | 369 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
370 | 370 | |
|
371 | 371 | def mark_as_fork(self, repo, fork, user): |
|
372 | 372 | repo = self._get_repo(repo) |
|
373 | 373 | fork = self._get_repo(fork) |
|
374 | 374 | if fork and repo.repo_id == fork.repo_id: |
|
375 | 375 | raise Exception("Cannot set repository as fork of itself") |
|
376 | 376 | |
|
377 | 377 | if fork and repo.repo_type != fork.repo_type: |
|
378 | 378 | raise RepositoryError( |
|
379 | 379 | "Cannot set repository as fork of repository with other type") |
|
380 | 380 | |
|
381 | 381 | repo.fork = fork |
|
382 | 382 | self.sa.add(repo) |
|
383 | 383 | return repo |
|
384 | 384 | |
|
385 | 385 | def pull_changes(self, repo, username): |
|
386 | 386 | dbrepo = self._get_repo(repo) |
|
387 | 387 | clone_uri = dbrepo.clone_uri |
|
388 | 388 | if not clone_uri: |
|
389 | 389 | raise Exception("This repository doesn't have a clone uri") |
|
390 | 390 | |
|
391 | 391 | repo = dbrepo.scm_instance(cache=False) |
|
392 | 392 | # TODO: marcink fix this an re-enable since we need common logic |
|
393 | 393 | # for hg/git remove hooks so we don't trigger them on fetching |
|
394 | 394 | # commits from remote |
|
395 | 395 | repo.config.clear_section('hooks') |
|
396 | 396 | |
|
397 | 397 | repo_name = dbrepo.repo_name |
|
398 | 398 | try: |
|
399 | 399 | # TODO: we need to make sure those operations call proper hooks ! |
|
400 | 400 | repo.pull(clone_uri) |
|
401 | 401 | |
|
402 | 402 | self.mark_for_invalidation(repo_name) |
|
403 | 403 | except Exception: |
|
404 | 404 | log.error(traceback.format_exc()) |
|
405 | 405 | raise |
|
406 | 406 | |
|
407 | 407 | def commit_change(self, repo, repo_name, commit, user, author, message, |
|
408 | 408 | content, f_path): |
|
409 | 409 | """ |
|
410 | 410 | Commits changes |
|
411 | 411 | |
|
412 | 412 | :param repo: SCM instance |
|
413 | 413 | |
|
414 | 414 | """ |
|
415 | 415 | user = self._get_user(user) |
|
416 | 416 | |
|
417 | 417 | # decoding here will force that we have proper encoded values |
|
418 | 418 | # in any other case this will throw exceptions and deny commit |
|
419 | 419 | content = safe_str(content) |
|
420 | 420 | path = safe_str(f_path) |
|
421 | 421 | # message and author needs to be unicode |
|
422 | 422 | # proper backend should then translate that into required type |
|
423 | 423 | message = safe_unicode(message) |
|
424 | 424 | author = safe_unicode(author) |
|
425 | 425 | imc = repo.in_memory_commit |
|
426 | 426 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) |
|
427 | 427 | try: |
|
428 | 428 | # TODO: handle pre-push action ! |
|
429 | 429 | tip = imc.commit( |
|
430 | 430 | message=message, author=author, parents=[commit], |
|
431 | 431 | branch=commit.branch) |
|
432 | 432 | except Exception as e: |
|
433 | 433 | log.error(traceback.format_exc()) |
|
434 | 434 | raise IMCCommitError(str(e)) |
|
435 | 435 | finally: |
|
436 | 436 | # always clear caches, if commit fails we want fresh object also |
|
437 | 437 | self.mark_for_invalidation(repo_name) |
|
438 | 438 | |
|
439 | 439 | # We trigger the post-push action |
|
440 | 440 | hooks_utils.trigger_post_push_hook( |
|
441 | 441 | username=user.username, action='push_local', repo_name=repo_name, |
|
442 | 442 | repo_alias=repo.alias, commit_ids=[tip.raw_id]) |
|
443 | 443 | return tip |
|
444 | 444 | |
|
445 | 445 | def _sanitize_path(self, f_path): |
|
446 | 446 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: |
|
447 | 447 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
448 | 448 | if f_path: |
|
449 | 449 | f_path = os.path.normpath(f_path) |
|
450 | 450 | return f_path |
|
451 | 451 | |
|
452 | 452 | def get_dirnode_metadata(self, commit, dir_node): |
|
453 | 453 | if not dir_node.is_dir(): |
|
454 | 454 | return [] |
|
455 | 455 | |
|
456 | 456 | data = [] |
|
457 | 457 | for node in dir_node: |
|
458 | 458 | if not node.is_file(): |
|
459 | 459 | # we skip file-nodes |
|
460 | 460 | continue |
|
461 | 461 | |
|
462 | 462 | last_commit = node.last_commit |
|
463 | 463 | last_commit_date = last_commit.date |
|
464 | 464 | data.append({ |
|
465 | 465 | 'name': node.name, |
|
466 | 466 | 'size': h.format_byte_size_binary(node.size), |
|
467 | 467 | 'modified_at': h.format_date(last_commit_date), |
|
468 | 468 | 'modified_ts': last_commit_date.isoformat(), |
|
469 | 469 | 'revision': last_commit.revision, |
|
470 | 470 | 'short_id': last_commit.short_id, |
|
471 | 471 | 'message': h.escape(last_commit.message), |
|
472 | 472 | 'author': h.escape(last_commit.author), |
|
473 | 473 | 'user_profile': h.gravatar_with_user(last_commit.author), |
|
474 | 474 | }) |
|
475 | 475 | |
|
476 | 476 | return data |
|
477 | 477 | |
|
478 | 478 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, |
|
479 | 479 | extended_info=False, content=False, max_file_bytes=None): |
|
480 | 480 | """ |
|
481 | 481 | recursive walk in root dir and return a set of all path in that dir |
|
482 | 482 | based on repository walk function |
|
483 | 483 | |
|
484 | 484 | :param repo_name: name of repository |
|
485 | 485 | :param commit_id: commit id for which to list nodes |
|
486 | 486 | :param root_path: root path to list |
|
487 | 487 | :param flat: return as a list, if False returns a dict with description |
|
488 | 488 | :param max_file_bytes: will not return file contents over this limit |
|
489 | 489 | |
|
490 | 490 | """ |
|
491 | 491 | _files = list() |
|
492 | 492 | _dirs = list() |
|
493 | 493 | try: |
|
494 | 494 | _repo = self._get_repo(repo_name) |
|
495 | 495 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
496 | 496 | root_path = root_path.lstrip('/') |
|
497 | 497 | for __, dirs, files in commit.walk(root_path): |
|
498 | 498 | for f in files: |
|
499 | 499 | _content = None |
|
500 | 500 | _data = f.unicode_path |
|
501 | 501 | over_size_limit = (max_file_bytes is not None |
|
502 | 502 | and f.size > max_file_bytes) |
|
503 | 503 | |
|
504 | 504 | if not flat: |
|
505 | 505 | _data = { |
|
506 | 506 | "name": f.unicode_path, |
|
507 | 507 | "type": "file", |
|
508 | 508 | } |
|
509 | 509 | if extended_info: |
|
510 | 510 | _data.update({ |
|
511 | 511 | "md5": f.md5, |
|
512 | 512 | "binary": f.is_binary, |
|
513 | 513 | "size": f.size, |
|
514 | 514 | "extension": f.extension, |
|
515 | 515 | "mimetype": f.mimetype, |
|
516 | 516 | "lines": f.lines()[0] |
|
517 | 517 | }) |
|
518 | 518 | |
|
519 | 519 | if content: |
|
520 | 520 | full_content = None |
|
521 | 521 | if not f.is_binary and not over_size_limit: |
|
522 | 522 | full_content = safe_str(f.content) |
|
523 | 523 | |
|
524 | 524 | _data.update({ |
|
525 | 525 | "content": full_content, |
|
526 | 526 | }) |
|
527 | 527 | _files.append(_data) |
|
528 | 528 | for d in dirs: |
|
529 | 529 | _data = d.unicode_path |
|
530 | 530 | if not flat: |
|
531 | 531 | _data = { |
|
532 | 532 | "name": d.unicode_path, |
|
533 | 533 | "type": "dir", |
|
534 | 534 | } |
|
535 | 535 | if extended_info: |
|
536 | 536 | _data.update({ |
|
537 | 537 | "md5": None, |
|
538 | 538 | "binary": None, |
|
539 | 539 | "size": None, |
|
540 | 540 | "extension": None, |
|
541 | 541 | }) |
|
542 | 542 | if content: |
|
543 | 543 | _data.update({ |
|
544 | 544 | "content": None |
|
545 | 545 | }) |
|
546 | 546 | _dirs.append(_data) |
|
547 | 547 | except RepositoryError: |
|
548 | 548 | log.debug("Exception in get_nodes", exc_info=True) |
|
549 | 549 | raise |
|
550 | 550 | |
|
551 | 551 | return _dirs, _files |
|
552 | 552 | |
|
553 | 553 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
554 | 554 | author=None, trigger_push_hook=True): |
|
555 | 555 | """ |
|
556 | 556 | Commits given multiple nodes into repo |
|
557 | 557 | |
|
558 | 558 | :param user: RhodeCode User object or user_id, the commiter |
|
559 | 559 | :param repo: RhodeCode Repository object |
|
560 | 560 | :param message: commit message |
|
561 | 561 | :param nodes: mapping {filename:{'content':content},...} |
|
562 | 562 | :param parent_commit: parent commit, can be empty than it's |
|
563 | 563 | initial commit |
|
564 | 564 | :param author: author of commit, cna be different that commiter |
|
565 | 565 | only for git |
|
566 | 566 | :param trigger_push_hook: trigger push hooks |
|
567 | 567 | |
|
568 | 568 | :returns: new commited commit |
|
569 | 569 | """ |
|
570 | 570 | |
|
571 | 571 | user = self._get_user(user) |
|
572 | 572 | scm_instance = repo.scm_instance(cache=False) |
|
573 | 573 | |
|
574 | 574 | processed_nodes = [] |
|
575 | 575 | for f_path in nodes: |
|
576 | 576 | f_path = self._sanitize_path(f_path) |
|
577 | 577 | content = nodes[f_path]['content'] |
|
578 | 578 | f_path = safe_str(f_path) |
|
579 | 579 | # decoding here will force that we have proper encoded values |
|
580 | 580 | # in any other case this will throw exceptions and deny commit |
|
581 | 581 | if isinstance(content, (basestring,)): |
|
582 | 582 | content = safe_str(content) |
|
583 | 583 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
584 | 584 | content = content.read() |
|
585 | 585 | else: |
|
586 | 586 | raise Exception('Content is of unrecognized type %s' % ( |
|
587 | 587 | type(content) |
|
588 | 588 | )) |
|
589 | 589 | processed_nodes.append((f_path, content)) |
|
590 | 590 | |
|
591 | 591 | message = safe_unicode(message) |
|
592 | 592 | commiter = user.full_contact |
|
593 | 593 | author = safe_unicode(author) if author else commiter |
|
594 | 594 | |
|
595 | 595 | imc = scm_instance.in_memory_commit |
|
596 | 596 | |
|
597 | 597 | if not parent_commit: |
|
598 | 598 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
599 | 599 | |
|
600 | 600 | if isinstance(parent_commit, EmptyCommit): |
|
601 | 601 | # EmptyCommit means we we're editing empty repository |
|
602 | 602 | parents = None |
|
603 | 603 | else: |
|
604 | 604 | parents = [parent_commit] |
|
605 | 605 | # add multiple nodes |
|
606 | 606 | for path, content in processed_nodes: |
|
607 | 607 | imc.add(FileNode(path, content=content)) |
|
608 | 608 | # TODO: handle pre push scenario |
|
609 | 609 | tip = imc.commit(message=message, |
|
610 | 610 | author=author, |
|
611 | 611 | parents=parents, |
|
612 | 612 | branch=parent_commit.branch) |
|
613 | 613 | |
|
614 | 614 | self.mark_for_invalidation(repo.repo_name) |
|
615 | 615 | if trigger_push_hook: |
|
616 | 616 | hooks_utils.trigger_post_push_hook( |
|
617 | 617 | username=user.username, action='push_local', |
|
618 | 618 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
619 | 619 | commit_ids=[tip.raw_id]) |
|
620 | 620 | return tip |
|
621 | 621 | |
|
622 | 622 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
623 | 623 | author=None, trigger_push_hook=True): |
|
624 | 624 | user = self._get_user(user) |
|
625 | 625 | scm_instance = repo.scm_instance(cache=False) |
|
626 | 626 | |
|
627 | 627 | message = safe_unicode(message) |
|
628 | 628 | commiter = user.full_contact |
|
629 | 629 | author = safe_unicode(author) if author else commiter |
|
630 | 630 | |
|
631 | 631 | imc = scm_instance.in_memory_commit |
|
632 | 632 | |
|
633 | 633 | if not parent_commit: |
|
634 | 634 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
635 | 635 | |
|
636 | 636 | if isinstance(parent_commit, EmptyCommit): |
|
637 | 637 | # EmptyCommit means we we're editing empty repository |
|
638 | 638 | parents = None |
|
639 | 639 | else: |
|
640 | 640 | parents = [parent_commit] |
|
641 | 641 | |
|
642 | 642 | # add multiple nodes |
|
643 | 643 | for _filename, data in nodes.items(): |
|
644 | 644 | # new filename, can be renamed from the old one, also sanitaze |
|
645 | 645 | # the path for any hack around relative paths like ../../ etc. |
|
646 | 646 | filename = self._sanitize_path(data['filename']) |
|
647 | 647 | old_filename = self._sanitize_path(_filename) |
|
648 | 648 | content = data['content'] |
|
649 | 649 | |
|
650 | 650 | filenode = FileNode(old_filename, content=content) |
|
651 | 651 | op = data['op'] |
|
652 | 652 | if op == 'add': |
|
653 | 653 | imc.add(filenode) |
|
654 | 654 | elif op == 'del': |
|
655 | 655 | imc.remove(filenode) |
|
656 | 656 | elif op == 'mod': |
|
657 | 657 | if filename != old_filename: |
|
658 | 658 | # TODO: handle renames more efficient, needs vcs lib |
|
659 | 659 | # changes |
|
660 | 660 | imc.remove(filenode) |
|
661 | 661 | imc.add(FileNode(filename, content=content)) |
|
662 | 662 | else: |
|
663 | 663 | imc.change(filenode) |
|
664 | 664 | |
|
665 | 665 | try: |
|
666 | 666 | # TODO: handle pre push scenario |
|
667 | 667 | # commit changes |
|
668 | 668 | tip = imc.commit(message=message, |
|
669 | 669 | author=author, |
|
670 | 670 | parents=parents, |
|
671 | 671 | branch=parent_commit.branch) |
|
672 | 672 | except NodeNotChangedError: |
|
673 | 673 | raise |
|
674 | 674 | except Exception as e: |
|
675 | 675 | log.exception("Unexpected exception during call to imc.commit") |
|
676 | 676 | raise IMCCommitError(str(e)) |
|
677 | 677 | finally: |
|
678 | 678 | # always clear caches, if commit fails we want fresh object also |
|
679 | 679 | self.mark_for_invalidation(repo.repo_name) |
|
680 | 680 | |
|
681 | 681 | if trigger_push_hook: |
|
682 | 682 | hooks_utils.trigger_post_push_hook( |
|
683 | 683 | username=user.username, action='push_local', |
|
684 | 684 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
685 | 685 | commit_ids=[tip.raw_id]) |
|
686 | 686 | |
|
687 | 687 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
688 | 688 | author=None, trigger_push_hook=True): |
|
689 | 689 | """ |
|
690 | 690 | Deletes given multiple nodes into `repo` |
|
691 | 691 | |
|
692 | 692 | :param user: RhodeCode User object or user_id, the committer |
|
693 | 693 | :param repo: RhodeCode Repository object |
|
694 | 694 | :param message: commit message |
|
695 | 695 | :param nodes: mapping {filename:{'content':content},...} |
|
696 | 696 | :param parent_commit: parent commit, can be empty than it's initial |
|
697 | 697 | commit |
|
698 | 698 | :param author: author of commit, cna be different that commiter only |
|
699 | 699 | for git |
|
700 | 700 | :param trigger_push_hook: trigger push hooks |
|
701 | 701 | |
|
702 | 702 | :returns: new commit after deletion |
|
703 | 703 | """ |
|
704 | 704 | |
|
705 | 705 | user = self._get_user(user) |
|
706 | 706 | scm_instance = repo.scm_instance(cache=False) |
|
707 | 707 | |
|
708 | 708 | processed_nodes = [] |
|
709 | 709 | for f_path in nodes: |
|
710 | 710 | f_path = self._sanitize_path(f_path) |
|
711 | 711 | # content can be empty but for compatabilty it allows same dicts |
|
712 | 712 | # structure as add_nodes |
|
713 | 713 | content = nodes[f_path].get('content') |
|
714 | 714 | processed_nodes.append((f_path, content)) |
|
715 | 715 | |
|
716 | 716 | message = safe_unicode(message) |
|
717 | 717 | commiter = user.full_contact |
|
718 | 718 | author = safe_unicode(author) if author else commiter |
|
719 | 719 | |
|
720 | 720 | imc = scm_instance.in_memory_commit |
|
721 | 721 | |
|
722 | 722 | if not parent_commit: |
|
723 | 723 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
724 | 724 | |
|
725 | 725 | if isinstance(parent_commit, EmptyCommit): |
|
726 | 726 | # EmptyCommit means we we're editing empty repository |
|
727 | 727 | parents = None |
|
728 | 728 | else: |
|
729 | 729 | parents = [parent_commit] |
|
730 | 730 | # add multiple nodes |
|
731 | 731 | for path, content in processed_nodes: |
|
732 | 732 | imc.remove(FileNode(path, content=content)) |
|
733 | 733 | |
|
734 | 734 | # TODO: handle pre push scenario |
|
735 | 735 | tip = imc.commit(message=message, |
|
736 | 736 | author=author, |
|
737 | 737 | parents=parents, |
|
738 | 738 | branch=parent_commit.branch) |
|
739 | 739 | |
|
740 | 740 | self.mark_for_invalidation(repo.repo_name) |
|
741 | 741 | if trigger_push_hook: |
|
742 | 742 | hooks_utils.trigger_post_push_hook( |
|
743 | 743 | username=user.username, action='push_local', |
|
744 | 744 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
745 | 745 | commit_ids=[tip.raw_id]) |
|
746 | 746 | return tip |
|
747 | 747 | |
|
748 | 748 | def strip(self, repo, commit_id, branch): |
|
749 | 749 | scm_instance = repo.scm_instance(cache=False) |
|
750 | 750 | scm_instance.config.clear_section('hooks') |
|
751 | 751 | scm_instance.strip(commit_id, branch) |
|
752 | 752 | self.mark_for_invalidation(repo.repo_name) |
|
753 | 753 | |
|
754 | 754 | def get_unread_journal(self): |
|
755 | 755 | return self.sa.query(UserLog).count() |
|
756 | 756 | |
|
757 | 757 | def get_repo_landing_revs(self, repo=None): |
|
758 | 758 | """ |
|
759 | 759 | Generates select option with tags branches and bookmarks (for hg only) |
|
760 | 760 | grouped by type |
|
761 | 761 | |
|
762 | 762 | :param repo: |
|
763 | 763 | """ |
|
764 | 764 | |
|
765 | hist_l = [] | |
|
766 | choices = [] | |
|
767 | 765 | repo = self._get_repo(repo) |
|
768 | hist_l.append(['rev:tip', _('latest tip')]) | |
|
769 | choices.append('rev:tip') | |
|
766 | ||
|
767 | hist_l = [ | |
|
768 | ['rev:tip', _('latest tip')] | |
|
769 | ] | |
|
770 | choices = [ | |
|
771 | 'rev:tip' | |
|
772 | ] | |
|
773 | ||
|
770 | 774 | if not repo: |
|
771 | 775 | return choices, hist_l |
|
772 | 776 | |
|
773 | 777 | repo = repo.scm_instance() |
|
774 | 778 | |
|
775 | 779 | branches_group = ( |
|
776 | 780 | [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) |
|
777 | 781 | for b in repo.branches], |
|
778 | 782 | _("Branches")) |
|
779 | 783 | hist_l.append(branches_group) |
|
780 | 784 | choices.extend([x[0] for x in branches_group[0]]) |
|
781 | 785 | |
|
782 | 786 | if repo.alias == 'hg': |
|
783 | 787 | bookmarks_group = ( |
|
784 | 788 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) |
|
785 | 789 | for b in repo.bookmarks], |
|
786 | 790 | _("Bookmarks")) |
|
787 | 791 | hist_l.append(bookmarks_group) |
|
788 | 792 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
789 | 793 | |
|
790 | 794 | tags_group = ( |
|
791 | 795 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) |
|
792 | 796 | for t in repo.tags], |
|
793 | 797 | _("Tags")) |
|
794 | 798 | hist_l.append(tags_group) |
|
795 | 799 | choices.extend([x[0] for x in tags_group[0]]) |
|
796 | 800 | |
|
797 | 801 | return choices, hist_l |
|
798 | 802 | |
|
799 | 803 | def install_git_hook(self, repo, force_create=False): |
|
800 | 804 | """ |
|
801 | 805 | Creates a rhodecode hook inside a git repository |
|
802 | 806 | |
|
803 | 807 | :param repo: Instance of VCS repo |
|
804 | 808 | :param force_create: Create even if same name hook exists |
|
805 | 809 | """ |
|
806 | 810 | |
|
807 | 811 | loc = os.path.join(repo.path, 'hooks') |
|
808 | 812 | if not repo.bare: |
|
809 | 813 | loc = os.path.join(repo.path, '.git', 'hooks') |
|
810 | 814 | if not os.path.isdir(loc): |
|
811 | 815 | os.makedirs(loc, mode=0777) |
|
812 | 816 | |
|
813 | 817 | tmpl_post = pkg_resources.resource_string( |
|
814 | 818 | 'rhodecode', '/'.join( |
|
815 | 819 | ('config', 'hook_templates', 'git_post_receive.py.tmpl'))) |
|
816 | 820 | tmpl_pre = pkg_resources.resource_string( |
|
817 | 821 | 'rhodecode', '/'.join( |
|
818 | 822 | ('config', 'hook_templates', 'git_pre_receive.py.tmpl'))) |
|
819 | 823 | |
|
820 | 824 | for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: |
|
821 | 825 | _hook_file = os.path.join(loc, '%s-receive' % h_type) |
|
822 | 826 | log.debug('Installing git hook in repo %s', repo) |
|
823 | 827 | _rhodecode_hook = _check_rhodecode_hook(_hook_file) |
|
824 | 828 | |
|
825 | 829 | if _rhodecode_hook or force_create: |
|
826 | 830 | log.debug('writing %s hook file !', h_type) |
|
827 | 831 | try: |
|
828 | 832 | with open(_hook_file, 'wb') as f: |
|
829 | 833 | tmpl = tmpl.replace('_TMPL_', rhodecode.__version__) |
|
830 | 834 | tmpl = tmpl.replace('_ENV_', sys.executable) |
|
831 | 835 | f.write(tmpl) |
|
832 | 836 | os.chmod(_hook_file, 0755) |
|
833 | 837 | except IOError: |
|
834 | 838 | log.exception('error writing hook file %s', _hook_file) |
|
835 | 839 | else: |
|
836 | 840 | log.debug('skipping writing hook file') |
|
837 | 841 | |
|
838 | 842 | def install_svn_hooks(self, repo, force_create=False): |
|
839 | 843 | """ |
|
840 | 844 | Creates rhodecode hooks inside a svn repository |
|
841 | 845 | |
|
842 | 846 | :param repo: Instance of VCS repo |
|
843 | 847 | :param force_create: Create even if same name hook exists |
|
844 | 848 | """ |
|
845 | 849 | hooks_path = os.path.join(repo.path, 'hooks') |
|
846 | 850 | if not os.path.isdir(hooks_path): |
|
847 | 851 | os.makedirs(hooks_path) |
|
848 | 852 | post_commit_tmpl = pkg_resources.resource_string( |
|
849 | 853 | 'rhodecode', '/'.join( |
|
850 | 854 | ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl'))) |
|
851 | 855 | pre_commit_template = pkg_resources.resource_string( |
|
852 | 856 | 'rhodecode', '/'.join( |
|
853 | 857 | ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl'))) |
|
854 | 858 | templates = { |
|
855 | 859 | 'post-commit': post_commit_tmpl, |
|
856 | 860 | 'pre-commit': pre_commit_template |
|
857 | 861 | } |
|
858 | 862 | for filename in templates: |
|
859 | 863 | _hook_file = os.path.join(hooks_path, filename) |
|
860 | 864 | _rhodecode_hook = _check_rhodecode_hook(_hook_file) |
|
861 | 865 | if _rhodecode_hook or force_create: |
|
862 | 866 | log.debug('writing %s hook file !', filename) |
|
863 | 867 | template = templates[filename] |
|
864 | 868 | try: |
|
865 | 869 | with open(_hook_file, 'wb') as f: |
|
866 | 870 | template = template.replace( |
|
867 | 871 | '_TMPL_', rhodecode.__version__) |
|
868 | 872 | template = template.replace('_ENV_', sys.executable) |
|
869 | 873 | f.write(template) |
|
870 | 874 | os.chmod(_hook_file, 0755) |
|
871 | 875 | except IOError: |
|
872 | 876 | log.exception('error writing hook file %s', filename) |
|
873 | 877 | else: |
|
874 | 878 | log.debug('skipping writing hook file') |
|
875 | 879 | |
|
876 | 880 | def install_hooks(self, repo, repo_type): |
|
877 | 881 | if repo_type == 'git': |
|
878 | 882 | self.install_git_hook(repo) |
|
879 | 883 | elif repo_type == 'svn': |
|
880 | 884 | self.install_svn_hooks(repo) |
|
881 | 885 | |
|
882 | 886 | def get_server_info(self, environ=None): |
|
883 | 887 | server_info = get_system_info(environ) |
|
884 | 888 | return server_info |
|
885 | 889 | |
|
886 | 890 | |
|
887 | 891 | def _check_rhodecode_hook(hook_path): |
|
888 | 892 | """ |
|
889 | 893 | Check if the hook was created by RhodeCode |
|
890 | 894 | """ |
|
891 | 895 | if not os.path.exists(hook_path): |
|
892 | 896 | return True |
|
893 | 897 | |
|
894 | 898 | log.debug('hook exists, checking if it is from rhodecode') |
|
895 | 899 | hook_content = _read_hook(hook_path) |
|
896 | 900 | matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content) |
|
897 | 901 | if matches: |
|
898 | 902 | try: |
|
899 | 903 | version = matches.groups()[0] |
|
900 | 904 | log.debug('got %s, it is rhodecode', version) |
|
901 | 905 | return True |
|
902 | 906 | except Exception: |
|
903 | 907 | log.exception("Exception while reading the hook version.") |
|
904 | 908 | |
|
905 | 909 | return False |
|
906 | 910 | |
|
907 | 911 | |
|
908 | 912 | def _read_hook(hook_path): |
|
909 | 913 | with open(hook_path, 'rb') as f: |
|
910 | 914 | content = f.read() |
|
911 | 915 | return content |
@@ -1,89 +1,88 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import unicodedata |
|
22 | 22 | |
|
23 | 23 | |
|
24 | ||
|
25 | 24 | def strip_preparer(value): |
|
26 | 25 | """ |
|
27 | 26 | strips given values using .strip() function |
|
28 | 27 | """ |
|
29 | 28 | |
|
30 | 29 | if value: |
|
31 | 30 | value = value.strip() |
|
32 | 31 | return value |
|
33 | 32 | |
|
34 | 33 | |
|
35 | 34 | def slugify_preparer(value): |
|
36 | 35 | """ |
|
37 | 36 | Slugify given value to a safe representation for url/id |
|
38 | 37 | """ |
|
39 | 38 | from rhodecode.lib.utils import repo_name_slug |
|
40 | 39 | if value: |
|
41 | 40 | value = repo_name_slug(value.lower()) |
|
42 | 41 | return value |
|
43 | 42 | |
|
44 | 43 | |
|
45 | 44 | def non_ascii_strip_preparer(value): |
|
46 | 45 | """ |
|
47 | 46 | trie to replace non-ascii letters to their ascii representation |
|
48 | 47 | eg:: |
|
49 | 48 | |
|
50 | 49 | `ΕΌoΕw` converts into `zolw` |
|
51 | 50 | """ |
|
52 | 51 | if value: |
|
53 | 52 | value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') |
|
54 | 53 | return value |
|
55 | 54 | |
|
56 | 55 | |
|
57 | 56 | def unique_list_preparer(value): |
|
58 | 57 | """ |
|
59 | 58 | Converts an list to a list with only unique values |
|
60 | 59 | """ |
|
61 | 60 | |
|
62 | 61 | def make_unique(value): |
|
63 | 62 | seen = [] |
|
64 | 63 | return [c for c in value if |
|
65 | 64 | not (c in seen or seen.append(c))] |
|
66 | 65 | |
|
67 | 66 | if isinstance(value, list): |
|
68 | 67 | ret_val = make_unique(value) |
|
69 | 68 | elif isinstance(value, set): |
|
70 | 69 | ret_val = list(value) |
|
71 | 70 | elif isinstance(value, tuple): |
|
72 | 71 | ret_val = make_unique(value) |
|
73 | 72 | elif value is None: |
|
74 | 73 | ret_val = [] |
|
75 | 74 | else: |
|
76 | 75 | ret_val = [value] |
|
77 | 76 | |
|
78 | 77 | return ret_val |
|
79 | 78 | |
|
80 | 79 | |
|
81 | 80 | def unique_list_from_str_preparer(value): |
|
82 | 81 | """ |
|
83 | 82 | Converts an list to a list with only unique values |
|
84 | 83 | """ |
|
85 | 84 | from rhodecode.lib.utils2 import aslist |
|
86 | 85 | |
|
87 | 86 | if isinstance(value, basestring): |
|
88 | 87 | value = aslist(value, ',') |
|
89 | 88 | return unique_list_preparer(value) No newline at end of file |
@@ -1,27 +1,321 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | |
|
23 | from rhodecode.translation import _ | |
|
23 | 24 | from rhodecode.model.validation_schema import validators, preparers, types |
|
24 | 25 | |
|
26 | DEFAULT_LANDING_REF = 'rev:tip' | |
|
27 | ||
|
28 | ||
|
29 | def get_group_and_repo(repo_name): | |
|
30 | from rhodecode.model.repo_group import RepoGroupModel | |
|
31 | return RepoGroupModel()._get_group_name_and_parent( | |
|
32 | repo_name, get_object=True) | |
|
33 | ||
|
34 | ||
|
35 | @colander.deferred | |
|
36 | def deferred_repo_type_validator(node, kw): | |
|
37 | options = kw.get('repo_type_options', []) | |
|
38 | return colander.OneOf([x for x in options]) | |
|
39 | ||
|
40 | ||
|
41 | @colander.deferred | |
|
42 | def deferred_repo_owner_validator(node, kw): | |
|
43 | ||
|
44 | def repo_owner_validator(node, value): | |
|
45 | from rhodecode.model.db import User | |
|
46 | existing = User.get_by_username(value) | |
|
47 | if not existing: | |
|
48 | msg = _(u'Repo owner with id `{}` does not exists').format(value) | |
|
49 | raise colander.Invalid(node, msg) | |
|
50 | ||
|
51 | return repo_owner_validator | |
|
52 | ||
|
53 | ||
|
54 | @colander.deferred | |
|
55 | def deferred_landing_ref_validator(node, kw): | |
|
56 | options = kw.get('repo_ref_options', [DEFAULT_LANDING_REF]) | |
|
57 | return colander.OneOf([x for x in options]) | |
|
58 | ||
|
59 | ||
|
60 | @colander.deferred | |
|
61 | def deferred_fork_of_validator(node, kw): | |
|
62 | old_values = kw.get('old_values') or {} | |
|
63 | ||
|
64 | def fork_of_validator(node, value): | |
|
65 | from rhodecode.model.db import Repository, RepoGroup | |
|
66 | existing = Repository.get_by_repo_name(value) | |
|
67 | if not existing: | |
|
68 | msg = _(u'Fork with id `{}` does not exists').format(value) | |
|
69 | raise colander.Invalid(node, msg) | |
|
70 | elif old_values['repo_name'] == existing.repo_name: | |
|
71 | msg = _(u'Cannot set fork of ' | |
|
72 | u'parameter of this repository to itself').format(value) | |
|
73 | raise colander.Invalid(node, msg) | |
|
74 | ||
|
75 | return fork_of_validator | |
|
76 | ||
|
77 | ||
|
78 | @colander.deferred | |
|
79 | def deferred_can_write_to_group_validator(node, kw): | |
|
80 | request_user = kw.get('user') | |
|
81 | old_values = kw.get('old_values') or {} | |
|
82 | ||
|
83 | def can_write_to_group_validator(node, value): | |
|
84 | """ | |
|
85 | Checks if given repo path is writable by user. This includes checks if | |
|
86 | user is allowed to create repositories under root path or under | |
|
87 | repo group paths | |
|
88 | """ | |
|
89 | ||
|
90 | from rhodecode.lib.auth import ( | |
|
91 | HasPermissionAny, HasRepoGroupPermissionAny) | |
|
92 | from rhodecode.model.repo_group import RepoGroupModel | |
|
93 | ||
|
94 | messages = { | |
|
95 | 'invalid_repo_group': | |
|
96 | _(u"Repository group `{}` does not exist"), | |
|
97 | # permissions denied we expose as not existing, to prevent | |
|
98 | # resource discovery | |
|
99 | 'permission_denied': | |
|
100 | _(u"Repository group `{}` does not exist"), | |
|
101 | 'permission_denied_root': | |
|
102 | _(u"You do not have the permission to store " | |
|
103 | u"repositories in the root location.") | |
|
104 | } | |
|
105 | ||
|
106 | value = value['repo_group_name'] | |
|
107 | ||
|
108 | is_root_location = value is types.RootLocation | |
|
109 | # NOT initialized validators, we must call them | |
|
110 | can_create_repos_at_root = HasPermissionAny( | |
|
111 | 'hg.admin', 'hg.create.repository') | |
|
112 | ||
|
113 | # if values is root location, we simply need to check if we can write | |
|
114 | # to root location ! | |
|
115 | if is_root_location: | |
|
116 | if can_create_repos_at_root(user=request_user): | |
|
117 | # we can create repo group inside tool-level. No more checks | |
|
118 | # are required | |
|
119 | return | |
|
120 | else: | |
|
121 | # "fake" node name as repo_name, otherwise we oddly report | |
|
122 | # the error as if it was coming form repo_group | |
|
123 | # however repo_group is empty when using root location. | |
|
124 | node.name = 'repo_name' | |
|
125 | raise colander.Invalid(node, messages['permission_denied_root']) | |
|
126 | ||
|
127 | # parent group not exists ? throw an error | |
|
128 | repo_group = RepoGroupModel().get_by_group_name(value) | |
|
129 | if value and not repo_group: | |
|
130 | raise colander.Invalid( | |
|
131 | node, messages['invalid_repo_group'].format(value)) | |
|
132 | ||
|
133 | gr_name = repo_group.group_name | |
|
134 | ||
|
135 | # create repositories with write permission on group is set to true | |
|
136 | create_on_write = HasPermissionAny( | |
|
137 | 'hg.create.write_on_repogroup.true')(user=request_user) | |
|
138 | ||
|
139 | group_admin = HasRepoGroupPermissionAny('group.admin')( | |
|
140 | gr_name, 'can write into group validator', user=request_user) | |
|
141 | group_write = HasRepoGroupPermissionAny('group.write')( | |
|
142 | gr_name, 'can write into group validator', user=request_user) | |
|
143 | ||
|
144 | forbidden = not (group_admin or (group_write and create_on_write)) | |
|
145 | ||
|
146 | # TODO: handling of old values, and detecting no-change in path | |
|
147 | # to skip permission checks in such cases. This only needs to be | |
|
148 | # implemented if we use this schema in forms as well | |
|
149 | ||
|
150 | # gid = (old_data['repo_group'].get('group_id') | |
|
151 | # if (old_data and 'repo_group' in old_data) else None) | |
|
152 | # value_changed = gid != safe_int(value) | |
|
153 | # new = not old_data | |
|
154 | ||
|
155 | # do check if we changed the value, there's a case that someone got | |
|
156 | # revoked write permissions to a repository, he still created, we | |
|
157 | # don't need to check permission if he didn't change the value of | |
|
158 | # groups in form box | |
|
159 | # if value_changed or new: | |
|
160 | # # parent group need to be existing | |
|
161 | # TODO: ENDS HERE | |
|
162 | ||
|
163 | if repo_group and forbidden: | |
|
164 | msg = messages['permission_denied'].format(value) | |
|
165 | raise colander.Invalid(node, msg) | |
|
166 | ||
|
167 | return can_write_to_group_validator | |
|
168 | ||
|
25 | 169 | |
|
26 | class RepoSchema(colander.Schema): | |
|
27 | repo_name = colander.SchemaNode(types.GroupNameType()) | |
|
170 | @colander.deferred | |
|
171 | def deferred_unique_name_validator(node, kw): | |
|
172 | request_user = kw.get('user') | |
|
173 | old_values = kw.get('old_values') or {} | |
|
174 | ||
|
175 | def unique_name_validator(node, value): | |
|
176 | from rhodecode.model.db import Repository, RepoGroup | |
|
177 | name_changed = value != old_values.get('repo_name') | |
|
178 | ||
|
179 | existing = Repository.get_by_repo_name(value) | |
|
180 | if name_changed and existing: | |
|
181 | msg = _(u'Repository with name `{}` already exists').format(value) | |
|
182 | raise colander.Invalid(node, msg) | |
|
183 | ||
|
184 | existing_group = RepoGroup.get_by_group_name(value) | |
|
185 | if name_changed and existing_group: | |
|
186 | msg = _(u'Repository group with name `{}` already exists').format( | |
|
187 | value) | |
|
188 | raise colander.Invalid(node, msg) | |
|
189 | return unique_name_validator | |
|
190 | ||
|
191 | ||
|
192 | @colander.deferred | |
|
193 | def deferred_repo_name_validator(node, kw): | |
|
194 | return validators.valid_name_validator | |
|
195 | ||
|
196 | ||
|
197 | class GroupType(colander.Mapping): | |
|
198 | def _validate(self, node, value): | |
|
199 | try: | |
|
200 | return dict(repo_group_name=value) | |
|
201 | except Exception as e: | |
|
202 | raise colander.Invalid( | |
|
203 | node, '"${val}" is not a mapping type: ${err}'.format( | |
|
204 | val=value, err=e)) | |
|
205 | ||
|
206 | def deserialize(self, node, cstruct): | |
|
207 | if cstruct is colander.null: | |
|
208 | return cstruct | |
|
209 | ||
|
210 | appstruct = super(GroupType, self).deserialize(node, cstruct) | |
|
211 | validated_name = appstruct['repo_group_name'] | |
|
212 | ||
|
213 | # inject group based on once deserialized data | |
|
214 | (repo_name_without_group, | |
|
215 | parent_group_name, | |
|
216 | parent_group) = get_group_and_repo(validated_name) | |
|
217 | ||
|
218 | appstruct['repo_name_without_group'] = repo_name_without_group | |
|
219 | appstruct['repo_group_name'] = parent_group_name or types.RootLocation | |
|
220 | if parent_group: | |
|
221 | appstruct['repo_group_id'] = parent_group.group_id | |
|
222 | ||
|
223 | return appstruct | |
|
224 | ||
|
225 | ||
|
226 | class GroupSchema(colander.SchemaNode): | |
|
227 | schema_type = GroupType | |
|
228 | validator = deferred_can_write_to_group_validator | |
|
229 | missing = colander.null | |
|
230 | ||
|
231 | ||
|
232 | class RepoGroup(GroupSchema): | |
|
233 | repo_group_name = colander.SchemaNode( | |
|
234 | types.GroupNameType()) | |
|
235 | repo_group_id = colander.SchemaNode( | |
|
236 | colander.String(), missing=None) | |
|
237 | repo_name_without_group = colander.SchemaNode( | |
|
238 | colander.String(), missing=None) | |
|
239 | ||
|
240 | ||
|
241 | class RepoGroupAccessSchema(colander.MappingSchema): | |
|
242 | repo_group = RepoGroup() | |
|
243 | ||
|
244 | ||
|
245 | class RepoNameUniqueSchema(colander.MappingSchema): | |
|
246 | unique_repo_name = colander.SchemaNode( | |
|
247 | colander.String(), | |
|
248 | validator=deferred_unique_name_validator) | |
|
249 | ||
|
250 | ||
|
251 | class RepoSchema(colander.MappingSchema): | |
|
252 | ||
|
253 | repo_name = colander.SchemaNode( | |
|
254 | types.RepoNameType(), | |
|
255 | validator=deferred_repo_name_validator) | |
|
256 | ||
|
257 | repo_type = colander.SchemaNode( | |
|
258 | colander.String(), | |
|
259 | validator=deferred_repo_type_validator) | |
|
260 | ||
|
261 | repo_owner = colander.SchemaNode( | |
|
262 | colander.String(), | |
|
263 | validator=deferred_repo_owner_validator) | |
|
264 | ||
|
265 | repo_description = colander.SchemaNode( | |
|
266 | colander.String(), missing='') | |
|
267 | ||
|
268 | repo_landing_commit_ref = colander.SchemaNode( | |
|
269 | colander.String(), | |
|
270 | validator=deferred_landing_ref_validator, | |
|
271 | preparers=[preparers.strip_preparer], | |
|
272 | missing=DEFAULT_LANDING_REF) | |
|
273 | ||
|
274 | repo_clone_uri = colander.SchemaNode( | |
|
275 | colander.String(), | |
|
276 | validator=colander.All(colander.Length(min=1)), | |
|
277 | preparers=[preparers.strip_preparer], | |
|
278 | missing='') | |
|
279 | ||
|
280 | repo_fork_of = colander.SchemaNode( | |
|
281 | colander.String(), | |
|
282 | validator=deferred_fork_of_validator, | |
|
283 | missing=None) | |
|
284 | ||
|
285 | repo_private = colander.SchemaNode( | |
|
286 | types.StringBooleanType(), | |
|
287 | missing=False) | |
|
288 | repo_copy_permissions = colander.SchemaNode( | |
|
289 | types.StringBooleanType(), | |
|
290 | missing=False) | |
|
291 | repo_enable_statistics = colander.SchemaNode( | |
|
292 | types.StringBooleanType(), | |
|
293 | missing=False) | |
|
294 | repo_enable_downloads = colander.SchemaNode( | |
|
295 | types.StringBooleanType(), | |
|
296 | missing=False) | |
|
297 | repo_enable_locking = colander.SchemaNode( | |
|
298 | types.StringBooleanType(), | |
|
299 | missing=False) | |
|
300 | ||
|
301 | def deserialize(self, cstruct): | |
|
302 | """ | |
|
303 | Custom deserialize that allows to chain validation, and verify | |
|
304 | permissions, and as last step uniqueness | |
|
305 | """ | |
|
306 | ||
|
307 | # first pass, to validate given data | |
|
308 | appstruct = super(RepoSchema, self).deserialize(cstruct) | |
|
309 | validated_name = appstruct['repo_name'] | |
|
310 | ||
|
311 | # second pass to validate permissions to repo_group | |
|
312 | second = RepoGroupAccessSchema().bind(**self.bindings) | |
|
313 | appstruct_second = second.deserialize({'repo_group': validated_name}) | |
|
314 | # save result | |
|
315 | appstruct['repo_group'] = appstruct_second['repo_group'] | |
|
316 | ||
|
317 | # thirds to validate uniqueness | |
|
318 | third = RepoNameUniqueSchema().bind(**self.bindings) | |
|
319 | third.deserialize({'unique_repo_name': validated_name}) | |
|
320 | ||
|
321 | return appstruct |
@@ -1,44 +1,43 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import colander |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class SearchParamsSchema(colander.MappingSchema): |
|
26 | 26 | search_query = colander.SchemaNode( |
|
27 | 27 | colander.String(), |
|
28 | 28 | missing='') |
|
29 | 29 | search_type = colander.SchemaNode( |
|
30 | 30 | colander.String(), |
|
31 | 31 | missing='content', |
|
32 | 32 | validator=colander.OneOf(['content', 'path', 'commit', 'repository'])) |
|
33 | 33 | search_sort = colander.SchemaNode( |
|
34 | 34 | colander.String(), |
|
35 | 35 | missing='newfirst', |
|
36 | validator=colander.OneOf( | |
|
37 | ['oldfirst', 'newfirst'])) | |
|
36 | validator=colander.OneOf(['oldfirst', 'newfirst'])) | |
|
38 | 37 | page_limit = colander.SchemaNode( |
|
39 | 38 | colander.Integer(), |
|
40 | 39 | missing=10, |
|
41 | 40 | validator=colander.Range(1, 500)) |
|
42 | 41 | requested_page = colander.SchemaNode( |
|
43 | 42 | colander.Integer(), |
|
44 | 43 | missing=1) |
@@ -1,137 +1,188 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | import re | |
|
22 | ||
|
21 | 23 | import colander |
|
24 | from rhodecode.model.validation_schema import preparers | |
|
25 | from rhodecode.model.db import User, UserGroup | |
|
26 | ||
|
27 | ||
|
28 | class _RootLocation(object): | |
|
29 | pass | |
|
30 | ||
|
31 | RootLocation = _RootLocation() | |
|
32 | ||
|
33 | ||
|
34 | def _normalize(seperator, path): | |
|
35 | ||
|
36 | if not path: | |
|
37 | return '' | |
|
38 | elif path is colander.null: | |
|
39 | return colander.null | |
|
40 | ||
|
41 | parts = path.split(seperator) | |
|
22 | 42 | |
|
23 | from rhodecode.model.db import User, UserGroup | |
|
43 | def bad_parts(value): | |
|
44 | if not value: | |
|
45 | return False | |
|
46 | if re.match(r'^[.]+$', value): | |
|
47 | return False | |
|
48 | ||
|
49 | return True | |
|
50 | ||
|
51 | def slugify(value): | |
|
52 | value = preparers.slugify_preparer(value) | |
|
53 | value = re.sub(r'[.]{2,}', '.', value) | |
|
54 | return value | |
|
55 | ||
|
56 | clean_parts = [slugify(item) for item in parts if item] | |
|
57 | path = filter(bad_parts, clean_parts) | |
|
58 | return seperator.join(path) | |
|
59 | ||
|
60 | ||
|
61 | class RepoNameType(colander.String): | |
|
62 | SEPARATOR = '/' | |
|
63 | ||
|
64 | def deserialize(self, node, cstruct): | |
|
65 | result = super(RepoNameType, self).deserialize(node, cstruct) | |
|
66 | if cstruct is colander.null: | |
|
67 | return colander.null | |
|
68 | return self._normalize(result) | |
|
69 | ||
|
70 | def _normalize(self, path): | |
|
71 | return _normalize(self.SEPARATOR, path) | |
|
24 | 72 | |
|
25 | 73 | |
|
26 | 74 | class GroupNameType(colander.String): |
|
27 | 75 | SEPARATOR = '/' |
|
28 | 76 | |
|
29 | 77 | def deserialize(self, node, cstruct): |
|
30 | result = super(GroupNameType, self).deserialize(node, cstruct) | |
|
31 | return self._replace_extra_slashes(result) | |
|
78 | if cstruct is RootLocation: | |
|
79 | return cstruct | |
|
32 | 80 | |
|
33 | def _replace_extra_slashes(self, path): | |
|
34 | path = path.split(self.SEPARATOR) | |
|
35 | path = [item for item in path if item] | |
|
36 |
return self. |
|
|
81 | result = super(GroupNameType, self).deserialize(node, cstruct) | |
|
82 | if cstruct is colander.null: | |
|
83 | return colander.null | |
|
84 | return self._normalize(result) | |
|
85 | ||
|
86 | def _normalize(self, path): | |
|
87 | return _normalize(self.SEPARATOR, path) | |
|
37 | 88 | |
|
38 | 89 | |
|
39 | 90 | class StringBooleanType(colander.String): |
|
40 | 91 | true_values = ['true', 't', 'yes', 'y', 'on', '1'] |
|
41 | 92 | false_values = ['false', 'f', 'no', 'n', 'off', '0'] |
|
42 | 93 | |
|
43 | 94 | def serialize(self, node, appstruct): |
|
44 | 95 | if appstruct is colander.null: |
|
45 | 96 | return colander.null |
|
46 | 97 | if not isinstance(appstruct, bool): |
|
47 | 98 | raise colander.Invalid(node, '%r is not a boolean' % appstruct) |
|
48 | 99 | |
|
49 | 100 | return appstruct and 'true' or 'false' |
|
50 | 101 | |
|
51 | 102 | def deserialize(self, node, cstruct): |
|
52 | 103 | if cstruct is colander.null: |
|
53 | 104 | return colander.null |
|
54 | 105 | |
|
55 | 106 | if isinstance(cstruct, bool): |
|
56 | 107 | return cstruct |
|
57 | 108 | |
|
58 | 109 | if not isinstance(cstruct, basestring): |
|
59 | 110 | raise colander.Invalid(node, '%r is not a string' % cstruct) |
|
60 | 111 | |
|
61 | 112 | value = cstruct.lower() |
|
62 | 113 | if value in self.true_values: |
|
63 | 114 | return True |
|
64 | 115 | elif value in self.false_values: |
|
65 | 116 | return False |
|
66 | 117 | else: |
|
67 | 118 | raise colander.Invalid( |
|
68 | 119 | node, '{} value cannot be translated to bool'.format(value)) |
|
69 | 120 | |
|
70 | 121 | |
|
71 | 122 | class UserOrUserGroupType(colander.SchemaType): |
|
72 | 123 | """ colander Schema type for valid rhodecode user and/or usergroup """ |
|
73 | 124 | scopes = ('user', 'usergroup') |
|
74 | 125 | |
|
75 | 126 | def __init__(self): |
|
76 | 127 | self.users = 'user' in self.scopes |
|
77 | 128 | self.usergroups = 'usergroup' in self.scopes |
|
78 | 129 | |
|
79 | 130 | def serialize(self, node, appstruct): |
|
80 | 131 | if appstruct is colander.null: |
|
81 | 132 | return colander.null |
|
82 | 133 | |
|
83 | 134 | if self.users: |
|
84 | 135 | if isinstance(appstruct, User): |
|
85 | 136 | if self.usergroups: |
|
86 | 137 | return 'user:%s' % appstruct.username |
|
87 | 138 | return appstruct.username |
|
88 | 139 | |
|
89 | 140 | if self.usergroups: |
|
90 | 141 | if isinstance(appstruct, UserGroup): |
|
91 | 142 | if self.users: |
|
92 | 143 | return 'usergroup:%s' % appstruct.users_group_name |
|
93 | 144 | return appstruct.users_group_name |
|
94 | 145 | |
|
95 | 146 | raise colander.Invalid( |
|
96 | 147 | node, '%s is not a valid %s' % (appstruct, ' or '.join(self.scopes))) |
|
97 | 148 | |
|
98 | 149 | def deserialize(self, node, cstruct): |
|
99 | 150 | if cstruct is colander.null: |
|
100 | 151 | return colander.null |
|
101 | 152 | |
|
102 | 153 | user, usergroup = None, None |
|
103 | 154 | if self.users: |
|
104 | 155 | if cstruct.startswith('user:'): |
|
105 | 156 | user = User.get_by_username(cstruct.split(':')[1]) |
|
106 | 157 | else: |
|
107 | 158 | user = User.get_by_username(cstruct) |
|
108 | 159 | |
|
109 | 160 | if self.usergroups: |
|
110 | 161 | if cstruct.startswith('usergroup:'): |
|
111 | 162 | usergroup = UserGroup.get_by_group_name(cstruct.split(':')[1]) |
|
112 | 163 | else: |
|
113 | 164 | usergroup = UserGroup.get_by_group_name(cstruct) |
|
114 | 165 | |
|
115 | 166 | if self.users and self.usergroups: |
|
116 | 167 | if user and usergroup: |
|
117 | 168 | raise colander.Invalid(node, ( |
|
118 | 169 | '%s is both a user and usergroup, specify which ' |
|
119 | 170 | 'one was wanted by prepending user: or usergroup: to the ' |
|
120 | 171 | 'name') % cstruct) |
|
121 | 172 | |
|
122 | 173 | if self.users and user: |
|
123 | 174 | return user |
|
124 | 175 | |
|
125 | 176 | if self.usergroups and usergroup: |
|
126 | 177 | return usergroup |
|
127 | 178 | |
|
128 | 179 | raise colander.Invalid( |
|
129 | 180 | node, '%s is not a valid %s' % (cstruct, ' or '.join(self.scopes))) |
|
130 | 181 | |
|
131 | 182 | |
|
132 | 183 | class UserType(UserOrUserGroupType): |
|
133 | 184 | scopes = ('user',) |
|
134 | 185 | |
|
135 | 186 | |
|
136 | 187 | class UserGroupType(UserOrUserGroupType): |
|
137 | 188 | scopes = ('usergroup',) |
@@ -1,38 +1,48 b'' | |||
|
1 | 1 | import os |
|
2 | 2 | import re |
|
3 | 3 | |
|
4 | 4 | import ipaddress |
|
5 | 5 | import colander |
|
6 | 6 | |
|
7 | 7 | from rhodecode.translation import _ |
|
8 | 8 | from rhodecode.lib.utils2 import glob2re |
|
9 | 9 | |
|
10 | 10 | |
|
11 | 11 | def ip_addr_validator(node, value): |
|
12 | 12 | try: |
|
13 | 13 | # this raises an ValueError if address is not IpV4 or IpV6 |
|
14 | 14 | ipaddress.ip_network(value, strict=False) |
|
15 | 15 | except ValueError: |
|
16 | 16 | msg = _(u'Please enter a valid IPv4 or IpV6 address') |
|
17 | 17 | raise colander.Invalid(node, msg) |
|
18 | 18 | |
|
19 | 19 | |
|
20 | 20 | class IpAddrValidator(object): |
|
21 | 21 | def __init__(self, strict=True): |
|
22 | 22 | self.strict = strict |
|
23 | 23 | |
|
24 | 24 | def __call__(self, node, value): |
|
25 | 25 | try: |
|
26 | 26 | # this raises an ValueError if address is not IpV4 or IpV6 |
|
27 | 27 | ipaddress.ip_network(value, strict=self.strict) |
|
28 | 28 | except ValueError: |
|
29 | 29 | msg = _(u'Please enter a valid IPv4 or IpV6 address') |
|
30 | 30 | raise colander.Invalid(node, msg) |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | def glob_validator(node, value): |
|
34 | 34 | try: |
|
35 | 35 | re.compile('^' + glob2re(value) + '$') |
|
36 | 36 | except Exception: |
|
37 | 37 | msg = _(u'Invalid glob pattern') |
|
38 | 38 | raise colander.Invalid(node, msg) |
|
39 | ||
|
40 | ||
|
41 | def valid_name_validator(node, value): | |
|
42 | from rhodecode.model.validation_schema import types | |
|
43 | if value is types.RootLocation: | |
|
44 | return | |
|
45 | ||
|
46 | msg = _('Name must start with a letter or number. Got `{}`').format(value) | |
|
47 | if not re.match(r'^[a-zA-z0-9]{1,}', value): | |
|
48 | raise colander.Invalid(node, msg) |
@@ -1,171 +1,167 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 | from rhodecode.model import validation_schema | |
|
25 | ||
|
26 | 24 | from rhodecode.integrations import integration_type_registry |
|
27 | 25 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
28 | 26 | from rhodecode.model.validation_schema.schemas.integration_schema import ( |
|
29 | 27 | make_integration_schema |
|
30 | 28 | ) |
|
31 | 29 | |
|
32 | 30 | |
|
33 | 31 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
34 | 32 | class TestIntegrationSchema(object): |
|
35 | 33 | |
|
36 |
def test_deserialize_integration_schema_perms( |
|
|
37 | test_repo_group, | |
|
38 | StubIntegrationType): | |
|
34 | def test_deserialize_integration_schema_perms( | |
|
35 | self, backend_random, test_repo_group, StubIntegrationType): | |
|
39 | 36 | |
|
40 | 37 | repo = backend_random.repo |
|
41 | 38 | repo_group = test_repo_group |
|
42 | 39 | |
|
43 | ||
|
44 | 40 | empty_perms_dict = { |
|
45 | 41 | 'global': [], |
|
46 | 42 | 'repositories': {}, |
|
47 | 43 | 'repositories_groups': {}, |
|
48 | 44 | } |
|
49 | 45 | |
|
50 | 46 | perms_tests = [ |
|
51 | 47 | ( |
|
52 | 48 | 'repo:%s' % repo.repo_name, |
|
53 | 49 | { |
|
54 | 50 | 'child_repos_only': None, |
|
55 | 51 | 'repo_group': None, |
|
56 | 52 | 'repo': repo, |
|
57 | 53 | }, |
|
58 | 54 | [ |
|
59 | 55 | ({}, False), |
|
60 | 56 | ({'global': ['hg.admin']}, True), |
|
61 | 57 | ({'global': []}, False), |
|
62 | 58 | ({'repositories': {repo.repo_name: 'repository.admin'}}, True), |
|
63 | 59 | ({'repositories': {repo.repo_name: 'repository.read'}}, False), |
|
64 | 60 | ({'repositories': {repo.repo_name: 'repository.write'}}, False), |
|
65 | 61 | ({'repositories': {repo.repo_name: 'repository.none'}}, False), |
|
66 | 62 | ] |
|
67 | 63 | ), |
|
68 | 64 | ( |
|
69 | 65 | 'repogroup:%s' % repo_group.group_name, |
|
70 | 66 | { |
|
71 | 67 | 'repo': None, |
|
72 | 68 | 'repo_group': repo_group, |
|
73 | 69 | 'child_repos_only': True, |
|
74 | 70 | }, |
|
75 | 71 | [ |
|
76 | 72 | ({}, False), |
|
77 | 73 | ({'global': ['hg.admin']}, True), |
|
78 | 74 | ({'global': []}, False), |
|
79 | 75 | ({'repositories_groups': |
|
80 | 76 | {repo_group.group_name: 'group.admin'}}, True), |
|
81 | 77 | ({'repositories_groups': |
|
82 | 78 | {repo_group.group_name: 'group.read'}}, False), |
|
83 | 79 | ({'repositories_groups': |
|
84 | 80 | {repo_group.group_name: 'group.write'}}, False), |
|
85 | 81 | ({'repositories_groups': |
|
86 | 82 | {repo_group.group_name: 'group.none'}}, False), |
|
87 | 83 | ] |
|
88 | 84 | ), |
|
89 | 85 | ( |
|
90 | 86 | 'repogroup-recursive:%s' % repo_group.group_name, |
|
91 | 87 | { |
|
92 | 88 | 'repo': None, |
|
93 | 89 | 'repo_group': repo_group, |
|
94 | 90 | 'child_repos_only': False, |
|
95 | 91 | }, |
|
96 | 92 | [ |
|
97 | 93 | ({}, False), |
|
98 | 94 | ({'global': ['hg.admin']}, True), |
|
99 | 95 | ({'global': []}, False), |
|
100 | 96 | ({'repositories_groups': |
|
101 | 97 | {repo_group.group_name: 'group.admin'}}, True), |
|
102 | 98 | ({'repositories_groups': |
|
103 | 99 | {repo_group.group_name: 'group.read'}}, False), |
|
104 | 100 | ({'repositories_groups': |
|
105 | 101 | {repo_group.group_name: 'group.write'}}, False), |
|
106 | 102 | ({'repositories_groups': |
|
107 | 103 | {repo_group.group_name: 'group.none'}}, False), |
|
108 | 104 | ] |
|
109 | 105 | ), |
|
110 | 106 | ( |
|
111 | 107 | 'global', |
|
112 | 108 | { |
|
113 | 109 | 'repo': None, |
|
114 | 110 | 'repo_group': None, |
|
115 | 111 | 'child_repos_only': False, |
|
116 | 112 | }, [ |
|
117 | 113 | ({}, False), |
|
118 | 114 | ({'global': ['hg.admin']}, True), |
|
119 | 115 | ({'global': []}, False), |
|
120 | 116 | ] |
|
121 | 117 | ), |
|
122 | 118 | ( |
|
123 | 119 | 'root-repos', |
|
124 | 120 | { |
|
125 | 121 | 'repo': None, |
|
126 | 122 | 'repo_group': None, |
|
127 | 123 | 'child_repos_only': True, |
|
128 | 124 | }, [ |
|
129 | 125 | ({}, False), |
|
130 | 126 | ({'global': ['hg.admin']}, True), |
|
131 | 127 | ({'global': []}, False), |
|
132 | 128 | ] |
|
133 | 129 | ), |
|
134 | 130 | ] |
|
135 | 131 | |
|
136 | 132 | for scope_input, scope_output, perms_allowed in perms_tests: |
|
137 | 133 | for perms_update, allowed in perms_allowed: |
|
138 | 134 | perms = dict(empty_perms_dict, **perms_update) |
|
139 | 135 | |
|
140 | 136 | schema = make_integration_schema( |
|
141 | 137 | IntegrationType=StubIntegrationType |
|
142 | 138 | ).bind(permissions=perms) |
|
143 | 139 | |
|
144 | 140 | input_data = { |
|
145 | 141 | 'options': { |
|
146 | 142 | 'enabled': 'true', |
|
147 | 143 | 'scope': scope_input, |
|
148 | 144 | 'name': 'test integration', |
|
149 | 145 | }, |
|
150 | 146 | 'settings': { |
|
151 | 147 | 'test_string_field': 'stringy', |
|
152 | 148 | 'test_int_field': '100', |
|
153 | 149 | } |
|
154 | 150 | } |
|
155 | 151 | |
|
156 | 152 | if not allowed: |
|
157 | 153 | with pytest.raises(colander.Invalid): |
|
158 | 154 | schema.deserialize(input_data) |
|
159 | 155 | else: |
|
160 | 156 | assert schema.deserialize(input_data) == { |
|
161 | 157 | 'options': { |
|
162 | 158 | 'enabled': True, |
|
163 | 159 | 'scope': scope_output, |
|
164 | 160 | 'name': 'test integration', |
|
165 | 161 | }, |
|
166 | 162 | 'settings': { |
|
167 | 163 | 'test_string_field': 'stringy', |
|
168 | 164 | 'test_int_field': 100, |
|
169 | 165 | } |
|
170 | 166 | } |
|
171 | 167 |
@@ -1,46 +1,102 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 |
from rhodecode.model.validation_schema.types import |
|
|
24 | from rhodecode.model.validation_schema.types import ( | |
|
25 | GroupNameType, RepoNameType, StringBooleanType) | |
|
25 | 26 | |
|
26 | 27 | |
|
27 | 28 | class TestGroupNameType(object): |
|
28 | 29 | @pytest.mark.parametrize('given, expected', [ |
|
29 | 30 | ('//group1/group2//', 'group1/group2'), |
|
30 | 31 | ('//group1///group2//', 'group1/group2'), |
|
31 | ('group1/group2///group3', 'group1/group2/group3') | |
|
32 | ('group1/group2///group3', 'group1/group2/group3'), | |
|
32 | 33 | ]) |
|
33 | def test_replace_extra_slashes_cleans_up_extra_slashes( | |
|
34 | self, given, expected): | |
|
35 | type_ = GroupNameType() | |
|
36 | result = type_._replace_extra_slashes(given) | |
|
34 | def test_normalize_path(self, given, expected): | |
|
35 | result = GroupNameType()._normalize(given) | |
|
37 | 36 | assert result == expected |
|
38 | 37 | |
|
39 | def test_deserialize_cleans_up_extra_slashes(self): | |
|
38 | @pytest.mark.parametrize('given, expected', [ | |
|
39 | ('//group1/group2//', 'group1/group2'), | |
|
40 | ('//group1///group2//', 'group1/group2'), | |
|
41 | ('group1/group2///group3', 'group1/group2/group3'), | |
|
42 | ('v1.2', 'v1.2'), | |
|
43 | ('/v1.2', 'v1.2'), | |
|
44 | ('.dirs', '.dirs'), | |
|
45 | ('..dirs', '.dirs'), | |
|
46 | ('./..dirs', '.dirs'), | |
|
47 | ('dir/;name;/;[];/sub', 'dir/name/sub'), | |
|
48 | (',/,/,d,,,', 'd'), | |
|
49 | ('/;/#/,d,,,', 'd'), | |
|
50 | ('long../../..name', 'long./.name'), | |
|
51 | ('long../..name', 'long./.name'), | |
|
52 | ('../', ''), | |
|
53 | ('\'../"../', ''), | |
|
54 | ('c,/,/..//./,c,,,/.d/../.........c', 'c/c/.d/.c'), | |
|
55 | ('c,/,/..//./,c,,,', 'c/c'), | |
|
56 | ('d../..d', 'd./.d'), | |
|
57 | ('d../../d', 'd./d'), | |
|
58 | ||
|
59 | ('d\;\./\,\./d', 'd./d'), | |
|
60 | ('d\.\./\.\./d', 'd./d'), | |
|
61 | ('d\.\./\..\../d', 'd./d'), | |
|
62 | ]) | |
|
63 | def test_deserialize_clean_up_name(self, given, expected): | |
|
40 | 64 | class TestSchema(colander.Schema): |
|
41 | field = colander.SchemaNode(GroupNameType()) | |
|
65 | field_group = colander.SchemaNode(GroupNameType()) | |
|
66 | field_repo = colander.SchemaNode(RepoNameType()) | |
|
42 | 67 | |
|
43 | 68 | schema = TestSchema() |
|
44 | cleaned_data = schema.deserialize( | |
|
45 | {'field': '//group1/group2///group3//'}) | |
|
46 | assert cleaned_data['field'] == 'group1/group2/group3' | |
|
69 | cleaned_data = schema.deserialize({ | |
|
70 | 'field_group': given, | |
|
71 | 'field_repo': given | |
|
72 | }) | |
|
73 | assert cleaned_data['field_group'] == expected | |
|
74 | assert cleaned_data['field_repo'] == expected | |
|
75 | ||
|
76 | ||
|
77 | class TestStringBooleanType(object): | |
|
78 | ||
|
79 | def _get_schema(self): | |
|
80 | class Schema(colander.MappingSchema): | |
|
81 | bools = colander.SchemaNode(StringBooleanType()) | |
|
82 | return Schema() | |
|
83 | ||
|
84 | @pytest.mark.parametrize('given, expected', [ | |
|
85 | ('1', True), | |
|
86 | ('yEs', True), | |
|
87 | ('true', True), | |
|
88 | ||
|
89 | ('0', False), | |
|
90 | ('NO', False), | |
|
91 | ('FALSE', False), | |
|
92 | ||
|
93 | ]) | |
|
94 | def test_convert_type(self, given, expected): | |
|
95 | schema = self._get_schema() | |
|
96 | result = schema.deserialize({'bools':given}) | |
|
97 | assert result['bools'] == expected | |
|
98 | ||
|
99 | def test_try_convert_bad_type(self): | |
|
100 | schema = self._get_schema() | |
|
101 | with pytest.raises(colander.Invalid): | |
|
102 | result = schema.deserialize({'bools': 'boom'}) |
General Comments 0
You need to be logged in to leave comments.
Login now