Show More
@@ -0,0 +1,128 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | import colander | |||
|
22 | import pytest | |||
|
23 | ||||
|
24 | from rhodecode.model.validation_schema import types | |||
|
25 | from rhodecode.model.validation_schema.schemas import repo_schema | |||
|
26 | ||||
|
27 | ||||
|
28 | class TestRepoSchema(object): | |||
|
29 | ||||
|
30 | #TODO: | |||
|
31 | # test nested groups | |||
|
32 | ||||
|
33 | @pytest.mark.parametrize('given, expected', [ | |||
|
34 | ('my repo', 'my-repo'), | |||
|
35 | (' hello world mike ', 'hello-world-mike'), | |||
|
36 | ||||
|
37 | ('//group1/group2//', 'group1/group2'), | |||
|
38 | ('//group1///group2//', 'group1/group2'), | |||
|
39 | ('///group1/group2///group3', 'group1/group2/group3'), | |||
|
40 | ('word g1/group2///group3', 'word-g1/group2/group3'), | |||
|
41 | ||||
|
42 | ('grou p1/gro;,,##up2//.../group3', 'grou-p1/group2/group3'), | |||
|
43 | ||||
|
44 | ('group,,,/,,,/1/2/3', 'group/1/2/3'), | |||
|
45 | ('grou[]p1/gro;up2///gro up3', 'group1/group2/gro-up3'), | |||
|
46 | (u'grou[]p1/gro;up2///gro up3/Δ Δ', u'group1/group2/gro-up3/Δ Δ'), | |||
|
47 | ]) | |||
|
48 | def test_deserialize_repo_name(self, app, user_admin, given, expected): | |||
|
49 | ||||
|
50 | schema = repo_schema.RepoSchema().bind() | |||
|
51 | assert expected == schema.get('repo_name').deserialize(given) | |||
|
52 | ||||
|
53 | def test_deserialize(self, app, user_admin): | |||
|
54 | schema = repo_schema.RepoSchema().bind( | |||
|
55 | repo_type_options=['hg'], | |||
|
56 | user=user_admin | |||
|
57 | ) | |||
|
58 | ||||
|
59 | schema_data = schema.deserialize(dict( | |||
|
60 | repo_name='dupa', | |||
|
61 | repo_type='hg', | |||
|
62 | repo_owner=user_admin.username | |||
|
63 | )) | |||
|
64 | ||||
|
65 | assert schema_data['repo_name'] == 'dupa' | |||
|
66 | assert schema_data['repo_group'] == { | |||
|
67 | 'repo_group_id': None, | |||
|
68 | 'repo_group_name': types.RootLocation, | |||
|
69 | 'repo_name_without_group': 'dupa'} | |||
|
70 | ||||
|
71 | @pytest.mark.parametrize('given, err_key, expected_exc', [ | |||
|
72 | ('xxx/dupa','repo_group', 'Repository group `xxx` does not exist'), | |||
|
73 | ('', 'repo_name', 'Name must start with a letter or number. Got ``'), | |||
|
74 | ]) | |||
|
75 | def test_deserialize_with_bad_group_name( | |||
|
76 | self, app, user_admin, given, err_key, expected_exc): | |||
|
77 | ||||
|
78 | schema = repo_schema.RepoSchema().bind( | |||
|
79 | repo_type_options=['hg'], | |||
|
80 | user=user_admin | |||
|
81 | ) | |||
|
82 | ||||
|
83 | with pytest.raises(colander.Invalid) as excinfo: | |||
|
84 | schema.deserialize(dict( | |||
|
85 | repo_name=given, | |||
|
86 | repo_type='hg', | |||
|
87 | repo_owner=user_admin.username | |||
|
88 | )) | |||
|
89 | ||||
|
90 | assert excinfo.value.asdict()[err_key] == expected_exc | |||
|
91 | ||||
|
92 | def test_deserialize_with_group_name(self, app, user_admin, test_repo_group): | |||
|
93 | schema = repo_schema.RepoSchema().bind( | |||
|
94 | repo_type_options=['hg'], | |||
|
95 | user=user_admin | |||
|
96 | ) | |||
|
97 | ||||
|
98 | full_name = test_repo_group.group_name + '/dupa' | |||
|
99 | schema_data = schema.deserialize(dict( | |||
|
100 | repo_name=full_name, | |||
|
101 | repo_type='hg', | |||
|
102 | repo_owner=user_admin.username | |||
|
103 | )) | |||
|
104 | ||||
|
105 | assert schema_data['repo_name'] == full_name | |||
|
106 | assert schema_data['repo_group'] == { | |||
|
107 | 'repo_group_id': test_repo_group.group_id, | |||
|
108 | 'repo_group_name': test_repo_group.group_name, | |||
|
109 | 'repo_name_without_group': 'dupa'} | |||
|
110 | ||||
|
111 | def test_deserialize_with_group_name_regular_user_no_perms( | |||
|
112 | self, app, user_regular, test_repo_group): | |||
|
113 | schema = repo_schema.RepoSchema().bind( | |||
|
114 | repo_type_options=['hg'], | |||
|
115 | user=user_regular | |||
|
116 | ) | |||
|
117 | ||||
|
118 | full_name = test_repo_group.group_name + '/dupa' | |||
|
119 | with pytest.raises(colander.Invalid) as excinfo: | |||
|
120 | schema.deserialize(dict( | |||
|
121 | repo_name=full_name, | |||
|
122 | repo_type='hg', | |||
|
123 | repo_owner=user_regular.username | |||
|
124 | )) | |||
|
125 | ||||
|
126 | expected = 'Repository group `{}` does not exist'.format( | |||
|
127 | test_repo_group.group_name) | |||
|
128 | assert excinfo.value.asdict()['repo_group'] == expected |
@@ -1,270 +1,350 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import json |
|
21 | import json | |
22 |
|
22 | |||
23 | import mock |
|
23 | import mock | |
24 | import pytest |
|
24 | import pytest | |
25 |
|
25 | |||
|
26 | from rhodecode.lib.utils2 import safe_unicode | |||
26 | from rhodecode.lib.vcs import settings |
|
27 | from rhodecode.lib.vcs import settings | |
|
28 | from rhodecode.model.meta import Session | |||
27 | from rhodecode.model.repo import RepoModel |
|
29 | from rhodecode.model.repo import RepoModel | |
|
30 | from rhodecode.model.user import UserModel | |||
28 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
31 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
29 | from rhodecode.api.tests.utils import ( |
|
32 | from rhodecode.api.tests.utils import ( | |
30 | build_data, api_call, assert_ok, assert_error, crash) |
|
33 | build_data, api_call, assert_ok, assert_error, crash) | |
31 | from rhodecode.tests.fixture import Fixture |
|
34 | from rhodecode.tests.fixture import Fixture | |
32 |
|
35 | |||
33 |
|
36 | |||
34 | fixture = Fixture() |
|
37 | fixture = Fixture() | |
35 |
|
38 | |||
36 |
|
39 | |||
37 | @pytest.mark.usefixtures("testuser_api", "app") |
|
40 | @pytest.mark.usefixtures("testuser_api", "app") | |
38 | class TestCreateRepo(object): |
|
41 | class TestCreateRepo(object): | |
39 | def test_api_create_repo(self, backend): |
|
42 | ||
40 | repo_name = 'api-repo-1' |
|
43 | @pytest.mark.parametrize('given, expected_name, expected_exc', [ | |
|
44 | ('api repo-1', 'api-repo-1', False), | |||
|
45 | ('api-repo 1-Δ Δ', 'api-repo-1-Δ Δ', False), | |||
|
46 | (u'unicode-Δ Δ', u'unicode-Δ Δ', False), | |||
|
47 | ('some repo v1.2', 'some-repo-v1.2', False), | |||
|
48 | ('v2.0', 'v2.0', False), | |||
|
49 | ]) | |||
|
50 | def test_api_create_repo(self, backend, given, expected_name, expected_exc): | |||
|
51 | ||||
41 | id_, params = build_data( |
|
52 | id_, params = build_data( | |
42 | self.apikey, |
|
53 | self.apikey, | |
43 | 'create_repo', |
|
54 | 'create_repo', | |
44 |
repo_name= |
|
55 | repo_name=given, | |
45 | owner=TEST_USER_ADMIN_LOGIN, |
|
56 | owner=TEST_USER_ADMIN_LOGIN, | |
46 | repo_type=backend.alias, |
|
57 | repo_type=backend.alias, | |
47 | ) |
|
58 | ) | |
48 | response = api_call(self.app, params) |
|
59 | response = api_call(self.app, params) | |
49 |
|
60 | |||
50 | repo = RepoModel().get_by_repo_name(repo_name) |
|
|||
51 |
|
||||
52 | assert repo is not None |
|
|||
53 | ret = { |
|
61 | ret = { | |
54 |
'msg': 'Created new repository `%s`' % ( |
|
62 | 'msg': 'Created new repository `%s`' % (expected_name,), | |
55 | 'success': True, |
|
63 | 'success': True, | |
56 | 'task': None, |
|
64 | 'task': None, | |
57 | } |
|
65 | } | |
58 | expected = ret |
|
66 | expected = ret | |
59 | assert_ok(id_, expected, given=response.body) |
|
67 | assert_ok(id_, expected, given=response.body) | |
60 |
|
68 | |||
61 | id_, params = build_data(self.apikey, 'get_repo', repoid=repo_name) |
|
69 | repo = RepoModel().get_by_repo_name(safe_unicode(expected_name)) | |
|
70 | assert repo is not None | |||
|
71 | ||||
|
72 | id_, params = build_data(self.apikey, 'get_repo', repoid=expected_name) | |||
62 | response = api_call(self.app, params) |
|
73 | response = api_call(self.app, params) | |
63 | body = json.loads(response.body) |
|
74 | body = json.loads(response.body) | |
64 |
|
75 | |||
65 | assert body['result']['enable_downloads'] is False |
|
76 | assert body['result']['enable_downloads'] is False | |
66 | assert body['result']['enable_locking'] is False |
|
77 | assert body['result']['enable_locking'] is False | |
67 | assert body['result']['enable_statistics'] is False |
|
78 | assert body['result']['enable_statistics'] is False | |
68 |
|
79 | |||
69 |
fixture.destroy_repo( |
|
80 | fixture.destroy_repo(safe_unicode(expected_name)) | |
70 |
|
81 | |||
71 | def test_api_create_restricted_repo_type(self, backend): |
|
82 | def test_api_create_restricted_repo_type(self, backend): | |
72 | repo_name = 'api-repo-type-{0}'.format(backend.alias) |
|
83 | repo_name = 'api-repo-type-{0}'.format(backend.alias) | |
73 | id_, params = build_data( |
|
84 | id_, params = build_data( | |
74 | self.apikey, |
|
85 | self.apikey, | |
75 | 'create_repo', |
|
86 | 'create_repo', | |
76 | repo_name=repo_name, |
|
87 | repo_name=repo_name, | |
77 | owner=TEST_USER_ADMIN_LOGIN, |
|
88 | owner=TEST_USER_ADMIN_LOGIN, | |
78 | repo_type=backend.alias, |
|
89 | repo_type=backend.alias, | |
79 | ) |
|
90 | ) | |
80 | git_backend = settings.BACKENDS['git'] |
|
91 | git_backend = settings.BACKENDS['git'] | |
81 | with mock.patch( |
|
92 | with mock.patch( | |
82 | 'rhodecode.lib.vcs.settings.BACKENDS', {'git': git_backend}): |
|
93 | 'rhodecode.lib.vcs.settings.BACKENDS', {'git': git_backend}): | |
83 | response = api_call(self.app, params) |
|
94 | response = api_call(self.app, params) | |
84 |
|
95 | |||
85 | repo = RepoModel().get_by_repo_name(repo_name) |
|
96 | repo = RepoModel().get_by_repo_name(repo_name) | |
86 |
|
97 | |||
87 | if backend.alias == 'git': |
|
98 | if backend.alias == 'git': | |
88 | assert repo is not None |
|
99 | assert repo is not None | |
89 | expected = { |
|
100 | expected = { | |
90 | 'msg': 'Created new repository `{0}`'.format(repo_name,), |
|
101 | 'msg': 'Created new repository `{0}`'.format(repo_name,), | |
91 | 'success': True, |
|
102 | 'success': True, | |
92 | 'task': None, |
|
103 | 'task': None, | |
93 | } |
|
104 | } | |
94 | assert_ok(id_, expected, given=response.body) |
|
105 | assert_ok(id_, expected, given=response.body) | |
95 | else: |
|
106 | else: | |
96 | assert repo is None |
|
107 | assert repo is None | |
97 |
|
108 | |||
98 | fixture.destroy_repo(repo_name) |
|
109 | fixture.destroy_repo(repo_name) | |
99 |
|
110 | |||
100 | def test_api_create_repo_with_booleans(self, backend): |
|
111 | def test_api_create_repo_with_booleans(self, backend): | |
101 | repo_name = 'api-repo-2' |
|
112 | repo_name = 'api-repo-2' | |
102 | id_, params = build_data( |
|
113 | id_, params = build_data( | |
103 | self.apikey, |
|
114 | self.apikey, | |
104 | 'create_repo', |
|
115 | 'create_repo', | |
105 | repo_name=repo_name, |
|
116 | repo_name=repo_name, | |
106 | owner=TEST_USER_ADMIN_LOGIN, |
|
117 | owner=TEST_USER_ADMIN_LOGIN, | |
107 | repo_type=backend.alias, |
|
118 | repo_type=backend.alias, | |
108 | enable_statistics=True, |
|
119 | enable_statistics=True, | |
109 | enable_locking=True, |
|
120 | enable_locking=True, | |
110 | enable_downloads=True |
|
121 | enable_downloads=True | |
111 | ) |
|
122 | ) | |
112 | response = api_call(self.app, params) |
|
123 | response = api_call(self.app, params) | |
113 |
|
124 | |||
114 | repo = RepoModel().get_by_repo_name(repo_name) |
|
125 | repo = RepoModel().get_by_repo_name(repo_name) | |
115 |
|
126 | |||
116 | assert repo is not None |
|
127 | assert repo is not None | |
117 | ret = { |
|
128 | ret = { | |
118 | 'msg': 'Created new repository `%s`' % (repo_name,), |
|
129 | 'msg': 'Created new repository `%s`' % (repo_name,), | |
119 | 'success': True, |
|
130 | 'success': True, | |
120 | 'task': None, |
|
131 | 'task': None, | |
121 | } |
|
132 | } | |
122 | expected = ret |
|
133 | expected = ret | |
123 | assert_ok(id_, expected, given=response.body) |
|
134 | assert_ok(id_, expected, given=response.body) | |
124 |
|
135 | |||
125 | id_, params = build_data(self.apikey, 'get_repo', repoid=repo_name) |
|
136 | id_, params = build_data(self.apikey, 'get_repo', repoid=repo_name) | |
126 | response = api_call(self.app, params) |
|
137 | response = api_call(self.app, params) | |
127 | body = json.loads(response.body) |
|
138 | body = json.loads(response.body) | |
128 |
|
139 | |||
129 | assert body['result']['enable_downloads'] is True |
|
140 | assert body['result']['enable_downloads'] is True | |
130 | assert body['result']['enable_locking'] is True |
|
141 | assert body['result']['enable_locking'] is True | |
131 | assert body['result']['enable_statistics'] is True |
|
142 | assert body['result']['enable_statistics'] is True | |
132 |
|
143 | |||
133 | fixture.destroy_repo(repo_name) |
|
144 | fixture.destroy_repo(repo_name) | |
134 |
|
145 | |||
135 | def test_api_create_repo_in_group(self, backend): |
|
146 | def test_api_create_repo_in_group(self, backend): | |
136 | repo_group_name = 'my_gr' |
|
147 | repo_group_name = 'my_gr' | |
137 | # create the parent |
|
148 | # create the parent | |
138 | fixture.create_repo_group(repo_group_name) |
|
149 | fixture.create_repo_group(repo_group_name) | |
139 |
|
150 | |||
140 | repo_name = '%s/api-repo-gr' % (repo_group_name,) |
|
151 | repo_name = '%s/api-repo-gr' % (repo_group_name,) | |
141 | id_, params = build_data( |
|
152 | id_, params = build_data( | |
142 | self.apikey, 'create_repo', |
|
153 | self.apikey, 'create_repo', | |
143 | repo_name=repo_name, |
|
154 | repo_name=repo_name, | |
144 | owner=TEST_USER_ADMIN_LOGIN, |
|
155 | owner=TEST_USER_ADMIN_LOGIN, | |
145 | repo_type=backend.alias,) |
|
156 | repo_type=backend.alias,) | |
146 | response = api_call(self.app, params) |
|
157 | response = api_call(self.app, params) | |
147 | repo = RepoModel().get_by_repo_name(repo_name) |
|
158 | repo = RepoModel().get_by_repo_name(repo_name) | |
148 | assert repo is not None |
|
159 | assert repo is not None | |
149 | assert repo.group is not None |
|
160 | assert repo.group is not None | |
150 |
|
161 | |||
151 | ret = { |
|
162 | ret = { | |
152 | 'msg': 'Created new repository `%s`' % (repo_name,), |
|
163 | 'msg': 'Created new repository `%s`' % (repo_name,), | |
153 | 'success': True, |
|
164 | 'success': True, | |
154 | 'task': None, |
|
165 | 'task': None, | |
155 | } |
|
166 | } | |
156 | expected = ret |
|
167 | expected = ret | |
157 | assert_ok(id_, expected, given=response.body) |
|
168 | assert_ok(id_, expected, given=response.body) | |
158 | fixture.destroy_repo(repo_name) |
|
169 | fixture.destroy_repo(repo_name) | |
159 | fixture.destroy_repo_group(repo_group_name) |
|
170 | fixture.destroy_repo_group(repo_group_name) | |
160 |
|
171 | |||
|
172 | def test_create_repo_in_group_that_doesnt_exist(self, backend, user_util): | |||
|
173 | repo_group_name = 'fake_group' | |||
|
174 | ||||
|
175 | repo_name = '%s/api-repo-gr' % (repo_group_name,) | |||
|
176 | id_, params = build_data( | |||
|
177 | self.apikey, 'create_repo', | |||
|
178 | repo_name=repo_name, | |||
|
179 | owner=TEST_USER_ADMIN_LOGIN, | |||
|
180 | repo_type=backend.alias,) | |||
|
181 | response = api_call(self.app, params) | |||
|
182 | ||||
|
183 | expected = {'repo_group': 'Repository group `{}` does not exist'.format( | |||
|
184 | repo_group_name)} | |||
|
185 | assert_error(id_, expected, given=response.body) | |||
|
186 | ||||
161 | def test_api_create_repo_unknown_owner(self, backend): |
|
187 | def test_api_create_repo_unknown_owner(self, backend): | |
162 | repo_name = 'api-repo-2' |
|
188 | repo_name = 'api-repo-2' | |
163 | owner = 'i-dont-exist' |
|
189 | owner = 'i-dont-exist' | |
164 | id_, params = build_data( |
|
190 | id_, params = build_data( | |
165 | self.apikey, 'create_repo', |
|
191 | self.apikey, 'create_repo', | |
166 | repo_name=repo_name, |
|
192 | repo_name=repo_name, | |
167 | owner=owner, |
|
193 | owner=owner, | |
168 | repo_type=backend.alias) |
|
194 | repo_type=backend.alias) | |
169 | response = api_call(self.app, params) |
|
195 | response = api_call(self.app, params) | |
170 | expected = 'user `%s` does not exist' % (owner,) |
|
196 | expected = 'user `%s` does not exist' % (owner,) | |
171 | assert_error(id_, expected, given=response.body) |
|
197 | assert_error(id_, expected, given=response.body) | |
172 |
|
198 | |||
173 | def test_api_create_repo_dont_specify_owner(self, backend): |
|
199 | def test_api_create_repo_dont_specify_owner(self, backend): | |
174 | repo_name = 'api-repo-3' |
|
200 | repo_name = 'api-repo-3' | |
175 | id_, params = build_data( |
|
201 | id_, params = build_data( | |
176 | self.apikey, 'create_repo', |
|
202 | self.apikey, 'create_repo', | |
177 | repo_name=repo_name, |
|
203 | repo_name=repo_name, | |
178 | repo_type=backend.alias) |
|
204 | repo_type=backend.alias) | |
179 | response = api_call(self.app, params) |
|
205 | response = api_call(self.app, params) | |
180 |
|
206 | |||
181 | repo = RepoModel().get_by_repo_name(repo_name) |
|
207 | repo = RepoModel().get_by_repo_name(repo_name) | |
182 | assert repo is not None |
|
208 | assert repo is not None | |
183 | ret = { |
|
209 | ret = { | |
184 | 'msg': 'Created new repository `%s`' % (repo_name,), |
|
210 | 'msg': 'Created new repository `%s`' % (repo_name,), | |
185 | 'success': True, |
|
211 | 'success': True, | |
186 | 'task': None, |
|
212 | 'task': None, | |
187 | } |
|
213 | } | |
188 | expected = ret |
|
214 | expected = ret | |
189 | assert_ok(id_, expected, given=response.body) |
|
215 | assert_ok(id_, expected, given=response.body) | |
190 | fixture.destroy_repo(repo_name) |
|
216 | fixture.destroy_repo(repo_name) | |
191 |
|
217 | |||
192 | def test_api_create_repo_by_non_admin(self, backend): |
|
218 | def test_api_create_repo_by_non_admin(self, backend): | |
193 | repo_name = 'api-repo-4' |
|
219 | repo_name = 'api-repo-4' | |
194 | id_, params = build_data( |
|
220 | id_, params = build_data( | |
195 | self.apikey_regular, 'create_repo', |
|
221 | self.apikey_regular, 'create_repo', | |
196 | repo_name=repo_name, |
|
222 | repo_name=repo_name, | |
197 | repo_type=backend.alias) |
|
223 | repo_type=backend.alias) | |
198 | response = api_call(self.app, params) |
|
224 | response = api_call(self.app, params) | |
199 |
|
225 | |||
200 | repo = RepoModel().get_by_repo_name(repo_name) |
|
226 | repo = RepoModel().get_by_repo_name(repo_name) | |
201 | assert repo is not None |
|
227 | assert repo is not None | |
202 | ret = { |
|
228 | ret = { | |
203 | 'msg': 'Created new repository `%s`' % (repo_name,), |
|
229 | 'msg': 'Created new repository `%s`' % (repo_name,), | |
204 | 'success': True, |
|
230 | 'success': True, | |
205 | 'task': None, |
|
231 | 'task': None, | |
206 | } |
|
232 | } | |
207 | expected = ret |
|
233 | expected = ret | |
208 | assert_ok(id_, expected, given=response.body) |
|
234 | assert_ok(id_, expected, given=response.body) | |
209 | fixture.destroy_repo(repo_name) |
|
235 | fixture.destroy_repo(repo_name) | |
210 |
|
236 | |||
211 | def test_api_create_repo_by_non_admin_specify_owner(self, backend): |
|
237 | def test_api_create_repo_by_non_admin_specify_owner(self, backend): | |
212 | repo_name = 'api-repo-5' |
|
238 | repo_name = 'api-repo-5' | |
213 | owner = 'i-dont-exist' |
|
239 | owner = 'i-dont-exist' | |
214 | id_, params = build_data( |
|
240 | id_, params = build_data( | |
215 | self.apikey_regular, 'create_repo', |
|
241 | self.apikey_regular, 'create_repo', | |
216 | repo_name=repo_name, |
|
242 | repo_name=repo_name, | |
217 | repo_type=backend.alias, |
|
243 | repo_type=backend.alias, | |
218 | owner=owner) |
|
244 | owner=owner) | |
219 | response = api_call(self.app, params) |
|
245 | response = api_call(self.app, params) | |
220 |
|
246 | |||
221 | expected = 'Only RhodeCode admin can specify `owner` param' |
|
247 | expected = 'Only RhodeCode super-admin can specify `owner` param' | |
222 | assert_error(id_, expected, given=response.body) |
|
248 | assert_error(id_, expected, given=response.body) | |
223 | fixture.destroy_repo(repo_name) |
|
249 | fixture.destroy_repo(repo_name) | |
224 |
|
250 | |||
|
251 | def test_api_create_repo_by_non_admin_no_parent_group_perms(self, backend): | |||
|
252 | repo_group_name = 'no-access' | |||
|
253 | fixture.create_repo_group(repo_group_name) | |||
|
254 | repo_name = 'no-access/api-repo' | |||
|
255 | ||||
|
256 | id_, params = build_data( | |||
|
257 | self.apikey_regular, 'create_repo', | |||
|
258 | repo_name=repo_name, | |||
|
259 | repo_type=backend.alias) | |||
|
260 | response = api_call(self.app, params) | |||
|
261 | ||||
|
262 | expected = {'repo_group': 'Repository group `{}` does not exist'.format( | |||
|
263 | repo_group_name)} | |||
|
264 | assert_error(id_, expected, given=response.body) | |||
|
265 | fixture.destroy_repo_group(repo_group_name) | |||
|
266 | fixture.destroy_repo(repo_name) | |||
|
267 | ||||
|
268 | def test_api_create_repo_non_admin_no_permission_to_create_to_root_level( | |||
|
269 | self, backend, user_util): | |||
|
270 | ||||
|
271 | regular_user = user_util.create_user() | |||
|
272 | regular_user_api_key = regular_user.api_key | |||
|
273 | ||||
|
274 | usr = UserModel().get_by_username(regular_user.username) | |||
|
275 | usr.inherit_default_permissions = False | |||
|
276 | Session().add(usr) | |||
|
277 | ||||
|
278 | repo_name = backend.new_repo_name() | |||
|
279 | id_, params = build_data( | |||
|
280 | regular_user_api_key, 'create_repo', | |||
|
281 | repo_name=repo_name, | |||
|
282 | repo_type=backend.alias) | |||
|
283 | response = api_call(self.app, params) | |||
|
284 | expected = { | |||
|
285 | "repo_name": "You do not have the permission to " | |||
|
286 | "store repositories in the root location."} | |||
|
287 | assert_error(id_, expected, given=response.body) | |||
|
288 | ||||
225 | def test_api_create_repo_exists(self, backend): |
|
289 | def test_api_create_repo_exists(self, backend): | |
226 | repo_name = backend.repo_name |
|
290 | repo_name = backend.repo_name | |
227 | id_, params = build_data( |
|
291 | id_, params = build_data( | |
228 | self.apikey, 'create_repo', |
|
292 | self.apikey, 'create_repo', | |
229 | repo_name=repo_name, |
|
293 | repo_name=repo_name, | |
230 | owner=TEST_USER_ADMIN_LOGIN, |
|
294 | owner=TEST_USER_ADMIN_LOGIN, | |
231 | repo_type=backend.alias,) |
|
295 | repo_type=backend.alias,) | |
232 | response = api_call(self.app, params) |
|
296 | response = api_call(self.app, params) | |
233 | expected = "repo `%s` already exist" % (repo_name,) |
|
297 | expected = { | |
|
298 | 'unique_repo_name': 'Repository with name `{}` already exists'.format( | |||
|
299 | repo_name)} | |||
234 | assert_error(id_, expected, given=response.body) |
|
300 | assert_error(id_, expected, given=response.body) | |
235 |
|
301 | |||
236 | @mock.patch.object(RepoModel, 'create', crash) |
|
302 | @mock.patch.object(RepoModel, 'create', crash) | |
237 | def test_api_create_repo_exception_occurred(self, backend): |
|
303 | def test_api_create_repo_exception_occurred(self, backend): | |
238 | repo_name = 'api-repo-6' |
|
304 | repo_name = 'api-repo-6' | |
239 | id_, params = build_data( |
|
305 | id_, params = build_data( | |
240 | self.apikey, 'create_repo', |
|
306 | self.apikey, 'create_repo', | |
241 | repo_name=repo_name, |
|
307 | repo_name=repo_name, | |
242 | owner=TEST_USER_ADMIN_LOGIN, |
|
308 | owner=TEST_USER_ADMIN_LOGIN, | |
243 | repo_type=backend.alias,) |
|
309 | repo_type=backend.alias,) | |
244 | response = api_call(self.app, params) |
|
310 | response = api_call(self.app, params) | |
245 | expected = 'failed to create repository `%s`' % (repo_name,) |
|
311 | expected = 'failed to create repository `%s`' % (repo_name,) | |
246 | assert_error(id_, expected, given=response.body) |
|
312 | assert_error(id_, expected, given=response.body) | |
247 |
|
313 | |||
248 | def test_create_repo_with_extra_slashes_in_name(self, backend, user_util): |
|
314 | @pytest.mark.parametrize('parent_group, dirty_name, expected_name', [ | |
249 | existing_repo_group = user_util.create_repo_group() |
|
315 | (None, 'foo bar x', 'foo-bar-x'), | |
250 | dirty_repo_name = '//{}/repo_name//'.format( |
|
316 | ('foo', '/foo//bar x', 'foo/bar-x'), | |
251 | existing_repo_group.group_name) |
|
317 | ('foo-bar', 'foo-bar //bar x', 'foo-bar/bar-x'), | |
252 | cleaned_repo_name = '{}/repo_name'.format( |
|
318 | ]) | |
253 | existing_repo_group.group_name) |
|
319 | def test_create_repo_with_extra_slashes_in_name( | |
|
320 | self, backend, parent_group, dirty_name, expected_name): | |||
|
321 | ||||
|
322 | if parent_group: | |||
|
323 | gr = fixture.create_repo_group(parent_group) | |||
|
324 | assert gr.group_name == parent_group | |||
254 |
|
325 | |||
255 | id_, params = build_data( |
|
326 | id_, params = build_data( | |
256 | self.apikey, 'create_repo', |
|
327 | self.apikey, 'create_repo', | |
257 |
repo_name=dirty_ |
|
328 | repo_name=dirty_name, | |
258 | repo_type=backend.alias, |
|
329 | repo_type=backend.alias, | |
259 | owner=TEST_USER_ADMIN_LOGIN,) |
|
330 | owner=TEST_USER_ADMIN_LOGIN,) | |
260 | response = api_call(self.app, params) |
|
331 | response = api_call(self.app, params) | |
261 | repo = RepoModel().get_by_repo_name(cleaned_repo_name) |
|
332 | expected ={ | |
|
333 | "msg": "Created new repository `{}`".format(expected_name), | |||
|
334 | "task": None, | |||
|
335 | "success": True | |||
|
336 | } | |||
|
337 | assert_ok(id_, expected, response.body) | |||
|
338 | ||||
|
339 | repo = RepoModel().get_by_repo_name(expected_name) | |||
262 | assert repo is not None |
|
340 | assert repo is not None | |
263 |
|
341 | |||
264 | expected = { |
|
342 | expected = { | |
265 |
'msg': 'Created new repository `%s`' % ( |
|
343 | 'msg': 'Created new repository `%s`' % (expected_name,), | |
266 | 'success': True, |
|
344 | 'success': True, | |
267 | 'task': None, |
|
345 | 'task': None, | |
268 | } |
|
346 | } | |
269 | assert_ok(id_, expected, given=response.body) |
|
347 | assert_ok(id_, expected, given=response.body) | |
270 |
fixture.destroy_repo( |
|
348 | fixture.destroy_repo(expected_name) | |
|
349 | if parent_group: | |||
|
350 | fixture.destroy_repo_group(parent_group) |
@@ -1,224 +1,279 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import mock |
|
22 | import mock | |
23 | import pytest |
|
23 | import pytest | |
24 |
|
24 | |||
25 | from rhodecode.model.meta import Session |
|
25 | from rhodecode.model.meta import Session | |
26 | from rhodecode.model.repo import RepoModel |
|
26 | from rhodecode.model.repo import RepoModel | |
|
27 | from rhodecode.model.repo_group import RepoGroupModel | |||
27 | from rhodecode.model.user import UserModel |
|
28 | from rhodecode.model.user import UserModel | |
28 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
29 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
29 | from rhodecode.api.tests.utils import ( |
|
30 | from rhodecode.api.tests.utils import ( | |
30 | build_data, api_call, assert_error, assert_ok, crash) |
|
31 | build_data, api_call, assert_error, assert_ok, crash) | |
31 | from rhodecode.tests.fixture import Fixture |
|
32 | from rhodecode.tests.fixture import Fixture | |
32 |
|
33 | |||
33 |
|
34 | |||
34 | fixture = Fixture() |
|
35 | fixture = Fixture() | |
35 |
|
36 | |||
36 |
|
37 | |||
37 | @pytest.mark.usefixtures("testuser_api", "app") |
|
38 | @pytest.mark.usefixtures("testuser_api", "app") | |
38 | class TestApiForkRepo(object): |
|
39 | class TestApiForkRepo(object): | |
39 | def test_api_fork_repo(self, backend): |
|
40 | def test_api_fork_repo(self, backend): | |
40 | source_name = backend['minimal'].repo_name |
|
41 | source_name = backend['minimal'].repo_name | |
41 | fork_name = backend.new_repo_name() |
|
42 | fork_name = backend.new_repo_name() | |
42 |
|
43 | |||
43 | id_, params = build_data( |
|
44 | id_, params = build_data( | |
44 | self.apikey, 'fork_repo', |
|
45 | self.apikey, 'fork_repo', | |
45 | repoid=source_name, |
|
46 | repoid=source_name, | |
46 | fork_name=fork_name, |
|
47 | fork_name=fork_name, | |
47 | owner=TEST_USER_ADMIN_LOGIN) |
|
48 | owner=TEST_USER_ADMIN_LOGIN) | |
48 | response = api_call(self.app, params) |
|
49 | response = api_call(self.app, params) | |
49 |
|
50 | |||
50 | expected = { |
|
51 | expected = { | |
51 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), |
|
52 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), | |
52 | 'success': True, |
|
53 | 'success': True, | |
53 | 'task': None, |
|
54 | 'task': None, | |
54 | } |
|
55 | } | |
55 | try: |
|
56 | try: | |
56 | assert_ok(id_, expected, given=response.body) |
|
57 | assert_ok(id_, expected, given=response.body) | |
57 | finally: |
|
58 | finally: | |
58 | fixture.destroy_repo(fork_name) |
|
59 | fixture.destroy_repo(fork_name) | |
59 |
|
60 | |||
60 | def test_api_fork_repo_into_group(self, backend, user_util): |
|
61 | def test_api_fork_repo_into_group(self, backend, user_util): | |
61 | source_name = backend['minimal'].repo_name |
|
62 | source_name = backend['minimal'].repo_name | |
62 | repo_group = user_util.create_repo_group() |
|
63 | repo_group = user_util.create_repo_group() | |
63 | fork_name = '%s/api-repo-fork' % repo_group.group_name |
|
64 | fork_name = '%s/api-repo-fork' % repo_group.group_name | |
64 | id_, params = build_data( |
|
65 | id_, params = build_data( | |
65 | self.apikey, 'fork_repo', |
|
66 | self.apikey, 'fork_repo', | |
66 | repoid=source_name, |
|
67 | repoid=source_name, | |
67 | fork_name=fork_name, |
|
68 | fork_name=fork_name, | |
68 | owner=TEST_USER_ADMIN_LOGIN) |
|
69 | owner=TEST_USER_ADMIN_LOGIN) | |
69 | response = api_call(self.app, params) |
|
70 | response = api_call(self.app, params) | |
70 |
|
71 | |||
71 | ret = { |
|
72 | ret = { | |
72 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), |
|
73 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), | |
73 | 'success': True, |
|
74 | 'success': True, | |
74 | 'task': None, |
|
75 | 'task': None, | |
75 | } |
|
76 | } | |
76 | expected = ret |
|
77 | expected = ret | |
77 | try: |
|
78 | try: | |
78 | assert_ok(id_, expected, given=response.body) |
|
79 | assert_ok(id_, expected, given=response.body) | |
79 | finally: |
|
80 | finally: | |
80 | fixture.destroy_repo(fork_name) |
|
81 | fixture.destroy_repo(fork_name) | |
81 |
|
82 | |||
82 | def test_api_fork_repo_non_admin(self, backend): |
|
83 | def test_api_fork_repo_non_admin(self, backend): | |
83 | source_name = backend['minimal'].repo_name |
|
84 | source_name = backend['minimal'].repo_name | |
84 | fork_name = backend.new_repo_name() |
|
85 | fork_name = backend.new_repo_name() | |
85 |
|
86 | |||
86 | id_, params = build_data( |
|
87 | id_, params = build_data( | |
87 | self.apikey_regular, 'fork_repo', |
|
88 | self.apikey_regular, 'fork_repo', | |
88 | repoid=source_name, |
|
89 | repoid=source_name, | |
89 | fork_name=fork_name) |
|
90 | fork_name=fork_name) | |
90 | response = api_call(self.app, params) |
|
91 | response = api_call(self.app, params) | |
91 |
|
92 | |||
92 | expected = { |
|
93 | expected = { | |
93 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), |
|
94 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), | |
94 | 'success': True, |
|
95 | 'success': True, | |
95 | 'task': None, |
|
96 | 'task': None, | |
96 | } |
|
97 | } | |
97 | try: |
|
98 | try: | |
98 | assert_ok(id_, expected, given=response.body) |
|
99 | assert_ok(id_, expected, given=response.body) | |
99 | finally: |
|
100 | finally: | |
100 | fixture.destroy_repo(fork_name) |
|
101 | fixture.destroy_repo(fork_name) | |
101 |
|
102 | |||
|
103 | def test_api_fork_repo_non_admin_into_group_no_permission(self, backend, user_util): | |||
|
104 | source_name = backend['minimal'].repo_name | |||
|
105 | repo_group = user_util.create_repo_group() | |||
|
106 | repo_group_name = repo_group.group_name | |||
|
107 | fork_name = '%s/api-repo-fork' % repo_group_name | |||
|
108 | ||||
|
109 | id_, params = build_data( | |||
|
110 | self.apikey_regular, 'fork_repo', | |||
|
111 | repoid=source_name, | |||
|
112 | fork_name=fork_name) | |||
|
113 | response = api_call(self.app, params) | |||
|
114 | ||||
|
115 | expected = { | |||
|
116 | 'repo_group': 'Repository group `{}` does not exist'.format( | |||
|
117 | repo_group_name)} | |||
|
118 | try: | |||
|
119 | assert_error(id_, expected, given=response.body) | |||
|
120 | finally: | |||
|
121 | fixture.destroy_repo(fork_name) | |||
|
122 | ||||
102 | def test_api_fork_repo_non_admin_into_group(self, backend, user_util): |
|
123 | def test_api_fork_repo_non_admin_into_group(self, backend, user_util): | |
103 | source_name = backend['minimal'].repo_name |
|
124 | source_name = backend['minimal'].repo_name | |
104 | repo_group = user_util.create_repo_group() |
|
125 | repo_group = user_util.create_repo_group() | |
105 | fork_name = '%s/api-repo-fork' % repo_group.group_name |
|
126 | fork_name = '%s/api-repo-fork' % repo_group.group_name | |
106 |
|
127 | |||
|
128 | RepoGroupModel().grant_user_permission( | |||
|
129 | repo_group, self.TEST_USER_LOGIN, 'group.admin') | |||
|
130 | Session().commit() | |||
|
131 | ||||
107 | id_, params = build_data( |
|
132 | id_, params = build_data( | |
108 | self.apikey_regular, 'fork_repo', |
|
133 | self.apikey_regular, 'fork_repo', | |
109 | repoid=source_name, |
|
134 | repoid=source_name, | |
110 | fork_name=fork_name) |
|
135 | fork_name=fork_name) | |
111 | response = api_call(self.app, params) |
|
136 | response = api_call(self.app, params) | |
112 |
|
137 | |||
113 | expected = { |
|
138 | expected = { | |
114 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), |
|
139 | 'msg': 'Created fork of `%s` as `%s`' % (source_name, fork_name), | |
115 | 'success': True, |
|
140 | 'success': True, | |
116 | 'task': None, |
|
141 | 'task': None, | |
117 | } |
|
142 | } | |
118 | try: |
|
143 | try: | |
119 | assert_ok(id_, expected, given=response.body) |
|
144 | assert_ok(id_, expected, given=response.body) | |
120 | finally: |
|
145 | finally: | |
121 | fixture.destroy_repo(fork_name) |
|
146 | fixture.destroy_repo(fork_name) | |
122 |
|
147 | |||
123 | def test_api_fork_repo_non_admin_specify_owner(self, backend): |
|
148 | def test_api_fork_repo_non_admin_specify_owner(self, backend): | |
124 | source_name = backend['minimal'].repo_name |
|
149 | source_name = backend['minimal'].repo_name | |
125 | fork_name = backend.new_repo_name() |
|
150 | fork_name = backend.new_repo_name() | |
126 | id_, params = build_data( |
|
151 | id_, params = build_data( | |
127 | self.apikey_regular, 'fork_repo', |
|
152 | self.apikey_regular, 'fork_repo', | |
128 | repoid=source_name, |
|
153 | repoid=source_name, | |
129 | fork_name=fork_name, |
|
154 | fork_name=fork_name, | |
130 | owner=TEST_USER_ADMIN_LOGIN) |
|
155 | owner=TEST_USER_ADMIN_LOGIN) | |
131 | response = api_call(self.app, params) |
|
156 | response = api_call(self.app, params) | |
132 | expected = 'Only RhodeCode admin can specify `owner` param' |
|
157 | expected = 'Only RhodeCode super-admin can specify `owner` param' | |
133 | assert_error(id_, expected, given=response.body) |
|
158 | assert_error(id_, expected, given=response.body) | |
134 |
|
159 | |||
135 |
def test_api_fork_repo_non_admin_no_permission_ |
|
160 | def test_api_fork_repo_non_admin_no_permission_of_source_repo( | |
|
161 | self, backend): | |||
136 | source_name = backend['minimal'].repo_name |
|
162 | source_name = backend['minimal'].repo_name | |
137 | RepoModel().grant_user_permission(repo=source_name, |
|
163 | RepoModel().grant_user_permission(repo=source_name, | |
138 | user=self.TEST_USER_LOGIN, |
|
164 | user=self.TEST_USER_LOGIN, | |
139 | perm='repository.none') |
|
165 | perm='repository.none') | |
140 | fork_name = backend.new_repo_name() |
|
166 | fork_name = backend.new_repo_name() | |
141 | id_, params = build_data( |
|
167 | id_, params = build_data( | |
142 | self.apikey_regular, 'fork_repo', |
|
168 | self.apikey_regular, 'fork_repo', | |
143 | repoid=backend.repo_name, |
|
169 | repoid=backend.repo_name, | |
144 | fork_name=fork_name) |
|
170 | fork_name=fork_name) | |
145 | response = api_call(self.app, params) |
|
171 | response = api_call(self.app, params) | |
146 | expected = 'repository `%s` does not exist' % (backend.repo_name) |
|
172 | expected = 'repository `%s` does not exist' % (backend.repo_name) | |
147 | assert_error(id_, expected, given=response.body) |
|
173 | assert_error(id_, expected, given=response.body) | |
148 |
|
174 | |||
149 | def test_api_fork_repo_non_admin_no_permission_to_fork_to_root_level( |
|
175 | def test_api_fork_repo_non_admin_no_permission_to_fork_to_root_level( | |
150 | self, backend): |
|
176 | self, backend, user_util): | |
|
177 | ||||
|
178 | regular_user = user_util.create_user() | |||
|
179 | regular_user_api_key = regular_user.api_key | |||
|
180 | usr = UserModel().get_by_username(regular_user.username) | |||
|
181 | usr.inherit_default_permissions = False | |||
|
182 | Session().add(usr) | |||
|
183 | UserModel().grant_perm(regular_user.username, 'hg.fork.repository') | |||
|
184 | ||||
151 | source_name = backend['minimal'].repo_name |
|
185 | source_name = backend['minimal'].repo_name | |
|
186 | fork_name = backend.new_repo_name() | |||
|
187 | id_, params = build_data( | |||
|
188 | regular_user_api_key, 'fork_repo', | |||
|
189 | repoid=source_name, | |||
|
190 | fork_name=fork_name) | |||
|
191 | response = api_call(self.app, params) | |||
|
192 | expected = { | |||
|
193 | "repo_name": "You do not have the permission to " | |||
|
194 | "store repositories in the root location."} | |||
|
195 | assert_error(id_, expected, given=response.body) | |||
152 |
|
196 | |||
153 | usr = UserModel().get_by_username(self.TEST_USER_LOGIN) |
|
197 | def test_api_fork_repo_non_admin_no_permission_to_fork( | |
|
198 | self, backend, user_util): | |||
|
199 | ||||
|
200 | regular_user = user_util.create_user() | |||
|
201 | regular_user_api_key = regular_user.api_key | |||
|
202 | usr = UserModel().get_by_username(regular_user.username) | |||
154 | usr.inherit_default_permissions = False |
|
203 | usr.inherit_default_permissions = False | |
155 | Session().add(usr) |
|
204 | Session().add(usr) | |
156 |
|
205 | |||
|
206 | source_name = backend['minimal'].repo_name | |||
157 | fork_name = backend.new_repo_name() |
|
207 | fork_name = backend.new_repo_name() | |
158 | id_, params = build_data( |
|
208 | id_, params = build_data( | |
159 |
|
|
209 | regular_user_api_key, 'fork_repo', | |
160 | repoid=source_name, |
|
210 | repoid=source_name, | |
161 | fork_name=fork_name) |
|
211 | fork_name=fork_name) | |
162 | response = api_call(self.app, params) |
|
212 | response = api_call(self.app, params) | |
|
213 | ||||
163 | expected = "Access was denied to this resource." |
|
214 | expected = "Access was denied to this resource." | |
164 | assert_error(id_, expected, given=response.body) |
|
215 | assert_error(id_, expected, given=response.body) | |
165 |
|
216 | |||
166 | def test_api_fork_repo_unknown_owner(self, backend): |
|
217 | def test_api_fork_repo_unknown_owner(self, backend): | |
167 | source_name = backend['minimal'].repo_name |
|
218 | source_name = backend['minimal'].repo_name | |
168 | fork_name = backend.new_repo_name() |
|
219 | fork_name = backend.new_repo_name() | |
169 | owner = 'i-dont-exist' |
|
220 | owner = 'i-dont-exist' | |
170 | id_, params = build_data( |
|
221 | id_, params = build_data( | |
171 | self.apikey, 'fork_repo', |
|
222 | self.apikey, 'fork_repo', | |
172 | repoid=source_name, |
|
223 | repoid=source_name, | |
173 | fork_name=fork_name, |
|
224 | fork_name=fork_name, | |
174 | owner=owner) |
|
225 | owner=owner) | |
175 | response = api_call(self.app, params) |
|
226 | response = api_call(self.app, params) | |
176 | expected = 'user `%s` does not exist' % (owner,) |
|
227 | expected = 'user `%s` does not exist' % (owner,) | |
177 | assert_error(id_, expected, given=response.body) |
|
228 | assert_error(id_, expected, given=response.body) | |
178 |
|
229 | |||
179 | def test_api_fork_repo_fork_exists(self, backend): |
|
230 | def test_api_fork_repo_fork_exists(self, backend): | |
180 | source_name = backend['minimal'].repo_name |
|
231 | source_name = backend['minimal'].repo_name | |
181 | fork_name = backend.new_repo_name() |
|
232 | fork_name = backend.new_repo_name() | |
182 | fork_repo = fixture.create_fork(source_name, fork_name) |
|
233 | fork_repo = fixture.create_fork(source_name, fork_name) | |
183 |
|
234 | |||
184 | id_, params = build_data( |
|
235 | id_, params = build_data( | |
185 | self.apikey, 'fork_repo', |
|
236 | self.apikey, 'fork_repo', | |
186 | repoid=source_name, |
|
237 | repoid=source_name, | |
187 | fork_name=fork_name, |
|
238 | fork_name=fork_name, | |
188 | owner=TEST_USER_ADMIN_LOGIN) |
|
239 | owner=TEST_USER_ADMIN_LOGIN) | |
189 | response = api_call(self.app, params) |
|
240 | response = api_call(self.app, params) | |
190 |
|
241 | |||
191 | try: |
|
242 | try: | |
192 | expected = "fork `%s` already exist" % (fork_name,) |
|
243 | expected = { | |
|
244 | 'unique_repo_name': 'Repository with name `{}` already exists'.format( | |||
|
245 | fork_name)} | |||
193 | assert_error(id_, expected, given=response.body) |
|
246 | assert_error(id_, expected, given=response.body) | |
194 | finally: |
|
247 | finally: | |
195 | fixture.destroy_repo(fork_repo.repo_name) |
|
248 | fixture.destroy_repo(fork_repo.repo_name) | |
196 |
|
249 | |||
197 | def test_api_fork_repo_repo_exists(self, backend): |
|
250 | def test_api_fork_repo_repo_exists(self, backend): | |
198 | source_name = backend['minimal'].repo_name |
|
251 | source_name = backend['minimal'].repo_name | |
199 | fork_name = source_name |
|
252 | fork_name = source_name | |
200 |
|
253 | |||
201 | id_, params = build_data( |
|
254 | id_, params = build_data( | |
202 | self.apikey, 'fork_repo', |
|
255 | self.apikey, 'fork_repo', | |
203 | repoid=source_name, |
|
256 | repoid=source_name, | |
204 | fork_name=fork_name, |
|
257 | fork_name=fork_name, | |
205 | owner=TEST_USER_ADMIN_LOGIN) |
|
258 | owner=TEST_USER_ADMIN_LOGIN) | |
206 | response = api_call(self.app, params) |
|
259 | response = api_call(self.app, params) | |
207 |
|
260 | |||
208 | expected = "repo `%s` already exist" % (fork_name,) |
|
261 | expected = { | |
|
262 | 'unique_repo_name': 'Repository with name `{}` already exists'.format( | |||
|
263 | fork_name)} | |||
209 | assert_error(id_, expected, given=response.body) |
|
264 | assert_error(id_, expected, given=response.body) | |
210 |
|
265 | |||
211 | @mock.patch.object(RepoModel, 'create_fork', crash) |
|
266 | @mock.patch.object(RepoModel, 'create_fork', crash) | |
212 | def test_api_fork_repo_exception_occurred(self, backend): |
|
267 | def test_api_fork_repo_exception_occurred(self, backend): | |
213 | source_name = backend['minimal'].repo_name |
|
268 | source_name = backend['minimal'].repo_name | |
214 | fork_name = backend.new_repo_name() |
|
269 | fork_name = backend.new_repo_name() | |
215 | id_, params = build_data( |
|
270 | id_, params = build_data( | |
216 | self.apikey, 'fork_repo', |
|
271 | self.apikey, 'fork_repo', | |
217 | repoid=source_name, |
|
272 | repoid=source_name, | |
218 | fork_name=fork_name, |
|
273 | fork_name=fork_name, | |
219 | owner=TEST_USER_ADMIN_LOGIN) |
|
274 | owner=TEST_USER_ADMIN_LOGIN) | |
220 | response = api_call(self.app, params) |
|
275 | response = api_call(self.app, params) | |
221 |
|
276 | |||
222 | expected = 'failed to fork repository `%s` as `%s`' % (source_name, |
|
277 | expected = 'failed to fork repository `%s` as `%s`' % (source_name, | |
223 | fork_name) |
|
278 | fork_name) | |
224 | assert_error(id_, expected, given=response.body) |
|
279 | assert_error(id_, expected, given=response.body) |
@@ -1,163 +1,189 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import mock |
|
21 | import mock | |
22 | import pytest |
|
22 | import pytest | |
23 |
|
23 | |||
24 | from rhodecode.model.repo import RepoModel |
|
24 | from rhodecode.model.repo import RepoModel | |
25 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN |
|
25 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN | |
26 | from rhodecode.api.tests.utils import ( |
|
26 | from rhodecode.api.tests.utils import ( | |
27 | build_data, api_call, assert_error, assert_ok, crash, jsonify) |
|
27 | build_data, api_call, assert_error, assert_ok, crash, jsonify) | |
28 | from rhodecode.tests.fixture import Fixture |
|
28 | from rhodecode.tests.fixture import Fixture | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | fixture = Fixture() |
|
31 | fixture = Fixture() | |
32 |
|
32 | |||
33 | UPDATE_REPO_NAME = 'api_update_me' |
|
33 | UPDATE_REPO_NAME = 'api_update_me' | |
34 |
|
34 | |||
35 | class SAME_AS_UPDATES(object): """ Constant used for tests below """ |
|
35 | ||
|
36 | class SAME_AS_UPDATES(object): | |||
|
37 | """ Constant used for tests below """ | |||
|
38 | ||||
36 |
|
39 | |||
37 | @pytest.mark.usefixtures("testuser_api", "app") |
|
40 | @pytest.mark.usefixtures("testuser_api", "app") | |
38 | class TestApiUpdateRepo(object): |
|
41 | class TestApiUpdateRepo(object): | |
39 |
|
42 | |||
40 | @pytest.mark.parametrize("updates, expected", [ |
|
43 | @pytest.mark.parametrize("updates, expected", [ | |
41 |
({'owner': TEST_USER_REGULAR_LOGIN}, |
|
44 | ({'owner': TEST_USER_REGULAR_LOGIN}, | |
42 | ({'description': 'new description'}, SAME_AS_UPDATES), |
|
45 | SAME_AS_UPDATES), | |
43 | ({'clone_uri': 'http://foo.com/repo'}, SAME_AS_UPDATES), |
|
46 | ||
44 | ({'clone_uri': None}, {'clone_uri': ''}), |
|
47 | ({'description': 'new description'}, | |
45 | ({'clone_uri': ''}, {'clone_uri': ''}), |
|
48 | SAME_AS_UPDATES), | |
46 | ({'landing_rev': 'branch:master'}, {'landing_rev': ['branch','master']}), |
|
49 | ||
47 | ({'enable_statistics': True}, SAME_AS_UPDATES), |
|
50 | ({'clone_uri': 'http://foo.com/repo'}, | |
48 |
|
|
51 | SAME_AS_UPDATES), | |
49 | ({'enable_downloads': True}, SAME_AS_UPDATES), |
|
52 | ||
50 |
({'n |
|
53 | ({'clone_uri': None}, | |
|
54 | {'clone_uri': ''}), | |||
|
55 | ||||
|
56 | ({'clone_uri': ''}, | |||
|
57 | {'clone_uri': ''}), | |||
|
58 | ||||
|
59 | ({'landing_rev': 'rev:tip'}, | |||
|
60 | {'landing_rev': ['rev', 'tip']}), | |||
|
61 | ||||
|
62 | ({'enable_statistics': True}, | |||
|
63 | SAME_AS_UPDATES), | |||
|
64 | ||||
|
65 | ({'enable_locking': True}, | |||
|
66 | SAME_AS_UPDATES), | |||
|
67 | ||||
|
68 | ({'enable_downloads': True}, | |||
|
69 | SAME_AS_UPDATES), | |||
|
70 | ||||
|
71 | ({'repo_name': 'new_repo_name'}, | |||
|
72 | { | |||
51 | 'repo_name': 'new_repo_name', |
|
73 | 'repo_name': 'new_repo_name', | |
52 |
'url': 'http://test.example.com:80/new_repo_name' |
|
74 | 'url': 'http://test.example.com:80/new_repo_name' | |
53 | }), |
|
75 | }), | |
54 | ({'group': 'test_group_for_update'}, { |
|
76 | ||
55 |
|
|
77 | ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME), | |
56 | 'url': 'http://test.example.com:80/test_group_for_update/%s' % UPDATE_REPO_NAME |
|
78 | '_group': 'test_group_for_update'}, | |
|
79 | { | |||
|
80 | 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME), | |||
|
81 | 'url': 'http://test.example.com:80/test_group_for_update/{}'.format(UPDATE_REPO_NAME) | |||
57 | }), |
|
82 | }), | |
58 | ]) |
|
83 | ]) | |
59 | def test_api_update_repo(self, updates, expected, backend): |
|
84 | def test_api_update_repo(self, updates, expected, backend): | |
60 | repo_name = UPDATE_REPO_NAME |
|
85 | repo_name = UPDATE_REPO_NAME | |
61 | repo = fixture.create_repo(repo_name, repo_type=backend.alias) |
|
86 | repo = fixture.create_repo(repo_name, repo_type=backend.alias) | |
62 | if updates.get('group'): |
|
87 | if updates.get('_group'): | |
63 | fixture.create_repo_group(updates['group']) |
|
88 | fixture.create_repo_group(updates['_group']) | |
64 |
|
89 | |||
65 | expected_api_data = repo.get_api_data(include_secrets=True) |
|
90 | expected_api_data = repo.get_api_data(include_secrets=True) | |
66 | if expected is SAME_AS_UPDATES: |
|
91 | if expected is SAME_AS_UPDATES: | |
67 | expected_api_data.update(updates) |
|
92 | expected_api_data.update(updates) | |
68 | else: |
|
93 | else: | |
69 | expected_api_data.update(expected) |
|
94 | expected_api_data.update(expected) | |
70 |
|
95 | |||
71 |
|
||||
72 | id_, params = build_data( |
|
96 | id_, params = build_data( | |
73 | self.apikey, 'update_repo', repoid=repo_name, **updates) |
|
97 | self.apikey, 'update_repo', repoid=repo_name, **updates) | |
74 | response = api_call(self.app, params) |
|
98 | response = api_call(self.app, params) | |
75 |
|
99 | |||
76 | if updates.get('name'): |
|
100 | if updates.get('repo_name'): | |
77 | repo_name = updates['name'] |
|
101 | repo_name = updates['repo_name'] | |
78 | if updates.get('group'): |
|
|||
79 | repo_name = '/'.join([updates['group'], repo_name]) |
|
|||
80 |
|
102 | |||
81 | try: |
|
103 | try: | |
82 | expected = { |
|
104 | expected = { | |
83 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name), |
|
105 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name), | |
84 | 'repository': jsonify(expected_api_data) |
|
106 | 'repository': jsonify(expected_api_data) | |
85 | } |
|
107 | } | |
86 | assert_ok(id_, expected, given=response.body) |
|
108 | assert_ok(id_, expected, given=response.body) | |
87 | finally: |
|
109 | finally: | |
88 | fixture.destroy_repo(repo_name) |
|
110 | fixture.destroy_repo(repo_name) | |
89 | if updates.get('group'): |
|
111 | if updates.get('_group'): | |
90 | fixture.destroy_repo_group(updates['group']) |
|
112 | fixture.destroy_repo_group(updates['_group']) | |
91 |
|
113 | |||
92 | def test_api_update_repo_fork_of_field(self, backend): |
|
114 | def test_api_update_repo_fork_of_field(self, backend): | |
93 | master_repo = backend.create_repo() |
|
115 | master_repo = backend.create_repo() | |
94 | repo = backend.create_repo() |
|
116 | repo = backend.create_repo() | |
95 | updates = { |
|
117 | updates = { | |
96 | 'fork_of': master_repo.repo_name |
|
118 | 'fork_of': master_repo.repo_name | |
97 | } |
|
119 | } | |
98 | expected_api_data = repo.get_api_data(include_secrets=True) |
|
120 | expected_api_data = repo.get_api_data(include_secrets=True) | |
99 | expected_api_data.update(updates) |
|
121 | expected_api_data.update(updates) | |
100 |
|
122 | |||
101 | id_, params = build_data( |
|
123 | id_, params = build_data( | |
102 | self.apikey, 'update_repo', repoid=repo.repo_name, **updates) |
|
124 | self.apikey, 'update_repo', repoid=repo.repo_name, **updates) | |
103 | response = api_call(self.app, params) |
|
125 | response = api_call(self.app, params) | |
104 | expected = { |
|
126 | expected = { | |
105 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), |
|
127 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), | |
106 | 'repository': jsonify(expected_api_data) |
|
128 | 'repository': jsonify(expected_api_data) | |
107 | } |
|
129 | } | |
108 | assert_ok(id_, expected, given=response.body) |
|
130 | assert_ok(id_, expected, given=response.body) | |
109 | result = response.json['result']['repository'] |
|
131 | result = response.json['result']['repository'] | |
110 | assert result['fork_of'] == master_repo.repo_name |
|
132 | assert result['fork_of'] == master_repo.repo_name | |
111 |
|
133 | |||
112 | def test_api_update_repo_fork_of_not_found(self, backend): |
|
134 | def test_api_update_repo_fork_of_not_found(self, backend): | |
113 | master_repo_name = 'fake-parent-repo' |
|
135 | master_repo_name = 'fake-parent-repo' | |
114 | repo = backend.create_repo() |
|
136 | repo = backend.create_repo() | |
115 | updates = { |
|
137 | updates = { | |
116 | 'fork_of': master_repo_name |
|
138 | 'fork_of': master_repo_name | |
117 | } |
|
139 | } | |
118 | id_, params = build_data( |
|
140 | id_, params = build_data( | |
119 | self.apikey, 'update_repo', repoid=repo.repo_name, **updates) |
|
141 | self.apikey, 'update_repo', repoid=repo.repo_name, **updates) | |
120 | response = api_call(self.app, params) |
|
142 | response = api_call(self.app, params) | |
121 | expected = 'repository `{}` does not exist'.format(master_repo_name) |
|
143 | expected = { | |
|
144 | 'repo_fork_of': 'Fork with id `{}` does not exists'.format( | |||
|
145 | master_repo_name)} | |||
122 | assert_error(id_, expected, given=response.body) |
|
146 | assert_error(id_, expected, given=response.body) | |
123 |
|
147 | |||
124 | def test_api_update_repo_with_repo_group_not_existing(self): |
|
148 | def test_api_update_repo_with_repo_group_not_existing(self): | |
125 | repo_name = 'admin_owned' |
|
149 | repo_name = 'admin_owned' | |
|
150 | fake_repo_group = 'test_group_for_update' | |||
126 | fixture.create_repo(repo_name) |
|
151 | fixture.create_repo(repo_name) | |
127 | updates = {'group': 'test_group_for_update'} |
|
152 | updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)} | |
128 | id_, params = build_data( |
|
153 | id_, params = build_data( | |
129 | self.apikey, 'update_repo', repoid=repo_name, **updates) |
|
154 | self.apikey, 'update_repo', repoid=repo_name, **updates) | |
130 | response = api_call(self.app, params) |
|
155 | response = api_call(self.app, params) | |
131 | try: |
|
156 | try: | |
132 | expected = 'repository group `%s` does not exist' % ( |
|
157 | expected = { | |
133 | updates['group'],) |
|
158 | 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group) | |
|
159 | } | |||
134 | assert_error(id_, expected, given=response.body) |
|
160 | assert_error(id_, expected, given=response.body) | |
135 | finally: |
|
161 | finally: | |
136 | fixture.destroy_repo(repo_name) |
|
162 | fixture.destroy_repo(repo_name) | |
137 |
|
163 | |||
138 | def test_api_update_repo_regular_user_not_allowed(self): |
|
164 | def test_api_update_repo_regular_user_not_allowed(self): | |
139 | repo_name = 'admin_owned' |
|
165 | repo_name = 'admin_owned' | |
140 | fixture.create_repo(repo_name) |
|
166 | fixture.create_repo(repo_name) | |
141 | updates = {'active': False} |
|
167 | updates = {'active': False} | |
142 | id_, params = build_data( |
|
168 | id_, params = build_data( | |
143 | self.apikey_regular, 'update_repo', repoid=repo_name, **updates) |
|
169 | self.apikey_regular, 'update_repo', repoid=repo_name, **updates) | |
144 | response = api_call(self.app, params) |
|
170 | response = api_call(self.app, params) | |
145 | try: |
|
171 | try: | |
146 | expected = 'repository `%s` does not exist' % (repo_name,) |
|
172 | expected = 'repository `%s` does not exist' % (repo_name,) | |
147 | assert_error(id_, expected, given=response.body) |
|
173 | assert_error(id_, expected, given=response.body) | |
148 | finally: |
|
174 | finally: | |
149 | fixture.destroy_repo(repo_name) |
|
175 | fixture.destroy_repo(repo_name) | |
150 |
|
176 | |||
151 | @mock.patch.object(RepoModel, 'update', crash) |
|
177 | @mock.patch.object(RepoModel, 'update', crash) | |
152 | def test_api_update_repo_exception_occurred(self, backend): |
|
178 | def test_api_update_repo_exception_occurred(self, backend): | |
153 | repo_name = UPDATE_REPO_NAME |
|
179 | repo_name = UPDATE_REPO_NAME | |
154 | fixture.create_repo(repo_name, repo_type=backend.alias) |
|
180 | fixture.create_repo(repo_name, repo_type=backend.alias) | |
155 | id_, params = build_data( |
|
181 | id_, params = build_data( | |
156 | self.apikey, 'update_repo', repoid=repo_name, |
|
182 | self.apikey, 'update_repo', repoid=repo_name, | |
157 | owner=TEST_USER_ADMIN_LOGIN,) |
|
183 | owner=TEST_USER_ADMIN_LOGIN,) | |
158 | response = api_call(self.app, params) |
|
184 | response = api_call(self.app, params) | |
159 | try: |
|
185 | try: | |
160 | expected = 'failed to update repo `%s`' % (repo_name,) |
|
186 | expected = 'failed to update repo `%s`' % (repo_name,) | |
161 | assert_error(id_, expected, given=response.body) |
|
187 | assert_error(id_, expected, given=response.body) | |
162 | finally: |
|
188 | finally: | |
163 | fixture.destroy_repo(repo_name) |
|
189 | fixture.destroy_repo(repo_name) |
@@ -1,1888 +1,1918 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 | import time |
|
22 | import time | |
23 |
|
23 | |||
24 |
import co |
|
24 | import rhodecode | |
25 |
|
25 | from rhodecode.api import ( | ||
26 | from rhodecode import BACKENDS |
|
26 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) | |
27 | from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCForbidden, json |
|
|||
28 | from rhodecode.api.utils import ( |
|
27 | from rhodecode.api.utils import ( | |
29 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
28 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, | |
30 |
get_user_group_or_error, get_user_or_error, |
|
29 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, | |
31 | get_perm_or_error, store_update, get_repo_group_or_error, parse_args, |
|
30 | get_perm_or_error, parse_args, get_origin, build_commit_data, | |
32 | get_origin, build_commit_data) |
|
31 | validate_set_owner_permissions) | |
33 | from rhodecode.lib.auth import ( |
|
32 | from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi | |
34 | HasPermissionAnyApi, HasRepoGroupPermissionAnyApi, |
|
|||
35 | HasUserGroupPermissionAnyApi) |
|
|||
36 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
33 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError | |
37 | from rhodecode.lib.utils import map_groups |
|
|||
38 | from rhodecode.lib.utils2 import str2bool, time_to_datetime |
|
34 | from rhodecode.lib.utils2 import str2bool, time_to_datetime | |
|
35 | from rhodecode.lib.ext_json import json | |||
39 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
36 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
40 | from rhodecode.model.comment import ChangesetCommentsModel |
|
37 | from rhodecode.model.comment import ChangesetCommentsModel | |
41 | from rhodecode.model.db import ( |
|
38 | from rhodecode.model.db import ( | |
42 | Session, ChangesetStatus, RepositoryField, Repository) |
|
39 | Session, ChangesetStatus, RepositoryField, Repository) | |
43 | from rhodecode.model.repo import RepoModel |
|
40 | from rhodecode.model.repo import RepoModel | |
44 | from rhodecode.model.repo_group import RepoGroupModel |
|
|||
45 | from rhodecode.model.scm import ScmModel, RepoList |
|
41 | from rhodecode.model.scm import ScmModel, RepoList | |
46 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
42 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel | |
|
43 | from rhodecode.model import validation_schema | |||
47 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
44 | from rhodecode.model.validation_schema.schemas import repo_schema | |
48 |
|
45 | |||
49 | log = logging.getLogger(__name__) |
|
46 | log = logging.getLogger(__name__) | |
50 |
|
47 | |||
51 |
|
48 | |||
52 | @jsonrpc_method() |
|
49 | @jsonrpc_method() | |
53 | def get_repo(request, apiuser, repoid, cache=Optional(True)): |
|
50 | def get_repo(request, apiuser, repoid, cache=Optional(True)): | |
54 | """ |
|
51 | """ | |
55 | Gets an existing repository by its name or repository_id. |
|
52 | Gets an existing repository by its name or repository_id. | |
56 |
|
53 | |||
57 | The members section so the output returns users groups or users |
|
54 | The members section so the output returns users groups or users | |
58 | associated with that repository. |
|
55 | associated with that repository. | |
59 |
|
56 | |||
60 | This command can only be run using an |authtoken| with admin rights, |
|
57 | This command can only be run using an |authtoken| with admin rights, | |
61 | or users with at least read rights to the |repo|. |
|
58 | or users with at least read rights to the |repo|. | |
62 |
|
59 | |||
63 | :param apiuser: This is filled automatically from the |authtoken|. |
|
60 | :param apiuser: This is filled automatically from the |authtoken|. | |
64 | :type apiuser: AuthUser |
|
61 | :type apiuser: AuthUser | |
65 | :param repoid: The repository name or repository id. |
|
62 | :param repoid: The repository name or repository id. | |
66 | :type repoid: str or int |
|
63 | :type repoid: str or int | |
67 | :param cache: use the cached value for last changeset |
|
64 | :param cache: use the cached value for last changeset | |
68 | :type: cache: Optional(bool) |
|
65 | :type: cache: Optional(bool) | |
69 |
|
66 | |||
70 | Example output: |
|
67 | Example output: | |
71 |
|
68 | |||
72 | .. code-block:: bash |
|
69 | .. code-block:: bash | |
73 |
|
70 | |||
74 | { |
|
71 | { | |
75 | "error": null, |
|
72 | "error": null, | |
76 | "id": <repo_id>, |
|
73 | "id": <repo_id>, | |
77 | "result": { |
|
74 | "result": { | |
78 | "clone_uri": null, |
|
75 | "clone_uri": null, | |
79 | "created_on": "timestamp", |
|
76 | "created_on": "timestamp", | |
80 | "description": "repo description", |
|
77 | "description": "repo description", | |
81 | "enable_downloads": false, |
|
78 | "enable_downloads": false, | |
82 | "enable_locking": false, |
|
79 | "enable_locking": false, | |
83 | "enable_statistics": false, |
|
80 | "enable_statistics": false, | |
84 | "followers": [ |
|
81 | "followers": [ | |
85 | { |
|
82 | { | |
86 | "active": true, |
|
83 | "active": true, | |
87 | "admin": false, |
|
84 | "admin": false, | |
88 | "api_key": "****************************************", |
|
85 | "api_key": "****************************************", | |
89 | "api_keys": [ |
|
86 | "api_keys": [ | |
90 | "****************************************" |
|
87 | "****************************************" | |
91 | ], |
|
88 | ], | |
92 | "email": "user@example.com", |
|
89 | "email": "user@example.com", | |
93 | "emails": [ |
|
90 | "emails": [ | |
94 | "user@example.com" |
|
91 | "user@example.com" | |
95 | ], |
|
92 | ], | |
96 | "extern_name": "rhodecode", |
|
93 | "extern_name": "rhodecode", | |
97 | "extern_type": "rhodecode", |
|
94 | "extern_type": "rhodecode", | |
98 | "firstname": "username", |
|
95 | "firstname": "username", | |
99 | "ip_addresses": [], |
|
96 | "ip_addresses": [], | |
100 | "language": null, |
|
97 | "language": null, | |
101 | "last_login": "2015-09-16T17:16:35.854", |
|
98 | "last_login": "2015-09-16T17:16:35.854", | |
102 | "lastname": "surname", |
|
99 | "lastname": "surname", | |
103 | "user_id": <user_id>, |
|
100 | "user_id": <user_id>, | |
104 | "username": "name" |
|
101 | "username": "name" | |
105 | } |
|
102 | } | |
106 | ], |
|
103 | ], | |
107 | "fork_of": "parent-repo", |
|
104 | "fork_of": "parent-repo", | |
108 | "landing_rev": [ |
|
105 | "landing_rev": [ | |
109 | "rev", |
|
106 | "rev", | |
110 | "tip" |
|
107 | "tip" | |
111 | ], |
|
108 | ], | |
112 | "last_changeset": { |
|
109 | "last_changeset": { | |
113 | "author": "User <user@example.com>", |
|
110 | "author": "User <user@example.com>", | |
114 | "branch": "default", |
|
111 | "branch": "default", | |
115 | "date": "timestamp", |
|
112 | "date": "timestamp", | |
116 | "message": "last commit message", |
|
113 | "message": "last commit message", | |
117 | "parents": [ |
|
114 | "parents": [ | |
118 | { |
|
115 | { | |
119 | "raw_id": "commit-id" |
|
116 | "raw_id": "commit-id" | |
120 | } |
|
117 | } | |
121 | ], |
|
118 | ], | |
122 | "raw_id": "commit-id", |
|
119 | "raw_id": "commit-id", | |
123 | "revision": <revision number>, |
|
120 | "revision": <revision number>, | |
124 | "short_id": "short id" |
|
121 | "short_id": "short id" | |
125 | }, |
|
122 | }, | |
126 | "lock_reason": null, |
|
123 | "lock_reason": null, | |
127 | "locked_by": null, |
|
124 | "locked_by": null, | |
128 | "locked_date": null, |
|
125 | "locked_date": null, | |
129 | "members": [ |
|
126 | "members": [ | |
130 | { |
|
127 | { | |
131 | "name": "super-admin-name", |
|
128 | "name": "super-admin-name", | |
132 | "origin": "super-admin", |
|
129 | "origin": "super-admin", | |
133 | "permission": "repository.admin", |
|
130 | "permission": "repository.admin", | |
134 | "type": "user" |
|
131 | "type": "user" | |
135 | }, |
|
132 | }, | |
136 | { |
|
133 | { | |
137 | "name": "owner-name", |
|
134 | "name": "owner-name", | |
138 | "origin": "owner", |
|
135 | "origin": "owner", | |
139 | "permission": "repository.admin", |
|
136 | "permission": "repository.admin", | |
140 | "type": "user" |
|
137 | "type": "user" | |
141 | }, |
|
138 | }, | |
142 | { |
|
139 | { | |
143 | "name": "user-group-name", |
|
140 | "name": "user-group-name", | |
144 | "origin": "permission", |
|
141 | "origin": "permission", | |
145 | "permission": "repository.write", |
|
142 | "permission": "repository.write", | |
146 | "type": "user_group" |
|
143 | "type": "user_group" | |
147 | } |
|
144 | } | |
148 | ], |
|
145 | ], | |
149 | "owner": "owner-name", |
|
146 | "owner": "owner-name", | |
150 | "permissions": [ |
|
147 | "permissions": [ | |
151 | { |
|
148 | { | |
152 | "name": "super-admin-name", |
|
149 | "name": "super-admin-name", | |
153 | "origin": "super-admin", |
|
150 | "origin": "super-admin", | |
154 | "permission": "repository.admin", |
|
151 | "permission": "repository.admin", | |
155 | "type": "user" |
|
152 | "type": "user" | |
156 | }, |
|
153 | }, | |
157 | { |
|
154 | { | |
158 | "name": "owner-name", |
|
155 | "name": "owner-name", | |
159 | "origin": "owner", |
|
156 | "origin": "owner", | |
160 | "permission": "repository.admin", |
|
157 | "permission": "repository.admin", | |
161 | "type": "user" |
|
158 | "type": "user" | |
162 | }, |
|
159 | }, | |
163 | { |
|
160 | { | |
164 | "name": "user-group-name", |
|
161 | "name": "user-group-name", | |
165 | "origin": "permission", |
|
162 | "origin": "permission", | |
166 | "permission": "repository.write", |
|
163 | "permission": "repository.write", | |
167 | "type": "user_group" |
|
164 | "type": "user_group" | |
168 | } |
|
165 | } | |
169 | ], |
|
166 | ], | |
170 | "private": true, |
|
167 | "private": true, | |
171 | "repo_id": 676, |
|
168 | "repo_id": 676, | |
172 | "repo_name": "user-group/repo-name", |
|
169 | "repo_name": "user-group/repo-name", | |
173 | "repo_type": "hg" |
|
170 | "repo_type": "hg" | |
174 | } |
|
171 | } | |
175 | } |
|
172 | } | |
176 | """ |
|
173 | """ | |
177 |
|
174 | |||
178 | repo = get_repo_or_error(repoid) |
|
175 | repo = get_repo_or_error(repoid) | |
179 | cache = Optional.extract(cache) |
|
176 | cache = Optional.extract(cache) | |
|
177 | ||||
180 | include_secrets = False |
|
178 | include_secrets = False | |
181 | if has_superadmin_permission(apiuser): |
|
179 | if has_superadmin_permission(apiuser): | |
182 | include_secrets = True |
|
180 | include_secrets = True | |
183 | else: |
|
181 | else: | |
184 | # check if we have at least read permission for this repo ! |
|
182 | # check if we have at least read permission for this repo ! | |
185 | _perms = ( |
|
183 | _perms = ( | |
186 | 'repository.admin', 'repository.write', 'repository.read',) |
|
184 | 'repository.admin', 'repository.write', 'repository.read',) | |
187 |
|
|
185 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
188 |
|
186 | |||
189 | permissions = [] |
|
187 | permissions = [] | |
190 | for _user in repo.permissions(): |
|
188 | for _user in repo.permissions(): | |
191 | user_data = { |
|
189 | user_data = { | |
192 | 'name': _user.username, |
|
190 | 'name': _user.username, | |
193 | 'permission': _user.permission, |
|
191 | 'permission': _user.permission, | |
194 | 'origin': get_origin(_user), |
|
192 | 'origin': get_origin(_user), | |
195 | 'type': "user", |
|
193 | 'type': "user", | |
196 | } |
|
194 | } | |
197 | permissions.append(user_data) |
|
195 | permissions.append(user_data) | |
198 |
|
196 | |||
199 | for _user_group in repo.permission_user_groups(): |
|
197 | for _user_group in repo.permission_user_groups(): | |
200 | user_group_data = { |
|
198 | user_group_data = { | |
201 | 'name': _user_group.users_group_name, |
|
199 | 'name': _user_group.users_group_name, | |
202 | 'permission': _user_group.permission, |
|
200 | 'permission': _user_group.permission, | |
203 | 'origin': get_origin(_user_group), |
|
201 | 'origin': get_origin(_user_group), | |
204 | 'type': "user_group", |
|
202 | 'type': "user_group", | |
205 | } |
|
203 | } | |
206 | permissions.append(user_group_data) |
|
204 | permissions.append(user_group_data) | |
207 |
|
205 | |||
208 | following_users = [ |
|
206 | following_users = [ | |
209 | user.user.get_api_data(include_secrets=include_secrets) |
|
207 | user.user.get_api_data(include_secrets=include_secrets) | |
210 | for user in repo.followers] |
|
208 | for user in repo.followers] | |
211 |
|
209 | |||
212 | if not cache: |
|
210 | if not cache: | |
213 | repo.update_commit_cache() |
|
211 | repo.update_commit_cache() | |
214 | data = repo.get_api_data(include_secrets=include_secrets) |
|
212 | data = repo.get_api_data(include_secrets=include_secrets) | |
215 | data['members'] = permissions # TODO: this should be deprecated soon |
|
213 | data['members'] = permissions # TODO: this should be deprecated soon | |
216 | data['permissions'] = permissions |
|
214 | data['permissions'] = permissions | |
217 | data['followers'] = following_users |
|
215 | data['followers'] = following_users | |
218 | return data |
|
216 | return data | |
219 |
|
217 | |||
220 |
|
218 | |||
221 | @jsonrpc_method() |
|
219 | @jsonrpc_method() | |
222 | def get_repos(request, apiuser): |
|
220 | def get_repos(request, apiuser): | |
223 | """ |
|
221 | """ | |
224 | Lists all existing repositories. |
|
222 | Lists all existing repositories. | |
225 |
|
223 | |||
226 | This command can only be run using an |authtoken| with admin rights, |
|
224 | This command can only be run using an |authtoken| with admin rights, | |
227 | or users with at least read rights to |repos|. |
|
225 | or users with at least read rights to |repos|. | |
228 |
|
226 | |||
229 | :param apiuser: This is filled automatically from the |authtoken|. |
|
227 | :param apiuser: This is filled automatically from the |authtoken|. | |
230 | :type apiuser: AuthUser |
|
228 | :type apiuser: AuthUser | |
231 |
|
229 | |||
232 | Example output: |
|
230 | Example output: | |
233 |
|
231 | |||
234 | .. code-block:: bash |
|
232 | .. code-block:: bash | |
235 |
|
233 | |||
236 | id : <id_given_in_input> |
|
234 | id : <id_given_in_input> | |
237 | result: [ |
|
235 | result: [ | |
238 | { |
|
236 | { | |
239 | "repo_id" : "<repo_id>", |
|
237 | "repo_id" : "<repo_id>", | |
240 | "repo_name" : "<reponame>" |
|
238 | "repo_name" : "<reponame>" | |
241 | "repo_type" : "<repo_type>", |
|
239 | "repo_type" : "<repo_type>", | |
242 | "clone_uri" : "<clone_uri>", |
|
240 | "clone_uri" : "<clone_uri>", | |
243 | "private": : "<bool>", |
|
241 | "private": : "<bool>", | |
244 | "created_on" : "<datetimecreated>", |
|
242 | "created_on" : "<datetimecreated>", | |
245 | "description" : "<description>", |
|
243 | "description" : "<description>", | |
246 | "landing_rev": "<landing_rev>", |
|
244 | "landing_rev": "<landing_rev>", | |
247 | "owner": "<repo_owner>", |
|
245 | "owner": "<repo_owner>", | |
248 | "fork_of": "<name_of_fork_parent>", |
|
246 | "fork_of": "<name_of_fork_parent>", | |
249 | "enable_downloads": "<bool>", |
|
247 | "enable_downloads": "<bool>", | |
250 | "enable_locking": "<bool>", |
|
248 | "enable_locking": "<bool>", | |
251 | "enable_statistics": "<bool>", |
|
249 | "enable_statistics": "<bool>", | |
252 | }, |
|
250 | }, | |
253 | ... |
|
251 | ... | |
254 | ] |
|
252 | ] | |
255 | error: null |
|
253 | error: null | |
256 | """ |
|
254 | """ | |
257 |
|
255 | |||
258 | include_secrets = has_superadmin_permission(apiuser) |
|
256 | include_secrets = has_superadmin_permission(apiuser) | |
259 | _perms = ('repository.read', 'repository.write', 'repository.admin',) |
|
257 | _perms = ('repository.read', 'repository.write', 'repository.admin',) | |
260 | extras = {'user': apiuser} |
|
258 | extras = {'user': apiuser} | |
261 |
|
259 | |||
262 | repo_list = RepoList( |
|
260 | repo_list = RepoList( | |
263 | RepoModel().get_all(), perm_set=_perms, extra_kwargs=extras) |
|
261 | RepoModel().get_all(), perm_set=_perms, extra_kwargs=extras) | |
264 | return [repo.get_api_data(include_secrets=include_secrets) |
|
262 | return [repo.get_api_data(include_secrets=include_secrets) | |
265 | for repo in repo_list] |
|
263 | for repo in repo_list] | |
266 |
|
264 | |||
267 |
|
265 | |||
268 | @jsonrpc_method() |
|
266 | @jsonrpc_method() | |
269 | def get_repo_changeset(request, apiuser, repoid, revision, |
|
267 | def get_repo_changeset(request, apiuser, repoid, revision, | |
270 | details=Optional('basic')): |
|
268 | details=Optional('basic')): | |
271 | """ |
|
269 | """ | |
272 | Returns information about a changeset. |
|
270 | Returns information about a changeset. | |
273 |
|
271 | |||
274 | Additionally parameters define the amount of details returned by |
|
272 | Additionally parameters define the amount of details returned by | |
275 | this function. |
|
273 | this function. | |
276 |
|
274 | |||
277 | This command can only be run using an |authtoken| with admin rights, |
|
275 | This command can only be run using an |authtoken| with admin rights, | |
278 | or users with at least read rights to the |repo|. |
|
276 | or users with at least read rights to the |repo|. | |
279 |
|
277 | |||
280 | :param apiuser: This is filled automatically from the |authtoken|. |
|
278 | :param apiuser: This is filled automatically from the |authtoken|. | |
281 | :type apiuser: AuthUser |
|
279 | :type apiuser: AuthUser | |
282 | :param repoid: The repository name or repository id |
|
280 | :param repoid: The repository name or repository id | |
283 | :type repoid: str or int |
|
281 | :type repoid: str or int | |
284 | :param revision: revision for which listing should be done |
|
282 | :param revision: revision for which listing should be done | |
285 | :type revision: str |
|
283 | :type revision: str | |
286 | :param details: details can be 'basic|extended|full' full gives diff |
|
284 | :param details: details can be 'basic|extended|full' full gives diff | |
287 | info details like the diff itself, and number of changed files etc. |
|
285 | info details like the diff itself, and number of changed files etc. | |
288 | :type details: Optional(str) |
|
286 | :type details: Optional(str) | |
289 |
|
287 | |||
290 | """ |
|
288 | """ | |
291 | repo = get_repo_or_error(repoid) |
|
289 | repo = get_repo_or_error(repoid) | |
292 | if not has_superadmin_permission(apiuser): |
|
290 | if not has_superadmin_permission(apiuser): | |
293 | _perms = ( |
|
291 | _perms = ( | |
294 | 'repository.admin', 'repository.write', 'repository.read',) |
|
292 | 'repository.admin', 'repository.write', 'repository.read',) | |
295 |
|
|
293 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
296 |
|
294 | |||
297 | changes_details = Optional.extract(details) |
|
295 | changes_details = Optional.extract(details) | |
298 | _changes_details_types = ['basic', 'extended', 'full'] |
|
296 | _changes_details_types = ['basic', 'extended', 'full'] | |
299 | if changes_details not in _changes_details_types: |
|
297 | if changes_details not in _changes_details_types: | |
300 | raise JSONRPCError( |
|
298 | raise JSONRPCError( | |
301 | 'ret_type must be one of %s' % ( |
|
299 | 'ret_type must be one of %s' % ( | |
302 | ','.join(_changes_details_types))) |
|
300 | ','.join(_changes_details_types))) | |
303 |
|
301 | |||
304 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
302 | pre_load = ['author', 'branch', 'date', 'message', 'parents', | |
305 | 'status', '_commit', '_file_paths'] |
|
303 | 'status', '_commit', '_file_paths'] | |
306 |
|
304 | |||
307 | try: |
|
305 | try: | |
308 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) |
|
306 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) | |
309 | except TypeError as e: |
|
307 | except TypeError as e: | |
310 | raise JSONRPCError(e.message) |
|
308 | raise JSONRPCError(e.message) | |
311 | _cs_json = cs.__json__() |
|
309 | _cs_json = cs.__json__() | |
312 | _cs_json['diff'] = build_commit_data(cs, changes_details) |
|
310 | _cs_json['diff'] = build_commit_data(cs, changes_details) | |
313 | if changes_details == 'full': |
|
311 | if changes_details == 'full': | |
314 | _cs_json['refs'] = { |
|
312 | _cs_json['refs'] = { | |
315 | 'branches': [cs.branch], |
|
313 | 'branches': [cs.branch], | |
316 | 'bookmarks': getattr(cs, 'bookmarks', []), |
|
314 | 'bookmarks': getattr(cs, 'bookmarks', []), | |
317 | 'tags': cs.tags |
|
315 | 'tags': cs.tags | |
318 | } |
|
316 | } | |
319 | return _cs_json |
|
317 | return _cs_json | |
320 |
|
318 | |||
321 |
|
319 | |||
322 | @jsonrpc_method() |
|
320 | @jsonrpc_method() | |
323 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, |
|
321 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, | |
324 | details=Optional('basic')): |
|
322 | details=Optional('basic')): | |
325 | """ |
|
323 | """ | |
326 | Returns a set of commits limited by the number starting |
|
324 | Returns a set of commits limited by the number starting | |
327 | from the `start_rev` option. |
|
325 | from the `start_rev` option. | |
328 |
|
326 | |||
329 | Additional parameters define the amount of details returned by this |
|
327 | Additional parameters define the amount of details returned by this | |
330 | function. |
|
328 | function. | |
331 |
|
329 | |||
332 | This command can only be run using an |authtoken| with admin rights, |
|
330 | This command can only be run using an |authtoken| with admin rights, | |
333 | or users with at least read rights to |repos|. |
|
331 | or users with at least read rights to |repos|. | |
334 |
|
332 | |||
335 | :param apiuser: This is filled automatically from the |authtoken|. |
|
333 | :param apiuser: This is filled automatically from the |authtoken|. | |
336 | :type apiuser: AuthUser |
|
334 | :type apiuser: AuthUser | |
337 | :param repoid: The repository name or repository ID. |
|
335 | :param repoid: The repository name or repository ID. | |
338 | :type repoid: str or int |
|
336 | :type repoid: str or int | |
339 | :param start_rev: The starting revision from where to get changesets. |
|
337 | :param start_rev: The starting revision from where to get changesets. | |
340 | :type start_rev: str |
|
338 | :type start_rev: str | |
341 | :param limit: Limit the number of commits to this amount |
|
339 | :param limit: Limit the number of commits to this amount | |
342 | :type limit: str or int |
|
340 | :type limit: str or int | |
343 | :param details: Set the level of detail returned. Valid option are: |
|
341 | :param details: Set the level of detail returned. Valid option are: | |
344 | ``basic``, ``extended`` and ``full``. |
|
342 | ``basic``, ``extended`` and ``full``. | |
345 | :type details: Optional(str) |
|
343 | :type details: Optional(str) | |
346 |
|
344 | |||
347 | .. note:: |
|
345 | .. note:: | |
348 |
|
346 | |||
349 | Setting the parameter `details` to the value ``full`` is extensive |
|
347 | Setting the parameter `details` to the value ``full`` is extensive | |
350 | and returns details like the diff itself, and the number |
|
348 | and returns details like the diff itself, and the number | |
351 | of changed files. |
|
349 | of changed files. | |
352 |
|
350 | |||
353 | """ |
|
351 | """ | |
354 | repo = get_repo_or_error(repoid) |
|
352 | repo = get_repo_or_error(repoid) | |
355 | if not has_superadmin_permission(apiuser): |
|
353 | if not has_superadmin_permission(apiuser): | |
356 | _perms = ( |
|
354 | _perms = ( | |
357 | 'repository.admin', 'repository.write', 'repository.read',) |
|
355 | 'repository.admin', 'repository.write', 'repository.read',) | |
358 |
|
|
356 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
359 |
|
357 | |||
360 | changes_details = Optional.extract(details) |
|
358 | changes_details = Optional.extract(details) | |
361 | _changes_details_types = ['basic', 'extended', 'full'] |
|
359 | _changes_details_types = ['basic', 'extended', 'full'] | |
362 | if changes_details not in _changes_details_types: |
|
360 | if changes_details not in _changes_details_types: | |
363 | raise JSONRPCError( |
|
361 | raise JSONRPCError( | |
364 | 'ret_type must be one of %s' % ( |
|
362 | 'ret_type must be one of %s' % ( | |
365 | ','.join(_changes_details_types))) |
|
363 | ','.join(_changes_details_types))) | |
366 |
|
364 | |||
367 | limit = int(limit) |
|
365 | limit = int(limit) | |
368 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
366 | pre_load = ['author', 'branch', 'date', 'message', 'parents', | |
369 | 'status', '_commit', '_file_paths'] |
|
367 | 'status', '_commit', '_file_paths'] | |
370 |
|
368 | |||
371 | vcs_repo = repo.scm_instance() |
|
369 | vcs_repo = repo.scm_instance() | |
372 | # SVN needs a special case to distinguish its index and commit id |
|
370 | # SVN needs a special case to distinguish its index and commit id | |
373 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): |
|
371 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): | |
374 | start_rev = vcs_repo.commit_ids[0] |
|
372 | start_rev = vcs_repo.commit_ids[0] | |
375 |
|
373 | |||
376 | try: |
|
374 | try: | |
377 | commits = vcs_repo.get_commits( |
|
375 | commits = vcs_repo.get_commits( | |
378 | start_id=start_rev, pre_load=pre_load) |
|
376 | start_id=start_rev, pre_load=pre_load) | |
379 | except TypeError as e: |
|
377 | except TypeError as e: | |
380 | raise JSONRPCError(e.message) |
|
378 | raise JSONRPCError(e.message) | |
381 | except Exception: |
|
379 | except Exception: | |
382 | log.exception('Fetching of commits failed') |
|
380 | log.exception('Fetching of commits failed') | |
383 | raise JSONRPCError('Error occurred during commit fetching') |
|
381 | raise JSONRPCError('Error occurred during commit fetching') | |
384 |
|
382 | |||
385 | ret = [] |
|
383 | ret = [] | |
386 | for cnt, commit in enumerate(commits): |
|
384 | for cnt, commit in enumerate(commits): | |
387 | if cnt >= limit != -1: |
|
385 | if cnt >= limit != -1: | |
388 | break |
|
386 | break | |
389 | _cs_json = commit.__json__() |
|
387 | _cs_json = commit.__json__() | |
390 | _cs_json['diff'] = build_commit_data(commit, changes_details) |
|
388 | _cs_json['diff'] = build_commit_data(commit, changes_details) | |
391 | if changes_details == 'full': |
|
389 | if changes_details == 'full': | |
392 | _cs_json['refs'] = { |
|
390 | _cs_json['refs'] = { | |
393 | 'branches': [commit.branch], |
|
391 | 'branches': [commit.branch], | |
394 | 'bookmarks': getattr(commit, 'bookmarks', []), |
|
392 | 'bookmarks': getattr(commit, 'bookmarks', []), | |
395 | 'tags': commit.tags |
|
393 | 'tags': commit.tags | |
396 | } |
|
394 | } | |
397 | ret.append(_cs_json) |
|
395 | ret.append(_cs_json) | |
398 | return ret |
|
396 | return ret | |
399 |
|
397 | |||
400 |
|
398 | |||
401 | @jsonrpc_method() |
|
399 | @jsonrpc_method() | |
402 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, |
|
400 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, | |
403 | ret_type=Optional('all'), details=Optional('basic'), |
|
401 | ret_type=Optional('all'), details=Optional('basic'), | |
404 | max_file_bytes=Optional(None)): |
|
402 | max_file_bytes=Optional(None)): | |
405 | """ |
|
403 | """ | |
406 | Returns a list of nodes and children in a flat list for a given |
|
404 | Returns a list of nodes and children in a flat list for a given | |
407 | path at given revision. |
|
405 | path at given revision. | |
408 |
|
406 | |||
409 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
407 | It's possible to specify ret_type to show only `files` or `dirs`. | |
410 |
|
408 | |||
411 | This command can only be run using an |authtoken| with admin rights, |
|
409 | This command can only be run using an |authtoken| with admin rights, | |
412 | or users with at least read rights to |repos|. |
|
410 | or users with at least read rights to |repos|. | |
413 |
|
411 | |||
414 | :param apiuser: This is filled automatically from the |authtoken|. |
|
412 | :param apiuser: This is filled automatically from the |authtoken|. | |
415 | :type apiuser: AuthUser |
|
413 | :type apiuser: AuthUser | |
416 | :param repoid: The repository name or repository ID. |
|
414 | :param repoid: The repository name or repository ID. | |
417 | :type repoid: str or int |
|
415 | :type repoid: str or int | |
418 | :param revision: The revision for which listing should be done. |
|
416 | :param revision: The revision for which listing should be done. | |
419 | :type revision: str |
|
417 | :type revision: str | |
420 | :param root_path: The path from which to start displaying. |
|
418 | :param root_path: The path from which to start displaying. | |
421 | :type root_path: str |
|
419 | :type root_path: str | |
422 | :param ret_type: Set the return type. Valid options are |
|
420 | :param ret_type: Set the return type. Valid options are | |
423 | ``all`` (default), ``files`` and ``dirs``. |
|
421 | ``all`` (default), ``files`` and ``dirs``. | |
424 | :type ret_type: Optional(str) |
|
422 | :type ret_type: Optional(str) | |
425 | :param details: Returns extended information about nodes, such as |
|
423 | :param details: Returns extended information about nodes, such as | |
426 | md5, binary, and or content. The valid options are ``basic`` and |
|
424 | md5, binary, and or content. The valid options are ``basic`` and | |
427 | ``full``. |
|
425 | ``full``. | |
428 | :type details: Optional(str) |
|
426 | :type details: Optional(str) | |
429 | :param max_file_bytes: Only return file content under this file size bytes |
|
427 | :param max_file_bytes: Only return file content under this file size bytes | |
430 | :type details: Optional(int) |
|
428 | :type details: Optional(int) | |
431 |
|
429 | |||
432 | Example output: |
|
430 | Example output: | |
433 |
|
431 | |||
434 | .. code-block:: bash |
|
432 | .. code-block:: bash | |
435 |
|
433 | |||
436 | id : <id_given_in_input> |
|
434 | id : <id_given_in_input> | |
437 | result: [ |
|
435 | result: [ | |
438 | { |
|
436 | { | |
439 | "name" : "<name>" |
|
437 | "name" : "<name>" | |
440 | "type" : "<type>", |
|
438 | "type" : "<type>", | |
441 | "binary": "<true|false>" (only in extended mode) |
|
439 | "binary": "<true|false>" (only in extended mode) | |
442 | "md5" : "<md5 of file content>" (only in extended mode) |
|
440 | "md5" : "<md5 of file content>" (only in extended mode) | |
443 | }, |
|
441 | }, | |
444 | ... |
|
442 | ... | |
445 | ] |
|
443 | ] | |
446 | error: null |
|
444 | error: null | |
447 | """ |
|
445 | """ | |
448 |
|
446 | |||
449 | repo = get_repo_or_error(repoid) |
|
447 | repo = get_repo_or_error(repoid) | |
450 | if not has_superadmin_permission(apiuser): |
|
448 | if not has_superadmin_permission(apiuser): | |
451 | _perms = ( |
|
449 | _perms = ( | |
452 | 'repository.admin', 'repository.write', 'repository.read',) |
|
450 | 'repository.admin', 'repository.write', 'repository.read',) | |
453 |
|
|
451 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
454 |
|
452 | |||
455 | ret_type = Optional.extract(ret_type) |
|
453 | ret_type = Optional.extract(ret_type) | |
456 | details = Optional.extract(details) |
|
454 | details = Optional.extract(details) | |
457 | _extended_types = ['basic', 'full'] |
|
455 | _extended_types = ['basic', 'full'] | |
458 | if details not in _extended_types: |
|
456 | if details not in _extended_types: | |
459 | raise JSONRPCError( |
|
457 | raise JSONRPCError( | |
460 | 'ret_type must be one of %s' % (','.join(_extended_types))) |
|
458 | 'ret_type must be one of %s' % (','.join(_extended_types))) | |
461 | extended_info = False |
|
459 | extended_info = False | |
462 | content = False |
|
460 | content = False | |
463 | if details == 'basic': |
|
461 | if details == 'basic': | |
464 | extended_info = True |
|
462 | extended_info = True | |
465 |
|
463 | |||
466 | if details == 'full': |
|
464 | if details == 'full': | |
467 | extended_info = content = True |
|
465 | extended_info = content = True | |
468 |
|
466 | |||
469 | _map = {} |
|
467 | _map = {} | |
470 | try: |
|
468 | try: | |
471 | # check if repo is not empty by any chance, skip quicker if it is. |
|
469 | # check if repo is not empty by any chance, skip quicker if it is. | |
472 | _scm = repo.scm_instance() |
|
470 | _scm = repo.scm_instance() | |
473 | if _scm.is_empty(): |
|
471 | if _scm.is_empty(): | |
474 | return [] |
|
472 | return [] | |
475 |
|
473 | |||
476 | _d, _f = ScmModel().get_nodes( |
|
474 | _d, _f = ScmModel().get_nodes( | |
477 | repo, revision, root_path, flat=False, |
|
475 | repo, revision, root_path, flat=False, | |
478 | extended_info=extended_info, content=content, |
|
476 | extended_info=extended_info, content=content, | |
479 | max_file_bytes=max_file_bytes) |
|
477 | max_file_bytes=max_file_bytes) | |
480 | _map = { |
|
478 | _map = { | |
481 | 'all': _d + _f, |
|
479 | 'all': _d + _f, | |
482 | 'files': _f, |
|
480 | 'files': _f, | |
483 | 'dirs': _d, |
|
481 | 'dirs': _d, | |
484 | } |
|
482 | } | |
485 | return _map[ret_type] |
|
483 | return _map[ret_type] | |
486 | except KeyError: |
|
484 | except KeyError: | |
487 | raise JSONRPCError( |
|
485 | raise JSONRPCError( | |
488 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) |
|
486 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) | |
489 | except Exception: |
|
487 | except Exception: | |
490 | log.exception("Exception occurred while trying to get repo nodes") |
|
488 | log.exception("Exception occurred while trying to get repo nodes") | |
491 | raise JSONRPCError( |
|
489 | raise JSONRPCError( | |
492 | 'failed to get repo: `%s` nodes' % repo.repo_name |
|
490 | 'failed to get repo: `%s` nodes' % repo.repo_name | |
493 | ) |
|
491 | ) | |
494 |
|
492 | |||
495 |
|
493 | |||
496 | @jsonrpc_method() |
|
494 | @jsonrpc_method() | |
497 | def get_repo_refs(request, apiuser, repoid): |
|
495 | def get_repo_refs(request, apiuser, repoid): | |
498 | """ |
|
496 | """ | |
499 | Returns a dictionary of current references. It returns |
|
497 | Returns a dictionary of current references. It returns | |
500 | bookmarks, branches, closed_branches, and tags for given repository |
|
498 | bookmarks, branches, closed_branches, and tags for given repository | |
501 |
|
499 | |||
502 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
500 | It's possible to specify ret_type to show only `files` or `dirs`. | |
503 |
|
501 | |||
504 | This command can only be run using an |authtoken| with admin rights, |
|
502 | This command can only be run using an |authtoken| with admin rights, | |
505 | or users with at least read rights to |repos|. |
|
503 | or users with at least read rights to |repos|. | |
506 |
|
504 | |||
507 | :param apiuser: This is filled automatically from the |authtoken|. |
|
505 | :param apiuser: This is filled automatically from the |authtoken|. | |
508 | :type apiuser: AuthUser |
|
506 | :type apiuser: AuthUser | |
509 | :param repoid: The repository name or repository ID. |
|
507 | :param repoid: The repository name or repository ID. | |
510 | :type repoid: str or int |
|
508 | :type repoid: str or int | |
511 |
|
509 | |||
512 | Example output: |
|
510 | Example output: | |
513 |
|
511 | |||
514 | .. code-block:: bash |
|
512 | .. code-block:: bash | |
515 |
|
513 | |||
516 | id : <id_given_in_input> |
|
514 | id : <id_given_in_input> | |
517 | result: [ |
|
515 | result: [ | |
518 | TODO... |
|
516 | TODO... | |
519 | ] |
|
517 | ] | |
520 | error: null |
|
518 | error: null | |
521 | """ |
|
519 | """ | |
522 |
|
520 | |||
523 | repo = get_repo_or_error(repoid) |
|
521 | repo = get_repo_or_error(repoid) | |
524 | if not has_superadmin_permission(apiuser): |
|
522 | if not has_superadmin_permission(apiuser): | |
525 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
523 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
526 |
|
|
524 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
527 |
|
525 | |||
528 | try: |
|
526 | try: | |
529 | # check if repo is not empty by any chance, skip quicker if it is. |
|
527 | # check if repo is not empty by any chance, skip quicker if it is. | |
530 | vcs_instance = repo.scm_instance() |
|
528 | vcs_instance = repo.scm_instance() | |
531 | refs = vcs_instance.refs() |
|
529 | refs = vcs_instance.refs() | |
532 | return refs |
|
530 | return refs | |
533 | except Exception: |
|
531 | except Exception: | |
534 | log.exception("Exception occurred while trying to get repo refs") |
|
532 | log.exception("Exception occurred while trying to get repo refs") | |
535 | raise JSONRPCError( |
|
533 | raise JSONRPCError( | |
536 | 'failed to get repo: `%s` references' % repo.repo_name |
|
534 | 'failed to get repo: `%s` references' % repo.repo_name | |
537 | ) |
|
535 | ) | |
538 |
|
536 | |||
539 |
|
537 | |||
540 | @jsonrpc_method() |
|
538 | @jsonrpc_method() | |
541 | def create_repo(request, apiuser, repo_name, repo_type, |
|
539 | def create_repo( | |
542 | owner=Optional(OAttr('apiuser')), description=Optional(''), |
|
540 | request, apiuser, repo_name, repo_type, | |
543 | private=Optional(False), clone_uri=Optional(None), |
|
541 | owner=Optional(OAttr('apiuser')), | |
|
542 | description=Optional(''), | |||
|
543 | private=Optional(False), | |||
|
544 | clone_uri=Optional(None), | |||
544 |
|
|
545 | landing_rev=Optional('rev:tip'), | |
545 |
|
|
546 | enable_statistics=Optional(False), | |
546 |
|
|
547 | enable_locking=Optional(False), | |
547 |
|
|
548 | enable_downloads=Optional(False), | |
548 |
|
|
549 | copy_permissions=Optional(False)): | |
549 | """ |
|
550 | """ | |
550 | Creates a repository. |
|
551 | Creates a repository. | |
551 |
|
552 | |||
552 |
* If the repository name contains "/", |
|
553 | * If the repository name contains "/", repository will be created inside | |
553 | groups will be created. |
|
554 | a repository group or nested repository groups | |
554 |
|
555 | |||
555 |
For example "foo/bar/ |
|
556 | For example "foo/bar/repo1" will create |repo| called "repo1" inside | |
556 | (with "foo" as parent). It will also create the "baz" repository |
|
557 | group "foo/bar". You have to have permissions to access and write to | |
557 | with "bar" as |repo| group. |
|
558 | the last repository group ("bar" in this example) | |
558 |
|
559 | |||
559 | This command can only be run using an |authtoken| with at least |
|
560 | This command can only be run using an |authtoken| with at least | |
560 | write permissions to the |repo|. |
|
561 | permissions to create repositories, or write permissions to | |
|
562 | parent repository groups. | |||
561 |
|
563 | |||
562 | :param apiuser: This is filled automatically from the |authtoken|. |
|
564 | :param apiuser: This is filled automatically from the |authtoken|. | |
563 | :type apiuser: AuthUser |
|
565 | :type apiuser: AuthUser | |
564 | :param repo_name: Set the repository name. |
|
566 | :param repo_name: Set the repository name. | |
565 | :type repo_name: str |
|
567 | :type repo_name: str | |
566 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. |
|
568 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. | |
567 | :type repo_type: str |
|
569 | :type repo_type: str | |
568 | :param owner: user_id or username |
|
570 | :param owner: user_id or username | |
569 | :type owner: Optional(str) |
|
571 | :type owner: Optional(str) | |
570 | :param description: Set the repository description. |
|
572 | :param description: Set the repository description. | |
571 | :type description: Optional(str) |
|
573 | :type description: Optional(str) | |
572 | :param private: |
|
574 | :param private: set repository as private | |
573 | :type private: bool |
|
575 | :type private: bool | |
574 | :param clone_uri: |
|
576 | :param clone_uri: set clone_uri | |
575 | :type clone_uri: str |
|
577 | :type clone_uri: str | |
576 | :param landing_rev: <rev_type>:<rev> |
|
578 | :param landing_rev: <rev_type>:<rev> | |
577 | :type landing_rev: str |
|
579 | :type landing_rev: str | |
578 | :param enable_locking: |
|
580 | :param enable_locking: | |
579 | :type enable_locking: bool |
|
581 | :type enable_locking: bool | |
580 | :param enable_downloads: |
|
582 | :param enable_downloads: | |
581 | :type enable_downloads: bool |
|
583 | :type enable_downloads: bool | |
582 | :param enable_statistics: |
|
584 | :param enable_statistics: | |
583 | :type enable_statistics: bool |
|
585 | :type enable_statistics: bool | |
584 | :param copy_permissions: Copy permission from group in which the |
|
586 | :param copy_permissions: Copy permission from group in which the | |
585 | repository is being created. |
|
587 | repository is being created. | |
586 | :type copy_permissions: bool |
|
588 | :type copy_permissions: bool | |
587 |
|
589 | |||
588 |
|
590 | |||
589 | Example output: |
|
591 | Example output: | |
590 |
|
592 | |||
591 | .. code-block:: bash |
|
593 | .. code-block:: bash | |
592 |
|
594 | |||
593 | id : <id_given_in_input> |
|
595 | id : <id_given_in_input> | |
594 | result: { |
|
596 | result: { | |
595 | "msg": "Created new repository `<reponame>`", |
|
597 | "msg": "Created new repository `<reponame>`", | |
596 | "success": true, |
|
598 | "success": true, | |
597 | "task": "<celery task id or None if done sync>" |
|
599 | "task": "<celery task id or None if done sync>" | |
598 | } |
|
600 | } | |
599 | error: null |
|
601 | error: null | |
600 |
|
602 | |||
601 |
|
603 | |||
602 | Example error output: |
|
604 | Example error output: | |
603 |
|
605 | |||
604 | .. code-block:: bash |
|
606 | .. code-block:: bash | |
605 |
|
607 | |||
606 | id : <id_given_in_input> |
|
608 | id : <id_given_in_input> | |
607 | result : null |
|
609 | result : null | |
608 | error : { |
|
610 | error : { | |
609 | 'failed to create repository `<repo_name>`' |
|
611 | 'failed to create repository `<repo_name>`' | |
610 | } |
|
612 | } | |
611 |
|
613 | |||
612 | """ |
|
614 | """ | |
613 | schema = repo_schema.RepoSchema() |
|
|||
614 | try: |
|
|||
615 | data = schema.deserialize({ |
|
|||
616 | 'repo_name': repo_name |
|
|||
617 | }) |
|
|||
618 | except colander.Invalid as e: |
|
|||
619 | raise JSONRPCError("Validation failed: %s" % (e.asdict(),)) |
|
|||
620 | repo_name = data['repo_name'] |
|
|||
621 |
|
615 | |||
622 | (repo_name_cleaned, |
|
616 | owner = validate_set_owner_permissions(apiuser, owner) | |
623 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent( |
|
|||
624 | repo_name) |
|
|||
625 |
|
||||
626 | if not HasPermissionAnyApi( |
|
|||
627 | 'hg.admin', 'hg.create.repository')(user=apiuser): |
|
|||
628 | # check if we have admin permission for this repo group if given ! |
|
|||
629 |
|
||||
630 | if parent_group_name: |
|
|||
631 | repogroupid = parent_group_name |
|
|||
632 | repo_group = get_repo_group_or_error(parent_group_name) |
|
|||
633 |
|
617 | |||
634 | _perms = ('group.admin',) |
|
618 | description = Optional.extract(description) | |
635 | if not HasRepoGroupPermissionAnyApi(*_perms)( |
|
619 | copy_permissions = Optional.extract(copy_permissions) | |
636 | user=apiuser, group_name=repo_group.group_name): |
|
620 | clone_uri = Optional.extract(clone_uri) | |
637 | raise JSONRPCError( |
|
621 | landing_commit_ref = Optional.extract(landing_rev) | |
638 | 'repository group `%s` does not exist' % ( |
|
|||
639 | repogroupid,)) |
|
|||
640 | else: |
|
|||
641 | raise JSONRPCForbidden() |
|
|||
642 |
|
||||
643 | if not has_superadmin_permission(apiuser): |
|
|||
644 | if not isinstance(owner, Optional): |
|
|||
645 | # forbid setting owner for non-admins |
|
|||
646 | raise JSONRPCError( |
|
|||
647 | 'Only RhodeCode admin can specify `owner` param') |
|
|||
648 |
|
||||
649 | if isinstance(owner, Optional): |
|
|||
650 | owner = apiuser.user_id |
|
|||
651 |
|
||||
652 | owner = get_user_or_error(owner) |
|
|||
653 |
|
||||
654 | if RepoModel().get_by_repo_name(repo_name): |
|
|||
655 | raise JSONRPCError("repo `%s` already exist" % repo_name) |
|
|||
656 |
|
622 | |||
657 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
623 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) | |
658 | if isinstance(private, Optional): |
|
624 | if isinstance(private, Optional): | |
659 | private = defs.get('repo_private') or Optional.extract(private) |
|
625 | private = defs.get('repo_private') or Optional.extract(private) | |
660 | if isinstance(repo_type, Optional): |
|
626 | if isinstance(repo_type, Optional): | |
661 | repo_type = defs.get('repo_type') |
|
627 | repo_type = defs.get('repo_type') | |
662 | if isinstance(enable_statistics, Optional): |
|
628 | if isinstance(enable_statistics, Optional): | |
663 | enable_statistics = defs.get('repo_enable_statistics') |
|
629 | enable_statistics = defs.get('repo_enable_statistics') | |
664 | if isinstance(enable_locking, Optional): |
|
630 | if isinstance(enable_locking, Optional): | |
665 | enable_locking = defs.get('repo_enable_locking') |
|
631 | enable_locking = defs.get('repo_enable_locking') | |
666 | if isinstance(enable_downloads, Optional): |
|
632 | if isinstance(enable_downloads, Optional): | |
667 | enable_downloads = defs.get('repo_enable_downloads') |
|
633 | enable_downloads = defs.get('repo_enable_downloads') | |
668 |
|
634 | |||
669 | clone_uri = Optional.extract(clone_uri) |
|
635 | schema = repo_schema.RepoSchema().bind( | |
670 | description = Optional.extract(description) |
|
636 | repo_type_options=rhodecode.BACKENDS.keys(), | |
671 | landing_rev = Optional.extract(landing_rev) |
|
637 | # user caller | |
672 | copy_permissions = Optional.extract(copy_permissions) |
|
638 | user=apiuser) | |
673 |
|
639 | |||
674 | try: |
|
640 | try: | |
675 | # create structure of groups and return the last group |
|
641 | schema_data = schema.deserialize(dict( | |
676 |
|
|
642 | repo_name=repo_name, | |
|
643 | repo_type=repo_type, | |||
|
644 | repo_owner=owner.username, | |||
|
645 | repo_description=description, | |||
|
646 | repo_landing_commit_ref=landing_commit_ref, | |||
|
647 | repo_clone_uri=clone_uri, | |||
|
648 | repo_private=private, | |||
|
649 | repo_copy_permissions=copy_permissions, | |||
|
650 | repo_enable_statistics=enable_statistics, | |||
|
651 | repo_enable_downloads=enable_downloads, | |||
|
652 | repo_enable_locking=enable_locking)) | |||
|
653 | except validation_schema.Invalid as err: | |||
|
654 | raise JSONRPCValidationError(colander_exc=err) | |||
|
655 | ||||
|
656 | try: | |||
677 | data = { |
|
657 | data = { | |
678 | 'repo_name': repo_name_cleaned, |
|
|||
679 | 'repo_name_full': repo_name, |
|
|||
680 | 'repo_type': repo_type, |
|
|||
681 | 'repo_description': description, |
|
|||
682 | 'owner': owner, |
|
658 | 'owner': owner, | |
683 | 'repo_private': private, |
|
659 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], | |
684 | 'clone_uri': clone_uri, |
|
660 | 'repo_name_full': schema_data['repo_name'], | |
685 |
'repo_group': repo_group |
|
661 | 'repo_group': schema_data['repo_group']['repo_group_id'], | |
686 | 'repo_landing_rev': landing_rev, |
|
662 | 'repo_type': schema_data['repo_type'], | |
687 | 'enable_statistics': enable_statistics, |
|
663 | 'repo_description': schema_data['repo_description'], | |
688 | 'enable_locking': enable_locking, |
|
664 | 'repo_private': schema_data['repo_private'], | |
689 | 'enable_downloads': enable_downloads, |
|
665 | 'clone_uri': schema_data['repo_clone_uri'], | |
690 | 'repo_copy_permissions': copy_permissions, |
|
666 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], | |
|
667 | 'enable_statistics': schema_data['repo_enable_statistics'], | |||
|
668 | 'enable_locking': schema_data['repo_enable_locking'], | |||
|
669 | 'enable_downloads': schema_data['repo_enable_downloads'], | |||
|
670 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], | |||
691 | } |
|
671 | } | |
692 |
|
672 | |||
693 | if repo_type not in BACKENDS.keys(): |
|
|||
694 | raise Exception("Invalid backend type %s" % repo_type) |
|
|||
695 | task = RepoModel().create(form_data=data, cur_user=owner) |
|
673 | task = RepoModel().create(form_data=data, cur_user=owner) | |
696 | from celery.result import BaseAsyncResult |
|
674 | from celery.result import BaseAsyncResult | |
697 | task_id = None |
|
675 | task_id = None | |
698 | if isinstance(task, BaseAsyncResult): |
|
676 | if isinstance(task, BaseAsyncResult): | |
699 | task_id = task.task_id |
|
677 | task_id = task.task_id | |
700 | # no commit, it's done in RepoModel, or async via celery |
|
678 | # no commit, it's done in RepoModel, or async via celery | |
701 | return { |
|
679 | return { | |
702 | 'msg': "Created new repository `%s`" % (repo_name,), |
|
680 | 'msg': "Created new repository `%s`" % (schema_data['repo_name'],), | |
703 | 'success': True, # cannot return the repo data here since fork |
|
681 | 'success': True, # cannot return the repo data here since fork | |
704 |
# can |
|
682 | # can be done async | |
705 | 'task': task_id |
|
683 | 'task': task_id | |
706 | } |
|
684 | } | |
707 | except Exception: |
|
685 | except Exception: | |
708 | log.exception( |
|
686 | log.exception( | |
709 | u"Exception while trying to create the repository %s", |
|
687 | u"Exception while trying to create the repository %s", | |
710 | repo_name) |
|
688 | schema_data['repo_name']) | |
711 | raise JSONRPCError( |
|
689 | raise JSONRPCError( | |
712 | 'failed to create repository `%s`' % (repo_name,)) |
|
690 | 'failed to create repository `%s`' % (schema_data['repo_name'],)) | |
713 |
|
691 | |||
714 |
|
692 | |||
715 | @jsonrpc_method() |
|
693 | @jsonrpc_method() | |
716 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), |
|
694 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), | |
717 | description=Optional('')): |
|
695 | description=Optional('')): | |
718 | """ |
|
696 | """ | |
719 | Adds an extra field to a repository. |
|
697 | Adds an extra field to a repository. | |
720 |
|
698 | |||
721 | This command can only be run using an |authtoken| with at least |
|
699 | This command can only be run using an |authtoken| with at least | |
722 | write permissions to the |repo|. |
|
700 | write permissions to the |repo|. | |
723 |
|
701 | |||
724 | :param apiuser: This is filled automatically from the |authtoken|. |
|
702 | :param apiuser: This is filled automatically from the |authtoken|. | |
725 | :type apiuser: AuthUser |
|
703 | :type apiuser: AuthUser | |
726 | :param repoid: Set the repository name or repository id. |
|
704 | :param repoid: Set the repository name or repository id. | |
727 | :type repoid: str or int |
|
705 | :type repoid: str or int | |
728 | :param key: Create a unique field key for this repository. |
|
706 | :param key: Create a unique field key for this repository. | |
729 | :type key: str |
|
707 | :type key: str | |
730 | :param label: |
|
708 | :param label: | |
731 | :type label: Optional(str) |
|
709 | :type label: Optional(str) | |
732 | :param description: |
|
710 | :param description: | |
733 | :type description: Optional(str) |
|
711 | :type description: Optional(str) | |
734 | """ |
|
712 | """ | |
735 | repo = get_repo_or_error(repoid) |
|
713 | repo = get_repo_or_error(repoid) | |
736 | if not has_superadmin_permission(apiuser): |
|
714 | if not has_superadmin_permission(apiuser): | |
737 | _perms = ('repository.admin',) |
|
715 | _perms = ('repository.admin',) | |
738 |
|
|
716 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
739 |
|
717 | |||
740 | label = Optional.extract(label) or key |
|
718 | label = Optional.extract(label) or key | |
741 | description = Optional.extract(description) |
|
719 | description = Optional.extract(description) | |
742 |
|
720 | |||
743 | field = RepositoryField.get_by_key_name(key, repo) |
|
721 | field = RepositoryField.get_by_key_name(key, repo) | |
744 | if field: |
|
722 | if field: | |
745 | raise JSONRPCError('Field with key ' |
|
723 | raise JSONRPCError('Field with key ' | |
746 | '`%s` exists for repo `%s`' % (key, repoid)) |
|
724 | '`%s` exists for repo `%s`' % (key, repoid)) | |
747 |
|
725 | |||
748 | try: |
|
726 | try: | |
749 | RepoModel().add_repo_field(repo, key, field_label=label, |
|
727 | RepoModel().add_repo_field(repo, key, field_label=label, | |
750 | field_desc=description) |
|
728 | field_desc=description) | |
751 | Session().commit() |
|
729 | Session().commit() | |
752 | return { |
|
730 | return { | |
753 | 'msg': "Added new repository field `%s`" % (key,), |
|
731 | 'msg': "Added new repository field `%s`" % (key,), | |
754 | 'success': True, |
|
732 | 'success': True, | |
755 | } |
|
733 | } | |
756 | except Exception: |
|
734 | except Exception: | |
757 | log.exception("Exception occurred while trying to add field to repo") |
|
735 | log.exception("Exception occurred while trying to add field to repo") | |
758 | raise JSONRPCError( |
|
736 | raise JSONRPCError( | |
759 | 'failed to create new field for repository `%s`' % (repoid,)) |
|
737 | 'failed to create new field for repository `%s`' % (repoid,)) | |
760 |
|
738 | |||
761 |
|
739 | |||
762 | @jsonrpc_method() |
|
740 | @jsonrpc_method() | |
763 | def remove_field_from_repo(request, apiuser, repoid, key): |
|
741 | def remove_field_from_repo(request, apiuser, repoid, key): | |
764 | """ |
|
742 | """ | |
765 | Removes an extra field from a repository. |
|
743 | Removes an extra field from a repository. | |
766 |
|
744 | |||
767 | This command can only be run using an |authtoken| with at least |
|
745 | This command can only be run using an |authtoken| with at least | |
768 | write permissions to the |repo|. |
|
746 | write permissions to the |repo|. | |
769 |
|
747 | |||
770 | :param apiuser: This is filled automatically from the |authtoken|. |
|
748 | :param apiuser: This is filled automatically from the |authtoken|. | |
771 | :type apiuser: AuthUser |
|
749 | :type apiuser: AuthUser | |
772 | :param repoid: Set the repository name or repository ID. |
|
750 | :param repoid: Set the repository name or repository ID. | |
773 | :type repoid: str or int |
|
751 | :type repoid: str or int | |
774 | :param key: Set the unique field key for this repository. |
|
752 | :param key: Set the unique field key for this repository. | |
775 | :type key: str |
|
753 | :type key: str | |
776 | """ |
|
754 | """ | |
777 |
|
755 | |||
778 | repo = get_repo_or_error(repoid) |
|
756 | repo = get_repo_or_error(repoid) | |
779 | if not has_superadmin_permission(apiuser): |
|
757 | if not has_superadmin_permission(apiuser): | |
780 | _perms = ('repository.admin',) |
|
758 | _perms = ('repository.admin',) | |
781 |
|
|
759 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
782 |
|
760 | |||
783 | field = RepositoryField.get_by_key_name(key, repo) |
|
761 | field = RepositoryField.get_by_key_name(key, repo) | |
784 | if not field: |
|
762 | if not field: | |
785 | raise JSONRPCError('Field with key `%s` does not ' |
|
763 | raise JSONRPCError('Field with key `%s` does not ' | |
786 | 'exists for repo `%s`' % (key, repoid)) |
|
764 | 'exists for repo `%s`' % (key, repoid)) | |
787 |
|
765 | |||
788 | try: |
|
766 | try: | |
789 | RepoModel().delete_repo_field(repo, field_key=key) |
|
767 | RepoModel().delete_repo_field(repo, field_key=key) | |
790 | Session().commit() |
|
768 | Session().commit() | |
791 | return { |
|
769 | return { | |
792 | 'msg': "Deleted repository field `%s`" % (key,), |
|
770 | 'msg': "Deleted repository field `%s`" % (key,), | |
793 | 'success': True, |
|
771 | 'success': True, | |
794 | } |
|
772 | } | |
795 | except Exception: |
|
773 | except Exception: | |
796 | log.exception( |
|
774 | log.exception( | |
797 | "Exception occurred while trying to delete field from repo") |
|
775 | "Exception occurred while trying to delete field from repo") | |
798 | raise JSONRPCError( |
|
776 | raise JSONRPCError( | |
799 | 'failed to delete field for repository `%s`' % (repoid,)) |
|
777 | 'failed to delete field for repository `%s`' % (repoid,)) | |
800 |
|
778 | |||
801 |
|
779 | |||
802 | @jsonrpc_method() |
|
780 | @jsonrpc_method() | |
803 | def update_repo(request, apiuser, repoid, name=Optional(None), |
|
781 | def update_repo( | |
804 | owner=Optional(OAttr('apiuser')), |
|
782 | request, apiuser, repoid, repo_name=Optional(None), | |
805 | group=Optional(None), |
|
783 | owner=Optional(OAttr('apiuser')), description=Optional(''), | |
806 | fork_of=Optional(None), |
|
784 | private=Optional(False), clone_uri=Optional(None), | |
807 |
|
|
785 | landing_rev=Optional('rev:tip'), fork_of=Optional(None), | |
808 | clone_uri=Optional(None), landing_rev=Optional('rev:tip'), |
|
|||
809 |
|
|
786 | enable_statistics=Optional(False), | |
810 |
|
|
787 | enable_locking=Optional(False), | |
811 |
|
|
788 | enable_downloads=Optional(False), fields=Optional('')): | |
812 | fields=Optional('')): |
|
|||
813 | """ |
|
789 | """ | |
814 | Updates a repository with the given information. |
|
790 | Updates a repository with the given information. | |
815 |
|
791 | |||
816 | This command can only be run using an |authtoken| with at least |
|
792 | This command can only be run using an |authtoken| with at least | |
817 |
|
|
793 | admin permissions to the |repo|. | |
|
794 | ||||
|
795 | * If the repository name contains "/", repository will be updated | |||
|
796 | accordingly with a repository group or nested repository groups | |||
|
797 | ||||
|
798 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| | |||
|
799 | called "repo-test" and place it inside group "foo/bar". | |||
|
800 | You have to have permissions to access and write to the last repository | |||
|
801 | group ("bar" in this example) | |||
818 |
|
802 | |||
819 | :param apiuser: This is filled automatically from the |authtoken|. |
|
803 | :param apiuser: This is filled automatically from the |authtoken|. | |
820 | :type apiuser: AuthUser |
|
804 | :type apiuser: AuthUser | |
821 | :param repoid: repository name or repository ID. |
|
805 | :param repoid: repository name or repository ID. | |
822 | :type repoid: str or int |
|
806 | :type repoid: str or int | |
823 |
:param name: Update the |repo| name |
|
807 | :param repo_name: Update the |repo| name, including the | |
824 | :type name: str |
|
808 | repository group it's in. | |
|
809 | :type repo_name: str | |||
825 | :param owner: Set the |repo| owner. |
|
810 | :param owner: Set the |repo| owner. | |
826 | :type owner: str |
|
811 | :type owner: str | |
827 |
:param |
|
812 | :param fork_of: Set the |repo| as fork of another |repo|. | |
828 | :type group: str |
|
|||
829 | :param fork_of: Set the master |repo| name. |
|
|||
830 | :type fork_of: str |
|
813 | :type fork_of: str | |
831 | :param description: Update the |repo| description. |
|
814 | :param description: Update the |repo| description. | |
832 | :type description: str |
|
815 | :type description: str | |
833 | :param private: Set the |repo| as private. (True | False) |
|
816 | :param private: Set the |repo| as private. (True | False) | |
834 | :type private: bool |
|
817 | :type private: bool | |
835 | :param clone_uri: Update the |repo| clone URI. |
|
818 | :param clone_uri: Update the |repo| clone URI. | |
836 | :type clone_uri: str |
|
819 | :type clone_uri: str | |
837 | :param landing_rev: Set the |repo| landing revision. Default is |
|
820 | :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``. | |
838 | ``tip``. |
|
|||
839 | :type landing_rev: str |
|
821 | :type landing_rev: str | |
840 | :param enable_statistics: Enable statistics on the |repo|, |
|
822 | :param enable_statistics: Enable statistics on the |repo|, (True | False). | |
841 | (True | False). |
|
|||
842 | :type enable_statistics: bool |
|
823 | :type enable_statistics: bool | |
843 | :param enable_locking: Enable |repo| locking. |
|
824 | :param enable_locking: Enable |repo| locking. | |
844 | :type enable_locking: bool |
|
825 | :type enable_locking: bool | |
845 | :param enable_downloads: Enable downloads from the |repo|, |
|
826 | :param enable_downloads: Enable downloads from the |repo|, (True | False). | |
846 | (True | False). |
|
|||
847 | :type enable_downloads: bool |
|
827 | :type enable_downloads: bool | |
848 | :param fields: Add extra fields to the |repo|. Use the following |
|
828 | :param fields: Add extra fields to the |repo|. Use the following | |
849 | example format: ``field_key=field_val,field_key2=fieldval2``. |
|
829 | example format: ``field_key=field_val,field_key2=fieldval2``. | |
850 | Escape ', ' with \, |
|
830 | Escape ', ' with \, | |
851 | :type fields: str |
|
831 | :type fields: str | |
852 | """ |
|
832 | """ | |
|
833 | ||||
853 | repo = get_repo_or_error(repoid) |
|
834 | repo = get_repo_or_error(repoid) | |
|
835 | ||||
854 | include_secrets = False |
|
836 | include_secrets = False | |
855 | if has_superadmin_permission(apiuser): |
|
837 | if not has_superadmin_permission(apiuser): | |
|
838 | validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',)) | |||
|
839 | else: | |||
856 | include_secrets = True |
|
840 | include_secrets = True | |
857 | else: |
|
841 | ||
858 | _perms = ('repository.admin',) |
|
842 | updates = dict( | |
859 | has_repo_permissions(apiuser, repoid, repo, _perms) |
|
843 | repo_name=repo_name | |
|
844 | if not isinstance(repo_name, Optional) else repo.repo_name, | |||
|
845 | ||||
|
846 | fork_id=fork_of | |||
|
847 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, | |||
|
848 | ||||
|
849 | user=owner | |||
|
850 | if not isinstance(owner, Optional) else repo.user.username, | |||
860 |
|
851 | |||
861 | updates = { |
|
852 | repo_description=description | |
862 | # update function requires this. |
|
853 | if not isinstance(description, Optional) else repo.description, | |
863 | 'repo_name': repo.just_name |
|
854 | ||
864 | } |
|
855 | repo_private=private | |
865 | repo_group = group |
|
856 | if not isinstance(private, Optional) else repo.private, | |
866 | if not isinstance(repo_group, Optional): |
|
857 | ||
867 | repo_group = get_repo_group_or_error(repo_group) |
|
858 | clone_uri=clone_uri | |
868 | repo_group = repo_group.group_id |
|
859 | if not isinstance(clone_uri, Optional) else repo.clone_uri, | |
|
860 | ||||
|
861 | repo_landing_rev=landing_rev | |||
|
862 | if not isinstance(landing_rev, Optional) else repo._landing_revision, | |||
|
863 | ||||
|
864 | repo_enable_statistics=enable_statistics | |||
|
865 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, | |||
|
866 | ||||
|
867 | repo_enable_locking=enable_locking | |||
|
868 | if not isinstance(enable_locking, Optional) else repo.enable_locking, | |||
|
869 | ||||
|
870 | repo_enable_downloads=enable_downloads | |||
|
871 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) | |||
|
872 | ||||
|
873 | ref_choices, _labels = ScmModel().get_repo_landing_revs(repo=repo) | |||
869 |
|
874 | |||
870 | repo_fork_of = fork_of |
|
875 | schema = repo_schema.RepoSchema().bind( | |
871 | if not isinstance(repo_fork_of, Optional): |
|
876 | repo_type_options=rhodecode.BACKENDS.keys(), | |
872 | repo_fork_of = get_repo_or_error(repo_fork_of) |
|
877 | repo_ref_options=ref_choices, | |
873 | repo_fork_of = repo_fork_of.repo_id |
|
878 | # user caller | |
874 |
|
879 | user=apiuser, | ||
|
880 | old_values=repo.get_api_data()) | |||
875 | try: |
|
881 | try: | |
876 | store_update(updates, name, 'repo_name') |
|
882 | schema_data = schema.deserialize(dict( | |
877 | store_update(updates, repo_group, 'repo_group') |
|
883 | # we save old value, users cannot change type | |
878 | store_update(updates, repo_fork_of, 'fork_id') |
|
884 | repo_type=repo.repo_type, | |
879 | store_update(updates, owner, 'user') |
|
885 | ||
880 | store_update(updates, description, 'repo_description') |
|
886 | repo_name=updates['repo_name'], | |
881 | store_update(updates, private, 'repo_private') |
|
887 | repo_owner=updates['user'], | |
882 | store_update(updates, clone_uri, 'clone_uri') |
|
888 | repo_description=updates['repo_description'], | |
883 | store_update(updates, landing_rev, 'repo_landing_rev') |
|
889 | repo_clone_uri=updates['clone_uri'], | |
884 | store_update(updates, enable_statistics, 'repo_enable_statistics') |
|
890 | repo_fork_of=updates['fork_id'], | |
885 | store_update(updates, enable_locking, 'repo_enable_locking') |
|
891 | repo_private=updates['repo_private'], | |
886 | store_update(updates, enable_downloads, 'repo_enable_downloads') |
|
892 | repo_landing_commit_ref=updates['repo_landing_rev'], | |
|
893 | repo_enable_statistics=updates['repo_enable_statistics'], | |||
|
894 | repo_enable_downloads=updates['repo_enable_downloads'], | |||
|
895 | repo_enable_locking=updates['repo_enable_locking'])) | |||
|
896 | except validation_schema.Invalid as err: | |||
|
897 | raise JSONRPCValidationError(colander_exc=err) | |||
|
898 | ||||
|
899 | # save validated data back into the updates dict | |||
|
900 | validated_updates = dict( | |||
|
901 | repo_name=schema_data['repo_group']['repo_name_without_group'], | |||
|
902 | repo_group=schema_data['repo_group']['repo_group_id'], | |||
|
903 | ||||
|
904 | user=schema_data['repo_owner'], | |||
|
905 | repo_description=schema_data['repo_description'], | |||
|
906 | repo_private=schema_data['repo_private'], | |||
|
907 | clone_uri=schema_data['repo_clone_uri'], | |||
|
908 | repo_landing_rev=schema_data['repo_landing_commit_ref'], | |||
|
909 | repo_enable_statistics=schema_data['repo_enable_statistics'], | |||
|
910 | repo_enable_locking=schema_data['repo_enable_locking'], | |||
|
911 | repo_enable_downloads=schema_data['repo_enable_downloads'], | |||
|
912 | ) | |||
|
913 | ||||
|
914 | if schema_data['repo_fork_of']: | |||
|
915 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) | |||
|
916 | validated_updates['fork_id'] = fork_repo.repo_id | |||
887 |
|
917 | |||
888 |
|
|
918 | # extra fields | |
889 |
|
|
919 | fields = parse_args(Optional.extract(fields), key_prefix='ex_') | |
890 |
|
|
920 | if fields: | |
891 |
|
|
921 | validated_updates.update(fields) | |
892 |
|
922 | |||
893 | RepoModel().update(repo, **updates) |
|
923 | try: | |
|
924 | RepoModel().update(repo, **validated_updates) | |||
894 | Session().commit() |
|
925 | Session().commit() | |
895 | return { |
|
926 | return { | |
896 | 'msg': 'updated repo ID:%s %s' % ( |
|
927 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), | |
897 | repo.repo_id, repo.repo_name), |
|
928 | 'repository': repo.get_api_data(include_secrets=include_secrets) | |
898 | 'repository': repo.get_api_data( |
|
|||
899 | include_secrets=include_secrets) |
|
|||
900 | } |
|
929 | } | |
901 | except Exception: |
|
930 | except Exception: | |
902 | log.exception( |
|
931 | log.exception( | |
903 | u"Exception while trying to update the repository %s", |
|
932 | u"Exception while trying to update the repository %s", | |
904 | repoid) |
|
933 | repoid) | |
905 | raise JSONRPCError('failed to update repo `%s`' % repoid) |
|
934 | raise JSONRPCError('failed to update repo `%s`' % repoid) | |
906 |
|
935 | |||
907 |
|
936 | |||
908 | @jsonrpc_method() |
|
937 | @jsonrpc_method() | |
909 | def fork_repo(request, apiuser, repoid, fork_name, |
|
938 | def fork_repo(request, apiuser, repoid, fork_name, | |
910 | owner=Optional(OAttr('apiuser')), |
|
939 | owner=Optional(OAttr('apiuser')), | |
911 |
description=Optional(''), |
|
940 | description=Optional(''), | |
912 |
private=Optional(False), |
|
941 | private=Optional(False), | |
|
942 | clone_uri=Optional(None), | |||
|
943 | landing_rev=Optional('rev:tip'), | |||
|
944 | copy_permissions=Optional(False)): | |||
913 | """ |
|
945 | """ | |
914 | Creates a fork of the specified |repo|. |
|
946 | Creates a fork of the specified |repo|. | |
915 |
|
947 | |||
916 | * If using |RCE| with Celery this will immediately return a success |
|
948 | * If the fork_name contains "/", fork will be created inside | |
917 | message, even though the fork will be created asynchronously. |
|
949 | a repository group or nested repository groups | |
918 |
|
950 | |||
919 | This command can only be run using an |authtoken| with fork |
|
951 | For example "foo/bar/fork-repo" will create fork called "fork-repo" | |
920 | permissions on the |repo|. |
|
952 | inside group "foo/bar". You have to have permissions to access and | |
|
953 | write to the last repository group ("bar" in this example) | |||
|
954 | ||||
|
955 | This command can only be run using an |authtoken| with minimum | |||
|
956 | read permissions of the forked repo, create fork permissions for an user. | |||
921 |
|
957 | |||
922 | :param apiuser: This is filled automatically from the |authtoken|. |
|
958 | :param apiuser: This is filled automatically from the |authtoken|. | |
923 | :type apiuser: AuthUser |
|
959 | :type apiuser: AuthUser | |
924 | :param repoid: Set repository name or repository ID. |
|
960 | :param repoid: Set repository name or repository ID. | |
925 | :type repoid: str or int |
|
961 | :type repoid: str or int | |
926 | :param fork_name: Set the fork name. |
|
962 | :param fork_name: Set the fork name, including it's repository group membership. | |
927 | :type fork_name: str |
|
963 | :type fork_name: str | |
928 | :param owner: Set the fork owner. |
|
964 | :param owner: Set the fork owner. | |
929 | :type owner: str |
|
965 | :type owner: str | |
930 | :param description: Set the fork descripton. |
|
966 | :param description: Set the fork description. | |
931 | :type description: str |
|
967 | :type description: str | |
932 | :param copy_permissions: Copy permissions from parent |repo|. The |
|
968 | :param copy_permissions: Copy permissions from parent |repo|. The | |
933 | default is False. |
|
969 | default is False. | |
934 | :type copy_permissions: bool |
|
970 | :type copy_permissions: bool | |
935 | :param private: Make the fork private. The default is False. |
|
971 | :param private: Make the fork private. The default is False. | |
936 | :type private: bool |
|
972 | :type private: bool | |
937 | :param landing_rev: Set the landing revision. The default is tip. |
|
973 | :param landing_rev: Set the landing revision. The default is tip. | |
938 |
|
974 | |||
939 | Example output: |
|
975 | Example output: | |
940 |
|
976 | |||
941 | .. code-block:: bash |
|
977 | .. code-block:: bash | |
942 |
|
978 | |||
943 | id : <id_for_response> |
|
979 | id : <id_for_response> | |
944 | api_key : "<api_key>" |
|
980 | api_key : "<api_key>" | |
945 | args: { |
|
981 | args: { | |
946 | "repoid" : "<reponame or repo_id>", |
|
982 | "repoid" : "<reponame or repo_id>", | |
947 | "fork_name": "<forkname>", |
|
983 | "fork_name": "<forkname>", | |
948 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
984 | "owner": "<username or user_id = Optional(=apiuser)>", | |
949 | "description": "<description>", |
|
985 | "description": "<description>", | |
950 | "copy_permissions": "<bool>", |
|
986 | "copy_permissions": "<bool>", | |
951 | "private": "<bool>", |
|
987 | "private": "<bool>", | |
952 | "landing_rev": "<landing_rev>" |
|
988 | "landing_rev": "<landing_rev>" | |
953 | } |
|
989 | } | |
954 |
|
990 | |||
955 | Example error output: |
|
991 | Example error output: | |
956 |
|
992 | |||
957 | .. code-block:: bash |
|
993 | .. code-block:: bash | |
958 |
|
994 | |||
959 | id : <id_given_in_input> |
|
995 | id : <id_given_in_input> | |
960 | result: { |
|
996 | result: { | |
961 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
997 | "msg": "Created fork of `<reponame>` as `<forkname>`", | |
962 | "success": true, |
|
998 | "success": true, | |
963 | "task": "<celery task id or None if done sync>" |
|
999 | "task": "<celery task id or None if done sync>" | |
964 | } |
|
1000 | } | |
965 | error: null |
|
1001 | error: null | |
966 |
|
1002 | |||
967 | """ |
|
1003 | """ | |
968 | if not has_superadmin_permission(apiuser): |
|
|||
969 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): |
|
|||
970 | raise JSONRPCForbidden() |
|
|||
971 |
|
1004 | |||
972 | repo = get_repo_or_error(repoid) |
|
1005 | repo = get_repo_or_error(repoid) | |
973 | repo_name = repo.repo_name |
|
1006 | repo_name = repo.repo_name | |
974 |
|
1007 | |||
975 | (fork_name_cleaned, |
|
|||
976 | parent_group_name) = RepoGroupModel()._get_group_name_and_parent( |
|
|||
977 | fork_name) |
|
|||
978 |
|
||||
979 | if not has_superadmin_permission(apiuser): |
|
1008 | if not has_superadmin_permission(apiuser): | |
980 | # check if we have at least read permission for |
|
1009 | # check if we have at least read permission for | |
981 | # this repo that we fork ! |
|
1010 | # this repo that we fork ! | |
982 | _perms = ( |
|
1011 | _perms = ( | |
983 | 'repository.admin', 'repository.write', 'repository.read') |
|
1012 | 'repository.admin', 'repository.write', 'repository.read') | |
984 |
|
|
1013 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
985 |
|
1014 | |||
986 | if not isinstance(owner, Optional): |
|
1015 | # check if the regular user has at least fork permissions as well | |
987 | # forbid setting owner for non super admins |
|
1016 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): | |
988 | raise JSONRPCError( |
|
|||
989 | 'Only RhodeCode admin can specify `owner` param' |
|
|||
990 | ) |
|
|||
991 | # check if we have a create.repo permission if not maybe the parent |
|
|||
992 | # group permission |
|
|||
993 | if not HasPermissionAnyApi('hg.create.repository')(user=apiuser): |
|
|||
994 | if parent_group_name: |
|
|||
995 | repogroupid = parent_group_name |
|
|||
996 | repo_group = get_repo_group_or_error(parent_group_name) |
|
|||
997 |
|
||||
998 | _perms = ('group.admin',) |
|
|||
999 | if not HasRepoGroupPermissionAnyApi(*_perms)( |
|
|||
1000 | user=apiuser, group_name=repo_group.group_name): |
|
|||
1001 | raise JSONRPCError( |
|
|||
1002 | 'repository group `%s` does not exist' % ( |
|
|||
1003 | repogroupid,)) |
|
|||
1004 | else: |
|
|||
1005 |
|
|
1017 | raise JSONRPCForbidden() | |
1006 |
|
1018 | |||
1007 | _repo = RepoModel().get_by_repo_name(fork_name) |
|
1019 | # check if user can set owner parameter | |
1008 | if _repo: |
|
1020 | owner = validate_set_owner_permissions(apiuser, owner) | |
1009 | type_ = 'fork' if _repo.fork else 'repo' |
|
|||
1010 | raise JSONRPCError("%s `%s` already exist" % (type_, fork_name)) |
|
|||
1011 |
|
1021 | |||
1012 | if isinstance(owner, Optional): |
|
1022 | description = Optional.extract(description) | |
1013 | owner = apiuser.user_id |
|
1023 | copy_permissions = Optional.extract(copy_permissions) | |
|
1024 | clone_uri = Optional.extract(clone_uri) | |||
|
1025 | landing_commit_ref = Optional.extract(landing_rev) | |||
|
1026 | private = Optional.extract(private) | |||
1014 |
|
1027 | |||
1015 | owner = get_user_or_error(owner) |
|
1028 | schema = repo_schema.RepoSchema().bind( | |
|
1029 | repo_type_options=rhodecode.BACKENDS.keys(), | |||
|
1030 | # user caller | |||
|
1031 | user=apiuser) | |||
1016 |
|
1032 | |||
1017 | try: |
|
1033 | try: | |
1018 | # create structure of groups and return the last group |
|
1034 | schema_data = schema.deserialize(dict( | |
1019 |
|
|
1035 | repo_name=fork_name, | |
1020 | form_data = { |
|
1036 | repo_type=repo.repo_type, | |
1021 | 'repo_name': fork_name_cleaned, |
|
1037 | repo_owner=owner.username, | |
1022 | 'repo_name_full': fork_name, |
|
1038 | repo_description=description, | |
1023 | 'repo_group': repo_group.group_id if repo_group else None, |
|
1039 | repo_landing_commit_ref=landing_commit_ref, | |
1024 | 'repo_type': repo.repo_type, |
|
1040 | repo_clone_uri=clone_uri, | |
1025 | 'description': Optional.extract(description), |
|
1041 | repo_private=private, | |
1026 | 'private': Optional.extract(private), |
|
1042 | repo_copy_permissions=copy_permissions)) | |
1027 | 'copy_permissions': Optional.extract(copy_permissions), |
|
1043 | except validation_schema.Invalid as err: | |
1028 | 'landing_rev': Optional.extract(landing_rev), |
|
1044 | raise JSONRPCValidationError(colander_exc=err) | |
|
1045 | ||||
|
1046 | try: | |||
|
1047 | data = { | |||
1029 | 'fork_parent_id': repo.repo_id, |
|
1048 | 'fork_parent_id': repo.repo_id, | |
|
1049 | ||||
|
1050 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], | |||
|
1051 | 'repo_name_full': schema_data['repo_name'], | |||
|
1052 | 'repo_group': schema_data['repo_group']['repo_group_id'], | |||
|
1053 | 'repo_type': schema_data['repo_type'], | |||
|
1054 | 'description': schema_data['repo_description'], | |||
|
1055 | 'private': schema_data['repo_private'], | |||
|
1056 | 'copy_permissions': schema_data['repo_copy_permissions'], | |||
|
1057 | 'landing_rev': schema_data['repo_landing_commit_ref'], | |||
1030 | } |
|
1058 | } | |
1031 |
|
1059 | |||
1032 |
task = RepoModel().create_fork( |
|
1060 | task = RepoModel().create_fork(data, cur_user=owner) | |
1033 | # no commit, it's done in RepoModel, or async via celery |
|
1061 | # no commit, it's done in RepoModel, or async via celery | |
1034 | from celery.result import BaseAsyncResult |
|
1062 | from celery.result import BaseAsyncResult | |
1035 | task_id = None |
|
1063 | task_id = None | |
1036 | if isinstance(task, BaseAsyncResult): |
|
1064 | if isinstance(task, BaseAsyncResult): | |
1037 | task_id = task.task_id |
|
1065 | task_id = task.task_id | |
1038 | return { |
|
1066 | return { | |
1039 | 'msg': 'Created fork of `%s` as `%s`' % ( |
|
1067 | 'msg': 'Created fork of `%s` as `%s`' % ( | |
1040 |
repo.repo_name, |
|
1068 | repo.repo_name, schema_data['repo_name']), | |
1041 | 'success': True, # cannot return the repo data here since fork |
|
1069 | 'success': True, # cannot return the repo data here since fork | |
1042 | # can be done async |
|
1070 | # can be done async | |
1043 | 'task': task_id |
|
1071 | 'task': task_id | |
1044 | } |
|
1072 | } | |
1045 | except Exception: |
|
1073 | except Exception: | |
1046 | log.exception("Exception occurred while trying to fork a repo") |
|
1074 | log.exception( | |
|
1075 | u"Exception while trying to create fork %s", | |||
|
1076 | schema_data['repo_name']) | |||
1047 | raise JSONRPCError( |
|
1077 | raise JSONRPCError( | |
1048 | 'failed to fork repository `%s` as `%s`' % ( |
|
1078 | 'failed to fork repository `%s` as `%s`' % ( | |
1049 |
repo_name, |
|
1079 | repo_name, schema_data['repo_name'])) | |
1050 |
|
1080 | |||
1051 |
|
1081 | |||
1052 | @jsonrpc_method() |
|
1082 | @jsonrpc_method() | |
1053 | def delete_repo(request, apiuser, repoid, forks=Optional('')): |
|
1083 | def delete_repo(request, apiuser, repoid, forks=Optional('')): | |
1054 | """ |
|
1084 | """ | |
1055 | Deletes a repository. |
|
1085 | Deletes a repository. | |
1056 |
|
1086 | |||
1057 | * When the `forks` parameter is set it's possible to detach or delete |
|
1087 | * When the `forks` parameter is set it's possible to detach or delete | |
1058 | forks of deleted repository. |
|
1088 | forks of deleted repository. | |
1059 |
|
1089 | |||
1060 | This command can only be run using an |authtoken| with admin |
|
1090 | This command can only be run using an |authtoken| with admin | |
1061 | permissions on the |repo|. |
|
1091 | permissions on the |repo|. | |
1062 |
|
1092 | |||
1063 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1093 | :param apiuser: This is filled automatically from the |authtoken|. | |
1064 | :type apiuser: AuthUser |
|
1094 | :type apiuser: AuthUser | |
1065 | :param repoid: Set the repository name or repository ID. |
|
1095 | :param repoid: Set the repository name or repository ID. | |
1066 | :type repoid: str or int |
|
1096 | :type repoid: str or int | |
1067 | :param forks: Set to `detach` or `delete` forks from the |repo|. |
|
1097 | :param forks: Set to `detach` or `delete` forks from the |repo|. | |
1068 | :type forks: Optional(str) |
|
1098 | :type forks: Optional(str) | |
1069 |
|
1099 | |||
1070 | Example error output: |
|
1100 | Example error output: | |
1071 |
|
1101 | |||
1072 | .. code-block:: bash |
|
1102 | .. code-block:: bash | |
1073 |
|
1103 | |||
1074 | id : <id_given_in_input> |
|
1104 | id : <id_given_in_input> | |
1075 | result: { |
|
1105 | result: { | |
1076 | "msg": "Deleted repository `<reponame>`", |
|
1106 | "msg": "Deleted repository `<reponame>`", | |
1077 | "success": true |
|
1107 | "success": true | |
1078 | } |
|
1108 | } | |
1079 | error: null |
|
1109 | error: null | |
1080 | """ |
|
1110 | """ | |
1081 |
|
1111 | |||
1082 | repo = get_repo_or_error(repoid) |
|
1112 | repo = get_repo_or_error(repoid) | |
1083 | if not has_superadmin_permission(apiuser): |
|
1113 | if not has_superadmin_permission(apiuser): | |
1084 | _perms = ('repository.admin',) |
|
1114 | _perms = ('repository.admin',) | |
1085 |
|
|
1115 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1086 |
|
1116 | |||
1087 | try: |
|
1117 | try: | |
1088 | handle_forks = Optional.extract(forks) |
|
1118 | handle_forks = Optional.extract(forks) | |
1089 | _forks_msg = '' |
|
1119 | _forks_msg = '' | |
1090 | _forks = [f for f in repo.forks] |
|
1120 | _forks = [f for f in repo.forks] | |
1091 | if handle_forks == 'detach': |
|
1121 | if handle_forks == 'detach': | |
1092 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) |
|
1122 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) | |
1093 | elif handle_forks == 'delete': |
|
1123 | elif handle_forks == 'delete': | |
1094 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) |
|
1124 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) | |
1095 | elif _forks: |
|
1125 | elif _forks: | |
1096 | raise JSONRPCError( |
|
1126 | raise JSONRPCError( | |
1097 | 'Cannot delete `%s` it still contains attached forks' % |
|
1127 | 'Cannot delete `%s` it still contains attached forks' % | |
1098 | (repo.repo_name,) |
|
1128 | (repo.repo_name,) | |
1099 | ) |
|
1129 | ) | |
1100 |
|
1130 | |||
1101 | RepoModel().delete(repo, forks=forks) |
|
1131 | RepoModel().delete(repo, forks=forks) | |
1102 | Session().commit() |
|
1132 | Session().commit() | |
1103 | return { |
|
1133 | return { | |
1104 | 'msg': 'Deleted repository `%s`%s' % ( |
|
1134 | 'msg': 'Deleted repository `%s`%s' % ( | |
1105 | repo.repo_name, _forks_msg), |
|
1135 | repo.repo_name, _forks_msg), | |
1106 | 'success': True |
|
1136 | 'success': True | |
1107 | } |
|
1137 | } | |
1108 | except Exception: |
|
1138 | except Exception: | |
1109 | log.exception("Exception occurred while trying to delete repo") |
|
1139 | log.exception("Exception occurred while trying to delete repo") | |
1110 | raise JSONRPCError( |
|
1140 | raise JSONRPCError( | |
1111 | 'failed to delete repository `%s`' % (repo.repo_name,) |
|
1141 | 'failed to delete repository `%s`' % (repo.repo_name,) | |
1112 | ) |
|
1142 | ) | |
1113 |
|
1143 | |||
1114 |
|
1144 | |||
1115 | #TODO: marcink, change name ? |
|
1145 | #TODO: marcink, change name ? | |
1116 | @jsonrpc_method() |
|
1146 | @jsonrpc_method() | |
1117 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): |
|
1147 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): | |
1118 | """ |
|
1148 | """ | |
1119 | Invalidates the cache for the specified repository. |
|
1149 | Invalidates the cache for the specified repository. | |
1120 |
|
1150 | |||
1121 | This command can only be run using an |authtoken| with admin rights to |
|
1151 | This command can only be run using an |authtoken| with admin rights to | |
1122 | the specified repository. |
|
1152 | the specified repository. | |
1123 |
|
1153 | |||
1124 | This command takes the following options: |
|
1154 | This command takes the following options: | |
1125 |
|
1155 | |||
1126 | :param apiuser: This is filled automatically from |authtoken|. |
|
1156 | :param apiuser: This is filled automatically from |authtoken|. | |
1127 | :type apiuser: AuthUser |
|
1157 | :type apiuser: AuthUser | |
1128 | :param repoid: Sets the repository name or repository ID. |
|
1158 | :param repoid: Sets the repository name or repository ID. | |
1129 | :type repoid: str or int |
|
1159 | :type repoid: str or int | |
1130 | :param delete_keys: This deletes the invalidated keys instead of |
|
1160 | :param delete_keys: This deletes the invalidated keys instead of | |
1131 | just flagging them. |
|
1161 | just flagging them. | |
1132 | :type delete_keys: Optional(``True`` | ``False``) |
|
1162 | :type delete_keys: Optional(``True`` | ``False``) | |
1133 |
|
1163 | |||
1134 | Example output: |
|
1164 | Example output: | |
1135 |
|
1165 | |||
1136 | .. code-block:: bash |
|
1166 | .. code-block:: bash | |
1137 |
|
1167 | |||
1138 | id : <id_given_in_input> |
|
1168 | id : <id_given_in_input> | |
1139 | result : { |
|
1169 | result : { | |
1140 | 'msg': Cache for repository `<repository name>` was invalidated, |
|
1170 | 'msg': Cache for repository `<repository name>` was invalidated, | |
1141 | 'repository': <repository name> |
|
1171 | 'repository': <repository name> | |
1142 | } |
|
1172 | } | |
1143 | error : null |
|
1173 | error : null | |
1144 |
|
1174 | |||
1145 | Example error output: |
|
1175 | Example error output: | |
1146 |
|
1176 | |||
1147 | .. code-block:: bash |
|
1177 | .. code-block:: bash | |
1148 |
|
1178 | |||
1149 | id : <id_given_in_input> |
|
1179 | id : <id_given_in_input> | |
1150 | result : null |
|
1180 | result : null | |
1151 | error : { |
|
1181 | error : { | |
1152 | 'Error occurred during cache invalidation action' |
|
1182 | 'Error occurred during cache invalidation action' | |
1153 | } |
|
1183 | } | |
1154 |
|
1184 | |||
1155 | """ |
|
1185 | """ | |
1156 |
|
1186 | |||
1157 | repo = get_repo_or_error(repoid) |
|
1187 | repo = get_repo_or_error(repoid) | |
1158 | if not has_superadmin_permission(apiuser): |
|
1188 | if not has_superadmin_permission(apiuser): | |
1159 | _perms = ('repository.admin', 'repository.write',) |
|
1189 | _perms = ('repository.admin', 'repository.write',) | |
1160 |
|
|
1190 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1161 |
|
1191 | |||
1162 | delete = Optional.extract(delete_keys) |
|
1192 | delete = Optional.extract(delete_keys) | |
1163 | try: |
|
1193 | try: | |
1164 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) |
|
1194 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) | |
1165 | return { |
|
1195 | return { | |
1166 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), |
|
1196 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), | |
1167 | 'repository': repo.repo_name |
|
1197 | 'repository': repo.repo_name | |
1168 | } |
|
1198 | } | |
1169 | except Exception: |
|
1199 | except Exception: | |
1170 | log.exception( |
|
1200 | log.exception( | |
1171 | "Exception occurred while trying to invalidate repo cache") |
|
1201 | "Exception occurred while trying to invalidate repo cache") | |
1172 | raise JSONRPCError( |
|
1202 | raise JSONRPCError( | |
1173 | 'Error occurred during cache invalidation action' |
|
1203 | 'Error occurred during cache invalidation action' | |
1174 | ) |
|
1204 | ) | |
1175 |
|
1205 | |||
1176 |
|
1206 | |||
1177 | #TODO: marcink, change name ? |
|
1207 | #TODO: marcink, change name ? | |
1178 | @jsonrpc_method() |
|
1208 | @jsonrpc_method() | |
1179 | def lock(request, apiuser, repoid, locked=Optional(None), |
|
1209 | def lock(request, apiuser, repoid, locked=Optional(None), | |
1180 | userid=Optional(OAttr('apiuser'))): |
|
1210 | userid=Optional(OAttr('apiuser'))): | |
1181 | """ |
|
1211 | """ | |
1182 | Sets the lock state of the specified |repo| by the given user. |
|
1212 | Sets the lock state of the specified |repo| by the given user. | |
1183 | From more information, see :ref:`repo-locking`. |
|
1213 | From more information, see :ref:`repo-locking`. | |
1184 |
|
1214 | |||
1185 | * If the ``userid`` option is not set, the repository is locked to the |
|
1215 | * If the ``userid`` option is not set, the repository is locked to the | |
1186 | user who called the method. |
|
1216 | user who called the method. | |
1187 | * If the ``locked`` parameter is not set, the current lock state of the |
|
1217 | * If the ``locked`` parameter is not set, the current lock state of the | |
1188 | repository is displayed. |
|
1218 | repository is displayed. | |
1189 |
|
1219 | |||
1190 | This command can only be run using an |authtoken| with admin rights to |
|
1220 | This command can only be run using an |authtoken| with admin rights to | |
1191 | the specified repository. |
|
1221 | the specified repository. | |
1192 |
|
1222 | |||
1193 | This command takes the following options: |
|
1223 | This command takes the following options: | |
1194 |
|
1224 | |||
1195 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1225 | :param apiuser: This is filled automatically from the |authtoken|. | |
1196 | :type apiuser: AuthUser |
|
1226 | :type apiuser: AuthUser | |
1197 | :param repoid: Sets the repository name or repository ID. |
|
1227 | :param repoid: Sets the repository name or repository ID. | |
1198 | :type repoid: str or int |
|
1228 | :type repoid: str or int | |
1199 | :param locked: Sets the lock state. |
|
1229 | :param locked: Sets the lock state. | |
1200 | :type locked: Optional(``True`` | ``False``) |
|
1230 | :type locked: Optional(``True`` | ``False``) | |
1201 | :param userid: Set the repository lock to this user. |
|
1231 | :param userid: Set the repository lock to this user. | |
1202 | :type userid: Optional(str or int) |
|
1232 | :type userid: Optional(str or int) | |
1203 |
|
1233 | |||
1204 | Example error output: |
|
1234 | Example error output: | |
1205 |
|
1235 | |||
1206 | .. code-block:: bash |
|
1236 | .. code-block:: bash | |
1207 |
|
1237 | |||
1208 | id : <id_given_in_input> |
|
1238 | id : <id_given_in_input> | |
1209 | result : { |
|
1239 | result : { | |
1210 | 'repo': '<reponame>', |
|
1240 | 'repo': '<reponame>', | |
1211 | 'locked': <bool: lock state>, |
|
1241 | 'locked': <bool: lock state>, | |
1212 | 'locked_since': <int: lock timestamp>, |
|
1242 | 'locked_since': <int: lock timestamp>, | |
1213 | 'locked_by': <username of person who made the lock>, |
|
1243 | 'locked_by': <username of person who made the lock>, | |
1214 | 'lock_reason': <str: reason for locking>, |
|
1244 | 'lock_reason': <str: reason for locking>, | |
1215 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, |
|
1245 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, | |
1216 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' |
|
1246 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' | |
1217 | or |
|
1247 | or | |
1218 | 'msg': 'Repo `<repository name>` not locked.' |
|
1248 | 'msg': 'Repo `<repository name>` not locked.' | |
1219 | or |
|
1249 | or | |
1220 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' |
|
1250 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' | |
1221 | } |
|
1251 | } | |
1222 | error : null |
|
1252 | error : null | |
1223 |
|
1253 | |||
1224 | Example error output: |
|
1254 | Example error output: | |
1225 |
|
1255 | |||
1226 | .. code-block:: bash |
|
1256 | .. code-block:: bash | |
1227 |
|
1257 | |||
1228 | id : <id_given_in_input> |
|
1258 | id : <id_given_in_input> | |
1229 | result : null |
|
1259 | result : null | |
1230 | error : { |
|
1260 | error : { | |
1231 | 'Error occurred locking repository `<reponame>`' |
|
1261 | 'Error occurred locking repository `<reponame>`' | |
1232 | } |
|
1262 | } | |
1233 | """ |
|
1263 | """ | |
1234 |
|
1264 | |||
1235 | repo = get_repo_or_error(repoid) |
|
1265 | repo = get_repo_or_error(repoid) | |
1236 | if not has_superadmin_permission(apiuser): |
|
1266 | if not has_superadmin_permission(apiuser): | |
1237 | # check if we have at least write permission for this repo ! |
|
1267 | # check if we have at least write permission for this repo ! | |
1238 | _perms = ('repository.admin', 'repository.write',) |
|
1268 | _perms = ('repository.admin', 'repository.write',) | |
1239 |
|
|
1269 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1240 |
|
1270 | |||
1241 | # make sure normal user does not pass someone else userid, |
|
1271 | # make sure normal user does not pass someone else userid, | |
1242 | # he is not allowed to do that |
|
1272 | # he is not allowed to do that | |
1243 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
1273 | if not isinstance(userid, Optional) and userid != apiuser.user_id: | |
1244 | raise JSONRPCError('userid is not the same as your user') |
|
1274 | raise JSONRPCError('userid is not the same as your user') | |
1245 |
|
1275 | |||
1246 | if isinstance(userid, Optional): |
|
1276 | if isinstance(userid, Optional): | |
1247 | userid = apiuser.user_id |
|
1277 | userid = apiuser.user_id | |
1248 |
|
1278 | |||
1249 | user = get_user_or_error(userid) |
|
1279 | user = get_user_or_error(userid) | |
1250 |
|
1280 | |||
1251 | if isinstance(locked, Optional): |
|
1281 | if isinstance(locked, Optional): | |
1252 | lockobj = repo.locked |
|
1282 | lockobj = repo.locked | |
1253 |
|
1283 | |||
1254 | if lockobj[0] is None: |
|
1284 | if lockobj[0] is None: | |
1255 | _d = { |
|
1285 | _d = { | |
1256 | 'repo': repo.repo_name, |
|
1286 | 'repo': repo.repo_name, | |
1257 | 'locked': False, |
|
1287 | 'locked': False, | |
1258 | 'locked_since': None, |
|
1288 | 'locked_since': None, | |
1259 | 'locked_by': None, |
|
1289 | 'locked_by': None, | |
1260 | 'lock_reason': None, |
|
1290 | 'lock_reason': None, | |
1261 | 'lock_state_changed': False, |
|
1291 | 'lock_state_changed': False, | |
1262 | 'msg': 'Repo `%s` not locked.' % repo.repo_name |
|
1292 | 'msg': 'Repo `%s` not locked.' % repo.repo_name | |
1263 | } |
|
1293 | } | |
1264 | return _d |
|
1294 | return _d | |
1265 | else: |
|
1295 | else: | |
1266 | _user_id, _time, _reason = lockobj |
|
1296 | _user_id, _time, _reason = lockobj | |
1267 | lock_user = get_user_or_error(userid) |
|
1297 | lock_user = get_user_or_error(userid) | |
1268 | _d = { |
|
1298 | _d = { | |
1269 | 'repo': repo.repo_name, |
|
1299 | 'repo': repo.repo_name, | |
1270 | 'locked': True, |
|
1300 | 'locked': True, | |
1271 | 'locked_since': _time, |
|
1301 | 'locked_since': _time, | |
1272 | 'locked_by': lock_user.username, |
|
1302 | 'locked_by': lock_user.username, | |
1273 | 'lock_reason': _reason, |
|
1303 | 'lock_reason': _reason, | |
1274 | 'lock_state_changed': False, |
|
1304 | 'lock_state_changed': False, | |
1275 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
1305 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' | |
1276 | % (repo.repo_name, lock_user.username, |
|
1306 | % (repo.repo_name, lock_user.username, | |
1277 | json.dumps(time_to_datetime(_time)))) |
|
1307 | json.dumps(time_to_datetime(_time)))) | |
1278 | } |
|
1308 | } | |
1279 | return _d |
|
1309 | return _d | |
1280 |
|
1310 | |||
1281 | # force locked state through a flag |
|
1311 | # force locked state through a flag | |
1282 | else: |
|
1312 | else: | |
1283 | locked = str2bool(locked) |
|
1313 | locked = str2bool(locked) | |
1284 | lock_reason = Repository.LOCK_API |
|
1314 | lock_reason = Repository.LOCK_API | |
1285 | try: |
|
1315 | try: | |
1286 | if locked: |
|
1316 | if locked: | |
1287 | lock_time = time.time() |
|
1317 | lock_time = time.time() | |
1288 | Repository.lock(repo, user.user_id, lock_time, lock_reason) |
|
1318 | Repository.lock(repo, user.user_id, lock_time, lock_reason) | |
1289 | else: |
|
1319 | else: | |
1290 | lock_time = None |
|
1320 | lock_time = None | |
1291 | Repository.unlock(repo) |
|
1321 | Repository.unlock(repo) | |
1292 | _d = { |
|
1322 | _d = { | |
1293 | 'repo': repo.repo_name, |
|
1323 | 'repo': repo.repo_name, | |
1294 | 'locked': locked, |
|
1324 | 'locked': locked, | |
1295 | 'locked_since': lock_time, |
|
1325 | 'locked_since': lock_time, | |
1296 | 'locked_by': user.username, |
|
1326 | 'locked_by': user.username, | |
1297 | 'lock_reason': lock_reason, |
|
1327 | 'lock_reason': lock_reason, | |
1298 | 'lock_state_changed': True, |
|
1328 | 'lock_state_changed': True, | |
1299 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
1329 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' | |
1300 | % (user.username, repo.repo_name, locked)) |
|
1330 | % (user.username, repo.repo_name, locked)) | |
1301 | } |
|
1331 | } | |
1302 | return _d |
|
1332 | return _d | |
1303 | except Exception: |
|
1333 | except Exception: | |
1304 | log.exception( |
|
1334 | log.exception( | |
1305 | "Exception occurred while trying to lock repository") |
|
1335 | "Exception occurred while trying to lock repository") | |
1306 | raise JSONRPCError( |
|
1336 | raise JSONRPCError( | |
1307 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
1337 | 'Error occurred locking repository `%s`' % repo.repo_name | |
1308 | ) |
|
1338 | ) | |
1309 |
|
1339 | |||
1310 |
|
1340 | |||
1311 | @jsonrpc_method() |
|
1341 | @jsonrpc_method() | |
1312 | def comment_commit( |
|
1342 | def comment_commit( | |
1313 | request, apiuser, repoid, commit_id, message, |
|
1343 | request, apiuser, repoid, commit_id, message, | |
1314 | userid=Optional(OAttr('apiuser')), status=Optional(None)): |
|
1344 | userid=Optional(OAttr('apiuser')), status=Optional(None)): | |
1315 | """ |
|
1345 | """ | |
1316 | Set a commit comment, and optionally change the status of the commit. |
|
1346 | Set a commit comment, and optionally change the status of the commit. | |
1317 |
|
1347 | |||
1318 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1348 | :param apiuser: This is filled automatically from the |authtoken|. | |
1319 | :type apiuser: AuthUser |
|
1349 | :type apiuser: AuthUser | |
1320 | :param repoid: Set the repository name or repository ID. |
|
1350 | :param repoid: Set the repository name or repository ID. | |
1321 | :type repoid: str or int |
|
1351 | :type repoid: str or int | |
1322 | :param commit_id: Specify the commit_id for which to set a comment. |
|
1352 | :param commit_id: Specify the commit_id for which to set a comment. | |
1323 | :type commit_id: str |
|
1353 | :type commit_id: str | |
1324 | :param message: The comment text. |
|
1354 | :param message: The comment text. | |
1325 | :type message: str |
|
1355 | :type message: str | |
1326 | :param userid: Set the user name of the comment creator. |
|
1356 | :param userid: Set the user name of the comment creator. | |
1327 | :type userid: Optional(str or int) |
|
1357 | :type userid: Optional(str or int) | |
1328 | :param status: status, one of 'not_reviewed', 'approved', 'rejected', |
|
1358 | :param status: status, one of 'not_reviewed', 'approved', 'rejected', | |
1329 | 'under_review' |
|
1359 | 'under_review' | |
1330 | :type status: str |
|
1360 | :type status: str | |
1331 |
|
1361 | |||
1332 | Example error output: |
|
1362 | Example error output: | |
1333 |
|
1363 | |||
1334 | .. code-block:: json |
|
1364 | .. code-block:: json | |
1335 |
|
1365 | |||
1336 | { |
|
1366 | { | |
1337 | "id" : <id_given_in_input>, |
|
1367 | "id" : <id_given_in_input>, | |
1338 | "result" : { |
|
1368 | "result" : { | |
1339 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", |
|
1369 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", | |
1340 | "status_change": null or <status>, |
|
1370 | "status_change": null or <status>, | |
1341 | "success": true |
|
1371 | "success": true | |
1342 | }, |
|
1372 | }, | |
1343 | "error" : null |
|
1373 | "error" : null | |
1344 | } |
|
1374 | } | |
1345 |
|
1375 | |||
1346 | """ |
|
1376 | """ | |
1347 | repo = get_repo_or_error(repoid) |
|
1377 | repo = get_repo_or_error(repoid) | |
1348 | if not has_superadmin_permission(apiuser): |
|
1378 | if not has_superadmin_permission(apiuser): | |
1349 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1379 | _perms = ('repository.read', 'repository.write', 'repository.admin') | |
1350 |
|
|
1380 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1351 |
|
1381 | |||
1352 | if isinstance(userid, Optional): |
|
1382 | if isinstance(userid, Optional): | |
1353 | userid = apiuser.user_id |
|
1383 | userid = apiuser.user_id | |
1354 |
|
1384 | |||
1355 | user = get_user_or_error(userid) |
|
1385 | user = get_user_or_error(userid) | |
1356 | status = Optional.extract(status) |
|
1386 | status = Optional.extract(status) | |
1357 |
|
1387 | |||
1358 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] |
|
1388 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] | |
1359 | if status and status not in allowed_statuses: |
|
1389 | if status and status not in allowed_statuses: | |
1360 | raise JSONRPCError('Bad status, must be on ' |
|
1390 | raise JSONRPCError('Bad status, must be on ' | |
1361 | 'of %s got %s' % (allowed_statuses, status,)) |
|
1391 | 'of %s got %s' % (allowed_statuses, status,)) | |
1362 |
|
1392 | |||
1363 | try: |
|
1393 | try: | |
1364 | rc_config = SettingsModel().get_all_settings() |
|
1394 | rc_config = SettingsModel().get_all_settings() | |
1365 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
1395 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') | |
1366 | status_change_label = ChangesetStatus.get_status_lbl(status) |
|
1396 | status_change_label = ChangesetStatus.get_status_lbl(status) | |
1367 | comm = ChangesetCommentsModel().create( |
|
1397 | comm = ChangesetCommentsModel().create( | |
1368 | message, repo, user, revision=commit_id, |
|
1398 | message, repo, user, revision=commit_id, | |
1369 | status_change=status_change_label, |
|
1399 | status_change=status_change_label, | |
1370 | status_change_type=status, |
|
1400 | status_change_type=status, | |
1371 | renderer=renderer) |
|
1401 | renderer=renderer) | |
1372 | if status: |
|
1402 | if status: | |
1373 | # also do a status change |
|
1403 | # also do a status change | |
1374 | try: |
|
1404 | try: | |
1375 | ChangesetStatusModel().set_status( |
|
1405 | ChangesetStatusModel().set_status( | |
1376 | repo, status, user, comm, revision=commit_id, |
|
1406 | repo, status, user, comm, revision=commit_id, | |
1377 | dont_allow_on_closed_pull_request=True |
|
1407 | dont_allow_on_closed_pull_request=True | |
1378 | ) |
|
1408 | ) | |
1379 | except StatusChangeOnClosedPullRequestError: |
|
1409 | except StatusChangeOnClosedPullRequestError: | |
1380 | log.exception( |
|
1410 | log.exception( | |
1381 | "Exception occurred while trying to change repo commit status") |
|
1411 | "Exception occurred while trying to change repo commit status") | |
1382 | msg = ('Changing status on a changeset associated with ' |
|
1412 | msg = ('Changing status on a changeset associated with ' | |
1383 | 'a closed pull request is not allowed') |
|
1413 | 'a closed pull request is not allowed') | |
1384 | raise JSONRPCError(msg) |
|
1414 | raise JSONRPCError(msg) | |
1385 |
|
1415 | |||
1386 | Session().commit() |
|
1416 | Session().commit() | |
1387 | return { |
|
1417 | return { | |
1388 | 'msg': ( |
|
1418 | 'msg': ( | |
1389 | 'Commented on commit `%s` for repository `%s`' % ( |
|
1419 | 'Commented on commit `%s` for repository `%s`' % ( | |
1390 | comm.revision, repo.repo_name)), |
|
1420 | comm.revision, repo.repo_name)), | |
1391 | 'status_change': status, |
|
1421 | 'status_change': status, | |
1392 | 'success': True, |
|
1422 | 'success': True, | |
1393 | } |
|
1423 | } | |
1394 | except JSONRPCError: |
|
1424 | except JSONRPCError: | |
1395 | # catch any inside errors, and re-raise them to prevent from |
|
1425 | # catch any inside errors, and re-raise them to prevent from | |
1396 | # below global catch to silence them |
|
1426 | # below global catch to silence them | |
1397 | raise |
|
1427 | raise | |
1398 | except Exception: |
|
1428 | except Exception: | |
1399 | log.exception("Exception occurred while trying to comment on commit") |
|
1429 | log.exception("Exception occurred while trying to comment on commit") | |
1400 | raise JSONRPCError( |
|
1430 | raise JSONRPCError( | |
1401 | 'failed to set comment on repository `%s`' % (repo.repo_name,) |
|
1431 | 'failed to set comment on repository `%s`' % (repo.repo_name,) | |
1402 | ) |
|
1432 | ) | |
1403 |
|
1433 | |||
1404 |
|
1434 | |||
1405 | @jsonrpc_method() |
|
1435 | @jsonrpc_method() | |
1406 | def grant_user_permission(request, apiuser, repoid, userid, perm): |
|
1436 | def grant_user_permission(request, apiuser, repoid, userid, perm): | |
1407 | """ |
|
1437 | """ | |
1408 | Grant permissions for the specified user on the given repository, |
|
1438 | Grant permissions for the specified user on the given repository, | |
1409 | or update existing permissions if found. |
|
1439 | or update existing permissions if found. | |
1410 |
|
1440 | |||
1411 | This command can only be run using an |authtoken| with admin |
|
1441 | This command can only be run using an |authtoken| with admin | |
1412 | permissions on the |repo|. |
|
1442 | permissions on the |repo|. | |
1413 |
|
1443 | |||
1414 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1444 | :param apiuser: This is filled automatically from the |authtoken|. | |
1415 | :type apiuser: AuthUser |
|
1445 | :type apiuser: AuthUser | |
1416 | :param repoid: Set the repository name or repository ID. |
|
1446 | :param repoid: Set the repository name or repository ID. | |
1417 | :type repoid: str or int |
|
1447 | :type repoid: str or int | |
1418 | :param userid: Set the user name. |
|
1448 | :param userid: Set the user name. | |
1419 | :type userid: str |
|
1449 | :type userid: str | |
1420 | :param perm: Set the user permissions, using the following format |
|
1450 | :param perm: Set the user permissions, using the following format | |
1421 | ``(repository.(none|read|write|admin))`` |
|
1451 | ``(repository.(none|read|write|admin))`` | |
1422 | :type perm: str |
|
1452 | :type perm: str | |
1423 |
|
1453 | |||
1424 | Example output: |
|
1454 | Example output: | |
1425 |
|
1455 | |||
1426 | .. code-block:: bash |
|
1456 | .. code-block:: bash | |
1427 |
|
1457 | |||
1428 | id : <id_given_in_input> |
|
1458 | id : <id_given_in_input> | |
1429 | result: { |
|
1459 | result: { | |
1430 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
1460 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", | |
1431 | "success": true |
|
1461 | "success": true | |
1432 | } |
|
1462 | } | |
1433 | error: null |
|
1463 | error: null | |
1434 | """ |
|
1464 | """ | |
1435 |
|
1465 | |||
1436 | repo = get_repo_or_error(repoid) |
|
1466 | repo = get_repo_or_error(repoid) | |
1437 | user = get_user_or_error(userid) |
|
1467 | user = get_user_or_error(userid) | |
1438 | perm = get_perm_or_error(perm) |
|
1468 | perm = get_perm_or_error(perm) | |
1439 | if not has_superadmin_permission(apiuser): |
|
1469 | if not has_superadmin_permission(apiuser): | |
1440 | _perms = ('repository.admin',) |
|
1470 | _perms = ('repository.admin',) | |
1441 |
|
|
1471 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1442 |
|
1472 | |||
1443 | try: |
|
1473 | try: | |
1444 |
|
1474 | |||
1445 | RepoModel().grant_user_permission(repo=repo, user=user, perm=perm) |
|
1475 | RepoModel().grant_user_permission(repo=repo, user=user, perm=perm) | |
1446 |
|
1476 | |||
1447 | Session().commit() |
|
1477 | Session().commit() | |
1448 | return { |
|
1478 | return { | |
1449 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( |
|
1479 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( | |
1450 | perm.permission_name, user.username, repo.repo_name |
|
1480 | perm.permission_name, user.username, repo.repo_name | |
1451 | ), |
|
1481 | ), | |
1452 | 'success': True |
|
1482 | 'success': True | |
1453 | } |
|
1483 | } | |
1454 | except Exception: |
|
1484 | except Exception: | |
1455 | log.exception( |
|
1485 | log.exception( | |
1456 | "Exception occurred while trying edit permissions for repo") |
|
1486 | "Exception occurred while trying edit permissions for repo") | |
1457 | raise JSONRPCError( |
|
1487 | raise JSONRPCError( | |
1458 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1488 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( | |
1459 | userid, repoid |
|
1489 | userid, repoid | |
1460 | ) |
|
1490 | ) | |
1461 | ) |
|
1491 | ) | |
1462 |
|
1492 | |||
1463 |
|
1493 | |||
1464 | @jsonrpc_method() |
|
1494 | @jsonrpc_method() | |
1465 | def revoke_user_permission(request, apiuser, repoid, userid): |
|
1495 | def revoke_user_permission(request, apiuser, repoid, userid): | |
1466 | """ |
|
1496 | """ | |
1467 | Revoke permission for a user on the specified repository. |
|
1497 | Revoke permission for a user on the specified repository. | |
1468 |
|
1498 | |||
1469 | This command can only be run using an |authtoken| with admin |
|
1499 | This command can only be run using an |authtoken| with admin | |
1470 | permissions on the |repo|. |
|
1500 | permissions on the |repo|. | |
1471 |
|
1501 | |||
1472 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1502 | :param apiuser: This is filled automatically from the |authtoken|. | |
1473 | :type apiuser: AuthUser |
|
1503 | :type apiuser: AuthUser | |
1474 | :param repoid: Set the repository name or repository ID. |
|
1504 | :param repoid: Set the repository name or repository ID. | |
1475 | :type repoid: str or int |
|
1505 | :type repoid: str or int | |
1476 | :param userid: Set the user name of revoked user. |
|
1506 | :param userid: Set the user name of revoked user. | |
1477 | :type userid: str or int |
|
1507 | :type userid: str or int | |
1478 |
|
1508 | |||
1479 | Example error output: |
|
1509 | Example error output: | |
1480 |
|
1510 | |||
1481 | .. code-block:: bash |
|
1511 | .. code-block:: bash | |
1482 |
|
1512 | |||
1483 | id : <id_given_in_input> |
|
1513 | id : <id_given_in_input> | |
1484 | result: { |
|
1514 | result: { | |
1485 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
1515 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", | |
1486 | "success": true |
|
1516 | "success": true | |
1487 | } |
|
1517 | } | |
1488 | error: null |
|
1518 | error: null | |
1489 | """ |
|
1519 | """ | |
1490 |
|
1520 | |||
1491 | repo = get_repo_or_error(repoid) |
|
1521 | repo = get_repo_or_error(repoid) | |
1492 | user = get_user_or_error(userid) |
|
1522 | user = get_user_or_error(userid) | |
1493 | if not has_superadmin_permission(apiuser): |
|
1523 | if not has_superadmin_permission(apiuser): | |
1494 | _perms = ('repository.admin',) |
|
1524 | _perms = ('repository.admin',) | |
1495 |
|
|
1525 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1496 |
|
1526 | |||
1497 | try: |
|
1527 | try: | |
1498 | RepoModel().revoke_user_permission(repo=repo, user=user) |
|
1528 | RepoModel().revoke_user_permission(repo=repo, user=user) | |
1499 | Session().commit() |
|
1529 | Session().commit() | |
1500 | return { |
|
1530 | return { | |
1501 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( |
|
1531 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( | |
1502 | user.username, repo.repo_name |
|
1532 | user.username, repo.repo_name | |
1503 | ), |
|
1533 | ), | |
1504 | 'success': True |
|
1534 | 'success': True | |
1505 | } |
|
1535 | } | |
1506 | except Exception: |
|
1536 | except Exception: | |
1507 | log.exception( |
|
1537 | log.exception( | |
1508 | "Exception occurred while trying revoke permissions to repo") |
|
1538 | "Exception occurred while trying revoke permissions to repo") | |
1509 | raise JSONRPCError( |
|
1539 | raise JSONRPCError( | |
1510 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1540 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( | |
1511 | userid, repoid |
|
1541 | userid, repoid | |
1512 | ) |
|
1542 | ) | |
1513 | ) |
|
1543 | ) | |
1514 |
|
1544 | |||
1515 |
|
1545 | |||
1516 | @jsonrpc_method() |
|
1546 | @jsonrpc_method() | |
1517 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): |
|
1547 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): | |
1518 | """ |
|
1548 | """ | |
1519 | Grant permission for a user group on the specified repository, |
|
1549 | Grant permission for a user group on the specified repository, | |
1520 | or update existing permissions. |
|
1550 | or update existing permissions. | |
1521 |
|
1551 | |||
1522 | This command can only be run using an |authtoken| with admin |
|
1552 | This command can only be run using an |authtoken| with admin | |
1523 | permissions on the |repo|. |
|
1553 | permissions on the |repo|. | |
1524 |
|
1554 | |||
1525 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1555 | :param apiuser: This is filled automatically from the |authtoken|. | |
1526 | :type apiuser: AuthUser |
|
1556 | :type apiuser: AuthUser | |
1527 | :param repoid: Set the repository name or repository ID. |
|
1557 | :param repoid: Set the repository name or repository ID. | |
1528 | :type repoid: str or int |
|
1558 | :type repoid: str or int | |
1529 | :param usergroupid: Specify the ID of the user group. |
|
1559 | :param usergroupid: Specify the ID of the user group. | |
1530 | :type usergroupid: str or int |
|
1560 | :type usergroupid: str or int | |
1531 | :param perm: Set the user group permissions using the following |
|
1561 | :param perm: Set the user group permissions using the following | |
1532 | format: (repository.(none|read|write|admin)) |
|
1562 | format: (repository.(none|read|write|admin)) | |
1533 | :type perm: str |
|
1563 | :type perm: str | |
1534 |
|
1564 | |||
1535 | Example output: |
|
1565 | Example output: | |
1536 |
|
1566 | |||
1537 | .. code-block:: bash |
|
1567 | .. code-block:: bash | |
1538 |
|
1568 | |||
1539 | id : <id_given_in_input> |
|
1569 | id : <id_given_in_input> | |
1540 | result : { |
|
1570 | result : { | |
1541 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1571 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", | |
1542 | "success": true |
|
1572 | "success": true | |
1543 |
|
1573 | |||
1544 | } |
|
1574 | } | |
1545 | error : null |
|
1575 | error : null | |
1546 |
|
1576 | |||
1547 | Example error output: |
|
1577 | Example error output: | |
1548 |
|
1578 | |||
1549 | .. code-block:: bash |
|
1579 | .. code-block:: bash | |
1550 |
|
1580 | |||
1551 | id : <id_given_in_input> |
|
1581 | id : <id_given_in_input> | |
1552 | result : null |
|
1582 | result : null | |
1553 | error : { |
|
1583 | error : { | |
1554 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' |
|
1584 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' | |
1555 | } |
|
1585 | } | |
1556 |
|
1586 | |||
1557 | """ |
|
1587 | """ | |
1558 |
|
1588 | |||
1559 | repo = get_repo_or_error(repoid) |
|
1589 | repo = get_repo_or_error(repoid) | |
1560 | perm = get_perm_or_error(perm) |
|
1590 | perm = get_perm_or_error(perm) | |
1561 | if not has_superadmin_permission(apiuser): |
|
1591 | if not has_superadmin_permission(apiuser): | |
1562 | _perms = ('repository.admin',) |
|
1592 | _perms = ('repository.admin',) | |
1563 |
|
|
1593 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1564 |
|
1594 | |||
1565 | user_group = get_user_group_or_error(usergroupid) |
|
1595 | user_group = get_user_group_or_error(usergroupid) | |
1566 | if not has_superadmin_permission(apiuser): |
|
1596 | if not has_superadmin_permission(apiuser): | |
1567 | # check if we have at least read permission for this user group ! |
|
1597 | # check if we have at least read permission for this user group ! | |
1568 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
1598 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) | |
1569 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
1599 | if not HasUserGroupPermissionAnyApi(*_perms)( | |
1570 | user=apiuser, user_group_name=user_group.users_group_name): |
|
1600 | user=apiuser, user_group_name=user_group.users_group_name): | |
1571 | raise JSONRPCError( |
|
1601 | raise JSONRPCError( | |
1572 | 'user group `%s` does not exist' % (usergroupid,)) |
|
1602 | 'user group `%s` does not exist' % (usergroupid,)) | |
1573 |
|
1603 | |||
1574 | try: |
|
1604 | try: | |
1575 | RepoModel().grant_user_group_permission( |
|
1605 | RepoModel().grant_user_group_permission( | |
1576 | repo=repo, group_name=user_group, perm=perm) |
|
1606 | repo=repo, group_name=user_group, perm=perm) | |
1577 |
|
1607 | |||
1578 | Session().commit() |
|
1608 | Session().commit() | |
1579 | return { |
|
1609 | return { | |
1580 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' |
|
1610 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' | |
1581 | 'repo: `%s`' % ( |
|
1611 | 'repo: `%s`' % ( | |
1582 | perm.permission_name, user_group.users_group_name, |
|
1612 | perm.permission_name, user_group.users_group_name, | |
1583 | repo.repo_name |
|
1613 | repo.repo_name | |
1584 | ), |
|
1614 | ), | |
1585 | 'success': True |
|
1615 | 'success': True | |
1586 | } |
|
1616 | } | |
1587 | except Exception: |
|
1617 | except Exception: | |
1588 | log.exception( |
|
1618 | log.exception( | |
1589 | "Exception occurred while trying change permission on repo") |
|
1619 | "Exception occurred while trying change permission on repo") | |
1590 | raise JSONRPCError( |
|
1620 | raise JSONRPCError( | |
1591 | 'failed to edit permission for user group: `%s` in ' |
|
1621 | 'failed to edit permission for user group: `%s` in ' | |
1592 | 'repo: `%s`' % ( |
|
1622 | 'repo: `%s`' % ( | |
1593 | usergroupid, repo.repo_name |
|
1623 | usergroupid, repo.repo_name | |
1594 | ) |
|
1624 | ) | |
1595 | ) |
|
1625 | ) | |
1596 |
|
1626 | |||
1597 |
|
1627 | |||
1598 | @jsonrpc_method() |
|
1628 | @jsonrpc_method() | |
1599 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): |
|
1629 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): | |
1600 | """ |
|
1630 | """ | |
1601 | Revoke the permissions of a user group on a given repository. |
|
1631 | Revoke the permissions of a user group on a given repository. | |
1602 |
|
1632 | |||
1603 | This command can only be run using an |authtoken| with admin |
|
1633 | This command can only be run using an |authtoken| with admin | |
1604 | permissions on the |repo|. |
|
1634 | permissions on the |repo|. | |
1605 |
|
1635 | |||
1606 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1636 | :param apiuser: This is filled automatically from the |authtoken|. | |
1607 | :type apiuser: AuthUser |
|
1637 | :type apiuser: AuthUser | |
1608 | :param repoid: Set the repository name or repository ID. |
|
1638 | :param repoid: Set the repository name or repository ID. | |
1609 | :type repoid: str or int |
|
1639 | :type repoid: str or int | |
1610 | :param usergroupid: Specify the user group ID. |
|
1640 | :param usergroupid: Specify the user group ID. | |
1611 | :type usergroupid: str or int |
|
1641 | :type usergroupid: str or int | |
1612 |
|
1642 | |||
1613 | Example output: |
|
1643 | Example output: | |
1614 |
|
1644 | |||
1615 | .. code-block:: bash |
|
1645 | .. code-block:: bash | |
1616 |
|
1646 | |||
1617 | id : <id_given_in_input> |
|
1647 | id : <id_given_in_input> | |
1618 | result: { |
|
1648 | result: { | |
1619 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1649 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", | |
1620 | "success": true |
|
1650 | "success": true | |
1621 | } |
|
1651 | } | |
1622 | error: null |
|
1652 | error: null | |
1623 | """ |
|
1653 | """ | |
1624 |
|
1654 | |||
1625 | repo = get_repo_or_error(repoid) |
|
1655 | repo = get_repo_or_error(repoid) | |
1626 | if not has_superadmin_permission(apiuser): |
|
1656 | if not has_superadmin_permission(apiuser): | |
1627 | _perms = ('repository.admin',) |
|
1657 | _perms = ('repository.admin',) | |
1628 |
|
|
1658 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1629 |
|
1659 | |||
1630 | user_group = get_user_group_or_error(usergroupid) |
|
1660 | user_group = get_user_group_or_error(usergroupid) | |
1631 | if not has_superadmin_permission(apiuser): |
|
1661 | if not has_superadmin_permission(apiuser): | |
1632 | # check if we have at least read permission for this user group ! |
|
1662 | # check if we have at least read permission for this user group ! | |
1633 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
1663 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) | |
1634 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
1664 | if not HasUserGroupPermissionAnyApi(*_perms)( | |
1635 | user=apiuser, user_group_name=user_group.users_group_name): |
|
1665 | user=apiuser, user_group_name=user_group.users_group_name): | |
1636 | raise JSONRPCError( |
|
1666 | raise JSONRPCError( | |
1637 | 'user group `%s` does not exist' % (usergroupid,)) |
|
1667 | 'user group `%s` does not exist' % (usergroupid,)) | |
1638 |
|
1668 | |||
1639 | try: |
|
1669 | try: | |
1640 | RepoModel().revoke_user_group_permission( |
|
1670 | RepoModel().revoke_user_group_permission( | |
1641 | repo=repo, group_name=user_group) |
|
1671 | repo=repo, group_name=user_group) | |
1642 |
|
1672 | |||
1643 | Session().commit() |
|
1673 | Session().commit() | |
1644 | return { |
|
1674 | return { | |
1645 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( |
|
1675 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( | |
1646 | user_group.users_group_name, repo.repo_name |
|
1676 | user_group.users_group_name, repo.repo_name | |
1647 | ), |
|
1677 | ), | |
1648 | 'success': True |
|
1678 | 'success': True | |
1649 | } |
|
1679 | } | |
1650 | except Exception: |
|
1680 | except Exception: | |
1651 | log.exception("Exception occurred while trying revoke " |
|
1681 | log.exception("Exception occurred while trying revoke " | |
1652 | "user group permission on repo") |
|
1682 | "user group permission on repo") | |
1653 | raise JSONRPCError( |
|
1683 | raise JSONRPCError( | |
1654 | 'failed to edit permission for user group: `%s` in ' |
|
1684 | 'failed to edit permission for user group: `%s` in ' | |
1655 | 'repo: `%s`' % ( |
|
1685 | 'repo: `%s`' % ( | |
1656 | user_group.users_group_name, repo.repo_name |
|
1686 | user_group.users_group_name, repo.repo_name | |
1657 | ) |
|
1687 | ) | |
1658 | ) |
|
1688 | ) | |
1659 |
|
1689 | |||
1660 |
|
1690 | |||
1661 | @jsonrpc_method() |
|
1691 | @jsonrpc_method() | |
1662 | def pull(request, apiuser, repoid): |
|
1692 | def pull(request, apiuser, repoid): | |
1663 | """ |
|
1693 | """ | |
1664 | Triggers a pull on the given repository from a remote location. You |
|
1694 | Triggers a pull on the given repository from a remote location. You | |
1665 | can use this to keep remote repositories up-to-date. |
|
1695 | can use this to keep remote repositories up-to-date. | |
1666 |
|
1696 | |||
1667 | This command can only be run using an |authtoken| with admin |
|
1697 | This command can only be run using an |authtoken| with admin | |
1668 | rights to the specified repository. For more information, |
|
1698 | rights to the specified repository. For more information, | |
1669 | see :ref:`config-token-ref`. |
|
1699 | see :ref:`config-token-ref`. | |
1670 |
|
1700 | |||
1671 | This command takes the following options: |
|
1701 | This command takes the following options: | |
1672 |
|
1702 | |||
1673 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1703 | :param apiuser: This is filled automatically from the |authtoken|. | |
1674 | :type apiuser: AuthUser |
|
1704 | :type apiuser: AuthUser | |
1675 | :param repoid: The repository name or repository ID. |
|
1705 | :param repoid: The repository name or repository ID. | |
1676 | :type repoid: str or int |
|
1706 | :type repoid: str or int | |
1677 |
|
1707 | |||
1678 | Example output: |
|
1708 | Example output: | |
1679 |
|
1709 | |||
1680 | .. code-block:: bash |
|
1710 | .. code-block:: bash | |
1681 |
|
1711 | |||
1682 | id : <id_given_in_input> |
|
1712 | id : <id_given_in_input> | |
1683 | result : { |
|
1713 | result : { | |
1684 | "msg": "Pulled from `<repository name>`" |
|
1714 | "msg": "Pulled from `<repository name>`" | |
1685 | "repository": "<repository name>" |
|
1715 | "repository": "<repository name>" | |
1686 | } |
|
1716 | } | |
1687 | error : null |
|
1717 | error : null | |
1688 |
|
1718 | |||
1689 | Example error output: |
|
1719 | Example error output: | |
1690 |
|
1720 | |||
1691 | .. code-block:: bash |
|
1721 | .. code-block:: bash | |
1692 |
|
1722 | |||
1693 | id : <id_given_in_input> |
|
1723 | id : <id_given_in_input> | |
1694 | result : null |
|
1724 | result : null | |
1695 | error : { |
|
1725 | error : { | |
1696 | "Unable to pull changes from `<reponame>`" |
|
1726 | "Unable to pull changes from `<reponame>`" | |
1697 | } |
|
1727 | } | |
1698 |
|
1728 | |||
1699 | """ |
|
1729 | """ | |
1700 |
|
1730 | |||
1701 | repo = get_repo_or_error(repoid) |
|
1731 | repo = get_repo_or_error(repoid) | |
1702 | if not has_superadmin_permission(apiuser): |
|
1732 | if not has_superadmin_permission(apiuser): | |
1703 | _perms = ('repository.admin',) |
|
1733 | _perms = ('repository.admin',) | |
1704 |
|
|
1734 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1705 |
|
1735 | |||
1706 | try: |
|
1736 | try: | |
1707 | ScmModel().pull_changes(repo.repo_name, apiuser.username) |
|
1737 | ScmModel().pull_changes(repo.repo_name, apiuser.username) | |
1708 | return { |
|
1738 | return { | |
1709 | 'msg': 'Pulled from `%s`' % repo.repo_name, |
|
1739 | 'msg': 'Pulled from `%s`' % repo.repo_name, | |
1710 | 'repository': repo.repo_name |
|
1740 | 'repository': repo.repo_name | |
1711 | } |
|
1741 | } | |
1712 | except Exception: |
|
1742 | except Exception: | |
1713 | log.exception("Exception occurred while trying to " |
|
1743 | log.exception("Exception occurred while trying to " | |
1714 | "pull changes from remote location") |
|
1744 | "pull changes from remote location") | |
1715 | raise JSONRPCError( |
|
1745 | raise JSONRPCError( | |
1716 | 'Unable to pull changes from `%s`' % repo.repo_name |
|
1746 | 'Unable to pull changes from `%s`' % repo.repo_name | |
1717 | ) |
|
1747 | ) | |
1718 |
|
1748 | |||
1719 |
|
1749 | |||
1720 | @jsonrpc_method() |
|
1750 | @jsonrpc_method() | |
1721 | def strip(request, apiuser, repoid, revision, branch): |
|
1751 | def strip(request, apiuser, repoid, revision, branch): | |
1722 | """ |
|
1752 | """ | |
1723 | Strips the given revision from the specified repository. |
|
1753 | Strips the given revision from the specified repository. | |
1724 |
|
1754 | |||
1725 | * This will remove the revision and all of its decendants. |
|
1755 | * This will remove the revision and all of its decendants. | |
1726 |
|
1756 | |||
1727 | This command can only be run using an |authtoken| with admin rights to |
|
1757 | This command can only be run using an |authtoken| with admin rights to | |
1728 | the specified repository. |
|
1758 | the specified repository. | |
1729 |
|
1759 | |||
1730 | This command takes the following options: |
|
1760 | This command takes the following options: | |
1731 |
|
1761 | |||
1732 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1762 | :param apiuser: This is filled automatically from the |authtoken|. | |
1733 | :type apiuser: AuthUser |
|
1763 | :type apiuser: AuthUser | |
1734 | :param repoid: The repository name or repository ID. |
|
1764 | :param repoid: The repository name or repository ID. | |
1735 | :type repoid: str or int |
|
1765 | :type repoid: str or int | |
1736 | :param revision: The revision you wish to strip. |
|
1766 | :param revision: The revision you wish to strip. | |
1737 | :type revision: str |
|
1767 | :type revision: str | |
1738 | :param branch: The branch from which to strip the revision. |
|
1768 | :param branch: The branch from which to strip the revision. | |
1739 | :type branch: str |
|
1769 | :type branch: str | |
1740 |
|
1770 | |||
1741 | Example output: |
|
1771 | Example output: | |
1742 |
|
1772 | |||
1743 | .. code-block:: bash |
|
1773 | .. code-block:: bash | |
1744 |
|
1774 | |||
1745 | id : <id_given_in_input> |
|
1775 | id : <id_given_in_input> | |
1746 | result : { |
|
1776 | result : { | |
1747 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" |
|
1777 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" | |
1748 | "repository": "<repository name>" |
|
1778 | "repository": "<repository name>" | |
1749 | } |
|
1779 | } | |
1750 | error : null |
|
1780 | error : null | |
1751 |
|
1781 | |||
1752 | Example error output: |
|
1782 | Example error output: | |
1753 |
|
1783 | |||
1754 | .. code-block:: bash |
|
1784 | .. code-block:: bash | |
1755 |
|
1785 | |||
1756 | id : <id_given_in_input> |
|
1786 | id : <id_given_in_input> | |
1757 | result : null |
|
1787 | result : null | |
1758 | error : { |
|
1788 | error : { | |
1759 | "Unable to strip commit <commit_hash> from repo `<repository name>`" |
|
1789 | "Unable to strip commit <commit_hash> from repo `<repository name>`" | |
1760 | } |
|
1790 | } | |
1761 |
|
1791 | |||
1762 | """ |
|
1792 | """ | |
1763 |
|
1793 | |||
1764 | repo = get_repo_or_error(repoid) |
|
1794 | repo = get_repo_or_error(repoid) | |
1765 | if not has_superadmin_permission(apiuser): |
|
1795 | if not has_superadmin_permission(apiuser): | |
1766 | _perms = ('repository.admin',) |
|
1796 | _perms = ('repository.admin',) | |
1767 |
|
|
1797 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1768 |
|
1798 | |||
1769 | try: |
|
1799 | try: | |
1770 | ScmModel().strip(repo, revision, branch) |
|
1800 | ScmModel().strip(repo, revision, branch) | |
1771 | return { |
|
1801 | return { | |
1772 | 'msg': 'Stripped commit %s from repo `%s`' % ( |
|
1802 | 'msg': 'Stripped commit %s from repo `%s`' % ( | |
1773 | revision, repo.repo_name), |
|
1803 | revision, repo.repo_name), | |
1774 | 'repository': repo.repo_name |
|
1804 | 'repository': repo.repo_name | |
1775 | } |
|
1805 | } | |
1776 | except Exception: |
|
1806 | except Exception: | |
1777 | log.exception("Exception while trying to strip") |
|
1807 | log.exception("Exception while trying to strip") | |
1778 | raise JSONRPCError( |
|
1808 | raise JSONRPCError( | |
1779 | 'Unable to strip commit %s from repo `%s`' % ( |
|
1809 | 'Unable to strip commit %s from repo `%s`' % ( | |
1780 | revision, repo.repo_name) |
|
1810 | revision, repo.repo_name) | |
1781 | ) |
|
1811 | ) | |
1782 |
|
1812 | |||
1783 |
|
1813 | |||
1784 | @jsonrpc_method() |
|
1814 | @jsonrpc_method() | |
1785 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): |
|
1815 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): | |
1786 | """ |
|
1816 | """ | |
1787 | Returns all settings for a repository. If key is given it only returns the |
|
1817 | Returns all settings for a repository. If key is given it only returns the | |
1788 | setting identified by the key or null. |
|
1818 | setting identified by the key or null. | |
1789 |
|
1819 | |||
1790 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1820 | :param apiuser: This is filled automatically from the |authtoken|. | |
1791 | :type apiuser: AuthUser |
|
1821 | :type apiuser: AuthUser | |
1792 | :param repoid: The repository name or repository id. |
|
1822 | :param repoid: The repository name or repository id. | |
1793 | :type repoid: str or int |
|
1823 | :type repoid: str or int | |
1794 | :param key: Key of the setting to return. |
|
1824 | :param key: Key of the setting to return. | |
1795 | :type: key: Optional(str) |
|
1825 | :type: key: Optional(str) | |
1796 |
|
1826 | |||
1797 | Example output: |
|
1827 | Example output: | |
1798 |
|
1828 | |||
1799 | .. code-block:: bash |
|
1829 | .. code-block:: bash | |
1800 |
|
1830 | |||
1801 | { |
|
1831 | { | |
1802 | "error": null, |
|
1832 | "error": null, | |
1803 | "id": 237, |
|
1833 | "id": 237, | |
1804 | "result": { |
|
1834 | "result": { | |
1805 | "extensions_largefiles": true, |
|
1835 | "extensions_largefiles": true, | |
1806 | "hooks_changegroup_push_logger": true, |
|
1836 | "hooks_changegroup_push_logger": true, | |
1807 | "hooks_changegroup_repo_size": false, |
|
1837 | "hooks_changegroup_repo_size": false, | |
1808 | "hooks_outgoing_pull_logger": true, |
|
1838 | "hooks_outgoing_pull_logger": true, | |
1809 | "phases_publish": "True", |
|
1839 | "phases_publish": "True", | |
1810 | "rhodecode_hg_use_rebase_for_merging": true, |
|
1840 | "rhodecode_hg_use_rebase_for_merging": true, | |
1811 | "rhodecode_pr_merge_enabled": true, |
|
1841 | "rhodecode_pr_merge_enabled": true, | |
1812 | "rhodecode_use_outdated_comments": true |
|
1842 | "rhodecode_use_outdated_comments": true | |
1813 | } |
|
1843 | } | |
1814 | } |
|
1844 | } | |
1815 | """ |
|
1845 | """ | |
1816 |
|
1846 | |||
1817 | # Restrict access to this api method to admins only. |
|
1847 | # Restrict access to this api method to admins only. | |
1818 | if not has_superadmin_permission(apiuser): |
|
1848 | if not has_superadmin_permission(apiuser): | |
1819 | raise JSONRPCForbidden() |
|
1849 | raise JSONRPCForbidden() | |
1820 |
|
1850 | |||
1821 | try: |
|
1851 | try: | |
1822 | repo = get_repo_or_error(repoid) |
|
1852 | repo = get_repo_or_error(repoid) | |
1823 | settings_model = VcsSettingsModel(repo=repo) |
|
1853 | settings_model = VcsSettingsModel(repo=repo) | |
1824 | settings = settings_model.get_global_settings() |
|
1854 | settings = settings_model.get_global_settings() | |
1825 | settings.update(settings_model.get_repo_settings()) |
|
1855 | settings.update(settings_model.get_repo_settings()) | |
1826 |
|
1856 | |||
1827 | # If only a single setting is requested fetch it from all settings. |
|
1857 | # If only a single setting is requested fetch it from all settings. | |
1828 | key = Optional.extract(key) |
|
1858 | key = Optional.extract(key) | |
1829 | if key is not None: |
|
1859 | if key is not None: | |
1830 | settings = settings.get(key, None) |
|
1860 | settings = settings.get(key, None) | |
1831 | except Exception: |
|
1861 | except Exception: | |
1832 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) |
|
1862 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) | |
1833 | log.exception(msg) |
|
1863 | log.exception(msg) | |
1834 | raise JSONRPCError(msg) |
|
1864 | raise JSONRPCError(msg) | |
1835 |
|
1865 | |||
1836 | return settings |
|
1866 | return settings | |
1837 |
|
1867 | |||
1838 |
|
1868 | |||
1839 | @jsonrpc_method() |
|
1869 | @jsonrpc_method() | |
1840 | def set_repo_settings(request, apiuser, repoid, settings): |
|
1870 | def set_repo_settings(request, apiuser, repoid, settings): | |
1841 | """ |
|
1871 | """ | |
1842 | Update repository settings. Returns true on success. |
|
1872 | Update repository settings. Returns true on success. | |
1843 |
|
1873 | |||
1844 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1874 | :param apiuser: This is filled automatically from the |authtoken|. | |
1845 | :type apiuser: AuthUser |
|
1875 | :type apiuser: AuthUser | |
1846 | :param repoid: The repository name or repository id. |
|
1876 | :param repoid: The repository name or repository id. | |
1847 | :type repoid: str or int |
|
1877 | :type repoid: str or int | |
1848 | :param settings: The new settings for the repository. |
|
1878 | :param settings: The new settings for the repository. | |
1849 | :type: settings: dict |
|
1879 | :type: settings: dict | |
1850 |
|
1880 | |||
1851 | Example output: |
|
1881 | Example output: | |
1852 |
|
1882 | |||
1853 | .. code-block:: bash |
|
1883 | .. code-block:: bash | |
1854 |
|
1884 | |||
1855 | { |
|
1885 | { | |
1856 | "error": null, |
|
1886 | "error": null, | |
1857 | "id": 237, |
|
1887 | "id": 237, | |
1858 | "result": true |
|
1888 | "result": true | |
1859 | } |
|
1889 | } | |
1860 | """ |
|
1890 | """ | |
1861 | # Restrict access to this api method to admins only. |
|
1891 | # Restrict access to this api method to admins only. | |
1862 | if not has_superadmin_permission(apiuser): |
|
1892 | if not has_superadmin_permission(apiuser): | |
1863 | raise JSONRPCForbidden() |
|
1893 | raise JSONRPCForbidden() | |
1864 |
|
1894 | |||
1865 | if type(settings) is not dict: |
|
1895 | if type(settings) is not dict: | |
1866 | raise JSONRPCError('Settings have to be a JSON Object.') |
|
1896 | raise JSONRPCError('Settings have to be a JSON Object.') | |
1867 |
|
1897 | |||
1868 | try: |
|
1898 | try: | |
1869 | settings_model = VcsSettingsModel(repo=repoid) |
|
1899 | settings_model = VcsSettingsModel(repo=repoid) | |
1870 |
|
1900 | |||
1871 | # Merge global, repo and incoming settings. |
|
1901 | # Merge global, repo and incoming settings. | |
1872 | new_settings = settings_model.get_global_settings() |
|
1902 | new_settings = settings_model.get_global_settings() | |
1873 | new_settings.update(settings_model.get_repo_settings()) |
|
1903 | new_settings.update(settings_model.get_repo_settings()) | |
1874 | new_settings.update(settings) |
|
1904 | new_settings.update(settings) | |
1875 |
|
1905 | |||
1876 | # Update the settings. |
|
1906 | # Update the settings. | |
1877 | inherit_global_settings = new_settings.get( |
|
1907 | inherit_global_settings = new_settings.get( | |
1878 | 'inherit_global_settings', False) |
|
1908 | 'inherit_global_settings', False) | |
1879 | settings_model.create_or_update_repo_settings( |
|
1909 | settings_model.create_or_update_repo_settings( | |
1880 | new_settings, inherit_global_settings=inherit_global_settings) |
|
1910 | new_settings, inherit_global_settings=inherit_global_settings) | |
1881 | Session().commit() |
|
1911 | Session().commit() | |
1882 | except Exception: |
|
1912 | except Exception: | |
1883 | msg = 'Failed to update settings for repository `{}`'.format(repoid) |
|
1913 | msg = 'Failed to update settings for repository `{}`'.format(repoid) | |
1884 | log.exception(msg) |
|
1914 | log.exception(msg) | |
1885 | raise JSONRPCError(msg) |
|
1915 | raise JSONRPCError(msg) | |
1886 |
|
1916 | |||
1887 | # Indicate success. |
|
1917 | # Indicate success. | |
1888 | return True |
|
1918 | return True |
@@ -1,1057 +1,1057 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Repository model for rhodecode |
|
22 | Repository model for rhodecode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import logging |
|
25 | import logging | |
26 | import os |
|
26 | import os | |
27 | import re |
|
27 | import re | |
28 | import shutil |
|
28 | import shutil | |
29 | import time |
|
29 | import time | |
30 | import traceback |
|
30 | import traceback | |
31 | from datetime import datetime |
|
31 | from datetime import datetime | |
32 |
|
32 | |||
33 | from sqlalchemy.sql import func |
|
33 | from sqlalchemy.sql import func | |
34 | from sqlalchemy.sql.expression import true, or_ |
|
34 | from sqlalchemy.sql.expression import true, or_ | |
35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
35 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
36 |
|
36 | |||
37 | from rhodecode import events |
|
37 | from rhodecode import events | |
38 | from rhodecode.lib import helpers as h |
|
38 | from rhodecode.lib import helpers as h | |
39 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
39 | from rhodecode.lib.auth import HasUserGroupPermissionAny | |
40 | from rhodecode.lib.caching_query import FromCache |
|
40 | from rhodecode.lib.caching_query import FromCache | |
41 | from rhodecode.lib.exceptions import AttachedForksError |
|
41 | from rhodecode.lib.exceptions import AttachedForksError | |
42 | from rhodecode.lib.hooks_base import log_delete_repository |
|
42 | from rhodecode.lib.hooks_base import log_delete_repository | |
43 | from rhodecode.lib.markup_renderer import MarkupRenderer |
|
43 | from rhodecode.lib.markup_renderer import MarkupRenderer | |
44 | from rhodecode.lib.utils import make_db_config |
|
44 | from rhodecode.lib.utils import make_db_config | |
45 | from rhodecode.lib.utils2 import ( |
|
45 | from rhodecode.lib.utils2 import ( | |
46 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
46 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, | |
47 | get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic) |
|
47 | get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic) | |
48 | from rhodecode.lib.vcs.backends import get_backend |
|
48 | from rhodecode.lib.vcs.backends import get_backend | |
49 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError |
|
49 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError | |
50 | from rhodecode.model import BaseModel |
|
50 | from rhodecode.model import BaseModel | |
51 | from rhodecode.model.db import ( |
|
51 | from rhodecode.model.db import ( | |
52 | Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, |
|
52 | Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, | |
53 | UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup, |
|
53 | UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup, | |
54 | RepoGroup, RepositoryField) |
|
54 | RepoGroup, RepositoryField) | |
55 | from rhodecode.model.scm import UserGroupList |
|
55 | from rhodecode.model.scm import UserGroupList | |
56 | from rhodecode.model.settings import VcsSettingsModel |
|
56 | from rhodecode.model.settings import VcsSettingsModel | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | log = logging.getLogger(__name__) |
|
59 | log = logging.getLogger(__name__) | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | class RepoModel(BaseModel): |
|
62 | class RepoModel(BaseModel): | |
63 |
|
63 | |||
64 | cls = Repository |
|
64 | cls = Repository | |
65 |
|
65 | |||
66 | def _get_user_group(self, users_group): |
|
66 | def _get_user_group(self, users_group): | |
67 | return self._get_instance(UserGroup, users_group, |
|
67 | return self._get_instance(UserGroup, users_group, | |
68 | callback=UserGroup.get_by_group_name) |
|
68 | callback=UserGroup.get_by_group_name) | |
69 |
|
69 | |||
70 | def _get_repo_group(self, repo_group): |
|
70 | def _get_repo_group(self, repo_group): | |
71 | return self._get_instance(RepoGroup, repo_group, |
|
71 | return self._get_instance(RepoGroup, repo_group, | |
72 | callback=RepoGroup.get_by_group_name) |
|
72 | callback=RepoGroup.get_by_group_name) | |
73 |
|
73 | |||
74 | def _create_default_perms(self, repository, private): |
|
74 | def _create_default_perms(self, repository, private): | |
75 | # create default permission |
|
75 | # create default permission | |
76 | default = 'repository.read' |
|
76 | default = 'repository.read' | |
77 | def_user = User.get_default_user() |
|
77 | def_user = User.get_default_user() | |
78 | for p in def_user.user_perms: |
|
78 | for p in def_user.user_perms: | |
79 | if p.permission.permission_name.startswith('repository.'): |
|
79 | if p.permission.permission_name.startswith('repository.'): | |
80 | default = p.permission.permission_name |
|
80 | default = p.permission.permission_name | |
81 | break |
|
81 | break | |
82 |
|
82 | |||
83 | default_perm = 'repository.none' if private else default |
|
83 | default_perm = 'repository.none' if private else default | |
84 |
|
84 | |||
85 | repo_to_perm = UserRepoToPerm() |
|
85 | repo_to_perm = UserRepoToPerm() | |
86 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
86 | repo_to_perm.permission = Permission.get_by_key(default_perm) | |
87 |
|
87 | |||
88 | repo_to_perm.repository = repository |
|
88 | repo_to_perm.repository = repository | |
89 | repo_to_perm.user_id = def_user.user_id |
|
89 | repo_to_perm.user_id = def_user.user_id | |
90 |
|
90 | |||
91 | return repo_to_perm |
|
91 | return repo_to_perm | |
92 |
|
92 | |||
93 | @LazyProperty |
|
93 | @LazyProperty | |
94 | def repos_path(self): |
|
94 | def repos_path(self): | |
95 | """ |
|
95 | """ | |
96 | Gets the repositories root path from database |
|
96 | Gets the repositories root path from database | |
97 | """ |
|
97 | """ | |
98 | settings_model = VcsSettingsModel(sa=self.sa) |
|
98 | settings_model = VcsSettingsModel(sa=self.sa) | |
99 | return settings_model.get_repos_location() |
|
99 | return settings_model.get_repos_location() | |
100 |
|
100 | |||
101 | def get(self, repo_id, cache=False): |
|
101 | def get(self, repo_id, cache=False): | |
102 | repo = self.sa.query(Repository) \ |
|
102 | repo = self.sa.query(Repository) \ | |
103 | .filter(Repository.repo_id == repo_id) |
|
103 | .filter(Repository.repo_id == repo_id) | |
104 |
|
104 | |||
105 | if cache: |
|
105 | if cache: | |
106 | repo = repo.options(FromCache("sql_cache_short", |
|
106 | repo = repo.options(FromCache("sql_cache_short", | |
107 | "get_repo_%s" % repo_id)) |
|
107 | "get_repo_%s" % repo_id)) | |
108 | return repo.scalar() |
|
108 | return repo.scalar() | |
109 |
|
109 | |||
110 | def get_repo(self, repository): |
|
110 | def get_repo(self, repository): | |
111 | return self._get_repo(repository) |
|
111 | return self._get_repo(repository) | |
112 |
|
112 | |||
113 | def get_by_repo_name(self, repo_name, cache=False): |
|
113 | def get_by_repo_name(self, repo_name, cache=False): | |
114 | repo = self.sa.query(Repository) \ |
|
114 | repo = self.sa.query(Repository) \ | |
115 | .filter(Repository.repo_name == repo_name) |
|
115 | .filter(Repository.repo_name == repo_name) | |
116 |
|
116 | |||
117 | if cache: |
|
117 | if cache: | |
118 | repo = repo.options(FromCache("sql_cache_short", |
|
118 | repo = repo.options(FromCache("sql_cache_short", | |
119 | "get_repo_%s" % repo_name)) |
|
119 | "get_repo_%s" % repo_name)) | |
120 | return repo.scalar() |
|
120 | return repo.scalar() | |
121 |
|
121 | |||
122 | def _extract_id_from_repo_name(self, repo_name): |
|
122 | def _extract_id_from_repo_name(self, repo_name): | |
123 | if repo_name.startswith('/'): |
|
123 | if repo_name.startswith('/'): | |
124 | repo_name = repo_name.lstrip('/') |
|
124 | repo_name = repo_name.lstrip('/') | |
125 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
125 | by_id_match = re.match(r'^_(\d{1,})', repo_name) | |
126 | if by_id_match: |
|
126 | if by_id_match: | |
127 | return by_id_match.groups()[0] |
|
127 | return by_id_match.groups()[0] | |
128 |
|
128 | |||
129 | def get_repo_by_id(self, repo_name): |
|
129 | def get_repo_by_id(self, repo_name): | |
130 | """ |
|
130 | """ | |
131 | Extracts repo_name by id from special urls. |
|
131 | Extracts repo_name by id from special urls. | |
132 | Example url is _11/repo_name |
|
132 | Example url is _11/repo_name | |
133 |
|
133 | |||
134 | :param repo_name: |
|
134 | :param repo_name: | |
135 | :return: repo object if matched else None |
|
135 | :return: repo object if matched else None | |
136 | """ |
|
136 | """ | |
137 | try: |
|
137 | try: | |
138 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
138 | _repo_id = self._extract_id_from_repo_name(repo_name) | |
139 | if _repo_id: |
|
139 | if _repo_id: | |
140 | return self.get(_repo_id) |
|
140 | return self.get(_repo_id) | |
141 | except Exception: |
|
141 | except Exception: | |
142 | log.exception('Failed to extract repo_name from URL') |
|
142 | log.exception('Failed to extract repo_name from URL') | |
143 |
|
143 | |||
144 | return None |
|
144 | return None | |
145 |
|
145 | |||
146 | def get_url(self, repo): |
|
146 | def get_url(self, repo): | |
147 | return h.url('summary_home', repo_name=safe_str(repo.repo_name), |
|
147 | return h.url('summary_home', repo_name=safe_str(repo.repo_name), | |
148 | qualified=True) |
|
148 | qualified=True) | |
149 |
|
149 | |||
150 | def get_users(self, name_contains=None, limit=20, only_active=True): |
|
150 | def get_users(self, name_contains=None, limit=20, only_active=True): | |
151 |
|
151 | |||
152 | # TODO: mikhail: move this method to the UserModel. |
|
152 | # TODO: mikhail: move this method to the UserModel. | |
153 | query = self.sa.query(User) |
|
153 | query = self.sa.query(User) | |
154 | if only_active: |
|
154 | if only_active: | |
155 | query = query.filter(User.active == true()) |
|
155 | query = query.filter(User.active == true()) | |
156 |
|
156 | |||
157 | if name_contains: |
|
157 | if name_contains: | |
158 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
158 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
159 | query = query.filter( |
|
159 | query = query.filter( | |
160 | or_( |
|
160 | or_( | |
161 | User.name.ilike(ilike_expression), |
|
161 | User.name.ilike(ilike_expression), | |
162 | User.lastname.ilike(ilike_expression), |
|
162 | User.lastname.ilike(ilike_expression), | |
163 | User.username.ilike(ilike_expression) |
|
163 | User.username.ilike(ilike_expression) | |
164 | ) |
|
164 | ) | |
165 | ) |
|
165 | ) | |
166 | query = query.limit(limit) |
|
166 | query = query.limit(limit) | |
167 | users = query.all() |
|
167 | users = query.all() | |
168 |
|
168 | |||
169 | _users = [ |
|
169 | _users = [ | |
170 | { |
|
170 | { | |
171 | 'id': user.user_id, |
|
171 | 'id': user.user_id, | |
172 | 'first_name': user.name, |
|
172 | 'first_name': user.name, | |
173 | 'last_name': user.lastname, |
|
173 | 'last_name': user.lastname, | |
174 | 'username': user.username, |
|
174 | 'username': user.username, | |
175 | 'email': user.email, |
|
175 | 'email': user.email, | |
176 | 'icon_link': h.gravatar_url(user.email, 30), |
|
176 | 'icon_link': h.gravatar_url(user.email, 30), | |
177 | 'value_display': h.person(user), |
|
177 | 'value_display': h.person(user), | |
178 | 'value': user.username, |
|
178 | 'value': user.username, | |
179 | 'value_type': 'user', |
|
179 | 'value_type': 'user', | |
180 | 'active': user.active, |
|
180 | 'active': user.active, | |
181 | } |
|
181 | } | |
182 | for user in users |
|
182 | for user in users | |
183 | ] |
|
183 | ] | |
184 | return _users |
|
184 | return _users | |
185 |
|
185 | |||
186 | def get_user_groups(self, name_contains=None, limit=20, only_active=True): |
|
186 | def get_user_groups(self, name_contains=None, limit=20, only_active=True): | |
187 | # TODO: mikhail: move this method to the UserGroupModel. |
|
187 | # TODO: mikhail: move this method to the UserGroupModel. | |
188 | query = self.sa.query(UserGroup) |
|
188 | query = self.sa.query(UserGroup) | |
189 | if only_active: |
|
189 | if only_active: | |
190 | query = query.filter(UserGroup.users_group_active == true()) |
|
190 | query = query.filter(UserGroup.users_group_active == true()) | |
191 |
|
191 | |||
192 | if name_contains: |
|
192 | if name_contains: | |
193 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
193 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
194 | query = query.filter( |
|
194 | query = query.filter( | |
195 | UserGroup.users_group_name.ilike(ilike_expression))\ |
|
195 | UserGroup.users_group_name.ilike(ilike_expression))\ | |
196 | .order_by(func.length(UserGroup.users_group_name))\ |
|
196 | .order_by(func.length(UserGroup.users_group_name))\ | |
197 | .order_by(UserGroup.users_group_name) |
|
197 | .order_by(UserGroup.users_group_name) | |
198 |
|
198 | |||
199 | query = query.limit(limit) |
|
199 | query = query.limit(limit) | |
200 | user_groups = query.all() |
|
200 | user_groups = query.all() | |
201 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
201 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] | |
202 | user_groups = UserGroupList(user_groups, perm_set=perm_set) |
|
202 | user_groups = UserGroupList(user_groups, perm_set=perm_set) | |
203 |
|
203 | |||
204 | _groups = [ |
|
204 | _groups = [ | |
205 | { |
|
205 | { | |
206 | 'id': group.users_group_id, |
|
206 | 'id': group.users_group_id, | |
207 | # TODO: marcink figure out a way to generate the url for the |
|
207 | # TODO: marcink figure out a way to generate the url for the | |
208 | # icon |
|
208 | # icon | |
209 | 'icon_link': '', |
|
209 | 'icon_link': '', | |
210 | 'value_display': 'Group: %s (%d members)' % ( |
|
210 | 'value_display': 'Group: %s (%d members)' % ( | |
211 | group.users_group_name, len(group.members),), |
|
211 | group.users_group_name, len(group.members),), | |
212 | 'value': group.users_group_name, |
|
212 | 'value': group.users_group_name, | |
213 | 'value_type': 'user_group', |
|
213 | 'value_type': 'user_group', | |
214 | 'active': group.users_group_active, |
|
214 | 'active': group.users_group_active, | |
215 | } |
|
215 | } | |
216 | for group in user_groups |
|
216 | for group in user_groups | |
217 | ] |
|
217 | ] | |
218 | return _groups |
|
218 | return _groups | |
219 |
|
219 | |||
220 | @classmethod |
|
220 | @classmethod | |
221 | def update_repoinfo(cls, repositories=None): |
|
221 | def update_repoinfo(cls, repositories=None): | |
222 | if not repositories: |
|
222 | if not repositories: | |
223 | repositories = Repository.getAll() |
|
223 | repositories = Repository.getAll() | |
224 | for repo in repositories: |
|
224 | for repo in repositories: | |
225 | repo.update_commit_cache() |
|
225 | repo.update_commit_cache() | |
226 |
|
226 | |||
227 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
227 | def get_repos_as_dict(self, repo_list=None, admin=False, | |
228 | super_user_actions=False): |
|
228 | super_user_actions=False): | |
229 |
|
229 | |||
230 | from rhodecode.lib.utils import PartialRenderer |
|
230 | from rhodecode.lib.utils import PartialRenderer | |
231 | _render = PartialRenderer('data_table/_dt_elements.html') |
|
231 | _render = PartialRenderer('data_table/_dt_elements.html') | |
232 | c = _render.c |
|
232 | c = _render.c | |
233 |
|
233 | |||
234 | def quick_menu(repo_name): |
|
234 | def quick_menu(repo_name): | |
235 | return _render('quick_menu', repo_name) |
|
235 | return _render('quick_menu', repo_name) | |
236 |
|
236 | |||
237 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
237 | def repo_lnk(name, rtype, rstate, private, fork_of): | |
238 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
238 | return _render('repo_name', name, rtype, rstate, private, fork_of, | |
239 | short_name=not admin, admin=False) |
|
239 | short_name=not admin, admin=False) | |
240 |
|
240 | |||
241 | def last_change(last_change): |
|
241 | def last_change(last_change): | |
242 | return _render("last_change", last_change) |
|
242 | return _render("last_change", last_change) | |
243 |
|
243 | |||
244 | def rss_lnk(repo_name): |
|
244 | def rss_lnk(repo_name): | |
245 | return _render("rss", repo_name) |
|
245 | return _render("rss", repo_name) | |
246 |
|
246 | |||
247 | def atom_lnk(repo_name): |
|
247 | def atom_lnk(repo_name): | |
248 | return _render("atom", repo_name) |
|
248 | return _render("atom", repo_name) | |
249 |
|
249 | |||
250 | def last_rev(repo_name, cs_cache): |
|
250 | def last_rev(repo_name, cs_cache): | |
251 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
251 | return _render('revision', repo_name, cs_cache.get('revision'), | |
252 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
252 | cs_cache.get('raw_id'), cs_cache.get('author'), | |
253 | cs_cache.get('message')) |
|
253 | cs_cache.get('message')) | |
254 |
|
254 | |||
255 | def desc(desc): |
|
255 | def desc(desc): | |
256 | if c.visual.stylify_metatags: |
|
256 | if c.visual.stylify_metatags: | |
257 | desc = h.urlify_text(h.escaped_stylize(desc)) |
|
257 | desc = h.urlify_text(h.escaped_stylize(desc)) | |
258 | else: |
|
258 | else: | |
259 | desc = h.urlify_text(h.html_escape(desc)) |
|
259 | desc = h.urlify_text(h.html_escape(desc)) | |
260 |
|
260 | |||
261 | return _render('repo_desc', desc) |
|
261 | return _render('repo_desc', desc) | |
262 |
|
262 | |||
263 | def state(repo_state): |
|
263 | def state(repo_state): | |
264 | return _render("repo_state", repo_state) |
|
264 | return _render("repo_state", repo_state) | |
265 |
|
265 | |||
266 | def repo_actions(repo_name): |
|
266 | def repo_actions(repo_name): | |
267 | return _render('repo_actions', repo_name, super_user_actions) |
|
267 | return _render('repo_actions', repo_name, super_user_actions) | |
268 |
|
268 | |||
269 | def user_profile(username): |
|
269 | def user_profile(username): | |
270 | return _render('user_profile', username) |
|
270 | return _render('user_profile', username) | |
271 |
|
271 | |||
272 | repos_data = [] |
|
272 | repos_data = [] | |
273 | for repo in repo_list: |
|
273 | for repo in repo_list: | |
274 | cs_cache = repo.changeset_cache |
|
274 | cs_cache = repo.changeset_cache | |
275 | row = { |
|
275 | row = { | |
276 | "menu": quick_menu(repo.repo_name), |
|
276 | "menu": quick_menu(repo.repo_name), | |
277 |
|
277 | |||
278 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
278 | "name": repo_lnk(repo.repo_name, repo.repo_type, | |
279 | repo.repo_state, repo.private, repo.fork), |
|
279 | repo.repo_state, repo.private, repo.fork), | |
280 | "name_raw": repo.repo_name.lower(), |
|
280 | "name_raw": repo.repo_name.lower(), | |
281 |
|
281 | |||
282 | "last_change": last_change(repo.last_db_change), |
|
282 | "last_change": last_change(repo.last_db_change), | |
283 | "last_change_raw": datetime_to_time(repo.last_db_change), |
|
283 | "last_change_raw": datetime_to_time(repo.last_db_change), | |
284 |
|
284 | |||
285 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
285 | "last_changeset": last_rev(repo.repo_name, cs_cache), | |
286 | "last_changeset_raw": cs_cache.get('revision'), |
|
286 | "last_changeset_raw": cs_cache.get('revision'), | |
287 |
|
287 | |||
288 | "desc": desc(repo.description), |
|
288 | "desc": desc(repo.description), | |
289 | "owner": user_profile(repo.user.username), |
|
289 | "owner": user_profile(repo.user.username), | |
290 |
|
290 | |||
291 | "state": state(repo.repo_state), |
|
291 | "state": state(repo.repo_state), | |
292 | "rss": rss_lnk(repo.repo_name), |
|
292 | "rss": rss_lnk(repo.repo_name), | |
293 |
|
293 | |||
294 | "atom": atom_lnk(repo.repo_name), |
|
294 | "atom": atom_lnk(repo.repo_name), | |
295 | } |
|
295 | } | |
296 | if admin: |
|
296 | if admin: | |
297 | row.update({ |
|
297 | row.update({ | |
298 | "action": repo_actions(repo.repo_name), |
|
298 | "action": repo_actions(repo.repo_name), | |
299 | }) |
|
299 | }) | |
300 | repos_data.append(row) |
|
300 | repos_data.append(row) | |
301 |
|
301 | |||
302 | return repos_data |
|
302 | return repos_data | |
303 |
|
303 | |||
304 | def _get_defaults(self, repo_name): |
|
304 | def _get_defaults(self, repo_name): | |
305 | """ |
|
305 | """ | |
306 | Gets information about repository, and returns a dict for |
|
306 | Gets information about repository, and returns a dict for | |
307 | usage in forms |
|
307 | usage in forms | |
308 |
|
308 | |||
309 | :param repo_name: |
|
309 | :param repo_name: | |
310 | """ |
|
310 | """ | |
311 |
|
311 | |||
312 | repo_info = Repository.get_by_repo_name(repo_name) |
|
312 | repo_info = Repository.get_by_repo_name(repo_name) | |
313 |
|
313 | |||
314 | if repo_info is None: |
|
314 | if repo_info is None: | |
315 | return None |
|
315 | return None | |
316 |
|
316 | |||
317 | defaults = repo_info.get_dict() |
|
317 | defaults = repo_info.get_dict() | |
318 | defaults['repo_name'] = repo_info.just_name |
|
318 | defaults['repo_name'] = repo_info.just_name | |
319 |
|
319 | |||
320 | groups = repo_info.groups_with_parents |
|
320 | groups = repo_info.groups_with_parents | |
321 | parent_group = groups[-1] if groups else None |
|
321 | parent_group = groups[-1] if groups else None | |
322 |
|
322 | |||
323 | # we use -1 as this is how in HTML, we mark an empty group |
|
323 | # we use -1 as this is how in HTML, we mark an empty group | |
324 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
324 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) | |
325 |
|
325 | |||
326 | keys_to_process = ( |
|
326 | keys_to_process = ( | |
327 | {'k': 'repo_type', 'strip': False}, |
|
327 | {'k': 'repo_type', 'strip': False}, | |
328 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
328 | {'k': 'repo_enable_downloads', 'strip': True}, | |
329 | {'k': 'repo_description', 'strip': True}, |
|
329 | {'k': 'repo_description', 'strip': True}, | |
330 | {'k': 'repo_enable_locking', 'strip': True}, |
|
330 | {'k': 'repo_enable_locking', 'strip': True}, | |
331 | {'k': 'repo_landing_rev', 'strip': True}, |
|
331 | {'k': 'repo_landing_rev', 'strip': True}, | |
332 | {'k': 'clone_uri', 'strip': False}, |
|
332 | {'k': 'clone_uri', 'strip': False}, | |
333 | {'k': 'repo_private', 'strip': True}, |
|
333 | {'k': 'repo_private', 'strip': True}, | |
334 | {'k': 'repo_enable_statistics', 'strip': True} |
|
334 | {'k': 'repo_enable_statistics', 'strip': True} | |
335 | ) |
|
335 | ) | |
336 |
|
336 | |||
337 | for item in keys_to_process: |
|
337 | for item in keys_to_process: | |
338 | attr = item['k'] |
|
338 | attr = item['k'] | |
339 | if item['strip']: |
|
339 | if item['strip']: | |
340 | attr = remove_prefix(item['k'], 'repo_') |
|
340 | attr = remove_prefix(item['k'], 'repo_') | |
341 |
|
341 | |||
342 | val = defaults[attr] |
|
342 | val = defaults[attr] | |
343 | if item['k'] == 'repo_landing_rev': |
|
343 | if item['k'] == 'repo_landing_rev': | |
344 | val = ':'.join(defaults[attr]) |
|
344 | val = ':'.join(defaults[attr]) | |
345 | defaults[item['k']] = val |
|
345 | defaults[item['k']] = val | |
346 | if item['k'] == 'clone_uri': |
|
346 | if item['k'] == 'clone_uri': | |
347 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
347 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden | |
348 |
|
348 | |||
349 | # fill owner |
|
349 | # fill owner | |
350 | if repo_info.user: |
|
350 | if repo_info.user: | |
351 | defaults.update({'user': repo_info.user.username}) |
|
351 | defaults.update({'user': repo_info.user.username}) | |
352 | else: |
|
352 | else: | |
353 | replacement_user = User.get_first_super_admin().username |
|
353 | replacement_user = User.get_first_super_admin().username | |
354 | defaults.update({'user': replacement_user}) |
|
354 | defaults.update({'user': replacement_user}) | |
355 |
|
355 | |||
356 | # fill repository users |
|
356 | # fill repository users | |
357 | for p in repo_info.repo_to_perm: |
|
357 | for p in repo_info.repo_to_perm: | |
358 | defaults.update({'u_perm_%s' % p.user.user_id: |
|
358 | defaults.update({'u_perm_%s' % p.user.user_id: | |
359 | p.permission.permission_name}) |
|
359 | p.permission.permission_name}) | |
360 |
|
360 | |||
361 | # fill repository groups |
|
361 | # fill repository groups | |
362 | for p in repo_info.users_group_to_perm: |
|
362 | for p in repo_info.users_group_to_perm: | |
363 | defaults.update({'g_perm_%s' % p.users_group.users_group_id: |
|
363 | defaults.update({'g_perm_%s' % p.users_group.users_group_id: | |
364 | p.permission.permission_name}) |
|
364 | p.permission.permission_name}) | |
365 |
|
365 | |||
366 | return defaults |
|
366 | return defaults | |
367 |
|
367 | |||
368 | def update(self, repo, **kwargs): |
|
368 | def update(self, repo, **kwargs): | |
369 | try: |
|
369 | try: | |
370 | cur_repo = self._get_repo(repo) |
|
370 | cur_repo = self._get_repo(repo) | |
371 | source_repo_name = cur_repo.repo_name |
|
371 | source_repo_name = cur_repo.repo_name | |
372 | if 'user' in kwargs: |
|
372 | if 'user' in kwargs: | |
373 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
373 | cur_repo.user = User.get_by_username(kwargs['user']) | |
374 |
|
374 | |||
375 | if 'repo_group' in kwargs: |
|
375 | if 'repo_group' in kwargs: | |
376 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
376 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) | |
377 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
377 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) | |
378 |
|
378 | |||
379 | update_keys = [ |
|
379 | update_keys = [ | |
380 | (1, 'repo_enable_downloads'), |
|
|||
381 | (1, 'repo_description'), |
|
380 | (1, 'repo_description'), | |
382 | (1, 'repo_enable_locking'), |
|
|||
383 | (1, 'repo_landing_rev'), |
|
381 | (1, 'repo_landing_rev'), | |
384 | (1, 'repo_private'), |
|
382 | (1, 'repo_private'), | |
|
383 | (1, 'repo_enable_downloads'), | |||
|
384 | (1, 'repo_enable_locking'), | |||
385 | (1, 'repo_enable_statistics'), |
|
385 | (1, 'repo_enable_statistics'), | |
386 | (0, 'clone_uri'), |
|
386 | (0, 'clone_uri'), | |
387 | (0, 'fork_id') |
|
387 | (0, 'fork_id') | |
388 | ] |
|
388 | ] | |
389 | for strip, k in update_keys: |
|
389 | for strip, k in update_keys: | |
390 | if k in kwargs: |
|
390 | if k in kwargs: | |
391 | val = kwargs[k] |
|
391 | val = kwargs[k] | |
392 | if strip: |
|
392 | if strip: | |
393 | k = remove_prefix(k, 'repo_') |
|
393 | k = remove_prefix(k, 'repo_') | |
394 | if k == 'clone_uri': |
|
394 | if k == 'clone_uri': | |
395 | from rhodecode.model.validators import Missing |
|
395 | from rhodecode.model.validators import Missing | |
396 | _change = kwargs.get('clone_uri_change') |
|
396 | _change = kwargs.get('clone_uri_change') | |
397 | if _change in [Missing, 'OLD']: |
|
397 | if _change in [Missing, 'OLD']: | |
398 | # we don't change the value, so use original one |
|
398 | # we don't change the value, so use original one | |
399 | val = cur_repo.clone_uri |
|
399 | val = cur_repo.clone_uri | |
400 |
|
400 | |||
401 | setattr(cur_repo, k, val) |
|
401 | setattr(cur_repo, k, val) | |
402 |
|
402 | |||
403 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
403 | new_name = cur_repo.get_new_name(kwargs['repo_name']) | |
404 | cur_repo.repo_name = new_name |
|
404 | cur_repo.repo_name = new_name | |
405 |
|
405 | |||
406 | # if private flag is set, reset default permission to NONE |
|
406 | # if private flag is set, reset default permission to NONE | |
407 | if kwargs.get('repo_private'): |
|
407 | if kwargs.get('repo_private'): | |
408 | EMPTY_PERM = 'repository.none' |
|
408 | EMPTY_PERM = 'repository.none' | |
409 | RepoModel().grant_user_permission( |
|
409 | RepoModel().grant_user_permission( | |
410 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
410 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM | |
411 | ) |
|
411 | ) | |
412 |
|
412 | |||
413 | # handle extra fields |
|
413 | # handle extra fields | |
414 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
414 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), | |
415 | kwargs): |
|
415 | kwargs): | |
416 | k = RepositoryField.un_prefix_key(field) |
|
416 | k = RepositoryField.un_prefix_key(field) | |
417 | ex_field = RepositoryField.get_by_key_name( |
|
417 | ex_field = RepositoryField.get_by_key_name( | |
418 | key=k, repo=cur_repo) |
|
418 | key=k, repo=cur_repo) | |
419 | if ex_field: |
|
419 | if ex_field: | |
420 | ex_field.field_value = kwargs[field] |
|
420 | ex_field.field_value = kwargs[field] | |
421 | self.sa.add(ex_field) |
|
421 | self.sa.add(ex_field) | |
422 | self.sa.add(cur_repo) |
|
422 | self.sa.add(cur_repo) | |
423 |
|
423 | |||
424 | if source_repo_name != new_name: |
|
424 | if source_repo_name != new_name: | |
425 | # rename repository |
|
425 | # rename repository | |
426 | self._rename_filesystem_repo( |
|
426 | self._rename_filesystem_repo( | |
427 | old=source_repo_name, new=new_name) |
|
427 | old=source_repo_name, new=new_name) | |
428 |
|
428 | |||
429 | return cur_repo |
|
429 | return cur_repo | |
430 | except Exception: |
|
430 | except Exception: | |
431 | log.error(traceback.format_exc()) |
|
431 | log.error(traceback.format_exc()) | |
432 | raise |
|
432 | raise | |
433 |
|
433 | |||
434 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
434 | def _create_repo(self, repo_name, repo_type, description, owner, | |
435 | private=False, clone_uri=None, repo_group=None, |
|
435 | private=False, clone_uri=None, repo_group=None, | |
436 | landing_rev='rev:tip', fork_of=None, |
|
436 | landing_rev='rev:tip', fork_of=None, | |
437 | copy_fork_permissions=False, enable_statistics=False, |
|
437 | copy_fork_permissions=False, enable_statistics=False, | |
438 | enable_locking=False, enable_downloads=False, |
|
438 | enable_locking=False, enable_downloads=False, | |
439 | copy_group_permissions=False, |
|
439 | copy_group_permissions=False, | |
440 | state=Repository.STATE_PENDING): |
|
440 | state=Repository.STATE_PENDING): | |
441 | """ |
|
441 | """ | |
442 | Create repository inside database with PENDING state, this should be |
|
442 | Create repository inside database with PENDING state, this should be | |
443 | only executed by create() repo. With exception of importing existing |
|
443 | only executed by create() repo. With exception of importing existing | |
444 | repos |
|
444 | repos | |
445 | """ |
|
445 | """ | |
446 | from rhodecode.model.scm import ScmModel |
|
446 | from rhodecode.model.scm import ScmModel | |
447 |
|
447 | |||
448 | owner = self._get_user(owner) |
|
448 | owner = self._get_user(owner) | |
449 | fork_of = self._get_repo(fork_of) |
|
449 | fork_of = self._get_repo(fork_of) | |
450 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
450 | repo_group = self._get_repo_group(safe_int(repo_group)) | |
451 |
|
451 | |||
452 | try: |
|
452 | try: | |
453 | repo_name = safe_unicode(repo_name) |
|
453 | repo_name = safe_unicode(repo_name) | |
454 | description = safe_unicode(description) |
|
454 | description = safe_unicode(description) | |
455 | # repo name is just a name of repository |
|
455 | # repo name is just a name of repository | |
456 | # while repo_name_full is a full qualified name that is combined |
|
456 | # while repo_name_full is a full qualified name that is combined | |
457 | # with name and path of group |
|
457 | # with name and path of group | |
458 | repo_name_full = repo_name |
|
458 | repo_name_full = repo_name | |
459 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
459 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] | |
460 |
|
460 | |||
461 | new_repo = Repository() |
|
461 | new_repo = Repository() | |
462 | new_repo.repo_state = state |
|
462 | new_repo.repo_state = state | |
463 | new_repo.enable_statistics = False |
|
463 | new_repo.enable_statistics = False | |
464 | new_repo.repo_name = repo_name_full |
|
464 | new_repo.repo_name = repo_name_full | |
465 | new_repo.repo_type = repo_type |
|
465 | new_repo.repo_type = repo_type | |
466 | new_repo.user = owner |
|
466 | new_repo.user = owner | |
467 | new_repo.group = repo_group |
|
467 | new_repo.group = repo_group | |
468 | new_repo.description = description or repo_name |
|
468 | new_repo.description = description or repo_name | |
469 | new_repo.private = private |
|
469 | new_repo.private = private | |
470 | new_repo.clone_uri = clone_uri |
|
470 | new_repo.clone_uri = clone_uri | |
471 | new_repo.landing_rev = landing_rev |
|
471 | new_repo.landing_rev = landing_rev | |
472 |
|
472 | |||
473 | new_repo.enable_statistics = enable_statistics |
|
473 | new_repo.enable_statistics = enable_statistics | |
474 | new_repo.enable_locking = enable_locking |
|
474 | new_repo.enable_locking = enable_locking | |
475 | new_repo.enable_downloads = enable_downloads |
|
475 | new_repo.enable_downloads = enable_downloads | |
476 |
|
476 | |||
477 | if repo_group: |
|
477 | if repo_group: | |
478 | new_repo.enable_locking = repo_group.enable_locking |
|
478 | new_repo.enable_locking = repo_group.enable_locking | |
479 |
|
479 | |||
480 | if fork_of: |
|
480 | if fork_of: | |
481 | parent_repo = fork_of |
|
481 | parent_repo = fork_of | |
482 | new_repo.fork = parent_repo |
|
482 | new_repo.fork = parent_repo | |
483 |
|
483 | |||
484 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
484 | events.trigger(events.RepoPreCreateEvent(new_repo)) | |
485 |
|
485 | |||
486 | self.sa.add(new_repo) |
|
486 | self.sa.add(new_repo) | |
487 |
|
487 | |||
488 | EMPTY_PERM = 'repository.none' |
|
488 | EMPTY_PERM = 'repository.none' | |
489 | if fork_of and copy_fork_permissions: |
|
489 | if fork_of and copy_fork_permissions: | |
490 | repo = fork_of |
|
490 | repo = fork_of | |
491 | user_perms = UserRepoToPerm.query() \ |
|
491 | user_perms = UserRepoToPerm.query() \ | |
492 | .filter(UserRepoToPerm.repository == repo).all() |
|
492 | .filter(UserRepoToPerm.repository == repo).all() | |
493 | group_perms = UserGroupRepoToPerm.query() \ |
|
493 | group_perms = UserGroupRepoToPerm.query() \ | |
494 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
494 | .filter(UserGroupRepoToPerm.repository == repo).all() | |
495 |
|
495 | |||
496 | for perm in user_perms: |
|
496 | for perm in user_perms: | |
497 | UserRepoToPerm.create( |
|
497 | UserRepoToPerm.create( | |
498 | perm.user, new_repo, perm.permission) |
|
498 | perm.user, new_repo, perm.permission) | |
499 |
|
499 | |||
500 | for perm in group_perms: |
|
500 | for perm in group_perms: | |
501 | UserGroupRepoToPerm.create( |
|
501 | UserGroupRepoToPerm.create( | |
502 | perm.users_group, new_repo, perm.permission) |
|
502 | perm.users_group, new_repo, perm.permission) | |
503 | # in case we copy permissions and also set this repo to private |
|
503 | # in case we copy permissions and also set this repo to private | |
504 | # override the default user permission to make it a private |
|
504 | # override the default user permission to make it a private | |
505 | # repo |
|
505 | # repo | |
506 | if private: |
|
506 | if private: | |
507 | RepoModel(self.sa).grant_user_permission( |
|
507 | RepoModel(self.sa).grant_user_permission( | |
508 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
508 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
509 |
|
509 | |||
510 | elif repo_group and copy_group_permissions: |
|
510 | elif repo_group and copy_group_permissions: | |
511 | user_perms = UserRepoGroupToPerm.query() \ |
|
511 | user_perms = UserRepoGroupToPerm.query() \ | |
512 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
512 | .filter(UserRepoGroupToPerm.group == repo_group).all() | |
513 |
|
513 | |||
514 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
514 | group_perms = UserGroupRepoGroupToPerm.query() \ | |
515 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
515 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() | |
516 |
|
516 | |||
517 | for perm in user_perms: |
|
517 | for perm in user_perms: | |
518 | perm_name = perm.permission.permission_name.replace( |
|
518 | perm_name = perm.permission.permission_name.replace( | |
519 | 'group.', 'repository.') |
|
519 | 'group.', 'repository.') | |
520 | perm_obj = Permission.get_by_key(perm_name) |
|
520 | perm_obj = Permission.get_by_key(perm_name) | |
521 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
521 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) | |
522 |
|
522 | |||
523 | for perm in group_perms: |
|
523 | for perm in group_perms: | |
524 | perm_name = perm.permission.permission_name.replace( |
|
524 | perm_name = perm.permission.permission_name.replace( | |
525 | 'group.', 'repository.') |
|
525 | 'group.', 'repository.') | |
526 | perm_obj = Permission.get_by_key(perm_name) |
|
526 | perm_obj = Permission.get_by_key(perm_name) | |
527 | UserGroupRepoToPerm.create( |
|
527 | UserGroupRepoToPerm.create( | |
528 | perm.users_group, new_repo, perm_obj) |
|
528 | perm.users_group, new_repo, perm_obj) | |
529 |
|
529 | |||
530 | if private: |
|
530 | if private: | |
531 | RepoModel(self.sa).grant_user_permission( |
|
531 | RepoModel(self.sa).grant_user_permission( | |
532 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
532 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) | |
533 |
|
533 | |||
534 | else: |
|
534 | else: | |
535 | perm_obj = self._create_default_perms(new_repo, private) |
|
535 | perm_obj = self._create_default_perms(new_repo, private) | |
536 | self.sa.add(perm_obj) |
|
536 | self.sa.add(perm_obj) | |
537 |
|
537 | |||
538 | # now automatically start following this repository as owner |
|
538 | # now automatically start following this repository as owner | |
539 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, |
|
539 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, | |
540 | owner.user_id) |
|
540 | owner.user_id) | |
541 |
|
541 | |||
542 | # we need to flush here, in order to check if database won't |
|
542 | # we need to flush here, in order to check if database won't | |
543 | # throw any exceptions, create filesystem dirs at the very end |
|
543 | # throw any exceptions, create filesystem dirs at the very end | |
544 | self.sa.flush() |
|
544 | self.sa.flush() | |
545 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
545 | events.trigger(events.RepoCreateEvent(new_repo)) | |
546 | return new_repo |
|
546 | return new_repo | |
547 |
|
547 | |||
548 | except Exception: |
|
548 | except Exception: | |
549 | log.error(traceback.format_exc()) |
|
549 | log.error(traceback.format_exc()) | |
550 | raise |
|
550 | raise | |
551 |
|
551 | |||
552 | def create(self, form_data, cur_user): |
|
552 | def create(self, form_data, cur_user): | |
553 | """ |
|
553 | """ | |
554 | Create repository using celery tasks |
|
554 | Create repository using celery tasks | |
555 |
|
555 | |||
556 | :param form_data: |
|
556 | :param form_data: | |
557 | :param cur_user: |
|
557 | :param cur_user: | |
558 | """ |
|
558 | """ | |
559 | from rhodecode.lib.celerylib import tasks, run_task |
|
559 | from rhodecode.lib.celerylib import tasks, run_task | |
560 | return run_task(tasks.create_repo, form_data, cur_user) |
|
560 | return run_task(tasks.create_repo, form_data, cur_user) | |
561 |
|
561 | |||
562 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
562 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, | |
563 | perm_deletions=None, check_perms=True, |
|
563 | perm_deletions=None, check_perms=True, | |
564 | cur_user=None): |
|
564 | cur_user=None): | |
565 | if not perm_additions: |
|
565 | if not perm_additions: | |
566 | perm_additions = [] |
|
566 | perm_additions = [] | |
567 | if not perm_updates: |
|
567 | if not perm_updates: | |
568 | perm_updates = [] |
|
568 | perm_updates = [] | |
569 | if not perm_deletions: |
|
569 | if not perm_deletions: | |
570 | perm_deletions = [] |
|
570 | perm_deletions = [] | |
571 |
|
571 | |||
572 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
572 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') | |
573 |
|
573 | |||
574 | # update permissions |
|
574 | # update permissions | |
575 | for member_id, perm, member_type in perm_updates: |
|
575 | for member_id, perm, member_type in perm_updates: | |
576 | member_id = int(member_id) |
|
576 | member_id = int(member_id) | |
577 | if member_type == 'user': |
|
577 | if member_type == 'user': | |
578 | # this updates also current one if found |
|
578 | # this updates also current one if found | |
579 | self.grant_user_permission( |
|
579 | self.grant_user_permission( | |
580 | repo=repo, user=member_id, perm=perm) |
|
580 | repo=repo, user=member_id, perm=perm) | |
581 | else: # set for user group |
|
581 | else: # set for user group | |
582 | # check if we have permissions to alter this usergroup |
|
582 | # check if we have permissions to alter this usergroup | |
583 | member_name = UserGroup.get(member_id).users_group_name |
|
583 | member_name = UserGroup.get(member_id).users_group_name | |
584 | if not check_perms or HasUserGroupPermissionAny( |
|
584 | if not check_perms or HasUserGroupPermissionAny( | |
585 | *req_perms)(member_name, user=cur_user): |
|
585 | *req_perms)(member_name, user=cur_user): | |
586 | self.grant_user_group_permission( |
|
586 | self.grant_user_group_permission( | |
587 | repo=repo, group_name=member_id, perm=perm) |
|
587 | repo=repo, group_name=member_id, perm=perm) | |
588 |
|
588 | |||
589 | # set new permissions |
|
589 | # set new permissions | |
590 | for member_id, perm, member_type in perm_additions: |
|
590 | for member_id, perm, member_type in perm_additions: | |
591 | member_id = int(member_id) |
|
591 | member_id = int(member_id) | |
592 | if member_type == 'user': |
|
592 | if member_type == 'user': | |
593 | self.grant_user_permission( |
|
593 | self.grant_user_permission( | |
594 | repo=repo, user=member_id, perm=perm) |
|
594 | repo=repo, user=member_id, perm=perm) | |
595 | else: # set for user group |
|
595 | else: # set for user group | |
596 | # check if we have permissions to alter this usergroup |
|
596 | # check if we have permissions to alter this usergroup | |
597 | member_name = UserGroup.get(member_id).users_group_name |
|
597 | member_name = UserGroup.get(member_id).users_group_name | |
598 | if not check_perms or HasUserGroupPermissionAny( |
|
598 | if not check_perms or HasUserGroupPermissionAny( | |
599 | *req_perms)(member_name, user=cur_user): |
|
599 | *req_perms)(member_name, user=cur_user): | |
600 | self.grant_user_group_permission( |
|
600 | self.grant_user_group_permission( | |
601 | repo=repo, group_name=member_id, perm=perm) |
|
601 | repo=repo, group_name=member_id, perm=perm) | |
602 |
|
602 | |||
603 | # delete permissions |
|
603 | # delete permissions | |
604 | for member_id, perm, member_type in perm_deletions: |
|
604 | for member_id, perm, member_type in perm_deletions: | |
605 | member_id = int(member_id) |
|
605 | member_id = int(member_id) | |
606 | if member_type == 'user': |
|
606 | if member_type == 'user': | |
607 | self.revoke_user_permission(repo=repo, user=member_id) |
|
607 | self.revoke_user_permission(repo=repo, user=member_id) | |
608 | else: # set for user group |
|
608 | else: # set for user group | |
609 | # check if we have permissions to alter this usergroup |
|
609 | # check if we have permissions to alter this usergroup | |
610 | member_name = UserGroup.get(member_id).users_group_name |
|
610 | member_name = UserGroup.get(member_id).users_group_name | |
611 | if not check_perms or HasUserGroupPermissionAny( |
|
611 | if not check_perms or HasUserGroupPermissionAny( | |
612 | *req_perms)(member_name, user=cur_user): |
|
612 | *req_perms)(member_name, user=cur_user): | |
613 | self.revoke_user_group_permission( |
|
613 | self.revoke_user_group_permission( | |
614 | repo=repo, group_name=member_id) |
|
614 | repo=repo, group_name=member_id) | |
615 |
|
615 | |||
616 | def create_fork(self, form_data, cur_user): |
|
616 | def create_fork(self, form_data, cur_user): | |
617 | """ |
|
617 | """ | |
618 | Simple wrapper into executing celery task for fork creation |
|
618 | Simple wrapper into executing celery task for fork creation | |
619 |
|
619 | |||
620 | :param form_data: |
|
620 | :param form_data: | |
621 | :param cur_user: |
|
621 | :param cur_user: | |
622 | """ |
|
622 | """ | |
623 | from rhodecode.lib.celerylib import tasks, run_task |
|
623 | from rhodecode.lib.celerylib import tasks, run_task | |
624 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
624 | return run_task(tasks.create_repo_fork, form_data, cur_user) | |
625 |
|
625 | |||
626 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
626 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): | |
627 | """ |
|
627 | """ | |
628 | Delete given repository, forks parameter defines what do do with |
|
628 | Delete given repository, forks parameter defines what do do with | |
629 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
629 | attached forks. Throws AttachedForksError if deleted repo has attached | |
630 | forks |
|
630 | forks | |
631 |
|
631 | |||
632 | :param repo: |
|
632 | :param repo: | |
633 | :param forks: str 'delete' or 'detach' |
|
633 | :param forks: str 'delete' or 'detach' | |
634 | :param fs_remove: remove(archive) repo from filesystem |
|
634 | :param fs_remove: remove(archive) repo from filesystem | |
635 | """ |
|
635 | """ | |
636 | if not cur_user: |
|
636 | if not cur_user: | |
637 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
637 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) | |
638 | repo = self._get_repo(repo) |
|
638 | repo = self._get_repo(repo) | |
639 | if repo: |
|
639 | if repo: | |
640 | if forks == 'detach': |
|
640 | if forks == 'detach': | |
641 | for r in repo.forks: |
|
641 | for r in repo.forks: | |
642 | r.fork = None |
|
642 | r.fork = None | |
643 | self.sa.add(r) |
|
643 | self.sa.add(r) | |
644 | elif forks == 'delete': |
|
644 | elif forks == 'delete': | |
645 | for r in repo.forks: |
|
645 | for r in repo.forks: | |
646 | self.delete(r, forks='delete') |
|
646 | self.delete(r, forks='delete') | |
647 | elif [f for f in repo.forks]: |
|
647 | elif [f for f in repo.forks]: | |
648 | raise AttachedForksError() |
|
648 | raise AttachedForksError() | |
649 |
|
649 | |||
650 | old_repo_dict = repo.get_dict() |
|
650 | old_repo_dict = repo.get_dict() | |
651 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
651 | events.trigger(events.RepoPreDeleteEvent(repo)) | |
652 | try: |
|
652 | try: | |
653 | self.sa.delete(repo) |
|
653 | self.sa.delete(repo) | |
654 | if fs_remove: |
|
654 | if fs_remove: | |
655 | self._delete_filesystem_repo(repo) |
|
655 | self._delete_filesystem_repo(repo) | |
656 | else: |
|
656 | else: | |
657 | log.debug('skipping removal from filesystem') |
|
657 | log.debug('skipping removal from filesystem') | |
658 | old_repo_dict.update({ |
|
658 | old_repo_dict.update({ | |
659 | 'deleted_by': cur_user, |
|
659 | 'deleted_by': cur_user, | |
660 | 'deleted_on': time.time(), |
|
660 | 'deleted_on': time.time(), | |
661 | }) |
|
661 | }) | |
662 | log_delete_repository(**old_repo_dict) |
|
662 | log_delete_repository(**old_repo_dict) | |
663 | events.trigger(events.RepoDeleteEvent(repo)) |
|
663 | events.trigger(events.RepoDeleteEvent(repo)) | |
664 | except Exception: |
|
664 | except Exception: | |
665 | log.error(traceback.format_exc()) |
|
665 | log.error(traceback.format_exc()) | |
666 | raise |
|
666 | raise | |
667 |
|
667 | |||
668 | def grant_user_permission(self, repo, user, perm): |
|
668 | def grant_user_permission(self, repo, user, perm): | |
669 | """ |
|
669 | """ | |
670 | Grant permission for user on given repository, or update existing one |
|
670 | Grant permission for user on given repository, or update existing one | |
671 | if found |
|
671 | if found | |
672 |
|
672 | |||
673 | :param repo: Instance of Repository, repository_id, or repository name |
|
673 | :param repo: Instance of Repository, repository_id, or repository name | |
674 | :param user: Instance of User, user_id or username |
|
674 | :param user: Instance of User, user_id or username | |
675 | :param perm: Instance of Permission, or permission_name |
|
675 | :param perm: Instance of Permission, or permission_name | |
676 | """ |
|
676 | """ | |
677 | user = self._get_user(user) |
|
677 | user = self._get_user(user) | |
678 | repo = self._get_repo(repo) |
|
678 | repo = self._get_repo(repo) | |
679 | permission = self._get_perm(perm) |
|
679 | permission = self._get_perm(perm) | |
680 |
|
680 | |||
681 | # check if we have that permission already |
|
681 | # check if we have that permission already | |
682 | obj = self.sa.query(UserRepoToPerm) \ |
|
682 | obj = self.sa.query(UserRepoToPerm) \ | |
683 | .filter(UserRepoToPerm.user == user) \ |
|
683 | .filter(UserRepoToPerm.user == user) \ | |
684 | .filter(UserRepoToPerm.repository == repo) \ |
|
684 | .filter(UserRepoToPerm.repository == repo) \ | |
685 | .scalar() |
|
685 | .scalar() | |
686 | if obj is None: |
|
686 | if obj is None: | |
687 | # create new ! |
|
687 | # create new ! | |
688 | obj = UserRepoToPerm() |
|
688 | obj = UserRepoToPerm() | |
689 | obj.repository = repo |
|
689 | obj.repository = repo | |
690 | obj.user = user |
|
690 | obj.user = user | |
691 | obj.permission = permission |
|
691 | obj.permission = permission | |
692 | self.sa.add(obj) |
|
692 | self.sa.add(obj) | |
693 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
693 | log.debug('Granted perm %s to %s on %s', perm, user, repo) | |
694 | action_logger_generic( |
|
694 | action_logger_generic( | |
695 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
695 | 'granted permission: {} to user: {} on repo: {}'.format( | |
696 | perm, user, repo), namespace='security.repo') |
|
696 | perm, user, repo), namespace='security.repo') | |
697 | return obj |
|
697 | return obj | |
698 |
|
698 | |||
699 | def revoke_user_permission(self, repo, user): |
|
699 | def revoke_user_permission(self, repo, user): | |
700 | """ |
|
700 | """ | |
701 | Revoke permission for user on given repository |
|
701 | Revoke permission for user on given repository | |
702 |
|
702 | |||
703 | :param repo: Instance of Repository, repository_id, or repository name |
|
703 | :param repo: Instance of Repository, repository_id, or repository name | |
704 | :param user: Instance of User, user_id or username |
|
704 | :param user: Instance of User, user_id or username | |
705 | """ |
|
705 | """ | |
706 |
|
706 | |||
707 | user = self._get_user(user) |
|
707 | user = self._get_user(user) | |
708 | repo = self._get_repo(repo) |
|
708 | repo = self._get_repo(repo) | |
709 |
|
709 | |||
710 | obj = self.sa.query(UserRepoToPerm) \ |
|
710 | obj = self.sa.query(UserRepoToPerm) \ | |
711 | .filter(UserRepoToPerm.repository == repo) \ |
|
711 | .filter(UserRepoToPerm.repository == repo) \ | |
712 | .filter(UserRepoToPerm.user == user) \ |
|
712 | .filter(UserRepoToPerm.user == user) \ | |
713 | .scalar() |
|
713 | .scalar() | |
714 | if obj: |
|
714 | if obj: | |
715 | self.sa.delete(obj) |
|
715 | self.sa.delete(obj) | |
716 | log.debug('Revoked perm on %s on %s', repo, user) |
|
716 | log.debug('Revoked perm on %s on %s', repo, user) | |
717 | action_logger_generic( |
|
717 | action_logger_generic( | |
718 | 'revoked permission from user: {} on repo: {}'.format( |
|
718 | 'revoked permission from user: {} on repo: {}'.format( | |
719 | user, repo), namespace='security.repo') |
|
719 | user, repo), namespace='security.repo') | |
720 |
|
720 | |||
721 | def grant_user_group_permission(self, repo, group_name, perm): |
|
721 | def grant_user_group_permission(self, repo, group_name, perm): | |
722 | """ |
|
722 | """ | |
723 | Grant permission for user group on given repository, or update |
|
723 | Grant permission for user group on given repository, or update | |
724 | existing one if found |
|
724 | existing one if found | |
725 |
|
725 | |||
726 | :param repo: Instance of Repository, repository_id, or repository name |
|
726 | :param repo: Instance of Repository, repository_id, or repository name | |
727 | :param group_name: Instance of UserGroup, users_group_id, |
|
727 | :param group_name: Instance of UserGroup, users_group_id, | |
728 | or user group name |
|
728 | or user group name | |
729 | :param perm: Instance of Permission, or permission_name |
|
729 | :param perm: Instance of Permission, or permission_name | |
730 | """ |
|
730 | """ | |
731 | repo = self._get_repo(repo) |
|
731 | repo = self._get_repo(repo) | |
732 | group_name = self._get_user_group(group_name) |
|
732 | group_name = self._get_user_group(group_name) | |
733 | permission = self._get_perm(perm) |
|
733 | permission = self._get_perm(perm) | |
734 |
|
734 | |||
735 | # check if we have that permission already |
|
735 | # check if we have that permission already | |
736 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
736 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
737 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
737 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
738 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
738 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
739 | .scalar() |
|
739 | .scalar() | |
740 |
|
740 | |||
741 | if obj is None: |
|
741 | if obj is None: | |
742 | # create new |
|
742 | # create new | |
743 | obj = UserGroupRepoToPerm() |
|
743 | obj = UserGroupRepoToPerm() | |
744 |
|
744 | |||
745 | obj.repository = repo |
|
745 | obj.repository = repo | |
746 | obj.users_group = group_name |
|
746 | obj.users_group = group_name | |
747 | obj.permission = permission |
|
747 | obj.permission = permission | |
748 | self.sa.add(obj) |
|
748 | self.sa.add(obj) | |
749 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
749 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) | |
750 | action_logger_generic( |
|
750 | action_logger_generic( | |
751 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
751 | 'granted permission: {} to usergroup: {} on repo: {}'.format( | |
752 | perm, group_name, repo), namespace='security.repo') |
|
752 | perm, group_name, repo), namespace='security.repo') | |
753 |
|
753 | |||
754 | return obj |
|
754 | return obj | |
755 |
|
755 | |||
756 | def revoke_user_group_permission(self, repo, group_name): |
|
756 | def revoke_user_group_permission(self, repo, group_name): | |
757 | """ |
|
757 | """ | |
758 | Revoke permission for user group on given repository |
|
758 | Revoke permission for user group on given repository | |
759 |
|
759 | |||
760 | :param repo: Instance of Repository, repository_id, or repository name |
|
760 | :param repo: Instance of Repository, repository_id, or repository name | |
761 | :param group_name: Instance of UserGroup, users_group_id, |
|
761 | :param group_name: Instance of UserGroup, users_group_id, | |
762 | or user group name |
|
762 | or user group name | |
763 | """ |
|
763 | """ | |
764 | repo = self._get_repo(repo) |
|
764 | repo = self._get_repo(repo) | |
765 | group_name = self._get_user_group(group_name) |
|
765 | group_name = self._get_user_group(group_name) | |
766 |
|
766 | |||
767 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
767 | obj = self.sa.query(UserGroupRepoToPerm) \ | |
768 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
768 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
769 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
769 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
770 | .scalar() |
|
770 | .scalar() | |
771 | if obj: |
|
771 | if obj: | |
772 | self.sa.delete(obj) |
|
772 | self.sa.delete(obj) | |
773 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
773 | log.debug('Revoked perm to %s on %s', repo, group_name) | |
774 | action_logger_generic( |
|
774 | action_logger_generic( | |
775 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
775 | 'revoked permission from usergroup: {} on repo: {}'.format( | |
776 | group_name, repo), namespace='security.repo') |
|
776 | group_name, repo), namespace='security.repo') | |
777 |
|
777 | |||
778 | def delete_stats(self, repo_name): |
|
778 | def delete_stats(self, repo_name): | |
779 | """ |
|
779 | """ | |
780 | removes stats for given repo |
|
780 | removes stats for given repo | |
781 |
|
781 | |||
782 | :param repo_name: |
|
782 | :param repo_name: | |
783 | """ |
|
783 | """ | |
784 | repo = self._get_repo(repo_name) |
|
784 | repo = self._get_repo(repo_name) | |
785 | try: |
|
785 | try: | |
786 | obj = self.sa.query(Statistics) \ |
|
786 | obj = self.sa.query(Statistics) \ | |
787 | .filter(Statistics.repository == repo).scalar() |
|
787 | .filter(Statistics.repository == repo).scalar() | |
788 | if obj: |
|
788 | if obj: | |
789 | self.sa.delete(obj) |
|
789 | self.sa.delete(obj) | |
790 | except Exception: |
|
790 | except Exception: | |
791 | log.error(traceback.format_exc()) |
|
791 | log.error(traceback.format_exc()) | |
792 | raise |
|
792 | raise | |
793 |
|
793 | |||
794 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
794 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', | |
795 | field_type='str', field_desc=''): |
|
795 | field_type='str', field_desc=''): | |
796 |
|
796 | |||
797 | repo = self._get_repo(repo_name) |
|
797 | repo = self._get_repo(repo_name) | |
798 |
|
798 | |||
799 | new_field = RepositoryField() |
|
799 | new_field = RepositoryField() | |
800 | new_field.repository = repo |
|
800 | new_field.repository = repo | |
801 | new_field.field_key = field_key |
|
801 | new_field.field_key = field_key | |
802 | new_field.field_type = field_type # python type |
|
802 | new_field.field_type = field_type # python type | |
803 | new_field.field_value = field_value |
|
803 | new_field.field_value = field_value | |
804 | new_field.field_desc = field_desc |
|
804 | new_field.field_desc = field_desc | |
805 | new_field.field_label = field_label |
|
805 | new_field.field_label = field_label | |
806 | self.sa.add(new_field) |
|
806 | self.sa.add(new_field) | |
807 | return new_field |
|
807 | return new_field | |
808 |
|
808 | |||
809 | def delete_repo_field(self, repo_name, field_key): |
|
809 | def delete_repo_field(self, repo_name, field_key): | |
810 | repo = self._get_repo(repo_name) |
|
810 | repo = self._get_repo(repo_name) | |
811 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
811 | field = RepositoryField.get_by_key_name(field_key, repo) | |
812 | if field: |
|
812 | if field: | |
813 | self.sa.delete(field) |
|
813 | self.sa.delete(field) | |
814 |
|
814 | |||
815 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
815 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, | |
816 | clone_uri=None, repo_store_location=None, |
|
816 | clone_uri=None, repo_store_location=None, | |
817 | use_global_config=False): |
|
817 | use_global_config=False): | |
818 | """ |
|
818 | """ | |
819 | makes repository on filesystem. It's group aware means it'll create |
|
819 | makes repository on filesystem. It's group aware means it'll create | |
820 | a repository within a group, and alter the paths accordingly of |
|
820 | a repository within a group, and alter the paths accordingly of | |
821 | group location |
|
821 | group location | |
822 |
|
822 | |||
823 | :param repo_name: |
|
823 | :param repo_name: | |
824 | :param alias: |
|
824 | :param alias: | |
825 | :param parent: |
|
825 | :param parent: | |
826 | :param clone_uri: |
|
826 | :param clone_uri: | |
827 | :param repo_store_location: |
|
827 | :param repo_store_location: | |
828 | """ |
|
828 | """ | |
829 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
829 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group | |
830 | from rhodecode.model.scm import ScmModel |
|
830 | from rhodecode.model.scm import ScmModel | |
831 |
|
831 | |||
832 | if Repository.NAME_SEP in repo_name: |
|
832 | if Repository.NAME_SEP in repo_name: | |
833 | raise ValueError( |
|
833 | raise ValueError( | |
834 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
834 | 'repo_name must not contain groups got `%s`' % repo_name) | |
835 |
|
835 | |||
836 | if isinstance(repo_group, RepoGroup): |
|
836 | if isinstance(repo_group, RepoGroup): | |
837 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
837 | new_parent_path = os.sep.join(repo_group.full_path_splitted) | |
838 | else: |
|
838 | else: | |
839 | new_parent_path = repo_group or '' |
|
839 | new_parent_path = repo_group or '' | |
840 |
|
840 | |||
841 | if repo_store_location: |
|
841 | if repo_store_location: | |
842 | _paths = [repo_store_location] |
|
842 | _paths = [repo_store_location] | |
843 | else: |
|
843 | else: | |
844 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
844 | _paths = [self.repos_path, new_parent_path, repo_name] | |
845 | # we need to make it str for mercurial |
|
845 | # we need to make it str for mercurial | |
846 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
846 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) | |
847 |
|
847 | |||
848 | # check if this path is not a repository |
|
848 | # check if this path is not a repository | |
849 | if is_valid_repo(repo_path, self.repos_path): |
|
849 | if is_valid_repo(repo_path, self.repos_path): | |
850 | raise Exception('This path %s is a valid repository' % repo_path) |
|
850 | raise Exception('This path %s is a valid repository' % repo_path) | |
851 |
|
851 | |||
852 | # check if this path is a group |
|
852 | # check if this path is a group | |
853 | if is_valid_repo_group(repo_path, self.repos_path): |
|
853 | if is_valid_repo_group(repo_path, self.repos_path): | |
854 | raise Exception('This path %s is a valid group' % repo_path) |
|
854 | raise Exception('This path %s is a valid group' % repo_path) | |
855 |
|
855 | |||
856 | log.info('creating repo %s in %s from url: `%s`', |
|
856 | log.info('creating repo %s in %s from url: `%s`', | |
857 | repo_name, safe_unicode(repo_path), |
|
857 | repo_name, safe_unicode(repo_path), | |
858 | obfuscate_url_pw(clone_uri)) |
|
858 | obfuscate_url_pw(clone_uri)) | |
859 |
|
859 | |||
860 | backend = get_backend(repo_type) |
|
860 | backend = get_backend(repo_type) | |
861 |
|
861 | |||
862 | config_repo = None if use_global_config else repo_name |
|
862 | config_repo = None if use_global_config else repo_name | |
863 | if config_repo and new_parent_path: |
|
863 | if config_repo and new_parent_path: | |
864 | config_repo = Repository.NAME_SEP.join( |
|
864 | config_repo = Repository.NAME_SEP.join( | |
865 | (new_parent_path, config_repo)) |
|
865 | (new_parent_path, config_repo)) | |
866 | config = make_db_config(clear_session=False, repo=config_repo) |
|
866 | config = make_db_config(clear_session=False, repo=config_repo) | |
867 | config.set('extensions', 'largefiles', '') |
|
867 | config.set('extensions', 'largefiles', '') | |
868 |
|
868 | |||
869 | # patch and reset hooks section of UI config to not run any |
|
869 | # patch and reset hooks section of UI config to not run any | |
870 | # hooks on creating remote repo |
|
870 | # hooks on creating remote repo | |
871 | config.clear_section('hooks') |
|
871 | config.clear_section('hooks') | |
872 |
|
872 | |||
873 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
873 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice | |
874 | if repo_type == 'git': |
|
874 | if repo_type == 'git': | |
875 | repo = backend( |
|
875 | repo = backend( | |
876 | repo_path, config=config, create=True, src_url=clone_uri, |
|
876 | repo_path, config=config, create=True, src_url=clone_uri, | |
877 | bare=True) |
|
877 | bare=True) | |
878 | else: |
|
878 | else: | |
879 | repo = backend( |
|
879 | repo = backend( | |
880 | repo_path, config=config, create=True, src_url=clone_uri) |
|
880 | repo_path, config=config, create=True, src_url=clone_uri) | |
881 |
|
881 | |||
882 | ScmModel().install_hooks(repo, repo_type=repo_type) |
|
882 | ScmModel().install_hooks(repo, repo_type=repo_type) | |
883 |
|
883 | |||
884 | log.debug('Created repo %s with %s backend', |
|
884 | log.debug('Created repo %s with %s backend', | |
885 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
885 | safe_unicode(repo_name), safe_unicode(repo_type)) | |
886 | return repo |
|
886 | return repo | |
887 |
|
887 | |||
888 | def _rename_filesystem_repo(self, old, new): |
|
888 | def _rename_filesystem_repo(self, old, new): | |
889 | """ |
|
889 | """ | |
890 | renames repository on filesystem |
|
890 | renames repository on filesystem | |
891 |
|
891 | |||
892 | :param old: old name |
|
892 | :param old: old name | |
893 | :param new: new name |
|
893 | :param new: new name | |
894 | """ |
|
894 | """ | |
895 | log.info('renaming repo from %s to %s', old, new) |
|
895 | log.info('renaming repo from %s to %s', old, new) | |
896 |
|
896 | |||
897 | old_path = os.path.join(self.repos_path, old) |
|
897 | old_path = os.path.join(self.repos_path, old) | |
898 | new_path = os.path.join(self.repos_path, new) |
|
898 | new_path = os.path.join(self.repos_path, new) | |
899 | if os.path.isdir(new_path): |
|
899 | if os.path.isdir(new_path): | |
900 | raise Exception( |
|
900 | raise Exception( | |
901 | 'Was trying to rename to already existing dir %s' % new_path |
|
901 | 'Was trying to rename to already existing dir %s' % new_path | |
902 | ) |
|
902 | ) | |
903 | shutil.move(old_path, new_path) |
|
903 | shutil.move(old_path, new_path) | |
904 |
|
904 | |||
905 | def _delete_filesystem_repo(self, repo): |
|
905 | def _delete_filesystem_repo(self, repo): | |
906 | """ |
|
906 | """ | |
907 | removes repo from filesystem, the removal is acctually made by |
|
907 | removes repo from filesystem, the removal is acctually made by | |
908 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
908 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this | |
909 | repository is no longer valid for rhodecode, can be undeleted later on |
|
909 | repository is no longer valid for rhodecode, can be undeleted later on | |
910 | by reverting the renames on this repository |
|
910 | by reverting the renames on this repository | |
911 |
|
911 | |||
912 | :param repo: repo object |
|
912 | :param repo: repo object | |
913 | """ |
|
913 | """ | |
914 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
914 | rm_path = os.path.join(self.repos_path, repo.repo_name) | |
915 | repo_group = repo.group |
|
915 | repo_group = repo.group | |
916 | log.info("Removing repository %s", rm_path) |
|
916 | log.info("Removing repository %s", rm_path) | |
917 | # disable hg/git internal that it doesn't get detected as repo |
|
917 | # disable hg/git internal that it doesn't get detected as repo | |
918 | alias = repo.repo_type |
|
918 | alias = repo.repo_type | |
919 |
|
919 | |||
920 | config = make_db_config(clear_session=False) |
|
920 | config = make_db_config(clear_session=False) | |
921 | config.set('extensions', 'largefiles', '') |
|
921 | config.set('extensions', 'largefiles', '') | |
922 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
922 | bare = getattr(repo.scm_instance(config=config), 'bare', False) | |
923 |
|
923 | |||
924 | # skip this for bare git repos |
|
924 | # skip this for bare git repos | |
925 | if not bare: |
|
925 | if not bare: | |
926 | # disable VCS repo |
|
926 | # disable VCS repo | |
927 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
927 | vcs_path = os.path.join(rm_path, '.%s' % alias) | |
928 | if os.path.exists(vcs_path): |
|
928 | if os.path.exists(vcs_path): | |
929 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
929 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) | |
930 |
|
930 | |||
931 | _now = datetime.now() |
|
931 | _now = datetime.now() | |
932 | _ms = str(_now.microsecond).rjust(6, '0') |
|
932 | _ms = str(_now.microsecond).rjust(6, '0') | |
933 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
933 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), | |
934 | repo.just_name) |
|
934 | repo.just_name) | |
935 | if repo_group: |
|
935 | if repo_group: | |
936 | # if repository is in group, prefix the removal path with the group |
|
936 | # if repository is in group, prefix the removal path with the group | |
937 | args = repo_group.full_path_splitted + [_d] |
|
937 | args = repo_group.full_path_splitted + [_d] | |
938 | _d = os.path.join(*args) |
|
938 | _d = os.path.join(*args) | |
939 |
|
939 | |||
940 | if os.path.isdir(rm_path): |
|
940 | if os.path.isdir(rm_path): | |
941 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
941 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
942 |
|
942 | |||
943 |
|
943 | |||
944 | class ReadmeFinder: |
|
944 | class ReadmeFinder: | |
945 | """ |
|
945 | """ | |
946 | Utility which knows how to find a readme for a specific commit. |
|
946 | Utility which knows how to find a readme for a specific commit. | |
947 |
|
947 | |||
948 | The main idea is that this is a configurable algorithm. When creating an |
|
948 | The main idea is that this is a configurable algorithm. When creating an | |
949 | instance you can define parameters, currently only the `default_renderer`. |
|
949 | instance you can define parameters, currently only the `default_renderer`. | |
950 | Based on this configuration the method :meth:`search` behaves slightly |
|
950 | Based on this configuration the method :meth:`search` behaves slightly | |
951 | different. |
|
951 | different. | |
952 | """ |
|
952 | """ | |
953 |
|
953 | |||
954 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
954 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) | |
955 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
955 | path_re = re.compile(r'^docs?', re.IGNORECASE) | |
956 |
|
956 | |||
957 | default_priorities = { |
|
957 | default_priorities = { | |
958 | None: 0, |
|
958 | None: 0, | |
959 | '.text': 2, |
|
959 | '.text': 2, | |
960 | '.txt': 3, |
|
960 | '.txt': 3, | |
961 | '.rst': 1, |
|
961 | '.rst': 1, | |
962 | '.rest': 2, |
|
962 | '.rest': 2, | |
963 | '.md': 1, |
|
963 | '.md': 1, | |
964 | '.mkdn': 2, |
|
964 | '.mkdn': 2, | |
965 | '.mdown': 3, |
|
965 | '.mdown': 3, | |
966 | '.markdown': 4, |
|
966 | '.markdown': 4, | |
967 | } |
|
967 | } | |
968 |
|
968 | |||
969 | path_priority = { |
|
969 | path_priority = { | |
970 | 'doc': 0, |
|
970 | 'doc': 0, | |
971 | 'docs': 1, |
|
971 | 'docs': 1, | |
972 | } |
|
972 | } | |
973 |
|
973 | |||
974 | FALLBACK_PRIORITY = 99 |
|
974 | FALLBACK_PRIORITY = 99 | |
975 |
|
975 | |||
976 | RENDERER_TO_EXTENSION = { |
|
976 | RENDERER_TO_EXTENSION = { | |
977 | 'rst': ['.rst', '.rest'], |
|
977 | 'rst': ['.rst', '.rest'], | |
978 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
978 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], | |
979 | } |
|
979 | } | |
980 |
|
980 | |||
981 | def __init__(self, default_renderer=None): |
|
981 | def __init__(self, default_renderer=None): | |
982 | self._default_renderer = default_renderer |
|
982 | self._default_renderer = default_renderer | |
983 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
983 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( | |
984 | default_renderer, []) |
|
984 | default_renderer, []) | |
985 |
|
985 | |||
986 | def search(self, commit, path='/'): |
|
986 | def search(self, commit, path='/'): | |
987 | """ |
|
987 | """ | |
988 | Find a readme in the given `commit`. |
|
988 | Find a readme in the given `commit`. | |
989 | """ |
|
989 | """ | |
990 | nodes = commit.get_nodes(path) |
|
990 | nodes = commit.get_nodes(path) | |
991 | matches = self._match_readmes(nodes) |
|
991 | matches = self._match_readmes(nodes) | |
992 | matches = self._sort_according_to_priority(matches) |
|
992 | matches = self._sort_according_to_priority(matches) | |
993 | if matches: |
|
993 | if matches: | |
994 | return matches[0].node |
|
994 | return matches[0].node | |
995 |
|
995 | |||
996 | paths = self._match_paths(nodes) |
|
996 | paths = self._match_paths(nodes) | |
997 | paths = self._sort_paths_according_to_priority(paths) |
|
997 | paths = self._sort_paths_according_to_priority(paths) | |
998 | for path in paths: |
|
998 | for path in paths: | |
999 | match = self.search(commit, path=path) |
|
999 | match = self.search(commit, path=path) | |
1000 | if match: |
|
1000 | if match: | |
1001 | return match |
|
1001 | return match | |
1002 |
|
1002 | |||
1003 | return None |
|
1003 | return None | |
1004 |
|
1004 | |||
1005 | def _match_readmes(self, nodes): |
|
1005 | def _match_readmes(self, nodes): | |
1006 | for node in nodes: |
|
1006 | for node in nodes: | |
1007 | if not node.is_file(): |
|
1007 | if not node.is_file(): | |
1008 | continue |
|
1008 | continue | |
1009 | path = node.path.rsplit('/', 1)[-1] |
|
1009 | path = node.path.rsplit('/', 1)[-1] | |
1010 | match = self.readme_re.match(path) |
|
1010 | match = self.readme_re.match(path) | |
1011 | if match: |
|
1011 | if match: | |
1012 | extension = match.group(1) |
|
1012 | extension = match.group(1) | |
1013 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
1013 | yield ReadmeMatch(node, match, self._priority(extension)) | |
1014 |
|
1014 | |||
1015 | def _match_paths(self, nodes): |
|
1015 | def _match_paths(self, nodes): | |
1016 | for node in nodes: |
|
1016 | for node in nodes: | |
1017 | if not node.is_dir(): |
|
1017 | if not node.is_dir(): | |
1018 | continue |
|
1018 | continue | |
1019 | match = self.path_re.match(node.path) |
|
1019 | match = self.path_re.match(node.path) | |
1020 | if match: |
|
1020 | if match: | |
1021 | yield node.path |
|
1021 | yield node.path | |
1022 |
|
1022 | |||
1023 | def _priority(self, extension): |
|
1023 | def _priority(self, extension): | |
1024 | renderer_priority = ( |
|
1024 | renderer_priority = ( | |
1025 | 0 if extension in self._renderer_extensions else 1) |
|
1025 | 0 if extension in self._renderer_extensions else 1) | |
1026 | extension_priority = self.default_priorities.get( |
|
1026 | extension_priority = self.default_priorities.get( | |
1027 | extension, self.FALLBACK_PRIORITY) |
|
1027 | extension, self.FALLBACK_PRIORITY) | |
1028 | return (renderer_priority, extension_priority) |
|
1028 | return (renderer_priority, extension_priority) | |
1029 |
|
1029 | |||
1030 | def _sort_according_to_priority(self, matches): |
|
1030 | def _sort_according_to_priority(self, matches): | |
1031 |
|
1031 | |||
1032 | def priority_and_path(match): |
|
1032 | def priority_and_path(match): | |
1033 | return (match.priority, match.path) |
|
1033 | return (match.priority, match.path) | |
1034 |
|
1034 | |||
1035 | return sorted(matches, key=priority_and_path) |
|
1035 | return sorted(matches, key=priority_and_path) | |
1036 |
|
1036 | |||
1037 | def _sort_paths_according_to_priority(self, paths): |
|
1037 | def _sort_paths_according_to_priority(self, paths): | |
1038 |
|
1038 | |||
1039 | def priority_and_path(path): |
|
1039 | def priority_and_path(path): | |
1040 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1040 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) | |
1041 |
|
1041 | |||
1042 | return sorted(paths, key=priority_and_path) |
|
1042 | return sorted(paths, key=priority_and_path) | |
1043 |
|
1043 | |||
1044 |
|
1044 | |||
1045 | class ReadmeMatch: |
|
1045 | class ReadmeMatch: | |
1046 |
|
1046 | |||
1047 | def __init__(self, node, match, priority): |
|
1047 | def __init__(self, node, match, priority): | |
1048 | self.node = node |
|
1048 | self.node = node | |
1049 | self._match = match |
|
1049 | self._match = match | |
1050 | self.priority = priority |
|
1050 | self.priority = priority | |
1051 |
|
1051 | |||
1052 | @property |
|
1052 | @property | |
1053 | def path(self): |
|
1053 | def path(self): | |
1054 | return self.node.path |
|
1054 | return self.node.path | |
1055 |
|
1055 | |||
1056 | def __repr__(self): |
|
1056 | def __repr__(self): | |
1057 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
|
1057 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,911 +1,915 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Scm model for RhodeCode |
|
22 | Scm model for RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os.path |
|
25 | import os.path | |
26 | import re |
|
26 | import re | |
27 | import sys |
|
27 | import sys | |
28 | import traceback |
|
28 | import traceback | |
29 | import logging |
|
29 | import logging | |
30 | import cStringIO |
|
30 | import cStringIO | |
31 | import pkg_resources |
|
31 | import pkg_resources | |
32 |
|
32 | |||
33 | from pylons.i18n.translation import _ |
|
33 | from pylons.i18n.translation import _ | |
34 | from sqlalchemy import func |
|
34 | from sqlalchemy import func | |
35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
35 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
36 |
|
36 | |||
37 | import rhodecode |
|
37 | import rhodecode | |
38 | from rhodecode.lib.vcs import get_backend |
|
38 | from rhodecode.lib.vcs import get_backend | |
39 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError |
|
39 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError | |
40 | from rhodecode.lib.vcs.nodes import FileNode |
|
40 | from rhodecode.lib.vcs.nodes import FileNode | |
41 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
41 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
42 | from rhodecode.lib import helpers as h |
|
42 | from rhodecode.lib import helpers as h | |
43 |
|
43 | |||
44 | from rhodecode.lib.auth import ( |
|
44 | from rhodecode.lib.auth import ( | |
45 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
45 | HasRepoPermissionAny, HasRepoGroupPermissionAny, | |
46 | HasUserGroupPermissionAny) |
|
46 | HasUserGroupPermissionAny) | |
47 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
47 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError | |
48 | from rhodecode.lib import hooks_utils, caches |
|
48 | from rhodecode.lib import hooks_utils, caches | |
49 | from rhodecode.lib.utils import ( |
|
49 | from rhodecode.lib.utils import ( | |
50 | get_filesystem_repos, action_logger, make_db_config) |
|
50 | get_filesystem_repos, action_logger, make_db_config) | |
51 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) |
|
51 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) | |
52 | from rhodecode.lib.system_info import get_system_info |
|
52 | from rhodecode.lib.system_info import get_system_info | |
53 | from rhodecode.model import BaseModel |
|
53 | from rhodecode.model import BaseModel | |
54 | from rhodecode.model.db import ( |
|
54 | from rhodecode.model.db import ( | |
55 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, |
|
55 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, | |
56 | PullRequest) |
|
56 | PullRequest) | |
57 | from rhodecode.model.settings import VcsSettingsModel |
|
57 | from rhodecode.model.settings import VcsSettingsModel | |
58 |
|
58 | |||
59 | log = logging.getLogger(__name__) |
|
59 | log = logging.getLogger(__name__) | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | class UserTemp(object): |
|
62 | class UserTemp(object): | |
63 | def __init__(self, user_id): |
|
63 | def __init__(self, user_id): | |
64 | self.user_id = user_id |
|
64 | self.user_id = user_id | |
65 |
|
65 | |||
66 | def __repr__(self): |
|
66 | def __repr__(self): | |
67 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
67 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) | |
68 |
|
68 | |||
69 |
|
69 | |||
70 | class RepoTemp(object): |
|
70 | class RepoTemp(object): | |
71 | def __init__(self, repo_id): |
|
71 | def __init__(self, repo_id): | |
72 | self.repo_id = repo_id |
|
72 | self.repo_id = repo_id | |
73 |
|
73 | |||
74 | def __repr__(self): |
|
74 | def __repr__(self): | |
75 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
75 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) | |
76 |
|
76 | |||
77 |
|
77 | |||
78 | class SimpleCachedRepoList(object): |
|
78 | class SimpleCachedRepoList(object): | |
79 | """ |
|
79 | """ | |
80 | Lighter version of of iteration of repos without the scm initialisation, |
|
80 | Lighter version of of iteration of repos without the scm initialisation, | |
81 | and with cache usage |
|
81 | and with cache usage | |
82 | """ |
|
82 | """ | |
83 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): |
|
83 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): | |
84 | self.db_repo_list = db_repo_list |
|
84 | self.db_repo_list = db_repo_list | |
85 | self.repos_path = repos_path |
|
85 | self.repos_path = repos_path | |
86 | self.order_by = order_by |
|
86 | self.order_by = order_by | |
87 | self.reversed = (order_by or '').startswith('-') |
|
87 | self.reversed = (order_by or '').startswith('-') | |
88 | if not perm_set: |
|
88 | if not perm_set: | |
89 | perm_set = ['repository.read', 'repository.write', |
|
89 | perm_set = ['repository.read', 'repository.write', | |
90 | 'repository.admin'] |
|
90 | 'repository.admin'] | |
91 | self.perm_set = perm_set |
|
91 | self.perm_set = perm_set | |
92 |
|
92 | |||
93 | def __len__(self): |
|
93 | def __len__(self): | |
94 | return len(self.db_repo_list) |
|
94 | return len(self.db_repo_list) | |
95 |
|
95 | |||
96 | def __repr__(self): |
|
96 | def __repr__(self): | |
97 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
97 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
98 |
|
98 | |||
99 | def __iter__(self): |
|
99 | def __iter__(self): | |
100 | for dbr in self.db_repo_list: |
|
100 | for dbr in self.db_repo_list: | |
101 | # check permission at this level |
|
101 | # check permission at this level | |
102 | has_perm = HasRepoPermissionAny(*self.perm_set)( |
|
102 | has_perm = HasRepoPermissionAny(*self.perm_set)( | |
103 | dbr.repo_name, 'SimpleCachedRepoList check') |
|
103 | dbr.repo_name, 'SimpleCachedRepoList check') | |
104 | if not has_perm: |
|
104 | if not has_perm: | |
105 | continue |
|
105 | continue | |
106 |
|
106 | |||
107 | tmp_d = { |
|
107 | tmp_d = { | |
108 | 'name': dbr.repo_name, |
|
108 | 'name': dbr.repo_name, | |
109 | 'dbrepo': dbr.get_dict(), |
|
109 | 'dbrepo': dbr.get_dict(), | |
110 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} |
|
110 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} | |
111 | } |
|
111 | } | |
112 | yield tmp_d |
|
112 | yield tmp_d | |
113 |
|
113 | |||
114 |
|
114 | |||
115 | class _PermCheckIterator(object): |
|
115 | class _PermCheckIterator(object): | |
116 |
|
116 | |||
117 | def __init__( |
|
117 | def __init__( | |
118 | self, obj_list, obj_attr, perm_set, perm_checker, |
|
118 | self, obj_list, obj_attr, perm_set, perm_checker, | |
119 | extra_kwargs=None): |
|
119 | extra_kwargs=None): | |
120 | """ |
|
120 | """ | |
121 | Creates iterator from given list of objects, additionally |
|
121 | Creates iterator from given list of objects, additionally | |
122 | checking permission for them from perm_set var |
|
122 | checking permission for them from perm_set var | |
123 |
|
123 | |||
124 | :param obj_list: list of db objects |
|
124 | :param obj_list: list of db objects | |
125 | :param obj_attr: attribute of object to pass into perm_checker |
|
125 | :param obj_attr: attribute of object to pass into perm_checker | |
126 | :param perm_set: list of permissions to check |
|
126 | :param perm_set: list of permissions to check | |
127 | :param perm_checker: callable to check permissions against |
|
127 | :param perm_checker: callable to check permissions against | |
128 | """ |
|
128 | """ | |
129 | self.obj_list = obj_list |
|
129 | self.obj_list = obj_list | |
130 | self.obj_attr = obj_attr |
|
130 | self.obj_attr = obj_attr | |
131 | self.perm_set = perm_set |
|
131 | self.perm_set = perm_set | |
132 | self.perm_checker = perm_checker |
|
132 | self.perm_checker = perm_checker | |
133 | self.extra_kwargs = extra_kwargs or {} |
|
133 | self.extra_kwargs = extra_kwargs or {} | |
134 |
|
134 | |||
135 | def __len__(self): |
|
135 | def __len__(self): | |
136 | return len(self.obj_list) |
|
136 | return len(self.obj_list) | |
137 |
|
137 | |||
138 | def __repr__(self): |
|
138 | def __repr__(self): | |
139 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
139 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
140 |
|
140 | |||
141 | def __iter__(self): |
|
141 | def __iter__(self): | |
142 | checker = self.perm_checker(*self.perm_set) |
|
142 | checker = self.perm_checker(*self.perm_set) | |
143 | for db_obj in self.obj_list: |
|
143 | for db_obj in self.obj_list: | |
144 | # check permission at this level |
|
144 | # check permission at this level | |
145 | name = getattr(db_obj, self.obj_attr, None) |
|
145 | name = getattr(db_obj, self.obj_attr, None) | |
146 | if not checker(name, self.__class__.__name__, **self.extra_kwargs): |
|
146 | if not checker(name, self.__class__.__name__, **self.extra_kwargs): | |
147 | continue |
|
147 | continue | |
148 |
|
148 | |||
149 | yield db_obj |
|
149 | yield db_obj | |
150 |
|
150 | |||
151 |
|
151 | |||
152 | class RepoList(_PermCheckIterator): |
|
152 | class RepoList(_PermCheckIterator): | |
153 |
|
153 | |||
154 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): |
|
154 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): | |
155 | if not perm_set: |
|
155 | if not perm_set: | |
156 | perm_set = [ |
|
156 | perm_set = [ | |
157 | 'repository.read', 'repository.write', 'repository.admin'] |
|
157 | 'repository.read', 'repository.write', 'repository.admin'] | |
158 |
|
158 | |||
159 | super(RepoList, self).__init__( |
|
159 | super(RepoList, self).__init__( | |
160 | obj_list=db_repo_list, |
|
160 | obj_list=db_repo_list, | |
161 | obj_attr='repo_name', perm_set=perm_set, |
|
161 | obj_attr='repo_name', perm_set=perm_set, | |
162 | perm_checker=HasRepoPermissionAny, |
|
162 | perm_checker=HasRepoPermissionAny, | |
163 | extra_kwargs=extra_kwargs) |
|
163 | extra_kwargs=extra_kwargs) | |
164 |
|
164 | |||
165 |
|
165 | |||
166 | class RepoGroupList(_PermCheckIterator): |
|
166 | class RepoGroupList(_PermCheckIterator): | |
167 |
|
167 | |||
168 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): |
|
168 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): | |
169 | if not perm_set: |
|
169 | if not perm_set: | |
170 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
170 | perm_set = ['group.read', 'group.write', 'group.admin'] | |
171 |
|
171 | |||
172 | super(RepoGroupList, self).__init__( |
|
172 | super(RepoGroupList, self).__init__( | |
173 | obj_list=db_repo_group_list, |
|
173 | obj_list=db_repo_group_list, | |
174 | obj_attr='group_name', perm_set=perm_set, |
|
174 | obj_attr='group_name', perm_set=perm_set, | |
175 | perm_checker=HasRepoGroupPermissionAny, |
|
175 | perm_checker=HasRepoGroupPermissionAny, | |
176 | extra_kwargs=extra_kwargs) |
|
176 | extra_kwargs=extra_kwargs) | |
177 |
|
177 | |||
178 |
|
178 | |||
179 | class UserGroupList(_PermCheckIterator): |
|
179 | class UserGroupList(_PermCheckIterator): | |
180 |
|
180 | |||
181 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): |
|
181 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): | |
182 | if not perm_set: |
|
182 | if not perm_set: | |
183 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
183 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] | |
184 |
|
184 | |||
185 | super(UserGroupList, self).__init__( |
|
185 | super(UserGroupList, self).__init__( | |
186 | obj_list=db_user_group_list, |
|
186 | obj_list=db_user_group_list, | |
187 | obj_attr='users_group_name', perm_set=perm_set, |
|
187 | obj_attr='users_group_name', perm_set=perm_set, | |
188 | perm_checker=HasUserGroupPermissionAny, |
|
188 | perm_checker=HasUserGroupPermissionAny, | |
189 | extra_kwargs=extra_kwargs) |
|
189 | extra_kwargs=extra_kwargs) | |
190 |
|
190 | |||
191 |
|
191 | |||
192 | class ScmModel(BaseModel): |
|
192 | class ScmModel(BaseModel): | |
193 | """ |
|
193 | """ | |
194 | Generic Scm Model |
|
194 | Generic Scm Model | |
195 | """ |
|
195 | """ | |
196 |
|
196 | |||
197 | @LazyProperty |
|
197 | @LazyProperty | |
198 | def repos_path(self): |
|
198 | def repos_path(self): | |
199 | """ |
|
199 | """ | |
200 | Gets the repositories root path from database |
|
200 | Gets the repositories root path from database | |
201 | """ |
|
201 | """ | |
202 |
|
202 | |||
203 | settings_model = VcsSettingsModel(sa=self.sa) |
|
203 | settings_model = VcsSettingsModel(sa=self.sa) | |
204 | return settings_model.get_repos_location() |
|
204 | return settings_model.get_repos_location() | |
205 |
|
205 | |||
206 | def repo_scan(self, repos_path=None): |
|
206 | def repo_scan(self, repos_path=None): | |
207 | """ |
|
207 | """ | |
208 | Listing of repositories in given path. This path should not be a |
|
208 | Listing of repositories in given path. This path should not be a | |
209 | repository itself. Return a dictionary of repository objects |
|
209 | repository itself. Return a dictionary of repository objects | |
210 |
|
210 | |||
211 | :param repos_path: path to directory containing repositories |
|
211 | :param repos_path: path to directory containing repositories | |
212 | """ |
|
212 | """ | |
213 |
|
213 | |||
214 | if repos_path is None: |
|
214 | if repos_path is None: | |
215 | repos_path = self.repos_path |
|
215 | repos_path = self.repos_path | |
216 |
|
216 | |||
217 | log.info('scanning for repositories in %s', repos_path) |
|
217 | log.info('scanning for repositories in %s', repos_path) | |
218 |
|
218 | |||
219 | config = make_db_config() |
|
219 | config = make_db_config() | |
220 | config.set('extensions', 'largefiles', '') |
|
220 | config.set('extensions', 'largefiles', '') | |
221 | repos = {} |
|
221 | repos = {} | |
222 |
|
222 | |||
223 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
223 | for name, path in get_filesystem_repos(repos_path, recursive=True): | |
224 | # name need to be decomposed and put back together using the / |
|
224 | # name need to be decomposed and put back together using the / | |
225 | # since this is internal storage separator for rhodecode |
|
225 | # since this is internal storage separator for rhodecode | |
226 | name = Repository.normalize_repo_name(name) |
|
226 | name = Repository.normalize_repo_name(name) | |
227 |
|
227 | |||
228 | try: |
|
228 | try: | |
229 | if name in repos: |
|
229 | if name in repos: | |
230 | raise RepositoryError('Duplicate repository name %s ' |
|
230 | raise RepositoryError('Duplicate repository name %s ' | |
231 | 'found in %s' % (name, path)) |
|
231 | 'found in %s' % (name, path)) | |
232 | elif path[0] in rhodecode.BACKENDS: |
|
232 | elif path[0] in rhodecode.BACKENDS: | |
233 | klass = get_backend(path[0]) |
|
233 | klass = get_backend(path[0]) | |
234 | repos[name] = klass(path[1], config=config) |
|
234 | repos[name] = klass(path[1], config=config) | |
235 | except OSError: |
|
235 | except OSError: | |
236 | continue |
|
236 | continue | |
237 | log.debug('found %s paths with repositories', len(repos)) |
|
237 | log.debug('found %s paths with repositories', len(repos)) | |
238 | return repos |
|
238 | return repos | |
239 |
|
239 | |||
240 | def get_repos(self, all_repos=None, sort_key=None): |
|
240 | def get_repos(self, all_repos=None, sort_key=None): | |
241 | """ |
|
241 | """ | |
242 | Get all repositories from db and for each repo create it's |
|
242 | Get all repositories from db and for each repo create it's | |
243 | backend instance and fill that backed with information from database |
|
243 | backend instance and fill that backed with information from database | |
244 |
|
244 | |||
245 | :param all_repos: list of repository names as strings |
|
245 | :param all_repos: list of repository names as strings | |
246 | give specific repositories list, good for filtering |
|
246 | give specific repositories list, good for filtering | |
247 |
|
247 | |||
248 | :param sort_key: initial sorting of repositories |
|
248 | :param sort_key: initial sorting of repositories | |
249 | """ |
|
249 | """ | |
250 | if all_repos is None: |
|
250 | if all_repos is None: | |
251 | all_repos = self.sa.query(Repository)\ |
|
251 | all_repos = self.sa.query(Repository)\ | |
252 | .filter(Repository.group_id == None)\ |
|
252 | .filter(Repository.group_id == None)\ | |
253 | .order_by(func.lower(Repository.repo_name)).all() |
|
253 | .order_by(func.lower(Repository.repo_name)).all() | |
254 | repo_iter = SimpleCachedRepoList( |
|
254 | repo_iter = SimpleCachedRepoList( | |
255 | all_repos, repos_path=self.repos_path, order_by=sort_key) |
|
255 | all_repos, repos_path=self.repos_path, order_by=sort_key) | |
256 | return repo_iter |
|
256 | return repo_iter | |
257 |
|
257 | |||
258 | def get_repo_groups(self, all_groups=None): |
|
258 | def get_repo_groups(self, all_groups=None): | |
259 | if all_groups is None: |
|
259 | if all_groups is None: | |
260 | all_groups = RepoGroup.query()\ |
|
260 | all_groups = RepoGroup.query()\ | |
261 | .filter(RepoGroup.group_parent_id == None).all() |
|
261 | .filter(RepoGroup.group_parent_id == None).all() | |
262 | return [x for x in RepoGroupList(all_groups)] |
|
262 | return [x for x in RepoGroupList(all_groups)] | |
263 |
|
263 | |||
264 | def mark_for_invalidation(self, repo_name, delete=False): |
|
264 | def mark_for_invalidation(self, repo_name, delete=False): | |
265 | """ |
|
265 | """ | |
266 | Mark caches of this repo invalid in the database. `delete` flag |
|
266 | Mark caches of this repo invalid in the database. `delete` flag | |
267 | removes the cache entries |
|
267 | removes the cache entries | |
268 |
|
268 | |||
269 | :param repo_name: the repo_name for which caches should be marked |
|
269 | :param repo_name: the repo_name for which caches should be marked | |
270 | invalid, or deleted |
|
270 | invalid, or deleted | |
271 | :param delete: delete the entry keys instead of setting bool |
|
271 | :param delete: delete the entry keys instead of setting bool | |
272 | flag on them |
|
272 | flag on them | |
273 | """ |
|
273 | """ | |
274 | CacheKey.set_invalidate(repo_name, delete=delete) |
|
274 | CacheKey.set_invalidate(repo_name, delete=delete) | |
275 | repo = Repository.get_by_repo_name(repo_name) |
|
275 | repo = Repository.get_by_repo_name(repo_name) | |
276 |
|
276 | |||
277 | if repo: |
|
277 | if repo: | |
278 | config = repo._config |
|
278 | config = repo._config | |
279 | config.set('extensions', 'largefiles', '') |
|
279 | config.set('extensions', 'largefiles', '') | |
280 | repo.update_commit_cache(config=config, cs_cache=None) |
|
280 | repo.update_commit_cache(config=config, cs_cache=None) | |
281 | caches.clear_repo_caches(repo_name) |
|
281 | caches.clear_repo_caches(repo_name) | |
282 |
|
282 | |||
283 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
283 | def toggle_following_repo(self, follow_repo_id, user_id): | |
284 |
|
284 | |||
285 | f = self.sa.query(UserFollowing)\ |
|
285 | f = self.sa.query(UserFollowing)\ | |
286 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
286 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ | |
287 | .filter(UserFollowing.user_id == user_id).scalar() |
|
287 | .filter(UserFollowing.user_id == user_id).scalar() | |
288 |
|
288 | |||
289 | if f is not None: |
|
289 | if f is not None: | |
290 | try: |
|
290 | try: | |
291 | self.sa.delete(f) |
|
291 | self.sa.delete(f) | |
292 | action_logger(UserTemp(user_id), |
|
292 | action_logger(UserTemp(user_id), | |
293 | 'stopped_following_repo', |
|
293 | 'stopped_following_repo', | |
294 | RepoTemp(follow_repo_id)) |
|
294 | RepoTemp(follow_repo_id)) | |
295 | return |
|
295 | return | |
296 | except Exception: |
|
296 | except Exception: | |
297 | log.error(traceback.format_exc()) |
|
297 | log.error(traceback.format_exc()) | |
298 | raise |
|
298 | raise | |
299 |
|
299 | |||
300 | try: |
|
300 | try: | |
301 | f = UserFollowing() |
|
301 | f = UserFollowing() | |
302 | f.user_id = user_id |
|
302 | f.user_id = user_id | |
303 | f.follows_repo_id = follow_repo_id |
|
303 | f.follows_repo_id = follow_repo_id | |
304 | self.sa.add(f) |
|
304 | self.sa.add(f) | |
305 |
|
305 | |||
306 | action_logger(UserTemp(user_id), |
|
306 | action_logger(UserTemp(user_id), | |
307 | 'started_following_repo', |
|
307 | 'started_following_repo', | |
308 | RepoTemp(follow_repo_id)) |
|
308 | RepoTemp(follow_repo_id)) | |
309 | except Exception: |
|
309 | except Exception: | |
310 | log.error(traceback.format_exc()) |
|
310 | log.error(traceback.format_exc()) | |
311 | raise |
|
311 | raise | |
312 |
|
312 | |||
313 | def toggle_following_user(self, follow_user_id, user_id): |
|
313 | def toggle_following_user(self, follow_user_id, user_id): | |
314 | f = self.sa.query(UserFollowing)\ |
|
314 | f = self.sa.query(UserFollowing)\ | |
315 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
315 | .filter(UserFollowing.follows_user_id == follow_user_id)\ | |
316 | .filter(UserFollowing.user_id == user_id).scalar() |
|
316 | .filter(UserFollowing.user_id == user_id).scalar() | |
317 |
|
317 | |||
318 | if f is not None: |
|
318 | if f is not None: | |
319 | try: |
|
319 | try: | |
320 | self.sa.delete(f) |
|
320 | self.sa.delete(f) | |
321 | return |
|
321 | return | |
322 | except Exception: |
|
322 | except Exception: | |
323 | log.error(traceback.format_exc()) |
|
323 | log.error(traceback.format_exc()) | |
324 | raise |
|
324 | raise | |
325 |
|
325 | |||
326 | try: |
|
326 | try: | |
327 | f = UserFollowing() |
|
327 | f = UserFollowing() | |
328 | f.user_id = user_id |
|
328 | f.user_id = user_id | |
329 | f.follows_user_id = follow_user_id |
|
329 | f.follows_user_id = follow_user_id | |
330 | self.sa.add(f) |
|
330 | self.sa.add(f) | |
331 | except Exception: |
|
331 | except Exception: | |
332 | log.error(traceback.format_exc()) |
|
332 | log.error(traceback.format_exc()) | |
333 | raise |
|
333 | raise | |
334 |
|
334 | |||
335 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
335 | def is_following_repo(self, repo_name, user_id, cache=False): | |
336 | r = self.sa.query(Repository)\ |
|
336 | r = self.sa.query(Repository)\ | |
337 | .filter(Repository.repo_name == repo_name).scalar() |
|
337 | .filter(Repository.repo_name == repo_name).scalar() | |
338 |
|
338 | |||
339 | f = self.sa.query(UserFollowing)\ |
|
339 | f = self.sa.query(UserFollowing)\ | |
340 | .filter(UserFollowing.follows_repository == r)\ |
|
340 | .filter(UserFollowing.follows_repository == r)\ | |
341 | .filter(UserFollowing.user_id == user_id).scalar() |
|
341 | .filter(UserFollowing.user_id == user_id).scalar() | |
342 |
|
342 | |||
343 | return f is not None |
|
343 | return f is not None | |
344 |
|
344 | |||
345 | def is_following_user(self, username, user_id, cache=False): |
|
345 | def is_following_user(self, username, user_id, cache=False): | |
346 | u = User.get_by_username(username) |
|
346 | u = User.get_by_username(username) | |
347 |
|
347 | |||
348 | f = self.sa.query(UserFollowing)\ |
|
348 | f = self.sa.query(UserFollowing)\ | |
349 | .filter(UserFollowing.follows_user == u)\ |
|
349 | .filter(UserFollowing.follows_user == u)\ | |
350 | .filter(UserFollowing.user_id == user_id).scalar() |
|
350 | .filter(UserFollowing.user_id == user_id).scalar() | |
351 |
|
351 | |||
352 | return f is not None |
|
352 | return f is not None | |
353 |
|
353 | |||
354 | def get_followers(self, repo): |
|
354 | def get_followers(self, repo): | |
355 | repo = self._get_repo(repo) |
|
355 | repo = self._get_repo(repo) | |
356 |
|
356 | |||
357 | return self.sa.query(UserFollowing)\ |
|
357 | return self.sa.query(UserFollowing)\ | |
358 | .filter(UserFollowing.follows_repository == repo).count() |
|
358 | .filter(UserFollowing.follows_repository == repo).count() | |
359 |
|
359 | |||
360 | def get_forks(self, repo): |
|
360 | def get_forks(self, repo): | |
361 | repo = self._get_repo(repo) |
|
361 | repo = self._get_repo(repo) | |
362 | return self.sa.query(Repository)\ |
|
362 | return self.sa.query(Repository)\ | |
363 | .filter(Repository.fork == repo).count() |
|
363 | .filter(Repository.fork == repo).count() | |
364 |
|
364 | |||
365 | def get_pull_requests(self, repo): |
|
365 | def get_pull_requests(self, repo): | |
366 | repo = self._get_repo(repo) |
|
366 | repo = self._get_repo(repo) | |
367 | return self.sa.query(PullRequest)\ |
|
367 | return self.sa.query(PullRequest)\ | |
368 | .filter(PullRequest.target_repo == repo)\ |
|
368 | .filter(PullRequest.target_repo == repo)\ | |
369 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
369 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() | |
370 |
|
370 | |||
371 | def mark_as_fork(self, repo, fork, user): |
|
371 | def mark_as_fork(self, repo, fork, user): | |
372 | repo = self._get_repo(repo) |
|
372 | repo = self._get_repo(repo) | |
373 | fork = self._get_repo(fork) |
|
373 | fork = self._get_repo(fork) | |
374 | if fork and repo.repo_id == fork.repo_id: |
|
374 | if fork and repo.repo_id == fork.repo_id: | |
375 | raise Exception("Cannot set repository as fork of itself") |
|
375 | raise Exception("Cannot set repository as fork of itself") | |
376 |
|
376 | |||
377 | if fork and repo.repo_type != fork.repo_type: |
|
377 | if fork and repo.repo_type != fork.repo_type: | |
378 | raise RepositoryError( |
|
378 | raise RepositoryError( | |
379 | "Cannot set repository as fork of repository with other type") |
|
379 | "Cannot set repository as fork of repository with other type") | |
380 |
|
380 | |||
381 | repo.fork = fork |
|
381 | repo.fork = fork | |
382 | self.sa.add(repo) |
|
382 | self.sa.add(repo) | |
383 | return repo |
|
383 | return repo | |
384 |
|
384 | |||
385 | def pull_changes(self, repo, username): |
|
385 | def pull_changes(self, repo, username): | |
386 | dbrepo = self._get_repo(repo) |
|
386 | dbrepo = self._get_repo(repo) | |
387 | clone_uri = dbrepo.clone_uri |
|
387 | clone_uri = dbrepo.clone_uri | |
388 | if not clone_uri: |
|
388 | if not clone_uri: | |
389 | raise Exception("This repository doesn't have a clone uri") |
|
389 | raise Exception("This repository doesn't have a clone uri") | |
390 |
|
390 | |||
391 | repo = dbrepo.scm_instance(cache=False) |
|
391 | repo = dbrepo.scm_instance(cache=False) | |
392 | # TODO: marcink fix this an re-enable since we need common logic |
|
392 | # TODO: marcink fix this an re-enable since we need common logic | |
393 | # for hg/git remove hooks so we don't trigger them on fetching |
|
393 | # for hg/git remove hooks so we don't trigger them on fetching | |
394 | # commits from remote |
|
394 | # commits from remote | |
395 | repo.config.clear_section('hooks') |
|
395 | repo.config.clear_section('hooks') | |
396 |
|
396 | |||
397 | repo_name = dbrepo.repo_name |
|
397 | repo_name = dbrepo.repo_name | |
398 | try: |
|
398 | try: | |
399 | # TODO: we need to make sure those operations call proper hooks ! |
|
399 | # TODO: we need to make sure those operations call proper hooks ! | |
400 | repo.pull(clone_uri) |
|
400 | repo.pull(clone_uri) | |
401 |
|
401 | |||
402 | self.mark_for_invalidation(repo_name) |
|
402 | self.mark_for_invalidation(repo_name) | |
403 | except Exception: |
|
403 | except Exception: | |
404 | log.error(traceback.format_exc()) |
|
404 | log.error(traceback.format_exc()) | |
405 | raise |
|
405 | raise | |
406 |
|
406 | |||
407 | def commit_change(self, repo, repo_name, commit, user, author, message, |
|
407 | def commit_change(self, repo, repo_name, commit, user, author, message, | |
408 | content, f_path): |
|
408 | content, f_path): | |
409 | """ |
|
409 | """ | |
410 | Commits changes |
|
410 | Commits changes | |
411 |
|
411 | |||
412 | :param repo: SCM instance |
|
412 | :param repo: SCM instance | |
413 |
|
413 | |||
414 | """ |
|
414 | """ | |
415 | user = self._get_user(user) |
|
415 | user = self._get_user(user) | |
416 |
|
416 | |||
417 | # decoding here will force that we have proper encoded values |
|
417 | # decoding here will force that we have proper encoded values | |
418 | # in any other case this will throw exceptions and deny commit |
|
418 | # in any other case this will throw exceptions and deny commit | |
419 | content = safe_str(content) |
|
419 | content = safe_str(content) | |
420 | path = safe_str(f_path) |
|
420 | path = safe_str(f_path) | |
421 | # message and author needs to be unicode |
|
421 | # message and author needs to be unicode | |
422 | # proper backend should then translate that into required type |
|
422 | # proper backend should then translate that into required type | |
423 | message = safe_unicode(message) |
|
423 | message = safe_unicode(message) | |
424 | author = safe_unicode(author) |
|
424 | author = safe_unicode(author) | |
425 | imc = repo.in_memory_commit |
|
425 | imc = repo.in_memory_commit | |
426 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) |
|
426 | imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path))) | |
427 | try: |
|
427 | try: | |
428 | # TODO: handle pre-push action ! |
|
428 | # TODO: handle pre-push action ! | |
429 | tip = imc.commit( |
|
429 | tip = imc.commit( | |
430 | message=message, author=author, parents=[commit], |
|
430 | message=message, author=author, parents=[commit], | |
431 | branch=commit.branch) |
|
431 | branch=commit.branch) | |
432 | except Exception as e: |
|
432 | except Exception as e: | |
433 | log.error(traceback.format_exc()) |
|
433 | log.error(traceback.format_exc()) | |
434 | raise IMCCommitError(str(e)) |
|
434 | raise IMCCommitError(str(e)) | |
435 | finally: |
|
435 | finally: | |
436 | # always clear caches, if commit fails we want fresh object also |
|
436 | # always clear caches, if commit fails we want fresh object also | |
437 | self.mark_for_invalidation(repo_name) |
|
437 | self.mark_for_invalidation(repo_name) | |
438 |
|
438 | |||
439 | # We trigger the post-push action |
|
439 | # We trigger the post-push action | |
440 | hooks_utils.trigger_post_push_hook( |
|
440 | hooks_utils.trigger_post_push_hook( | |
441 | username=user.username, action='push_local', repo_name=repo_name, |
|
441 | username=user.username, action='push_local', repo_name=repo_name, | |
442 | repo_alias=repo.alias, commit_ids=[tip.raw_id]) |
|
442 | repo_alias=repo.alias, commit_ids=[tip.raw_id]) | |
443 | return tip |
|
443 | return tip | |
444 |
|
444 | |||
445 | def _sanitize_path(self, f_path): |
|
445 | def _sanitize_path(self, f_path): | |
446 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: |
|
446 | if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path: | |
447 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
447 | raise NonRelativePathError('%s is not an relative path' % f_path) | |
448 | if f_path: |
|
448 | if f_path: | |
449 | f_path = os.path.normpath(f_path) |
|
449 | f_path = os.path.normpath(f_path) | |
450 | return f_path |
|
450 | return f_path | |
451 |
|
451 | |||
452 | def get_dirnode_metadata(self, commit, dir_node): |
|
452 | def get_dirnode_metadata(self, commit, dir_node): | |
453 | if not dir_node.is_dir(): |
|
453 | if not dir_node.is_dir(): | |
454 | return [] |
|
454 | return [] | |
455 |
|
455 | |||
456 | data = [] |
|
456 | data = [] | |
457 | for node in dir_node: |
|
457 | for node in dir_node: | |
458 | if not node.is_file(): |
|
458 | if not node.is_file(): | |
459 | # we skip file-nodes |
|
459 | # we skip file-nodes | |
460 | continue |
|
460 | continue | |
461 |
|
461 | |||
462 | last_commit = node.last_commit |
|
462 | last_commit = node.last_commit | |
463 | last_commit_date = last_commit.date |
|
463 | last_commit_date = last_commit.date | |
464 | data.append({ |
|
464 | data.append({ | |
465 | 'name': node.name, |
|
465 | 'name': node.name, | |
466 | 'size': h.format_byte_size_binary(node.size), |
|
466 | 'size': h.format_byte_size_binary(node.size), | |
467 | 'modified_at': h.format_date(last_commit_date), |
|
467 | 'modified_at': h.format_date(last_commit_date), | |
468 | 'modified_ts': last_commit_date.isoformat(), |
|
468 | 'modified_ts': last_commit_date.isoformat(), | |
469 | 'revision': last_commit.revision, |
|
469 | 'revision': last_commit.revision, | |
470 | 'short_id': last_commit.short_id, |
|
470 | 'short_id': last_commit.short_id, | |
471 | 'message': h.escape(last_commit.message), |
|
471 | 'message': h.escape(last_commit.message), | |
472 | 'author': h.escape(last_commit.author), |
|
472 | 'author': h.escape(last_commit.author), | |
473 | 'user_profile': h.gravatar_with_user(last_commit.author), |
|
473 | 'user_profile': h.gravatar_with_user(last_commit.author), | |
474 | }) |
|
474 | }) | |
475 |
|
475 | |||
476 | return data |
|
476 | return data | |
477 |
|
477 | |||
478 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, |
|
478 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, | |
479 | extended_info=False, content=False, max_file_bytes=None): |
|
479 | extended_info=False, content=False, max_file_bytes=None): | |
480 | """ |
|
480 | """ | |
481 | recursive walk in root dir and return a set of all path in that dir |
|
481 | recursive walk in root dir and return a set of all path in that dir | |
482 | based on repository walk function |
|
482 | based on repository walk function | |
483 |
|
483 | |||
484 | :param repo_name: name of repository |
|
484 | :param repo_name: name of repository | |
485 | :param commit_id: commit id for which to list nodes |
|
485 | :param commit_id: commit id for which to list nodes | |
486 | :param root_path: root path to list |
|
486 | :param root_path: root path to list | |
487 | :param flat: return as a list, if False returns a dict with description |
|
487 | :param flat: return as a list, if False returns a dict with description | |
488 | :param max_file_bytes: will not return file contents over this limit |
|
488 | :param max_file_bytes: will not return file contents over this limit | |
489 |
|
489 | |||
490 | """ |
|
490 | """ | |
491 | _files = list() |
|
491 | _files = list() | |
492 | _dirs = list() |
|
492 | _dirs = list() | |
493 | try: |
|
493 | try: | |
494 | _repo = self._get_repo(repo_name) |
|
494 | _repo = self._get_repo(repo_name) | |
495 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
495 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
496 | root_path = root_path.lstrip('/') |
|
496 | root_path = root_path.lstrip('/') | |
497 | for __, dirs, files in commit.walk(root_path): |
|
497 | for __, dirs, files in commit.walk(root_path): | |
498 | for f in files: |
|
498 | for f in files: | |
499 | _content = None |
|
499 | _content = None | |
500 | _data = f.unicode_path |
|
500 | _data = f.unicode_path | |
501 | over_size_limit = (max_file_bytes is not None |
|
501 | over_size_limit = (max_file_bytes is not None | |
502 | and f.size > max_file_bytes) |
|
502 | and f.size > max_file_bytes) | |
503 |
|
503 | |||
504 | if not flat: |
|
504 | if not flat: | |
505 | _data = { |
|
505 | _data = { | |
506 | "name": f.unicode_path, |
|
506 | "name": f.unicode_path, | |
507 | "type": "file", |
|
507 | "type": "file", | |
508 | } |
|
508 | } | |
509 | if extended_info: |
|
509 | if extended_info: | |
510 | _data.update({ |
|
510 | _data.update({ | |
511 | "md5": f.md5, |
|
511 | "md5": f.md5, | |
512 | "binary": f.is_binary, |
|
512 | "binary": f.is_binary, | |
513 | "size": f.size, |
|
513 | "size": f.size, | |
514 | "extension": f.extension, |
|
514 | "extension": f.extension, | |
515 | "mimetype": f.mimetype, |
|
515 | "mimetype": f.mimetype, | |
516 | "lines": f.lines()[0] |
|
516 | "lines": f.lines()[0] | |
517 | }) |
|
517 | }) | |
518 |
|
518 | |||
519 | if content: |
|
519 | if content: | |
520 | full_content = None |
|
520 | full_content = None | |
521 | if not f.is_binary and not over_size_limit: |
|
521 | if not f.is_binary and not over_size_limit: | |
522 | full_content = safe_str(f.content) |
|
522 | full_content = safe_str(f.content) | |
523 |
|
523 | |||
524 | _data.update({ |
|
524 | _data.update({ | |
525 | "content": full_content, |
|
525 | "content": full_content, | |
526 | }) |
|
526 | }) | |
527 | _files.append(_data) |
|
527 | _files.append(_data) | |
528 | for d in dirs: |
|
528 | for d in dirs: | |
529 | _data = d.unicode_path |
|
529 | _data = d.unicode_path | |
530 | if not flat: |
|
530 | if not flat: | |
531 | _data = { |
|
531 | _data = { | |
532 | "name": d.unicode_path, |
|
532 | "name": d.unicode_path, | |
533 | "type": "dir", |
|
533 | "type": "dir", | |
534 | } |
|
534 | } | |
535 | if extended_info: |
|
535 | if extended_info: | |
536 | _data.update({ |
|
536 | _data.update({ | |
537 | "md5": None, |
|
537 | "md5": None, | |
538 | "binary": None, |
|
538 | "binary": None, | |
539 | "size": None, |
|
539 | "size": None, | |
540 | "extension": None, |
|
540 | "extension": None, | |
541 | }) |
|
541 | }) | |
542 | if content: |
|
542 | if content: | |
543 | _data.update({ |
|
543 | _data.update({ | |
544 | "content": None |
|
544 | "content": None | |
545 | }) |
|
545 | }) | |
546 | _dirs.append(_data) |
|
546 | _dirs.append(_data) | |
547 | except RepositoryError: |
|
547 | except RepositoryError: | |
548 | log.debug("Exception in get_nodes", exc_info=True) |
|
548 | log.debug("Exception in get_nodes", exc_info=True) | |
549 | raise |
|
549 | raise | |
550 |
|
550 | |||
551 | return _dirs, _files |
|
551 | return _dirs, _files | |
552 |
|
552 | |||
553 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
553 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, | |
554 | author=None, trigger_push_hook=True): |
|
554 | author=None, trigger_push_hook=True): | |
555 | """ |
|
555 | """ | |
556 | Commits given multiple nodes into repo |
|
556 | Commits given multiple nodes into repo | |
557 |
|
557 | |||
558 | :param user: RhodeCode User object or user_id, the commiter |
|
558 | :param user: RhodeCode User object or user_id, the commiter | |
559 | :param repo: RhodeCode Repository object |
|
559 | :param repo: RhodeCode Repository object | |
560 | :param message: commit message |
|
560 | :param message: commit message | |
561 | :param nodes: mapping {filename:{'content':content},...} |
|
561 | :param nodes: mapping {filename:{'content':content},...} | |
562 | :param parent_commit: parent commit, can be empty than it's |
|
562 | :param parent_commit: parent commit, can be empty than it's | |
563 | initial commit |
|
563 | initial commit | |
564 | :param author: author of commit, cna be different that commiter |
|
564 | :param author: author of commit, cna be different that commiter | |
565 | only for git |
|
565 | only for git | |
566 | :param trigger_push_hook: trigger push hooks |
|
566 | :param trigger_push_hook: trigger push hooks | |
567 |
|
567 | |||
568 | :returns: new commited commit |
|
568 | :returns: new commited commit | |
569 | """ |
|
569 | """ | |
570 |
|
570 | |||
571 | user = self._get_user(user) |
|
571 | user = self._get_user(user) | |
572 | scm_instance = repo.scm_instance(cache=False) |
|
572 | scm_instance = repo.scm_instance(cache=False) | |
573 |
|
573 | |||
574 | processed_nodes = [] |
|
574 | processed_nodes = [] | |
575 | for f_path in nodes: |
|
575 | for f_path in nodes: | |
576 | f_path = self._sanitize_path(f_path) |
|
576 | f_path = self._sanitize_path(f_path) | |
577 | content = nodes[f_path]['content'] |
|
577 | content = nodes[f_path]['content'] | |
578 | f_path = safe_str(f_path) |
|
578 | f_path = safe_str(f_path) | |
579 | # decoding here will force that we have proper encoded values |
|
579 | # decoding here will force that we have proper encoded values | |
580 | # in any other case this will throw exceptions and deny commit |
|
580 | # in any other case this will throw exceptions and deny commit | |
581 | if isinstance(content, (basestring,)): |
|
581 | if isinstance(content, (basestring,)): | |
582 | content = safe_str(content) |
|
582 | content = safe_str(content) | |
583 | elif isinstance(content, (file, cStringIO.OutputType,)): |
|
583 | elif isinstance(content, (file, cStringIO.OutputType,)): | |
584 | content = content.read() |
|
584 | content = content.read() | |
585 | else: |
|
585 | else: | |
586 | raise Exception('Content is of unrecognized type %s' % ( |
|
586 | raise Exception('Content is of unrecognized type %s' % ( | |
587 | type(content) |
|
587 | type(content) | |
588 | )) |
|
588 | )) | |
589 | processed_nodes.append((f_path, content)) |
|
589 | processed_nodes.append((f_path, content)) | |
590 |
|
590 | |||
591 | message = safe_unicode(message) |
|
591 | message = safe_unicode(message) | |
592 | commiter = user.full_contact |
|
592 | commiter = user.full_contact | |
593 | author = safe_unicode(author) if author else commiter |
|
593 | author = safe_unicode(author) if author else commiter | |
594 |
|
594 | |||
595 | imc = scm_instance.in_memory_commit |
|
595 | imc = scm_instance.in_memory_commit | |
596 |
|
596 | |||
597 | if not parent_commit: |
|
597 | if not parent_commit: | |
598 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
598 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
599 |
|
599 | |||
600 | if isinstance(parent_commit, EmptyCommit): |
|
600 | if isinstance(parent_commit, EmptyCommit): | |
601 | # EmptyCommit means we we're editing empty repository |
|
601 | # EmptyCommit means we we're editing empty repository | |
602 | parents = None |
|
602 | parents = None | |
603 | else: |
|
603 | else: | |
604 | parents = [parent_commit] |
|
604 | parents = [parent_commit] | |
605 | # add multiple nodes |
|
605 | # add multiple nodes | |
606 | for path, content in processed_nodes: |
|
606 | for path, content in processed_nodes: | |
607 | imc.add(FileNode(path, content=content)) |
|
607 | imc.add(FileNode(path, content=content)) | |
608 | # TODO: handle pre push scenario |
|
608 | # TODO: handle pre push scenario | |
609 | tip = imc.commit(message=message, |
|
609 | tip = imc.commit(message=message, | |
610 | author=author, |
|
610 | author=author, | |
611 | parents=parents, |
|
611 | parents=parents, | |
612 | branch=parent_commit.branch) |
|
612 | branch=parent_commit.branch) | |
613 |
|
613 | |||
614 | self.mark_for_invalidation(repo.repo_name) |
|
614 | self.mark_for_invalidation(repo.repo_name) | |
615 | if trigger_push_hook: |
|
615 | if trigger_push_hook: | |
616 | hooks_utils.trigger_post_push_hook( |
|
616 | hooks_utils.trigger_post_push_hook( | |
617 | username=user.username, action='push_local', |
|
617 | username=user.username, action='push_local', | |
618 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
618 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
619 | commit_ids=[tip.raw_id]) |
|
619 | commit_ids=[tip.raw_id]) | |
620 | return tip |
|
620 | return tip | |
621 |
|
621 | |||
622 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
622 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, | |
623 | author=None, trigger_push_hook=True): |
|
623 | author=None, trigger_push_hook=True): | |
624 | user = self._get_user(user) |
|
624 | user = self._get_user(user) | |
625 | scm_instance = repo.scm_instance(cache=False) |
|
625 | scm_instance = repo.scm_instance(cache=False) | |
626 |
|
626 | |||
627 | message = safe_unicode(message) |
|
627 | message = safe_unicode(message) | |
628 | commiter = user.full_contact |
|
628 | commiter = user.full_contact | |
629 | author = safe_unicode(author) if author else commiter |
|
629 | author = safe_unicode(author) if author else commiter | |
630 |
|
630 | |||
631 | imc = scm_instance.in_memory_commit |
|
631 | imc = scm_instance.in_memory_commit | |
632 |
|
632 | |||
633 | if not parent_commit: |
|
633 | if not parent_commit: | |
634 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
634 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
635 |
|
635 | |||
636 | if isinstance(parent_commit, EmptyCommit): |
|
636 | if isinstance(parent_commit, EmptyCommit): | |
637 | # EmptyCommit means we we're editing empty repository |
|
637 | # EmptyCommit means we we're editing empty repository | |
638 | parents = None |
|
638 | parents = None | |
639 | else: |
|
639 | else: | |
640 | parents = [parent_commit] |
|
640 | parents = [parent_commit] | |
641 |
|
641 | |||
642 | # add multiple nodes |
|
642 | # add multiple nodes | |
643 | for _filename, data in nodes.items(): |
|
643 | for _filename, data in nodes.items(): | |
644 | # new filename, can be renamed from the old one, also sanitaze |
|
644 | # new filename, can be renamed from the old one, also sanitaze | |
645 | # the path for any hack around relative paths like ../../ etc. |
|
645 | # the path for any hack around relative paths like ../../ etc. | |
646 | filename = self._sanitize_path(data['filename']) |
|
646 | filename = self._sanitize_path(data['filename']) | |
647 | old_filename = self._sanitize_path(_filename) |
|
647 | old_filename = self._sanitize_path(_filename) | |
648 | content = data['content'] |
|
648 | content = data['content'] | |
649 |
|
649 | |||
650 | filenode = FileNode(old_filename, content=content) |
|
650 | filenode = FileNode(old_filename, content=content) | |
651 | op = data['op'] |
|
651 | op = data['op'] | |
652 | if op == 'add': |
|
652 | if op == 'add': | |
653 | imc.add(filenode) |
|
653 | imc.add(filenode) | |
654 | elif op == 'del': |
|
654 | elif op == 'del': | |
655 | imc.remove(filenode) |
|
655 | imc.remove(filenode) | |
656 | elif op == 'mod': |
|
656 | elif op == 'mod': | |
657 | if filename != old_filename: |
|
657 | if filename != old_filename: | |
658 | # TODO: handle renames more efficient, needs vcs lib |
|
658 | # TODO: handle renames more efficient, needs vcs lib | |
659 | # changes |
|
659 | # changes | |
660 | imc.remove(filenode) |
|
660 | imc.remove(filenode) | |
661 | imc.add(FileNode(filename, content=content)) |
|
661 | imc.add(FileNode(filename, content=content)) | |
662 | else: |
|
662 | else: | |
663 | imc.change(filenode) |
|
663 | imc.change(filenode) | |
664 |
|
664 | |||
665 | try: |
|
665 | try: | |
666 | # TODO: handle pre push scenario |
|
666 | # TODO: handle pre push scenario | |
667 | # commit changes |
|
667 | # commit changes | |
668 | tip = imc.commit(message=message, |
|
668 | tip = imc.commit(message=message, | |
669 | author=author, |
|
669 | author=author, | |
670 | parents=parents, |
|
670 | parents=parents, | |
671 | branch=parent_commit.branch) |
|
671 | branch=parent_commit.branch) | |
672 | except NodeNotChangedError: |
|
672 | except NodeNotChangedError: | |
673 | raise |
|
673 | raise | |
674 | except Exception as e: |
|
674 | except Exception as e: | |
675 | log.exception("Unexpected exception during call to imc.commit") |
|
675 | log.exception("Unexpected exception during call to imc.commit") | |
676 | raise IMCCommitError(str(e)) |
|
676 | raise IMCCommitError(str(e)) | |
677 | finally: |
|
677 | finally: | |
678 | # always clear caches, if commit fails we want fresh object also |
|
678 | # always clear caches, if commit fails we want fresh object also | |
679 | self.mark_for_invalidation(repo.repo_name) |
|
679 | self.mark_for_invalidation(repo.repo_name) | |
680 |
|
680 | |||
681 | if trigger_push_hook: |
|
681 | if trigger_push_hook: | |
682 | hooks_utils.trigger_post_push_hook( |
|
682 | hooks_utils.trigger_post_push_hook( | |
683 | username=user.username, action='push_local', |
|
683 | username=user.username, action='push_local', | |
684 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
684 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
685 | commit_ids=[tip.raw_id]) |
|
685 | commit_ids=[tip.raw_id]) | |
686 |
|
686 | |||
687 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
687 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, | |
688 | author=None, trigger_push_hook=True): |
|
688 | author=None, trigger_push_hook=True): | |
689 | """ |
|
689 | """ | |
690 | Deletes given multiple nodes into `repo` |
|
690 | Deletes given multiple nodes into `repo` | |
691 |
|
691 | |||
692 | :param user: RhodeCode User object or user_id, the committer |
|
692 | :param user: RhodeCode User object or user_id, the committer | |
693 | :param repo: RhodeCode Repository object |
|
693 | :param repo: RhodeCode Repository object | |
694 | :param message: commit message |
|
694 | :param message: commit message | |
695 | :param nodes: mapping {filename:{'content':content},...} |
|
695 | :param nodes: mapping {filename:{'content':content},...} | |
696 | :param parent_commit: parent commit, can be empty than it's initial |
|
696 | :param parent_commit: parent commit, can be empty than it's initial | |
697 | commit |
|
697 | commit | |
698 | :param author: author of commit, cna be different that commiter only |
|
698 | :param author: author of commit, cna be different that commiter only | |
699 | for git |
|
699 | for git | |
700 | :param trigger_push_hook: trigger push hooks |
|
700 | :param trigger_push_hook: trigger push hooks | |
701 |
|
701 | |||
702 | :returns: new commit after deletion |
|
702 | :returns: new commit after deletion | |
703 | """ |
|
703 | """ | |
704 |
|
704 | |||
705 | user = self._get_user(user) |
|
705 | user = self._get_user(user) | |
706 | scm_instance = repo.scm_instance(cache=False) |
|
706 | scm_instance = repo.scm_instance(cache=False) | |
707 |
|
707 | |||
708 | processed_nodes = [] |
|
708 | processed_nodes = [] | |
709 | for f_path in nodes: |
|
709 | for f_path in nodes: | |
710 | f_path = self._sanitize_path(f_path) |
|
710 | f_path = self._sanitize_path(f_path) | |
711 | # content can be empty but for compatabilty it allows same dicts |
|
711 | # content can be empty but for compatabilty it allows same dicts | |
712 | # structure as add_nodes |
|
712 | # structure as add_nodes | |
713 | content = nodes[f_path].get('content') |
|
713 | content = nodes[f_path].get('content') | |
714 | processed_nodes.append((f_path, content)) |
|
714 | processed_nodes.append((f_path, content)) | |
715 |
|
715 | |||
716 | message = safe_unicode(message) |
|
716 | message = safe_unicode(message) | |
717 | commiter = user.full_contact |
|
717 | commiter = user.full_contact | |
718 | author = safe_unicode(author) if author else commiter |
|
718 | author = safe_unicode(author) if author else commiter | |
719 |
|
719 | |||
720 | imc = scm_instance.in_memory_commit |
|
720 | imc = scm_instance.in_memory_commit | |
721 |
|
721 | |||
722 | if not parent_commit: |
|
722 | if not parent_commit: | |
723 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
723 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
724 |
|
724 | |||
725 | if isinstance(parent_commit, EmptyCommit): |
|
725 | if isinstance(parent_commit, EmptyCommit): | |
726 | # EmptyCommit means we we're editing empty repository |
|
726 | # EmptyCommit means we we're editing empty repository | |
727 | parents = None |
|
727 | parents = None | |
728 | else: |
|
728 | else: | |
729 | parents = [parent_commit] |
|
729 | parents = [parent_commit] | |
730 | # add multiple nodes |
|
730 | # add multiple nodes | |
731 | for path, content in processed_nodes: |
|
731 | for path, content in processed_nodes: | |
732 | imc.remove(FileNode(path, content=content)) |
|
732 | imc.remove(FileNode(path, content=content)) | |
733 |
|
733 | |||
734 | # TODO: handle pre push scenario |
|
734 | # TODO: handle pre push scenario | |
735 | tip = imc.commit(message=message, |
|
735 | tip = imc.commit(message=message, | |
736 | author=author, |
|
736 | author=author, | |
737 | parents=parents, |
|
737 | parents=parents, | |
738 | branch=parent_commit.branch) |
|
738 | branch=parent_commit.branch) | |
739 |
|
739 | |||
740 | self.mark_for_invalidation(repo.repo_name) |
|
740 | self.mark_for_invalidation(repo.repo_name) | |
741 | if trigger_push_hook: |
|
741 | if trigger_push_hook: | |
742 | hooks_utils.trigger_post_push_hook( |
|
742 | hooks_utils.trigger_post_push_hook( | |
743 | username=user.username, action='push_local', |
|
743 | username=user.username, action='push_local', | |
744 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, |
|
744 | repo_name=repo.repo_name, repo_alias=scm_instance.alias, | |
745 | commit_ids=[tip.raw_id]) |
|
745 | commit_ids=[tip.raw_id]) | |
746 | return tip |
|
746 | return tip | |
747 |
|
747 | |||
748 | def strip(self, repo, commit_id, branch): |
|
748 | def strip(self, repo, commit_id, branch): | |
749 | scm_instance = repo.scm_instance(cache=False) |
|
749 | scm_instance = repo.scm_instance(cache=False) | |
750 | scm_instance.config.clear_section('hooks') |
|
750 | scm_instance.config.clear_section('hooks') | |
751 | scm_instance.strip(commit_id, branch) |
|
751 | scm_instance.strip(commit_id, branch) | |
752 | self.mark_for_invalidation(repo.repo_name) |
|
752 | self.mark_for_invalidation(repo.repo_name) | |
753 |
|
753 | |||
754 | def get_unread_journal(self): |
|
754 | def get_unread_journal(self): | |
755 | return self.sa.query(UserLog).count() |
|
755 | return self.sa.query(UserLog).count() | |
756 |
|
756 | |||
757 | def get_repo_landing_revs(self, repo=None): |
|
757 | def get_repo_landing_revs(self, repo=None): | |
758 | """ |
|
758 | """ | |
759 | Generates select option with tags branches and bookmarks (for hg only) |
|
759 | Generates select option with tags branches and bookmarks (for hg only) | |
760 | grouped by type |
|
760 | grouped by type | |
761 |
|
761 | |||
762 | :param repo: |
|
762 | :param repo: | |
763 | """ |
|
763 | """ | |
764 |
|
764 | |||
765 | hist_l = [] |
|
|||
766 | choices = [] |
|
|||
767 | repo = self._get_repo(repo) |
|
765 | repo = self._get_repo(repo) | |
768 | hist_l.append(['rev:tip', _('latest tip')]) |
|
766 | ||
769 | choices.append('rev:tip') |
|
767 | hist_l = [ | |
|
768 | ['rev:tip', _('latest tip')] | |||
|
769 | ] | |||
|
770 | choices = [ | |||
|
771 | 'rev:tip' | |||
|
772 | ] | |||
|
773 | ||||
770 | if not repo: |
|
774 | if not repo: | |
771 | return choices, hist_l |
|
775 | return choices, hist_l | |
772 |
|
776 | |||
773 | repo = repo.scm_instance() |
|
777 | repo = repo.scm_instance() | |
774 |
|
778 | |||
775 | branches_group = ( |
|
779 | branches_group = ( | |
776 | [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) |
|
780 | [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) | |
777 | for b in repo.branches], |
|
781 | for b in repo.branches], | |
778 | _("Branches")) |
|
782 | _("Branches")) | |
779 | hist_l.append(branches_group) |
|
783 | hist_l.append(branches_group) | |
780 | choices.extend([x[0] for x in branches_group[0]]) |
|
784 | choices.extend([x[0] for x in branches_group[0]]) | |
781 |
|
785 | |||
782 | if repo.alias == 'hg': |
|
786 | if repo.alias == 'hg': | |
783 | bookmarks_group = ( |
|
787 | bookmarks_group = ( | |
784 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) |
|
788 | [(u'book:%s' % safe_unicode(b), safe_unicode(b)) | |
785 | for b in repo.bookmarks], |
|
789 | for b in repo.bookmarks], | |
786 | _("Bookmarks")) |
|
790 | _("Bookmarks")) | |
787 | hist_l.append(bookmarks_group) |
|
791 | hist_l.append(bookmarks_group) | |
788 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
792 | choices.extend([x[0] for x in bookmarks_group[0]]) | |
789 |
|
793 | |||
790 | tags_group = ( |
|
794 | tags_group = ( | |
791 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) |
|
795 | [(u'tag:%s' % safe_unicode(t), safe_unicode(t)) | |
792 | for t in repo.tags], |
|
796 | for t in repo.tags], | |
793 | _("Tags")) |
|
797 | _("Tags")) | |
794 | hist_l.append(tags_group) |
|
798 | hist_l.append(tags_group) | |
795 | choices.extend([x[0] for x in tags_group[0]]) |
|
799 | choices.extend([x[0] for x in tags_group[0]]) | |
796 |
|
800 | |||
797 | return choices, hist_l |
|
801 | return choices, hist_l | |
798 |
|
802 | |||
799 | def install_git_hook(self, repo, force_create=False): |
|
803 | def install_git_hook(self, repo, force_create=False): | |
800 | """ |
|
804 | """ | |
801 | Creates a rhodecode hook inside a git repository |
|
805 | Creates a rhodecode hook inside a git repository | |
802 |
|
806 | |||
803 | :param repo: Instance of VCS repo |
|
807 | :param repo: Instance of VCS repo | |
804 | :param force_create: Create even if same name hook exists |
|
808 | :param force_create: Create even if same name hook exists | |
805 | """ |
|
809 | """ | |
806 |
|
810 | |||
807 | loc = os.path.join(repo.path, 'hooks') |
|
811 | loc = os.path.join(repo.path, 'hooks') | |
808 | if not repo.bare: |
|
812 | if not repo.bare: | |
809 | loc = os.path.join(repo.path, '.git', 'hooks') |
|
813 | loc = os.path.join(repo.path, '.git', 'hooks') | |
810 | if not os.path.isdir(loc): |
|
814 | if not os.path.isdir(loc): | |
811 | os.makedirs(loc, mode=0777) |
|
815 | os.makedirs(loc, mode=0777) | |
812 |
|
816 | |||
813 | tmpl_post = pkg_resources.resource_string( |
|
817 | tmpl_post = pkg_resources.resource_string( | |
814 | 'rhodecode', '/'.join( |
|
818 | 'rhodecode', '/'.join( | |
815 | ('config', 'hook_templates', 'git_post_receive.py.tmpl'))) |
|
819 | ('config', 'hook_templates', 'git_post_receive.py.tmpl'))) | |
816 | tmpl_pre = pkg_resources.resource_string( |
|
820 | tmpl_pre = pkg_resources.resource_string( | |
817 | 'rhodecode', '/'.join( |
|
821 | 'rhodecode', '/'.join( | |
818 | ('config', 'hook_templates', 'git_pre_receive.py.tmpl'))) |
|
822 | ('config', 'hook_templates', 'git_pre_receive.py.tmpl'))) | |
819 |
|
823 | |||
820 | for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: |
|
824 | for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: | |
821 | _hook_file = os.path.join(loc, '%s-receive' % h_type) |
|
825 | _hook_file = os.path.join(loc, '%s-receive' % h_type) | |
822 | log.debug('Installing git hook in repo %s', repo) |
|
826 | log.debug('Installing git hook in repo %s', repo) | |
823 | _rhodecode_hook = _check_rhodecode_hook(_hook_file) |
|
827 | _rhodecode_hook = _check_rhodecode_hook(_hook_file) | |
824 |
|
828 | |||
825 | if _rhodecode_hook or force_create: |
|
829 | if _rhodecode_hook or force_create: | |
826 | log.debug('writing %s hook file !', h_type) |
|
830 | log.debug('writing %s hook file !', h_type) | |
827 | try: |
|
831 | try: | |
828 | with open(_hook_file, 'wb') as f: |
|
832 | with open(_hook_file, 'wb') as f: | |
829 | tmpl = tmpl.replace('_TMPL_', rhodecode.__version__) |
|
833 | tmpl = tmpl.replace('_TMPL_', rhodecode.__version__) | |
830 | tmpl = tmpl.replace('_ENV_', sys.executable) |
|
834 | tmpl = tmpl.replace('_ENV_', sys.executable) | |
831 | f.write(tmpl) |
|
835 | f.write(tmpl) | |
832 | os.chmod(_hook_file, 0755) |
|
836 | os.chmod(_hook_file, 0755) | |
833 | except IOError: |
|
837 | except IOError: | |
834 | log.exception('error writing hook file %s', _hook_file) |
|
838 | log.exception('error writing hook file %s', _hook_file) | |
835 | else: |
|
839 | else: | |
836 | log.debug('skipping writing hook file') |
|
840 | log.debug('skipping writing hook file') | |
837 |
|
841 | |||
838 | def install_svn_hooks(self, repo, force_create=False): |
|
842 | def install_svn_hooks(self, repo, force_create=False): | |
839 | """ |
|
843 | """ | |
840 | Creates rhodecode hooks inside a svn repository |
|
844 | Creates rhodecode hooks inside a svn repository | |
841 |
|
845 | |||
842 | :param repo: Instance of VCS repo |
|
846 | :param repo: Instance of VCS repo | |
843 | :param force_create: Create even if same name hook exists |
|
847 | :param force_create: Create even if same name hook exists | |
844 | """ |
|
848 | """ | |
845 | hooks_path = os.path.join(repo.path, 'hooks') |
|
849 | hooks_path = os.path.join(repo.path, 'hooks') | |
846 | if not os.path.isdir(hooks_path): |
|
850 | if not os.path.isdir(hooks_path): | |
847 | os.makedirs(hooks_path) |
|
851 | os.makedirs(hooks_path) | |
848 | post_commit_tmpl = pkg_resources.resource_string( |
|
852 | post_commit_tmpl = pkg_resources.resource_string( | |
849 | 'rhodecode', '/'.join( |
|
853 | 'rhodecode', '/'.join( | |
850 | ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl'))) |
|
854 | ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl'))) | |
851 | pre_commit_template = pkg_resources.resource_string( |
|
855 | pre_commit_template = pkg_resources.resource_string( | |
852 | 'rhodecode', '/'.join( |
|
856 | 'rhodecode', '/'.join( | |
853 | ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl'))) |
|
857 | ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl'))) | |
854 | templates = { |
|
858 | templates = { | |
855 | 'post-commit': post_commit_tmpl, |
|
859 | 'post-commit': post_commit_tmpl, | |
856 | 'pre-commit': pre_commit_template |
|
860 | 'pre-commit': pre_commit_template | |
857 | } |
|
861 | } | |
858 | for filename in templates: |
|
862 | for filename in templates: | |
859 | _hook_file = os.path.join(hooks_path, filename) |
|
863 | _hook_file = os.path.join(hooks_path, filename) | |
860 | _rhodecode_hook = _check_rhodecode_hook(_hook_file) |
|
864 | _rhodecode_hook = _check_rhodecode_hook(_hook_file) | |
861 | if _rhodecode_hook or force_create: |
|
865 | if _rhodecode_hook or force_create: | |
862 | log.debug('writing %s hook file !', filename) |
|
866 | log.debug('writing %s hook file !', filename) | |
863 | template = templates[filename] |
|
867 | template = templates[filename] | |
864 | try: |
|
868 | try: | |
865 | with open(_hook_file, 'wb') as f: |
|
869 | with open(_hook_file, 'wb') as f: | |
866 | template = template.replace( |
|
870 | template = template.replace( | |
867 | '_TMPL_', rhodecode.__version__) |
|
871 | '_TMPL_', rhodecode.__version__) | |
868 | template = template.replace('_ENV_', sys.executable) |
|
872 | template = template.replace('_ENV_', sys.executable) | |
869 | f.write(template) |
|
873 | f.write(template) | |
870 | os.chmod(_hook_file, 0755) |
|
874 | os.chmod(_hook_file, 0755) | |
871 | except IOError: |
|
875 | except IOError: | |
872 | log.exception('error writing hook file %s', filename) |
|
876 | log.exception('error writing hook file %s', filename) | |
873 | else: |
|
877 | else: | |
874 | log.debug('skipping writing hook file') |
|
878 | log.debug('skipping writing hook file') | |
875 |
|
879 | |||
876 | def install_hooks(self, repo, repo_type): |
|
880 | def install_hooks(self, repo, repo_type): | |
877 | if repo_type == 'git': |
|
881 | if repo_type == 'git': | |
878 | self.install_git_hook(repo) |
|
882 | self.install_git_hook(repo) | |
879 | elif repo_type == 'svn': |
|
883 | elif repo_type == 'svn': | |
880 | self.install_svn_hooks(repo) |
|
884 | self.install_svn_hooks(repo) | |
881 |
|
885 | |||
882 | def get_server_info(self, environ=None): |
|
886 | def get_server_info(self, environ=None): | |
883 | server_info = get_system_info(environ) |
|
887 | server_info = get_system_info(environ) | |
884 | return server_info |
|
888 | return server_info | |
885 |
|
889 | |||
886 |
|
890 | |||
887 | def _check_rhodecode_hook(hook_path): |
|
891 | def _check_rhodecode_hook(hook_path): | |
888 | """ |
|
892 | """ | |
889 | Check if the hook was created by RhodeCode |
|
893 | Check if the hook was created by RhodeCode | |
890 | """ |
|
894 | """ | |
891 | if not os.path.exists(hook_path): |
|
895 | if not os.path.exists(hook_path): | |
892 | return True |
|
896 | return True | |
893 |
|
897 | |||
894 | log.debug('hook exists, checking if it is from rhodecode') |
|
898 | log.debug('hook exists, checking if it is from rhodecode') | |
895 | hook_content = _read_hook(hook_path) |
|
899 | hook_content = _read_hook(hook_path) | |
896 | matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content) |
|
900 | matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content) | |
897 | if matches: |
|
901 | if matches: | |
898 | try: |
|
902 | try: | |
899 | version = matches.groups()[0] |
|
903 | version = matches.groups()[0] | |
900 | log.debug('got %s, it is rhodecode', version) |
|
904 | log.debug('got %s, it is rhodecode', version) | |
901 | return True |
|
905 | return True | |
902 | except Exception: |
|
906 | except Exception: | |
903 | log.exception("Exception while reading the hook version.") |
|
907 | log.exception("Exception while reading the hook version.") | |
904 |
|
908 | |||
905 | return False |
|
909 | return False | |
906 |
|
910 | |||
907 |
|
911 | |||
908 | def _read_hook(hook_path): |
|
912 | def _read_hook(hook_path): | |
909 | with open(hook_path, 'rb') as f: |
|
913 | with open(hook_path, 'rb') as f: | |
910 | content = f.read() |
|
914 | content = f.read() | |
911 | return content |
|
915 | return content |
@@ -1,89 +1,88 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import unicodedata |
|
21 | import unicodedata | |
22 |
|
22 | |||
23 |
|
23 | |||
24 |
|
||||
25 | def strip_preparer(value): |
|
24 | def strip_preparer(value): | |
26 | """ |
|
25 | """ | |
27 | strips given values using .strip() function |
|
26 | strips given values using .strip() function | |
28 | """ |
|
27 | """ | |
29 |
|
28 | |||
30 | if value: |
|
29 | if value: | |
31 | value = value.strip() |
|
30 | value = value.strip() | |
32 | return value |
|
31 | return value | |
33 |
|
32 | |||
34 |
|
33 | |||
35 | def slugify_preparer(value): |
|
34 | def slugify_preparer(value): | |
36 | """ |
|
35 | """ | |
37 | Slugify given value to a safe representation for url/id |
|
36 | Slugify given value to a safe representation for url/id | |
38 | """ |
|
37 | """ | |
39 | from rhodecode.lib.utils import repo_name_slug |
|
38 | from rhodecode.lib.utils import repo_name_slug | |
40 | if value: |
|
39 | if value: | |
41 | value = repo_name_slug(value.lower()) |
|
40 | value = repo_name_slug(value.lower()) | |
42 | return value |
|
41 | return value | |
43 |
|
42 | |||
44 |
|
43 | |||
45 | def non_ascii_strip_preparer(value): |
|
44 | def non_ascii_strip_preparer(value): | |
46 | """ |
|
45 | """ | |
47 | trie to replace non-ascii letters to their ascii representation |
|
46 | trie to replace non-ascii letters to their ascii representation | |
48 | eg:: |
|
47 | eg:: | |
49 |
|
48 | |||
50 | `ΕΌoΕw` converts into `zolw` |
|
49 | `ΕΌoΕw` converts into `zolw` | |
51 | """ |
|
50 | """ | |
52 | if value: |
|
51 | if value: | |
53 | value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') |
|
52 | value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') | |
54 | return value |
|
53 | return value | |
55 |
|
54 | |||
56 |
|
55 | |||
57 | def unique_list_preparer(value): |
|
56 | def unique_list_preparer(value): | |
58 | """ |
|
57 | """ | |
59 | Converts an list to a list with only unique values |
|
58 | Converts an list to a list with only unique values | |
60 | """ |
|
59 | """ | |
61 |
|
60 | |||
62 | def make_unique(value): |
|
61 | def make_unique(value): | |
63 | seen = [] |
|
62 | seen = [] | |
64 | return [c for c in value if |
|
63 | return [c for c in value if | |
65 | not (c in seen or seen.append(c))] |
|
64 | not (c in seen or seen.append(c))] | |
66 |
|
65 | |||
67 | if isinstance(value, list): |
|
66 | if isinstance(value, list): | |
68 | ret_val = make_unique(value) |
|
67 | ret_val = make_unique(value) | |
69 | elif isinstance(value, set): |
|
68 | elif isinstance(value, set): | |
70 | ret_val = list(value) |
|
69 | ret_val = list(value) | |
71 | elif isinstance(value, tuple): |
|
70 | elif isinstance(value, tuple): | |
72 | ret_val = make_unique(value) |
|
71 | ret_val = make_unique(value) | |
73 | elif value is None: |
|
72 | elif value is None: | |
74 | ret_val = [] |
|
73 | ret_val = [] | |
75 | else: |
|
74 | else: | |
76 | ret_val = [value] |
|
75 | ret_val = [value] | |
77 |
|
76 | |||
78 | return ret_val |
|
77 | return ret_val | |
79 |
|
78 | |||
80 |
|
79 | |||
81 | def unique_list_from_str_preparer(value): |
|
80 | def unique_list_from_str_preparer(value): | |
82 | """ |
|
81 | """ | |
83 | Converts an list to a list with only unique values |
|
82 | Converts an list to a list with only unique values | |
84 | """ |
|
83 | """ | |
85 | from rhodecode.lib.utils2 import aslist |
|
84 | from rhodecode.lib.utils2 import aslist | |
86 |
|
85 | |||
87 | if isinstance(value, basestring): |
|
86 | if isinstance(value, basestring): | |
88 | value = aslist(value, ',') |
|
87 | value = aslist(value, ',') | |
89 | return unique_list_preparer(value) No newline at end of file |
|
88 | return unique_list_preparer(value) |
@@ -1,27 +1,321 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import colander |
|
21 | import colander | |
22 |
|
22 | |||
|
23 | from rhodecode.translation import _ | |||
23 | from rhodecode.model.validation_schema import validators, preparers, types |
|
24 | from rhodecode.model.validation_schema import validators, preparers, types | |
24 |
|
25 | |||
|
26 | DEFAULT_LANDING_REF = 'rev:tip' | |||
|
27 | ||||
|
28 | ||||
|
29 | def get_group_and_repo(repo_name): | |||
|
30 | from rhodecode.model.repo_group import RepoGroupModel | |||
|
31 | return RepoGroupModel()._get_group_name_and_parent( | |||
|
32 | repo_name, get_object=True) | |||
|
33 | ||||
|
34 | ||||
|
35 | @colander.deferred | |||
|
36 | def deferred_repo_type_validator(node, kw): | |||
|
37 | options = kw.get('repo_type_options', []) | |||
|
38 | return colander.OneOf([x for x in options]) | |||
|
39 | ||||
|
40 | ||||
|
41 | @colander.deferred | |||
|
42 | def deferred_repo_owner_validator(node, kw): | |||
|
43 | ||||
|
44 | def repo_owner_validator(node, value): | |||
|
45 | from rhodecode.model.db import User | |||
|
46 | existing = User.get_by_username(value) | |||
|
47 | if not existing: | |||
|
48 | msg = _(u'Repo owner with id `{}` does not exists').format(value) | |||
|
49 | raise colander.Invalid(node, msg) | |||
|
50 | ||||
|
51 | return repo_owner_validator | |||
|
52 | ||||
|
53 | ||||
|
54 | @colander.deferred | |||
|
55 | def deferred_landing_ref_validator(node, kw): | |||
|
56 | options = kw.get('repo_ref_options', [DEFAULT_LANDING_REF]) | |||
|
57 | return colander.OneOf([x for x in options]) | |||
|
58 | ||||
|
59 | ||||
|
60 | @colander.deferred | |||
|
61 | def deferred_fork_of_validator(node, kw): | |||
|
62 | old_values = kw.get('old_values') or {} | |||
|
63 | ||||
|
64 | def fork_of_validator(node, value): | |||
|
65 | from rhodecode.model.db import Repository, RepoGroup | |||
|
66 | existing = Repository.get_by_repo_name(value) | |||
|
67 | if not existing: | |||
|
68 | msg = _(u'Fork with id `{}` does not exists').format(value) | |||
|
69 | raise colander.Invalid(node, msg) | |||
|
70 | elif old_values['repo_name'] == existing.repo_name: | |||
|
71 | msg = _(u'Cannot set fork of ' | |||
|
72 | u'parameter of this repository to itself').format(value) | |||
|
73 | raise colander.Invalid(node, msg) | |||
|
74 | ||||
|
75 | return fork_of_validator | |||
|
76 | ||||
|
77 | ||||
|
78 | @colander.deferred | |||
|
79 | def deferred_can_write_to_group_validator(node, kw): | |||
|
80 | request_user = kw.get('user') | |||
|
81 | old_values = kw.get('old_values') or {} | |||
|
82 | ||||
|
83 | def can_write_to_group_validator(node, value): | |||
|
84 | """ | |||
|
85 | Checks if given repo path is writable by user. This includes checks if | |||
|
86 | user is allowed to create repositories under root path or under | |||
|
87 | repo group paths | |||
|
88 | """ | |||
|
89 | ||||
|
90 | from rhodecode.lib.auth import ( | |||
|
91 | HasPermissionAny, HasRepoGroupPermissionAny) | |||
|
92 | from rhodecode.model.repo_group import RepoGroupModel | |||
|
93 | ||||
|
94 | messages = { | |||
|
95 | 'invalid_repo_group': | |||
|
96 | _(u"Repository group `{}` does not exist"), | |||
|
97 | # permissions denied we expose as not existing, to prevent | |||
|
98 | # resource discovery | |||
|
99 | 'permission_denied': | |||
|
100 | _(u"Repository group `{}` does not exist"), | |||
|
101 | 'permission_denied_root': | |||
|
102 | _(u"You do not have the permission to store " | |||
|
103 | u"repositories in the root location.") | |||
|
104 | } | |||
|
105 | ||||
|
106 | value = value['repo_group_name'] | |||
|
107 | ||||
|
108 | is_root_location = value is types.RootLocation | |||
|
109 | # NOT initialized validators, we must call them | |||
|
110 | can_create_repos_at_root = HasPermissionAny( | |||
|
111 | 'hg.admin', 'hg.create.repository') | |||
|
112 | ||||
|
113 | # if values is root location, we simply need to check if we can write | |||
|
114 | # to root location ! | |||
|
115 | if is_root_location: | |||
|
116 | if can_create_repos_at_root(user=request_user): | |||
|
117 | # we can create repo group inside tool-level. No more checks | |||
|
118 | # are required | |||
|
119 | return | |||
|
120 | else: | |||
|
121 | # "fake" node name as repo_name, otherwise we oddly report | |||
|
122 | # the error as if it was coming form repo_group | |||
|
123 | # however repo_group is empty when using root location. | |||
|
124 | node.name = 'repo_name' | |||
|
125 | raise colander.Invalid(node, messages['permission_denied_root']) | |||
|
126 | ||||
|
127 | # parent group not exists ? throw an error | |||
|
128 | repo_group = RepoGroupModel().get_by_group_name(value) | |||
|
129 | if value and not repo_group: | |||
|
130 | raise colander.Invalid( | |||
|
131 | node, messages['invalid_repo_group'].format(value)) | |||
|
132 | ||||
|
133 | gr_name = repo_group.group_name | |||
|
134 | ||||
|
135 | # create repositories with write permission on group is set to true | |||
|
136 | create_on_write = HasPermissionAny( | |||
|
137 | 'hg.create.write_on_repogroup.true')(user=request_user) | |||
|
138 | ||||
|
139 | group_admin = HasRepoGroupPermissionAny('group.admin')( | |||
|
140 | gr_name, 'can write into group validator', user=request_user) | |||
|
141 | group_write = HasRepoGroupPermissionAny('group.write')( | |||
|
142 | gr_name, 'can write into group validator', user=request_user) | |||
|
143 | ||||
|
144 | forbidden = not (group_admin or (group_write and create_on_write)) | |||
|
145 | ||||
|
146 | # TODO: handling of old values, and detecting no-change in path | |||
|
147 | # to skip permission checks in such cases. This only needs to be | |||
|
148 | # implemented if we use this schema in forms as well | |||
|
149 | ||||
|
150 | # gid = (old_data['repo_group'].get('group_id') | |||
|
151 | # if (old_data and 'repo_group' in old_data) else None) | |||
|
152 | # value_changed = gid != safe_int(value) | |||
|
153 | # new = not old_data | |||
|
154 | ||||
|
155 | # do check if we changed the value, there's a case that someone got | |||
|
156 | # revoked write permissions to a repository, he still created, we | |||
|
157 | # don't need to check permission if he didn't change the value of | |||
|
158 | # groups in form box | |||
|
159 | # if value_changed or new: | |||
|
160 | # # parent group need to be existing | |||
|
161 | # TODO: ENDS HERE | |||
|
162 | ||||
|
163 | if repo_group and forbidden: | |||
|
164 | msg = messages['permission_denied'].format(value) | |||
|
165 | raise colander.Invalid(node, msg) | |||
|
166 | ||||
|
167 | return can_write_to_group_validator | |||
|
168 | ||||
25 |
|
169 | |||
26 | class RepoSchema(colander.Schema): |
|
170 | @colander.deferred | |
27 | repo_name = colander.SchemaNode(types.GroupNameType()) |
|
171 | def deferred_unique_name_validator(node, kw): | |
|
172 | request_user = kw.get('user') | |||
|
173 | old_values = kw.get('old_values') or {} | |||
|
174 | ||||
|
175 | def unique_name_validator(node, value): | |||
|
176 | from rhodecode.model.db import Repository, RepoGroup | |||
|
177 | name_changed = value != old_values.get('repo_name') | |||
|
178 | ||||
|
179 | existing = Repository.get_by_repo_name(value) | |||
|
180 | if name_changed and existing: | |||
|
181 | msg = _(u'Repository with name `{}` already exists').format(value) | |||
|
182 | raise colander.Invalid(node, msg) | |||
|
183 | ||||
|
184 | existing_group = RepoGroup.get_by_group_name(value) | |||
|
185 | if name_changed and existing_group: | |||
|
186 | msg = _(u'Repository group with name `{}` already exists').format( | |||
|
187 | value) | |||
|
188 | raise colander.Invalid(node, msg) | |||
|
189 | return unique_name_validator | |||
|
190 | ||||
|
191 | ||||
|
192 | @colander.deferred | |||
|
193 | def deferred_repo_name_validator(node, kw): | |||
|
194 | return validators.valid_name_validator | |||
|
195 | ||||
|
196 | ||||
|
197 | class GroupType(colander.Mapping): | |||
|
198 | def _validate(self, node, value): | |||
|
199 | try: | |||
|
200 | return dict(repo_group_name=value) | |||
|
201 | except Exception as e: | |||
|
202 | raise colander.Invalid( | |||
|
203 | node, '"${val}" is not a mapping type: ${err}'.format( | |||
|
204 | val=value, err=e)) | |||
|
205 | ||||
|
206 | def deserialize(self, node, cstruct): | |||
|
207 | if cstruct is colander.null: | |||
|
208 | return cstruct | |||
|
209 | ||||
|
210 | appstruct = super(GroupType, self).deserialize(node, cstruct) | |||
|
211 | validated_name = appstruct['repo_group_name'] | |||
|
212 | ||||
|
213 | # inject group based on once deserialized data | |||
|
214 | (repo_name_without_group, | |||
|
215 | parent_group_name, | |||
|
216 | parent_group) = get_group_and_repo(validated_name) | |||
|
217 | ||||
|
218 | appstruct['repo_name_without_group'] = repo_name_without_group | |||
|
219 | appstruct['repo_group_name'] = parent_group_name or types.RootLocation | |||
|
220 | if parent_group: | |||
|
221 | appstruct['repo_group_id'] = parent_group.group_id | |||
|
222 | ||||
|
223 | return appstruct | |||
|
224 | ||||
|
225 | ||||
|
226 | class GroupSchema(colander.SchemaNode): | |||
|
227 | schema_type = GroupType | |||
|
228 | validator = deferred_can_write_to_group_validator | |||
|
229 | missing = colander.null | |||
|
230 | ||||
|
231 | ||||
|
232 | class RepoGroup(GroupSchema): | |||
|
233 | repo_group_name = colander.SchemaNode( | |||
|
234 | types.GroupNameType()) | |||
|
235 | repo_group_id = colander.SchemaNode( | |||
|
236 | colander.String(), missing=None) | |||
|
237 | repo_name_without_group = colander.SchemaNode( | |||
|
238 | colander.String(), missing=None) | |||
|
239 | ||||
|
240 | ||||
|
241 | class RepoGroupAccessSchema(colander.MappingSchema): | |||
|
242 | repo_group = RepoGroup() | |||
|
243 | ||||
|
244 | ||||
|
245 | class RepoNameUniqueSchema(colander.MappingSchema): | |||
|
246 | unique_repo_name = colander.SchemaNode( | |||
|
247 | colander.String(), | |||
|
248 | validator=deferred_unique_name_validator) | |||
|
249 | ||||
|
250 | ||||
|
251 | class RepoSchema(colander.MappingSchema): | |||
|
252 | ||||
|
253 | repo_name = colander.SchemaNode( | |||
|
254 | types.RepoNameType(), | |||
|
255 | validator=deferred_repo_name_validator) | |||
|
256 | ||||
|
257 | repo_type = colander.SchemaNode( | |||
|
258 | colander.String(), | |||
|
259 | validator=deferred_repo_type_validator) | |||
|
260 | ||||
|
261 | repo_owner = colander.SchemaNode( | |||
|
262 | colander.String(), | |||
|
263 | validator=deferred_repo_owner_validator) | |||
|
264 | ||||
|
265 | repo_description = colander.SchemaNode( | |||
|
266 | colander.String(), missing='') | |||
|
267 | ||||
|
268 | repo_landing_commit_ref = colander.SchemaNode( | |||
|
269 | colander.String(), | |||
|
270 | validator=deferred_landing_ref_validator, | |||
|
271 | preparers=[preparers.strip_preparer], | |||
|
272 | missing=DEFAULT_LANDING_REF) | |||
|
273 | ||||
|
274 | repo_clone_uri = colander.SchemaNode( | |||
|
275 | colander.String(), | |||
|
276 | validator=colander.All(colander.Length(min=1)), | |||
|
277 | preparers=[preparers.strip_preparer], | |||
|
278 | missing='') | |||
|
279 | ||||
|
280 | repo_fork_of = colander.SchemaNode( | |||
|
281 | colander.String(), | |||
|
282 | validator=deferred_fork_of_validator, | |||
|
283 | missing=None) | |||
|
284 | ||||
|
285 | repo_private = colander.SchemaNode( | |||
|
286 | types.StringBooleanType(), | |||
|
287 | missing=False) | |||
|
288 | repo_copy_permissions = colander.SchemaNode( | |||
|
289 | types.StringBooleanType(), | |||
|
290 | missing=False) | |||
|
291 | repo_enable_statistics = colander.SchemaNode( | |||
|
292 | types.StringBooleanType(), | |||
|
293 | missing=False) | |||
|
294 | repo_enable_downloads = colander.SchemaNode( | |||
|
295 | types.StringBooleanType(), | |||
|
296 | missing=False) | |||
|
297 | repo_enable_locking = colander.SchemaNode( | |||
|
298 | types.StringBooleanType(), | |||
|
299 | missing=False) | |||
|
300 | ||||
|
301 | def deserialize(self, cstruct): | |||
|
302 | """ | |||
|
303 | Custom deserialize that allows to chain validation, and verify | |||
|
304 | permissions, and as last step uniqueness | |||
|
305 | """ | |||
|
306 | ||||
|
307 | # first pass, to validate given data | |||
|
308 | appstruct = super(RepoSchema, self).deserialize(cstruct) | |||
|
309 | validated_name = appstruct['repo_name'] | |||
|
310 | ||||
|
311 | # second pass to validate permissions to repo_group | |||
|
312 | second = RepoGroupAccessSchema().bind(**self.bindings) | |||
|
313 | appstruct_second = second.deserialize({'repo_group': validated_name}) | |||
|
314 | # save result | |||
|
315 | appstruct['repo_group'] = appstruct_second['repo_group'] | |||
|
316 | ||||
|
317 | # thirds to validate uniqueness | |||
|
318 | third = RepoNameUniqueSchema().bind(**self.bindings) | |||
|
319 | third.deserialize({'unique_repo_name': validated_name}) | |||
|
320 | ||||
|
321 | return appstruct |
@@ -1,44 +1,43 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import colander |
|
22 | import colander | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class SearchParamsSchema(colander.MappingSchema): |
|
25 | class SearchParamsSchema(colander.MappingSchema): | |
26 | search_query = colander.SchemaNode( |
|
26 | search_query = colander.SchemaNode( | |
27 | colander.String(), |
|
27 | colander.String(), | |
28 | missing='') |
|
28 | missing='') | |
29 | search_type = colander.SchemaNode( |
|
29 | search_type = colander.SchemaNode( | |
30 | colander.String(), |
|
30 | colander.String(), | |
31 | missing='content', |
|
31 | missing='content', | |
32 | validator=colander.OneOf(['content', 'path', 'commit', 'repository'])) |
|
32 | validator=colander.OneOf(['content', 'path', 'commit', 'repository'])) | |
33 | search_sort = colander.SchemaNode( |
|
33 | search_sort = colander.SchemaNode( | |
34 | colander.String(), |
|
34 | colander.String(), | |
35 | missing='newfirst', |
|
35 | missing='newfirst', | |
36 | validator=colander.OneOf( |
|
36 | validator=colander.OneOf(['oldfirst', 'newfirst'])) | |
37 | ['oldfirst', 'newfirst'])) |
|
|||
38 | page_limit = colander.SchemaNode( |
|
37 | page_limit = colander.SchemaNode( | |
39 | colander.Integer(), |
|
38 | colander.Integer(), | |
40 | missing=10, |
|
39 | missing=10, | |
41 | validator=colander.Range(1, 500)) |
|
40 | validator=colander.Range(1, 500)) | |
42 | requested_page = colander.SchemaNode( |
|
41 | requested_page = colander.SchemaNode( | |
43 | colander.Integer(), |
|
42 | colander.Integer(), | |
44 | missing=1) |
|
43 | missing=1) |
@@ -1,137 +1,188 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
|
21 | import re | |||
|
22 | ||||
21 | import colander |
|
23 | import colander | |
|
24 | from rhodecode.model.validation_schema import preparers | |||
|
25 | from rhodecode.model.db import User, UserGroup | |||
|
26 | ||||
|
27 | ||||
|
28 | class _RootLocation(object): | |||
|
29 | pass | |||
|
30 | ||||
|
31 | RootLocation = _RootLocation() | |||
|
32 | ||||
|
33 | ||||
|
34 | def _normalize(seperator, path): | |||
|
35 | ||||
|
36 | if not path: | |||
|
37 | return '' | |||
|
38 | elif path is colander.null: | |||
|
39 | return colander.null | |||
|
40 | ||||
|
41 | parts = path.split(seperator) | |||
22 |
|
42 | |||
23 | from rhodecode.model.db import User, UserGroup |
|
43 | def bad_parts(value): | |
|
44 | if not value: | |||
|
45 | return False | |||
|
46 | if re.match(r'^[.]+$', value): | |||
|
47 | return False | |||
|
48 | ||||
|
49 | return True | |||
|
50 | ||||
|
51 | def slugify(value): | |||
|
52 | value = preparers.slugify_preparer(value) | |||
|
53 | value = re.sub(r'[.]{2,}', '.', value) | |||
|
54 | return value | |||
|
55 | ||||
|
56 | clean_parts = [slugify(item) for item in parts if item] | |||
|
57 | path = filter(bad_parts, clean_parts) | |||
|
58 | return seperator.join(path) | |||
|
59 | ||||
|
60 | ||||
|
61 | class RepoNameType(colander.String): | |||
|
62 | SEPARATOR = '/' | |||
|
63 | ||||
|
64 | def deserialize(self, node, cstruct): | |||
|
65 | result = super(RepoNameType, self).deserialize(node, cstruct) | |||
|
66 | if cstruct is colander.null: | |||
|
67 | return colander.null | |||
|
68 | return self._normalize(result) | |||
|
69 | ||||
|
70 | def _normalize(self, path): | |||
|
71 | return _normalize(self.SEPARATOR, path) | |||
24 |
|
72 | |||
25 |
|
73 | |||
26 | class GroupNameType(colander.String): |
|
74 | class GroupNameType(colander.String): | |
27 | SEPARATOR = '/' |
|
75 | SEPARATOR = '/' | |
28 |
|
76 | |||
29 | def deserialize(self, node, cstruct): |
|
77 | def deserialize(self, node, cstruct): | |
30 | result = super(GroupNameType, self).deserialize(node, cstruct) |
|
78 | if cstruct is RootLocation: | |
31 | return self._replace_extra_slashes(result) |
|
79 | return cstruct | |
32 |
|
80 | |||
33 | def _replace_extra_slashes(self, path): |
|
81 | result = super(GroupNameType, self).deserialize(node, cstruct) | |
34 | path = path.split(self.SEPARATOR) |
|
82 | if cstruct is colander.null: | |
35 | path = [item for item in path if item] |
|
83 | return colander.null | |
36 |
return self. |
|
84 | return self._normalize(result) | |
|
85 | ||||
|
86 | def _normalize(self, path): | |||
|
87 | return _normalize(self.SEPARATOR, path) | |||
37 |
|
88 | |||
38 |
|
89 | |||
39 | class StringBooleanType(colander.String): |
|
90 | class StringBooleanType(colander.String): | |
40 | true_values = ['true', 't', 'yes', 'y', 'on', '1'] |
|
91 | true_values = ['true', 't', 'yes', 'y', 'on', '1'] | |
41 | false_values = ['false', 'f', 'no', 'n', 'off', '0'] |
|
92 | false_values = ['false', 'f', 'no', 'n', 'off', '0'] | |
42 |
|
93 | |||
43 | def serialize(self, node, appstruct): |
|
94 | def serialize(self, node, appstruct): | |
44 | if appstruct is colander.null: |
|
95 | if appstruct is colander.null: | |
45 | return colander.null |
|
96 | return colander.null | |
46 | if not isinstance(appstruct, bool): |
|
97 | if not isinstance(appstruct, bool): | |
47 | raise colander.Invalid(node, '%r is not a boolean' % appstruct) |
|
98 | raise colander.Invalid(node, '%r is not a boolean' % appstruct) | |
48 |
|
99 | |||
49 | return appstruct and 'true' or 'false' |
|
100 | return appstruct and 'true' or 'false' | |
50 |
|
101 | |||
51 | def deserialize(self, node, cstruct): |
|
102 | def deserialize(self, node, cstruct): | |
52 | if cstruct is colander.null: |
|
103 | if cstruct is colander.null: | |
53 | return colander.null |
|
104 | return colander.null | |
54 |
|
105 | |||
55 | if isinstance(cstruct, bool): |
|
106 | if isinstance(cstruct, bool): | |
56 | return cstruct |
|
107 | return cstruct | |
57 |
|
108 | |||
58 | if not isinstance(cstruct, basestring): |
|
109 | if not isinstance(cstruct, basestring): | |
59 | raise colander.Invalid(node, '%r is not a string' % cstruct) |
|
110 | raise colander.Invalid(node, '%r is not a string' % cstruct) | |
60 |
|
111 | |||
61 | value = cstruct.lower() |
|
112 | value = cstruct.lower() | |
62 | if value in self.true_values: |
|
113 | if value in self.true_values: | |
63 | return True |
|
114 | return True | |
64 | elif value in self.false_values: |
|
115 | elif value in self.false_values: | |
65 | return False |
|
116 | return False | |
66 | else: |
|
117 | else: | |
67 | raise colander.Invalid( |
|
118 | raise colander.Invalid( | |
68 | node, '{} value cannot be translated to bool'.format(value)) |
|
119 | node, '{} value cannot be translated to bool'.format(value)) | |
69 |
|
120 | |||
70 |
|
121 | |||
71 | class UserOrUserGroupType(colander.SchemaType): |
|
122 | class UserOrUserGroupType(colander.SchemaType): | |
72 | """ colander Schema type for valid rhodecode user and/or usergroup """ |
|
123 | """ colander Schema type for valid rhodecode user and/or usergroup """ | |
73 | scopes = ('user', 'usergroup') |
|
124 | scopes = ('user', 'usergroup') | |
74 |
|
125 | |||
75 | def __init__(self): |
|
126 | def __init__(self): | |
76 | self.users = 'user' in self.scopes |
|
127 | self.users = 'user' in self.scopes | |
77 | self.usergroups = 'usergroup' in self.scopes |
|
128 | self.usergroups = 'usergroup' in self.scopes | |
78 |
|
129 | |||
79 | def serialize(self, node, appstruct): |
|
130 | def serialize(self, node, appstruct): | |
80 | if appstruct is colander.null: |
|
131 | if appstruct is colander.null: | |
81 | return colander.null |
|
132 | return colander.null | |
82 |
|
133 | |||
83 | if self.users: |
|
134 | if self.users: | |
84 | if isinstance(appstruct, User): |
|
135 | if isinstance(appstruct, User): | |
85 | if self.usergroups: |
|
136 | if self.usergroups: | |
86 | return 'user:%s' % appstruct.username |
|
137 | return 'user:%s' % appstruct.username | |
87 | return appstruct.username |
|
138 | return appstruct.username | |
88 |
|
139 | |||
89 | if self.usergroups: |
|
140 | if self.usergroups: | |
90 | if isinstance(appstruct, UserGroup): |
|
141 | if isinstance(appstruct, UserGroup): | |
91 | if self.users: |
|
142 | if self.users: | |
92 | return 'usergroup:%s' % appstruct.users_group_name |
|
143 | return 'usergroup:%s' % appstruct.users_group_name | |
93 | return appstruct.users_group_name |
|
144 | return appstruct.users_group_name | |
94 |
|
145 | |||
95 | raise colander.Invalid( |
|
146 | raise colander.Invalid( | |
96 | node, '%s is not a valid %s' % (appstruct, ' or '.join(self.scopes))) |
|
147 | node, '%s is not a valid %s' % (appstruct, ' or '.join(self.scopes))) | |
97 |
|
148 | |||
98 | def deserialize(self, node, cstruct): |
|
149 | def deserialize(self, node, cstruct): | |
99 | if cstruct is colander.null: |
|
150 | if cstruct is colander.null: | |
100 | return colander.null |
|
151 | return colander.null | |
101 |
|
152 | |||
102 | user, usergroup = None, None |
|
153 | user, usergroup = None, None | |
103 | if self.users: |
|
154 | if self.users: | |
104 | if cstruct.startswith('user:'): |
|
155 | if cstruct.startswith('user:'): | |
105 | user = User.get_by_username(cstruct.split(':')[1]) |
|
156 | user = User.get_by_username(cstruct.split(':')[1]) | |
106 | else: |
|
157 | else: | |
107 | user = User.get_by_username(cstruct) |
|
158 | user = User.get_by_username(cstruct) | |
108 |
|
159 | |||
109 | if self.usergroups: |
|
160 | if self.usergroups: | |
110 | if cstruct.startswith('usergroup:'): |
|
161 | if cstruct.startswith('usergroup:'): | |
111 | usergroup = UserGroup.get_by_group_name(cstruct.split(':')[1]) |
|
162 | usergroup = UserGroup.get_by_group_name(cstruct.split(':')[1]) | |
112 | else: |
|
163 | else: | |
113 | usergroup = UserGroup.get_by_group_name(cstruct) |
|
164 | usergroup = UserGroup.get_by_group_name(cstruct) | |
114 |
|
165 | |||
115 | if self.users and self.usergroups: |
|
166 | if self.users and self.usergroups: | |
116 | if user and usergroup: |
|
167 | if user and usergroup: | |
117 | raise colander.Invalid(node, ( |
|
168 | raise colander.Invalid(node, ( | |
118 | '%s is both a user and usergroup, specify which ' |
|
169 | '%s is both a user and usergroup, specify which ' | |
119 | 'one was wanted by prepending user: or usergroup: to the ' |
|
170 | 'one was wanted by prepending user: or usergroup: to the ' | |
120 | 'name') % cstruct) |
|
171 | 'name') % cstruct) | |
121 |
|
172 | |||
122 | if self.users and user: |
|
173 | if self.users and user: | |
123 | return user |
|
174 | return user | |
124 |
|
175 | |||
125 | if self.usergroups and usergroup: |
|
176 | if self.usergroups and usergroup: | |
126 | return usergroup |
|
177 | return usergroup | |
127 |
|
178 | |||
128 | raise colander.Invalid( |
|
179 | raise colander.Invalid( | |
129 | node, '%s is not a valid %s' % (cstruct, ' or '.join(self.scopes))) |
|
180 | node, '%s is not a valid %s' % (cstruct, ' or '.join(self.scopes))) | |
130 |
|
181 | |||
131 |
|
182 | |||
132 | class UserType(UserOrUserGroupType): |
|
183 | class UserType(UserOrUserGroupType): | |
133 | scopes = ('user',) |
|
184 | scopes = ('user',) | |
134 |
|
185 | |||
135 |
|
186 | |||
136 | class UserGroupType(UserOrUserGroupType): |
|
187 | class UserGroupType(UserOrUserGroupType): | |
137 | scopes = ('usergroup',) |
|
188 | scopes = ('usergroup',) |
@@ -1,38 +1,48 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import re |
|
2 | import re | |
3 |
|
3 | |||
4 | import ipaddress |
|
4 | import ipaddress | |
5 | import colander |
|
5 | import colander | |
6 |
|
6 | |||
7 | from rhodecode.translation import _ |
|
7 | from rhodecode.translation import _ | |
8 | from rhodecode.lib.utils2 import glob2re |
|
8 | from rhodecode.lib.utils2 import glob2re | |
9 |
|
9 | |||
10 |
|
10 | |||
11 | def ip_addr_validator(node, value): |
|
11 | def ip_addr_validator(node, value): | |
12 | try: |
|
12 | try: | |
13 | # this raises an ValueError if address is not IpV4 or IpV6 |
|
13 | # this raises an ValueError if address is not IpV4 or IpV6 | |
14 | ipaddress.ip_network(value, strict=False) |
|
14 | ipaddress.ip_network(value, strict=False) | |
15 | except ValueError: |
|
15 | except ValueError: | |
16 | msg = _(u'Please enter a valid IPv4 or IpV6 address') |
|
16 | msg = _(u'Please enter a valid IPv4 or IpV6 address') | |
17 | raise colander.Invalid(node, msg) |
|
17 | raise colander.Invalid(node, msg) | |
18 |
|
18 | |||
19 |
|
19 | |||
20 | class IpAddrValidator(object): |
|
20 | class IpAddrValidator(object): | |
21 | def __init__(self, strict=True): |
|
21 | def __init__(self, strict=True): | |
22 | self.strict = strict |
|
22 | self.strict = strict | |
23 |
|
23 | |||
24 | def __call__(self, node, value): |
|
24 | def __call__(self, node, value): | |
25 | try: |
|
25 | try: | |
26 | # this raises an ValueError if address is not IpV4 or IpV6 |
|
26 | # this raises an ValueError if address is not IpV4 or IpV6 | |
27 | ipaddress.ip_network(value, strict=self.strict) |
|
27 | ipaddress.ip_network(value, strict=self.strict) | |
28 | except ValueError: |
|
28 | except ValueError: | |
29 | msg = _(u'Please enter a valid IPv4 or IpV6 address') |
|
29 | msg = _(u'Please enter a valid IPv4 or IpV6 address') | |
30 | raise colander.Invalid(node, msg) |
|
30 | raise colander.Invalid(node, msg) | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | def glob_validator(node, value): |
|
33 | def glob_validator(node, value): | |
34 | try: |
|
34 | try: | |
35 | re.compile('^' + glob2re(value) + '$') |
|
35 | re.compile('^' + glob2re(value) + '$') | |
36 | except Exception: |
|
36 | except Exception: | |
37 | msg = _(u'Invalid glob pattern') |
|
37 | msg = _(u'Invalid glob pattern') | |
38 | raise colander.Invalid(node, msg) |
|
38 | raise colander.Invalid(node, msg) | |
|
39 | ||||
|
40 | ||||
|
41 | def valid_name_validator(node, value): | |||
|
42 | from rhodecode.model.validation_schema import types | |||
|
43 | if value is types.RootLocation: | |||
|
44 | return | |||
|
45 | ||||
|
46 | msg = _('Name must start with a letter or number. Got `{}`').format(value) | |||
|
47 | if not re.match(r'^[a-zA-z0-9]{1,}', value): | |||
|
48 | raise colander.Invalid(node, msg) |
@@ -1,171 +1,167 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import colander |
|
21 | import colander | |
22 | import pytest |
|
22 | import pytest | |
23 |
|
23 | |||
24 | from rhodecode.model import validation_schema |
|
|||
25 |
|
||||
26 | from rhodecode.integrations import integration_type_registry |
|
24 | from rhodecode.integrations import integration_type_registry | |
27 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
25 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
28 | from rhodecode.model.validation_schema.schemas.integration_schema import ( |
|
26 | from rhodecode.model.validation_schema.schemas.integration_schema import ( | |
29 | make_integration_schema |
|
27 | make_integration_schema | |
30 | ) |
|
28 | ) | |
31 |
|
29 | |||
32 |
|
30 | |||
33 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
31 | @pytest.mark.usefixtures('app', 'autologin_user') | |
34 | class TestIntegrationSchema(object): |
|
32 | class TestIntegrationSchema(object): | |
35 |
|
33 | |||
36 |
def test_deserialize_integration_schema_perms( |
|
34 | def test_deserialize_integration_schema_perms( | |
37 | test_repo_group, |
|
35 | self, backend_random, test_repo_group, StubIntegrationType): | |
38 | StubIntegrationType): |
|
|||
39 |
|
36 | |||
40 | repo = backend_random.repo |
|
37 | repo = backend_random.repo | |
41 | repo_group = test_repo_group |
|
38 | repo_group = test_repo_group | |
42 |
|
39 | |||
43 |
|
||||
44 | empty_perms_dict = { |
|
40 | empty_perms_dict = { | |
45 | 'global': [], |
|
41 | 'global': [], | |
46 | 'repositories': {}, |
|
42 | 'repositories': {}, | |
47 | 'repositories_groups': {}, |
|
43 | 'repositories_groups': {}, | |
48 | } |
|
44 | } | |
49 |
|
45 | |||
50 | perms_tests = [ |
|
46 | perms_tests = [ | |
51 | ( |
|
47 | ( | |
52 | 'repo:%s' % repo.repo_name, |
|
48 | 'repo:%s' % repo.repo_name, | |
53 | { |
|
49 | { | |
54 | 'child_repos_only': None, |
|
50 | 'child_repos_only': None, | |
55 | 'repo_group': None, |
|
51 | 'repo_group': None, | |
56 | 'repo': repo, |
|
52 | 'repo': repo, | |
57 | }, |
|
53 | }, | |
58 | [ |
|
54 | [ | |
59 | ({}, False), |
|
55 | ({}, False), | |
60 | ({'global': ['hg.admin']}, True), |
|
56 | ({'global': ['hg.admin']}, True), | |
61 | ({'global': []}, False), |
|
57 | ({'global': []}, False), | |
62 | ({'repositories': {repo.repo_name: 'repository.admin'}}, True), |
|
58 | ({'repositories': {repo.repo_name: 'repository.admin'}}, True), | |
63 | ({'repositories': {repo.repo_name: 'repository.read'}}, False), |
|
59 | ({'repositories': {repo.repo_name: 'repository.read'}}, False), | |
64 | ({'repositories': {repo.repo_name: 'repository.write'}}, False), |
|
60 | ({'repositories': {repo.repo_name: 'repository.write'}}, False), | |
65 | ({'repositories': {repo.repo_name: 'repository.none'}}, False), |
|
61 | ({'repositories': {repo.repo_name: 'repository.none'}}, False), | |
66 | ] |
|
62 | ] | |
67 | ), |
|
63 | ), | |
68 | ( |
|
64 | ( | |
69 | 'repogroup:%s' % repo_group.group_name, |
|
65 | 'repogroup:%s' % repo_group.group_name, | |
70 | { |
|
66 | { | |
71 | 'repo': None, |
|
67 | 'repo': None, | |
72 | 'repo_group': repo_group, |
|
68 | 'repo_group': repo_group, | |
73 | 'child_repos_only': True, |
|
69 | 'child_repos_only': True, | |
74 | }, |
|
70 | }, | |
75 | [ |
|
71 | [ | |
76 | ({}, False), |
|
72 | ({}, False), | |
77 | ({'global': ['hg.admin']}, True), |
|
73 | ({'global': ['hg.admin']}, True), | |
78 | ({'global': []}, False), |
|
74 | ({'global': []}, False), | |
79 | ({'repositories_groups': |
|
75 | ({'repositories_groups': | |
80 | {repo_group.group_name: 'group.admin'}}, True), |
|
76 | {repo_group.group_name: 'group.admin'}}, True), | |
81 | ({'repositories_groups': |
|
77 | ({'repositories_groups': | |
82 | {repo_group.group_name: 'group.read'}}, False), |
|
78 | {repo_group.group_name: 'group.read'}}, False), | |
83 | ({'repositories_groups': |
|
79 | ({'repositories_groups': | |
84 | {repo_group.group_name: 'group.write'}}, False), |
|
80 | {repo_group.group_name: 'group.write'}}, False), | |
85 | ({'repositories_groups': |
|
81 | ({'repositories_groups': | |
86 | {repo_group.group_name: 'group.none'}}, False), |
|
82 | {repo_group.group_name: 'group.none'}}, False), | |
87 | ] |
|
83 | ] | |
88 | ), |
|
84 | ), | |
89 | ( |
|
85 | ( | |
90 | 'repogroup-recursive:%s' % repo_group.group_name, |
|
86 | 'repogroup-recursive:%s' % repo_group.group_name, | |
91 | { |
|
87 | { | |
92 | 'repo': None, |
|
88 | 'repo': None, | |
93 | 'repo_group': repo_group, |
|
89 | 'repo_group': repo_group, | |
94 | 'child_repos_only': False, |
|
90 | 'child_repos_only': False, | |
95 | }, |
|
91 | }, | |
96 | [ |
|
92 | [ | |
97 | ({}, False), |
|
93 | ({}, False), | |
98 | ({'global': ['hg.admin']}, True), |
|
94 | ({'global': ['hg.admin']}, True), | |
99 | ({'global': []}, False), |
|
95 | ({'global': []}, False), | |
100 | ({'repositories_groups': |
|
96 | ({'repositories_groups': | |
101 | {repo_group.group_name: 'group.admin'}}, True), |
|
97 | {repo_group.group_name: 'group.admin'}}, True), | |
102 | ({'repositories_groups': |
|
98 | ({'repositories_groups': | |
103 | {repo_group.group_name: 'group.read'}}, False), |
|
99 | {repo_group.group_name: 'group.read'}}, False), | |
104 | ({'repositories_groups': |
|
100 | ({'repositories_groups': | |
105 | {repo_group.group_name: 'group.write'}}, False), |
|
101 | {repo_group.group_name: 'group.write'}}, False), | |
106 | ({'repositories_groups': |
|
102 | ({'repositories_groups': | |
107 | {repo_group.group_name: 'group.none'}}, False), |
|
103 | {repo_group.group_name: 'group.none'}}, False), | |
108 | ] |
|
104 | ] | |
109 | ), |
|
105 | ), | |
110 | ( |
|
106 | ( | |
111 | 'global', |
|
107 | 'global', | |
112 | { |
|
108 | { | |
113 | 'repo': None, |
|
109 | 'repo': None, | |
114 | 'repo_group': None, |
|
110 | 'repo_group': None, | |
115 | 'child_repos_only': False, |
|
111 | 'child_repos_only': False, | |
116 | }, [ |
|
112 | }, [ | |
117 | ({}, False), |
|
113 | ({}, False), | |
118 | ({'global': ['hg.admin']}, True), |
|
114 | ({'global': ['hg.admin']}, True), | |
119 | ({'global': []}, False), |
|
115 | ({'global': []}, False), | |
120 | ] |
|
116 | ] | |
121 | ), |
|
117 | ), | |
122 | ( |
|
118 | ( | |
123 | 'root-repos', |
|
119 | 'root-repos', | |
124 | { |
|
120 | { | |
125 | 'repo': None, |
|
121 | 'repo': None, | |
126 | 'repo_group': None, |
|
122 | 'repo_group': None, | |
127 | 'child_repos_only': True, |
|
123 | 'child_repos_only': True, | |
128 | }, [ |
|
124 | }, [ | |
129 | ({}, False), |
|
125 | ({}, False), | |
130 | ({'global': ['hg.admin']}, True), |
|
126 | ({'global': ['hg.admin']}, True), | |
131 | ({'global': []}, False), |
|
127 | ({'global': []}, False), | |
132 | ] |
|
128 | ] | |
133 | ), |
|
129 | ), | |
134 | ] |
|
130 | ] | |
135 |
|
131 | |||
136 | for scope_input, scope_output, perms_allowed in perms_tests: |
|
132 | for scope_input, scope_output, perms_allowed in perms_tests: | |
137 | for perms_update, allowed in perms_allowed: |
|
133 | for perms_update, allowed in perms_allowed: | |
138 | perms = dict(empty_perms_dict, **perms_update) |
|
134 | perms = dict(empty_perms_dict, **perms_update) | |
139 |
|
135 | |||
140 | schema = make_integration_schema( |
|
136 | schema = make_integration_schema( | |
141 | IntegrationType=StubIntegrationType |
|
137 | IntegrationType=StubIntegrationType | |
142 | ).bind(permissions=perms) |
|
138 | ).bind(permissions=perms) | |
143 |
|
139 | |||
144 | input_data = { |
|
140 | input_data = { | |
145 | 'options': { |
|
141 | 'options': { | |
146 | 'enabled': 'true', |
|
142 | 'enabled': 'true', | |
147 | 'scope': scope_input, |
|
143 | 'scope': scope_input, | |
148 | 'name': 'test integration', |
|
144 | 'name': 'test integration', | |
149 | }, |
|
145 | }, | |
150 | 'settings': { |
|
146 | 'settings': { | |
151 | 'test_string_field': 'stringy', |
|
147 | 'test_string_field': 'stringy', | |
152 | 'test_int_field': '100', |
|
148 | 'test_int_field': '100', | |
153 | } |
|
149 | } | |
154 | } |
|
150 | } | |
155 |
|
151 | |||
156 | if not allowed: |
|
152 | if not allowed: | |
157 | with pytest.raises(colander.Invalid): |
|
153 | with pytest.raises(colander.Invalid): | |
158 | schema.deserialize(input_data) |
|
154 | schema.deserialize(input_data) | |
159 | else: |
|
155 | else: | |
160 | assert schema.deserialize(input_data) == { |
|
156 | assert schema.deserialize(input_data) == { | |
161 | 'options': { |
|
157 | 'options': { | |
162 | 'enabled': True, |
|
158 | 'enabled': True, | |
163 | 'scope': scope_output, |
|
159 | 'scope': scope_output, | |
164 | 'name': 'test integration', |
|
160 | 'name': 'test integration', | |
165 | }, |
|
161 | }, | |
166 | 'settings': { |
|
162 | 'settings': { | |
167 | 'test_string_field': 'stringy', |
|
163 | 'test_string_field': 'stringy', | |
168 | 'test_int_field': 100, |
|
164 | 'test_int_field': 100, | |
169 | } |
|
165 | } | |
170 | } |
|
166 | } | |
171 |
|
167 |
@@ -1,46 +1,102 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import colander |
|
21 | import colander | |
22 | import pytest |
|
22 | import pytest | |
23 |
|
23 | |||
24 |
from rhodecode.model.validation_schema.types import |
|
24 | from rhodecode.model.validation_schema.types import ( | |
|
25 | GroupNameType, RepoNameType, StringBooleanType) | |||
25 |
|
26 | |||
26 |
|
27 | |||
27 | class TestGroupNameType(object): |
|
28 | class TestGroupNameType(object): | |
28 | @pytest.mark.parametrize('given, expected', [ |
|
29 | @pytest.mark.parametrize('given, expected', [ | |
29 | ('//group1/group2//', 'group1/group2'), |
|
30 | ('//group1/group2//', 'group1/group2'), | |
30 | ('//group1///group2//', 'group1/group2'), |
|
31 | ('//group1///group2//', 'group1/group2'), | |
31 | ('group1/group2///group3', 'group1/group2/group3') |
|
32 | ('group1/group2///group3', 'group1/group2/group3'), | |
32 | ]) |
|
33 | ]) | |
33 | def test_replace_extra_slashes_cleans_up_extra_slashes( |
|
34 | def test_normalize_path(self, given, expected): | |
34 | self, given, expected): |
|
35 | result = GroupNameType()._normalize(given) | |
35 | type_ = GroupNameType() |
|
|||
36 | result = type_._replace_extra_slashes(given) |
|
|||
37 | assert result == expected |
|
36 | assert result == expected | |
38 |
|
37 | |||
39 | def test_deserialize_cleans_up_extra_slashes(self): |
|
38 | @pytest.mark.parametrize('given, expected', [ | |
|
39 | ('//group1/group2//', 'group1/group2'), | |||
|
40 | ('//group1///group2//', 'group1/group2'), | |||
|
41 | ('group1/group2///group3', 'group1/group2/group3'), | |||
|
42 | ('v1.2', 'v1.2'), | |||
|
43 | ('/v1.2', 'v1.2'), | |||
|
44 | ('.dirs', '.dirs'), | |||
|
45 | ('..dirs', '.dirs'), | |||
|
46 | ('./..dirs', '.dirs'), | |||
|
47 | ('dir/;name;/;[];/sub', 'dir/name/sub'), | |||
|
48 | (',/,/,d,,,', 'd'), | |||
|
49 | ('/;/#/,d,,,', 'd'), | |||
|
50 | ('long../../..name', 'long./.name'), | |||
|
51 | ('long../..name', 'long./.name'), | |||
|
52 | ('../', ''), | |||
|
53 | ('\'../"../', ''), | |||
|
54 | ('c,/,/..//./,c,,,/.d/../.........c', 'c/c/.d/.c'), | |||
|
55 | ('c,/,/..//./,c,,,', 'c/c'), | |||
|
56 | ('d../..d', 'd./.d'), | |||
|
57 | ('d../../d', 'd./d'), | |||
|
58 | ||||
|
59 | ('d\;\./\,\./d', 'd./d'), | |||
|
60 | ('d\.\./\.\./d', 'd./d'), | |||
|
61 | ('d\.\./\..\../d', 'd./d'), | |||
|
62 | ]) | |||
|
63 | def test_deserialize_clean_up_name(self, given, expected): | |||
40 | class TestSchema(colander.Schema): |
|
64 | class TestSchema(colander.Schema): | |
41 | field = colander.SchemaNode(GroupNameType()) |
|
65 | field_group = colander.SchemaNode(GroupNameType()) | |
|
66 | field_repo = colander.SchemaNode(RepoNameType()) | |||
42 |
|
67 | |||
43 | schema = TestSchema() |
|
68 | schema = TestSchema() | |
44 | cleaned_data = schema.deserialize( |
|
69 | cleaned_data = schema.deserialize({ | |
45 | {'field': '//group1/group2///group3//'}) |
|
70 | 'field_group': given, | |
46 | assert cleaned_data['field'] == 'group1/group2/group3' |
|
71 | 'field_repo': given | |
|
72 | }) | |||
|
73 | assert cleaned_data['field_group'] == expected | |||
|
74 | assert cleaned_data['field_repo'] == expected | |||
|
75 | ||||
|
76 | ||||
|
77 | class TestStringBooleanType(object): | |||
|
78 | ||||
|
79 | def _get_schema(self): | |||
|
80 | class Schema(colander.MappingSchema): | |||
|
81 | bools = colander.SchemaNode(StringBooleanType()) | |||
|
82 | return Schema() | |||
|
83 | ||||
|
84 | @pytest.mark.parametrize('given, expected', [ | |||
|
85 | ('1', True), | |||
|
86 | ('yEs', True), | |||
|
87 | ('true', True), | |||
|
88 | ||||
|
89 | ('0', False), | |||
|
90 | ('NO', False), | |||
|
91 | ('FALSE', False), | |||
|
92 | ||||
|
93 | ]) | |||
|
94 | def test_convert_type(self, given, expected): | |||
|
95 | schema = self._get_schema() | |||
|
96 | result = schema.deserialize({'bools':given}) | |||
|
97 | assert result['bools'] == expected | |||
|
98 | ||||
|
99 | def test_try_convert_bad_type(self): | |||
|
100 | schema = self._get_schema() | |||
|
101 | with pytest.raises(colander.Invalid): | |||
|
102 | result = schema.deserialize({'bools': 'boom'}) |
General Comments 0
You need to be logged in to leave comments.
Login now