##// END OF EJS Templates
observers: code cleanups and fixed tests.
marcink -
r4519:ea50ffa9 stable
parent child Browse files
Show More
@@ -1,368 +1,368 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.model.db import User
23 from rhodecode.model.db import User
24 from rhodecode.model.pull_request import PullRequestModel
24 from rhodecode.model.pull_request import PullRequestModel
25 from rhodecode.model.repo import RepoModel
25 from rhodecode.model.repo import RepoModel
26 from rhodecode.model.user import UserModel
26 from rhodecode.model.user import UserModel
27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
28 from rhodecode.api.tests.utils import build_data, api_call, assert_error
28 from rhodecode.api.tests.utils import build_data, api_call, assert_error
29
29
30
30
31 @pytest.mark.usefixtures("testuser_api", "app")
31 @pytest.mark.usefixtures("testuser_api", "app")
32 class TestCreatePullRequestApi(object):
32 class TestCreatePullRequestApi(object):
33 finalizers = []
33 finalizers = []
34
34
35 def teardown_method(self, method):
35 def teardown_method(self, method):
36 if self.finalizers:
36 if self.finalizers:
37 for finalizer in self.finalizers:
37 for finalizer in self.finalizers:
38 finalizer()
38 finalizer()
39 self.finalizers = []
39 self.finalizers = []
40
40
41 def test_create_with_wrong_data(self):
41 def test_create_with_wrong_data(self):
42 required_data = {
42 required_data = {
43 'source_repo': 'tests/source_repo',
43 'source_repo': 'tests/source_repo',
44 'target_repo': 'tests/target_repo',
44 'target_repo': 'tests/target_repo',
45 'source_ref': 'branch:default:initial',
45 'source_ref': 'branch:default:initial',
46 'target_ref': 'branch:default:new-feature',
46 'target_ref': 'branch:default:new-feature',
47 }
47 }
48 for key in required_data:
48 for key in required_data:
49 data = required_data.copy()
49 data = required_data.copy()
50 data.pop(key)
50 data.pop(key)
51 id_, params = build_data(
51 id_, params = build_data(
52 self.apikey, 'create_pull_request', **data)
52 self.apikey, 'create_pull_request', **data)
53 response = api_call(self.app, params)
53 response = api_call(self.app, params)
54
54
55 expected = 'Missing non optional `{}` arg in JSON DATA'.format(key)
55 expected = 'Missing non optional `{}` arg in JSON DATA'.format(key)
56 assert_error(id_, expected, given=response.body)
56 assert_error(id_, expected, given=response.body)
57
57
58 @pytest.mark.backends("git", "hg")
58 @pytest.mark.backends("git", "hg")
59 @pytest.mark.parametrize('source_ref', [
59 @pytest.mark.parametrize('source_ref', [
60 'bookmarg:default:initial'
60 'bookmarg:default:initial'
61 ])
61 ])
62 def test_create_with_wrong_refs_data(self, backend, source_ref):
62 def test_create_with_wrong_refs_data(self, backend, source_ref):
63
63
64 data = self._prepare_data(backend)
64 data = self._prepare_data(backend)
65 data['source_ref'] = source_ref
65 data['source_ref'] = source_ref
66
66
67 id_, params = build_data(
67 id_, params = build_data(
68 self.apikey_regular, 'create_pull_request', **data)
68 self.apikey_regular, 'create_pull_request', **data)
69
69
70 response = api_call(self.app, params)
70 response = api_call(self.app, params)
71
71
72 expected = "Ref `{}` type is not allowed. " \
72 expected = "Ref `{}` type is not allowed. " \
73 "Only:['bookmark', 'book', 'tag', 'branch'] " \
73 "Only:['bookmark', 'book', 'tag', 'branch'] " \
74 "are possible.".format(source_ref)
74 "are possible.".format(source_ref)
75 assert_error(id_, expected, given=response.body)
75 assert_error(id_, expected, given=response.body)
76
76
77 @pytest.mark.backends("git", "hg")
77 @pytest.mark.backends("git", "hg")
78 def test_create_with_correct_data(self, backend):
78 def test_create_with_correct_data(self, backend):
79 data = self._prepare_data(backend)
79 data = self._prepare_data(backend)
80 RepoModel().revoke_user_permission(
80 RepoModel().revoke_user_permission(
81 self.source.repo_name, User.DEFAULT_USER)
81 self.source.repo_name, User.DEFAULT_USER)
82 id_, params = build_data(
82 id_, params = build_data(
83 self.apikey_regular, 'create_pull_request', **data)
83 self.apikey_regular, 'create_pull_request', **data)
84 response = api_call(self.app, params)
84 response = api_call(self.app, params)
85 expected_message = "Created new pull request `{title}`".format(
85 expected_message = "Created new pull request `{title}`".format(
86 title=data['title'])
86 title=data['title'])
87 result = response.json
87 result = response.json
88 assert result['error'] is None
88 assert result['error'] is None
89 assert result['result']['msg'] == expected_message
89 assert result['result']['msg'] == expected_message
90 pull_request_id = result['result']['pull_request_id']
90 pull_request_id = result['result']['pull_request_id']
91 pull_request = PullRequestModel().get(pull_request_id)
91 pull_request = PullRequestModel().get(pull_request_id)
92 assert pull_request.title == data['title']
92 assert pull_request.title == data['title']
93 assert pull_request.description == data['description']
93 assert pull_request.description == data['description']
94 assert pull_request.source_ref == data['source_ref']
94 assert pull_request.source_ref == data['source_ref']
95 assert pull_request.target_ref == data['target_ref']
95 assert pull_request.target_ref == data['target_ref']
96 assert pull_request.source_repo.repo_name == data['source_repo']
96 assert pull_request.source_repo.repo_name == data['source_repo']
97 assert pull_request.target_repo.repo_name == data['target_repo']
97 assert pull_request.target_repo.repo_name == data['target_repo']
98 assert pull_request.revisions == [self.commit_ids['change']]
98 assert pull_request.revisions == [self.commit_ids['change']]
99 assert len(pull_request.reviewers) == 1
99 assert len(pull_request.reviewers) == 1
100
100
101 @pytest.mark.backends("git", "hg")
101 @pytest.mark.backends("git", "hg")
102 def test_create_with_empty_description(self, backend):
102 def test_create_with_empty_description(self, backend):
103 data = self._prepare_data(backend)
103 data = self._prepare_data(backend)
104 data.pop('description')
104 data.pop('description')
105 id_, params = build_data(
105 id_, params = build_data(
106 self.apikey_regular, 'create_pull_request', **data)
106 self.apikey_regular, 'create_pull_request', **data)
107 response = api_call(self.app, params)
107 response = api_call(self.app, params)
108 expected_message = "Created new pull request `{title}`".format(
108 expected_message = "Created new pull request `{title}`".format(
109 title=data['title'])
109 title=data['title'])
110 result = response.json
110 result = response.json
111 assert result['error'] is None
111 assert result['error'] is None
112 assert result['result']['msg'] == expected_message
112 assert result['result']['msg'] == expected_message
113 pull_request_id = result['result']['pull_request_id']
113 pull_request_id = result['result']['pull_request_id']
114 pull_request = PullRequestModel().get(pull_request_id)
114 pull_request = PullRequestModel().get(pull_request_id)
115 assert pull_request.description == ''
115 assert pull_request.description == ''
116
116
117 @pytest.mark.backends("git", "hg")
117 @pytest.mark.backends("git", "hg")
118 def test_create_with_empty_title(self, backend):
118 def test_create_with_empty_title(self, backend):
119 data = self._prepare_data(backend)
119 data = self._prepare_data(backend)
120 data.pop('title')
120 data.pop('title')
121 id_, params = build_data(
121 id_, params = build_data(
122 self.apikey_regular, 'create_pull_request', **data)
122 self.apikey_regular, 'create_pull_request', **data)
123 response = api_call(self.app, params)
123 response = api_call(self.app, params)
124 result = response.json
124 result = response.json
125 pull_request_id = result['result']['pull_request_id']
125 pull_request_id = result['result']['pull_request_id']
126 pull_request = PullRequestModel().get(pull_request_id)
126 pull_request = PullRequestModel().get(pull_request_id)
127 data['ref'] = backend.default_branch_name
127 data['ref'] = backend.default_branch_name
128 title = '{source_repo}#{ref} to {target_repo}'.format(**data)
128 title = '{source_repo}#{ref} to {target_repo}'.format(**data)
129 assert pull_request.title == title
129 assert pull_request.title == title
130
130
131 @pytest.mark.backends("git", "hg")
131 @pytest.mark.backends("git", "hg")
132 def test_create_with_reviewers_specified_by_names(
132 def test_create_with_reviewers_specified_by_names(
133 self, backend, no_notifications):
133 self, backend, no_notifications):
134 data = self._prepare_data(backend)
134 data = self._prepare_data(backend)
135 reviewers = [
135 reviewers = [
136 {'username': TEST_USER_REGULAR_LOGIN,
136 {'username': TEST_USER_REGULAR_LOGIN,
137 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]},
137 'reasons': ['{} added manually'.format(TEST_USER_REGULAR_LOGIN)]},
138 {'username': TEST_USER_ADMIN_LOGIN,
138 {'username': TEST_USER_ADMIN_LOGIN,
139 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)],
139 'reasons': ['{} added manually'.format(TEST_USER_ADMIN_LOGIN)],
140 'mandatory': True},
140 'mandatory': True},
141 ]
141 ]
142 data['reviewers'] = reviewers
142 data['reviewers'] = reviewers
143
143
144 id_, params = build_data(
144 id_, params = build_data(
145 self.apikey_regular, 'create_pull_request', **data)
145 self.apikey_regular, 'create_pull_request', **data)
146 response = api_call(self.app, params)
146 response = api_call(self.app, params)
147
147
148 expected_message = "Created new pull request `{title}`".format(
148 expected_message = "Created new pull request `{title}`".format(
149 title=data['title'])
149 title=data['title'])
150 result = response.json
150 result = response.json
151 assert result['error'] is None
151 assert result['error'] is None
152 assert result['result']['msg'] == expected_message
152 assert result['result']['msg'] == expected_message
153 pull_request_id = result['result']['pull_request_id']
153 pull_request_id = result['result']['pull_request_id']
154 pull_request = PullRequestModel().get(pull_request_id)
154 pull_request = PullRequestModel().get(pull_request_id)
155
155
156 actual_reviewers = []
156 actual_reviewers = []
157 for rev in pull_request.reviewers:
157 for rev in pull_request.reviewers:
158 entry = {
158 entry = {
159 'username': rev.user.username,
159 'username': rev.user.username,
160 'reasons': rev.reasons,
160 'reasons': rev.reasons,
161 }
161 }
162 if rev.mandatory:
162 if rev.mandatory:
163 entry['mandatory'] = rev.mandatory
163 entry['mandatory'] = rev.mandatory
164 actual_reviewers.append(entry)
164 actual_reviewers.append(entry)
165
165
166 owner_username = pull_request.target_repo.user.username
166 owner_username = pull_request.target_repo.user.username
167 for spec_reviewer in reviewers[::]:
167 for spec_reviewer in reviewers[::]:
168 # default reviewer will be added who is an owner of the repo
168 # default reviewer will be added who is an owner of the repo
169 # this get's overridden by a add owner to reviewers rule
169 # this get's overridden by a add owner to reviewers rule
170 if spec_reviewer['username'] == owner_username:
170 if spec_reviewer['username'] == owner_username:
171 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
171 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
172 # since owner is more important, we don't inherit mandatory flag
172 # since owner is more important, we don't inherit mandatory flag
173 del spec_reviewer['mandatory']
173 del spec_reviewer['mandatory']
174
174
175 assert sorted(actual_reviewers, key=lambda e: e['username']) \
175 assert sorted(actual_reviewers, key=lambda e: e['username']) \
176 == sorted(reviewers, key=lambda e: e['username'])
176 == sorted(reviewers, key=lambda e: e['username'])
177
177
178 @pytest.mark.backends("git", "hg")
178 @pytest.mark.backends("git", "hg")
179 def test_create_with_reviewers_specified_by_ids(
179 def test_create_with_reviewers_specified_by_ids(
180 self, backend, no_notifications):
180 self, backend, no_notifications):
181 data = self._prepare_data(backend)
181 data = self._prepare_data(backend)
182 reviewers = [
182 reviewers = [
183 {'username': UserModel().get_by_username(
183 {'username': UserModel().get_by_username(
184 TEST_USER_REGULAR_LOGIN).user_id,
184 TEST_USER_REGULAR_LOGIN).user_id,
185 'reasons': ['added manually']},
185 'reasons': ['added manually']},
186 {'username': UserModel().get_by_username(
186 {'username': UserModel().get_by_username(
187 TEST_USER_ADMIN_LOGIN).user_id,
187 TEST_USER_ADMIN_LOGIN).user_id,
188 'reasons': ['added manually']},
188 'reasons': ['added manually']},
189 ]
189 ]
190
190
191 data['reviewers'] = reviewers
191 data['reviewers'] = reviewers
192 id_, params = build_data(
192 id_, params = build_data(
193 self.apikey_regular, 'create_pull_request', **data)
193 self.apikey_regular, 'create_pull_request', **data)
194 response = api_call(self.app, params)
194 response = api_call(self.app, params)
195
195
196 expected_message = "Created new pull request `{title}`".format(
196 expected_message = "Created new pull request `{title}`".format(
197 title=data['title'])
197 title=data['title'])
198 result = response.json
198 result = response.json
199 assert result['error'] is None
199 assert result['error'] is None
200 assert result['result']['msg'] == expected_message
200 assert result['result']['msg'] == expected_message
201 pull_request_id = result['result']['pull_request_id']
201 pull_request_id = result['result']['pull_request_id']
202 pull_request = PullRequestModel().get(pull_request_id)
202 pull_request = PullRequestModel().get(pull_request_id)
203
203
204 actual_reviewers = []
204 actual_reviewers = []
205 for rev in pull_request.reviewers:
205 for rev in pull_request.reviewers:
206 entry = {
206 entry = {
207 'username': rev.user.user_id,
207 'username': rev.user.user_id,
208 'reasons': rev.reasons,
208 'reasons': rev.reasons,
209 }
209 }
210 if rev.mandatory:
210 if rev.mandatory:
211 entry['mandatory'] = rev.mandatory
211 entry['mandatory'] = rev.mandatory
212 actual_reviewers.append(entry)
212 actual_reviewers.append(entry)
213
213
214 owner_user_id = pull_request.target_repo.user.user_id
214 owner_user_id = pull_request.target_repo.user.user_id
215 for spec_reviewer in reviewers[::]:
215 for spec_reviewer in reviewers[::]:
216 # default reviewer will be added who is an owner of the repo
216 # default reviewer will be added who is an owner of the repo
217 # this get's overridden by a add owner to reviewers rule
217 # this get's overridden by a add owner to reviewers rule
218 if spec_reviewer['username'] == owner_user_id:
218 if spec_reviewer['username'] == owner_user_id:
219 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
219 spec_reviewer['reasons'] = [u'Default reviewer', u'Repository owner']
220
220
221 assert sorted(actual_reviewers, key=lambda e: e['username']) \
221 assert sorted(actual_reviewers, key=lambda e: e['username']) \
222 == sorted(reviewers, key=lambda e: e['username'])
222 == sorted(reviewers, key=lambda e: e['username'])
223
223
224 @pytest.mark.backends("git", "hg")
224 @pytest.mark.backends("git", "hg")
225 def test_create_fails_when_the_reviewer_is_not_found(self, backend):
225 def test_create_fails_when_the_reviewer_is_not_found(self, backend):
226 data = self._prepare_data(backend)
226 data = self._prepare_data(backend)
227 data['reviewers'] = [{'username': 'somebody'}]
227 data['reviewers'] = [{'username': 'somebody'}]
228 id_, params = build_data(
228 id_, params = build_data(
229 self.apikey_regular, 'create_pull_request', **data)
229 self.apikey_regular, 'create_pull_request', **data)
230 response = api_call(self.app, params)
230 response = api_call(self.app, params)
231 expected_message = 'user `somebody` does not exist'
231 expected_message = 'user `somebody` does not exist'
232 assert_error(id_, expected_message, given=response.body)
232 assert_error(id_, expected_message, given=response.body)
233
233
234 @pytest.mark.backends("git", "hg")
234 @pytest.mark.backends("git", "hg")
235 def test_cannot_create_with_reviewers_in_wrong_format(self, backend):
235 def test_cannot_create_with_reviewers_in_wrong_format(self, backend):
236 data = self._prepare_data(backend)
236 data = self._prepare_data(backend)
237 reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN])
237 reviewers = ','.join([TEST_USER_REGULAR_LOGIN, TEST_USER_ADMIN_LOGIN])
238 data['reviewers'] = reviewers
238 data['reviewers'] = reviewers
239 id_, params = build_data(
239 id_, params = build_data(
240 self.apikey_regular, 'create_pull_request', **data)
240 self.apikey_regular, 'create_pull_request', **data)
241 response = api_call(self.app, params)
241 response = api_call(self.app, params)
242 expected_message = {u'': '"test_regular,test_admin" is not iterable'}
242 expected_message = {u'': '"test_regular,test_admin" is not iterable'}
243 assert_error(id_, expected_message, given=response.body)
243 assert_error(id_, expected_message, given=response.body)
244
244
245 @pytest.mark.backends("git", "hg")
245 @pytest.mark.backends("git", "hg")
246 def test_create_with_no_commit_hashes(self, backend):
246 def test_create_with_no_commit_hashes(self, backend):
247 data = self._prepare_data(backend)
247 data = self._prepare_data(backend)
248 expected_source_ref = data['source_ref']
248 expected_source_ref = data['source_ref']
249 expected_target_ref = data['target_ref']
249 expected_target_ref = data['target_ref']
250 data['source_ref'] = 'branch:{}'.format(backend.default_branch_name)
250 data['source_ref'] = 'branch:{}'.format(backend.default_branch_name)
251 data['target_ref'] = 'branch:{}'.format(backend.default_branch_name)
251 data['target_ref'] = 'branch:{}'.format(backend.default_branch_name)
252 id_, params = build_data(
252 id_, params = build_data(
253 self.apikey_regular, 'create_pull_request', **data)
253 self.apikey_regular, 'create_pull_request', **data)
254 response = api_call(self.app, params)
254 response = api_call(self.app, params)
255 expected_message = "Created new pull request `{title}`".format(
255 expected_message = "Created new pull request `{title}`".format(
256 title=data['title'])
256 title=data['title'])
257 result = response.json
257 result = response.json
258 assert result['result']['msg'] == expected_message
258 assert result['result']['msg'] == expected_message
259 pull_request_id = result['result']['pull_request_id']
259 pull_request_id = result['result']['pull_request_id']
260 pull_request = PullRequestModel().get(pull_request_id)
260 pull_request = PullRequestModel().get(pull_request_id)
261 assert pull_request.source_ref == expected_source_ref
261 assert pull_request.source_ref == expected_source_ref
262 assert pull_request.target_ref == expected_target_ref
262 assert pull_request.target_ref == expected_target_ref
263
263
264 @pytest.mark.backends("git", "hg")
264 @pytest.mark.backends("git", "hg")
265 @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"])
265 @pytest.mark.parametrize("data_key", ["source_repo", "target_repo"])
266 def test_create_fails_with_wrong_repo(self, backend, data_key):
266 def test_create_fails_with_wrong_repo(self, backend, data_key):
267 repo_name = 'fake-repo'
267 repo_name = 'fake-repo'
268 data = self._prepare_data(backend)
268 data = self._prepare_data(backend)
269 data[data_key] = repo_name
269 data[data_key] = repo_name
270 id_, params = build_data(
270 id_, params = build_data(
271 self.apikey_regular, 'create_pull_request', **data)
271 self.apikey_regular, 'create_pull_request', **data)
272 response = api_call(self.app, params)
272 response = api_call(self.app, params)
273 expected_message = 'repository `{}` does not exist'.format(repo_name)
273 expected_message = 'repository `{}` does not exist'.format(repo_name)
274 assert_error(id_, expected_message, given=response.body)
274 assert_error(id_, expected_message, given=response.body)
275
275
276 @pytest.mark.backends("git", "hg")
276 @pytest.mark.backends("git", "hg")
277 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
277 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
278 def test_create_fails_with_non_existing_branch(self, backend, data_key):
278 def test_create_fails_with_non_existing_branch(self, backend, data_key):
279 branch_name = 'test-branch'
279 branch_name = 'test-branch'
280 data = self._prepare_data(backend)
280 data = self._prepare_data(backend)
281 data[data_key] = "branch:{}".format(branch_name)
281 data[data_key] = "branch:{}".format(branch_name)
282 id_, params = build_data(
282 id_, params = build_data(
283 self.apikey_regular, 'create_pull_request', **data)
283 self.apikey_regular, 'create_pull_request', **data)
284 response = api_call(self.app, params)
284 response = api_call(self.app, params)
285 expected_message = 'The specified value:{type}:`{name}` ' \
285 expected_message = 'The specified value:{type}:`{name}` ' \
286 'does not exist, or is not allowed.'.format(type='branch',
286 'does not exist, or is not allowed.'.format(type='branch',
287 name=branch_name)
287 name=branch_name)
288 assert_error(id_, expected_message, given=response.body)
288 assert_error(id_, expected_message, given=response.body)
289
289
290 @pytest.mark.backends("git", "hg")
290 @pytest.mark.backends("git", "hg")
291 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
291 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
292 def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key):
292 def test_create_fails_with_ref_in_a_wrong_format(self, backend, data_key):
293 data = self._prepare_data(backend)
293 data = self._prepare_data(backend)
294 ref = 'stange-ref'
294 ref = 'stange-ref'
295 data[data_key] = ref
295 data[data_key] = ref
296 id_, params = build_data(
296 id_, params = build_data(
297 self.apikey_regular, 'create_pull_request', **data)
297 self.apikey_regular, 'create_pull_request', **data)
298 response = api_call(self.app, params)
298 response = api_call(self.app, params)
299 expected_message = (
299 expected_message = (
300 'Ref `{ref}` given in a wrong format. Please check the API'
300 'Ref `{ref}` given in a wrong format. Please check the API'
301 ' documentation for more details'.format(ref=ref))
301 ' documentation for more details'.format(ref=ref))
302 assert_error(id_, expected_message, given=response.body)
302 assert_error(id_, expected_message, given=response.body)
303
303
304 @pytest.mark.backends("git", "hg")
304 @pytest.mark.backends("git", "hg")
305 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
305 @pytest.mark.parametrize("data_key", ["source_ref", "target_ref"])
306 def test_create_fails_with_non_existing_ref(self, backend, data_key):
306 def test_create_fails_with_non_existing_ref(self, backend, data_key):
307 commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10'
307 commit_id = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa10'
308 ref = self._get_full_ref(backend, commit_id)
308 ref = self._get_full_ref(backend, commit_id)
309 data = self._prepare_data(backend)
309 data = self._prepare_data(backend)
310 data[data_key] = ref
310 data[data_key] = ref
311 id_, params = build_data(
311 id_, params = build_data(
312 self.apikey_regular, 'create_pull_request', **data)
312 self.apikey_regular, 'create_pull_request', **data)
313 response = api_call(self.app, params)
313 response = api_call(self.app, params)
314 expected_message = 'Ref `{}` does not exist'.format(ref)
314 expected_message = 'Ref `{}` does not exist'.format(ref)
315 assert_error(id_, expected_message, given=response.body)
315 assert_error(id_, expected_message, given=response.body)
316
316
317 @pytest.mark.backends("git", "hg")
317 @pytest.mark.backends("git", "hg")
318 def test_create_fails_when_no_revisions(self, backend):
318 def test_create_fails_when_no_revisions(self, backend):
319 data = self._prepare_data(backend, source_head='initial')
319 data = self._prepare_data(backend, source_head='initial')
320 id_, params = build_data(
320 id_, params = build_data(
321 self.apikey_regular, 'create_pull_request', **data)
321 self.apikey_regular, 'create_pull_request', **data)
322 response = api_call(self.app, params)
322 response = api_call(self.app, params)
323 expected_message = 'no commits found'
323 expected_message = 'no commits found for merge between specified references'
324 assert_error(id_, expected_message, given=response.body)
324 assert_error(id_, expected_message, given=response.body)
325
325
326 @pytest.mark.backends("git", "hg")
326 @pytest.mark.backends("git", "hg")
327 def test_create_fails_when_no_permissions(self, backend):
327 def test_create_fails_when_no_permissions(self, backend):
328 data = self._prepare_data(backend)
328 data = self._prepare_data(backend)
329 RepoModel().revoke_user_permission(
329 RepoModel().revoke_user_permission(
330 self.source.repo_name, self.test_user)
330 self.source.repo_name, self.test_user)
331 RepoModel().revoke_user_permission(
331 RepoModel().revoke_user_permission(
332 self.source.repo_name, User.DEFAULT_USER)
332 self.source.repo_name, User.DEFAULT_USER)
333
333
334 id_, params = build_data(
334 id_, params = build_data(
335 self.apikey_regular, 'create_pull_request', **data)
335 self.apikey_regular, 'create_pull_request', **data)
336 response = api_call(self.app, params)
336 response = api_call(self.app, params)
337 expected_message = 'repository `{}` does not exist'.format(
337 expected_message = 'repository `{}` does not exist'.format(
338 self.source.repo_name)
338 self.source.repo_name)
339 assert_error(id_, expected_message, given=response.body)
339 assert_error(id_, expected_message, given=response.body)
340
340
341 def _prepare_data(
341 def _prepare_data(
342 self, backend, source_head='change', target_head='initial'):
342 self, backend, source_head='change', target_head='initial'):
343 commits = [
343 commits = [
344 {'message': 'initial'},
344 {'message': 'initial'},
345 {'message': 'change'},
345 {'message': 'change'},
346 {'message': 'new-feature', 'parents': ['initial']},
346 {'message': 'new-feature', 'parents': ['initial']},
347 ]
347 ]
348 self.commit_ids = backend.create_master_repo(commits)
348 self.commit_ids = backend.create_master_repo(commits)
349 self.source = backend.create_repo(heads=[source_head])
349 self.source = backend.create_repo(heads=[source_head])
350 self.target = backend.create_repo(heads=[target_head])
350 self.target = backend.create_repo(heads=[target_head])
351
351
352 data = {
352 data = {
353 'source_repo': self.source.repo_name,
353 'source_repo': self.source.repo_name,
354 'target_repo': self.target.repo_name,
354 'target_repo': self.target.repo_name,
355 'source_ref': self._get_full_ref(
355 'source_ref': self._get_full_ref(
356 backend, self.commit_ids[source_head]),
356 backend, self.commit_ids[source_head]),
357 'target_ref': self._get_full_ref(
357 'target_ref': self._get_full_ref(
358 backend, self.commit_ids[target_head]),
358 backend, self.commit_ids[target_head]),
359 'title': 'Test PR 1',
359 'title': 'Test PR 1',
360 'description': 'Test'
360 'description': 'Test'
361 }
361 }
362 RepoModel().grant_user_permission(
362 RepoModel().grant_user_permission(
363 self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read')
363 self.source.repo_name, self.TEST_USER_LOGIN, 'repository.read')
364 return data
364 return data
365
365
366 def _get_full_ref(self, backend, commit_id):
366 def _get_full_ref(self, backend, commit_id):
367 return 'branch:{branch}:{commit_id}'.format(
367 return 'branch:{branch}:{commit_id}'.format(
368 branch=backend.default_branch_name, commit_id=commit_id)
368 branch=backend.default_branch_name, commit_id=commit_id)
@@ -1,80 +1,82 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.meta import Session
24 from rhodecode.model.meta import Session
25 from rhodecode.model.pull_request import PullRequestModel
25 from rhodecode.model.pull_request import PullRequestModel
26 from rhodecode.api.tests.utils import (
26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error)
27 build_data, api_call, assert_error)
28
28
29
29
30 @pytest.mark.usefixtures("testuser_api", "app")
30 @pytest.mark.usefixtures("testuser_api", "app")
31 class TestGetPullRequest(object):
31 class TestGetPullRequest(object):
32
32 @pytest.mark.backends("git", "hg")
33 @pytest.mark.backends("git", "hg")
33 def test_api_get_pull_requests(self, pr_util):
34 def test_api_get_pull_requests(self, pr_util):
34 pull_request = pr_util.create_pull_request()
35 pull_request = pr_util.create_pull_request()
35 pull_request_2 = PullRequestModel().create(
36 pull_request_2 = PullRequestModel().create(
36 created_by=pull_request.author,
37 created_by=pull_request.author,
37 source_repo=pull_request.source_repo,
38 source_repo=pull_request.source_repo,
38 source_ref=pull_request.source_ref,
39 source_ref=pull_request.source_ref,
39 target_repo=pull_request.target_repo,
40 target_repo=pull_request.target_repo,
40 target_ref=pull_request.target_ref,
41 target_ref=pull_request.target_ref,
41 revisions=pull_request.revisions,
42 revisions=pull_request.revisions,
42 reviewers=(),
43 reviewers=(),
44 observers=(),
43 title=pull_request.title,
45 title=pull_request.title,
44 description=pull_request.description,
46 description=pull_request.description,
45 )
47 )
46 Session().commit()
48 Session().commit()
47 id_, params = build_data(
49 id_, params = build_data(
48 self.apikey, 'get_pull_requests',
50 self.apikey, 'get_pull_requests',
49 repoid=pull_request.target_repo.repo_name)
51 repoid=pull_request.target_repo.repo_name)
50 response = api_call(self.app, params)
52 response = api_call(self.app, params)
51 assert response.status == '200 OK'
53 assert response.status == '200 OK'
52 assert len(response.json['result']) == 2
54 assert len(response.json['result']) == 2
53
55
54 PullRequestModel().close_pull_request(
56 PullRequestModel().close_pull_request(
55 pull_request_2, pull_request_2.author)
57 pull_request_2, pull_request_2.author)
56 Session().commit()
58 Session().commit()
57
59
58 id_, params = build_data(
60 id_, params = build_data(
59 self.apikey, 'get_pull_requests',
61 self.apikey, 'get_pull_requests',
60 repoid=pull_request.target_repo.repo_name,
62 repoid=pull_request.target_repo.repo_name,
61 status='new')
63 status='new')
62 response = api_call(self.app, params)
64 response = api_call(self.app, params)
63 assert response.status == '200 OK'
65 assert response.status == '200 OK'
64 assert len(response.json['result']) == 1
66 assert len(response.json['result']) == 1
65
67
66 id_, params = build_data(
68 id_, params = build_data(
67 self.apikey, 'get_pull_requests',
69 self.apikey, 'get_pull_requests',
68 repoid=pull_request.target_repo.repo_name,
70 repoid=pull_request.target_repo.repo_name,
69 status='closed')
71 status='closed')
70 response = api_call(self.app, params)
72 response = api_call(self.app, params)
71 assert response.status == '200 OK'
73 assert response.status == '200 OK'
72 assert len(response.json['result']) == 1
74 assert len(response.json['result']) == 1
73
75
74 @pytest.mark.backends("git", "hg")
76 @pytest.mark.backends("git", "hg")
75 def test_api_get_pull_requests_repo_error(self):
77 def test_api_get_pull_requests_repo_error(self):
76 id_, params = build_data(self.apikey, 'get_pull_requests', repoid=666)
78 id_, params = build_data(self.apikey, 'get_pull_requests', repoid=666)
77 response = api_call(self.app, params)
79 response = api_call(self.app, params)
78
80
79 expected = 'repository `666` does not exist'
81 expected = 'repository `666` does not exist'
80 assert_error(id_, expected, given=response.body)
82 assert_error(id_, expected, given=response.body)
@@ -1,212 +1,215 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.vcs.nodes import FileNode
23 from rhodecode.lib.vcs.nodes import FileNode
24 from rhodecode.model.db import User
24 from rhodecode.model.db import User
25 from rhodecode.model.pull_request import PullRequestModel
25 from rhodecode.model.pull_request import PullRequestModel
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 from rhodecode.api.tests.utils import (
27 from rhodecode.api.tests.utils import (
28 build_data, api_call, assert_ok, assert_error)
28 build_data, api_call, assert_ok, assert_error)
29
29
30
30
31 @pytest.mark.usefixtures("testuser_api", "app")
31 @pytest.mark.usefixtures("testuser_api", "app")
32 class TestUpdatePullRequest(object):
32 class TestUpdatePullRequest(object):
33
33
34 @pytest.mark.backends("git", "hg")
34 @pytest.mark.backends("git", "hg")
35 def test_api_update_pull_request_title_or_description(
35 def test_api_update_pull_request_title_or_description(
36 self, pr_util, no_notifications):
36 self, pr_util, no_notifications):
37 pull_request = pr_util.create_pull_request()
37 pull_request = pr_util.create_pull_request()
38
38
39 id_, params = build_data(
39 id_, params = build_data(
40 self.apikey, 'update_pull_request',
40 self.apikey, 'update_pull_request',
41 repoid=pull_request.target_repo.repo_name,
41 repoid=pull_request.target_repo.repo_name,
42 pullrequestid=pull_request.pull_request_id,
42 pullrequestid=pull_request.pull_request_id,
43 title='New TITLE OF A PR',
43 title='New TITLE OF A PR',
44 description='New DESC OF A PR',
44 description='New DESC OF A PR',
45 )
45 )
46 response = api_call(self.app, params)
46 response = api_call(self.app, params)
47
47
48 expected = {
48 expected = {
49 "msg": "Updated pull request `{}`".format(
49 "msg": "Updated pull request `{}`".format(
50 pull_request.pull_request_id),
50 pull_request.pull_request_id),
51 "pull_request": response.json['result']['pull_request'],
51 "pull_request": response.json['result']['pull_request'],
52 "updated_commits": {"added": [], "common": [], "removed": []},
52 "updated_commits": {"added": [], "common": [], "removed": []},
53 "updated_reviewers": {"added": [], "removed": []},
53 "updated_reviewers": {"added": [], "removed": []},
54 "updated_observers": {"added": [], "removed": []},
54 }
55 }
55
56
56 response_json = response.json['result']
57 response_json = response.json['result']
57 assert response_json == expected
58 assert response_json == expected
58 pr = response_json['pull_request']
59 pr = response_json['pull_request']
59 assert pr['title'] == 'New TITLE OF A PR'
60 assert pr['title'] == 'New TITLE OF A PR'
60 assert pr['description'] == 'New DESC OF A PR'
61 assert pr['description'] == 'New DESC OF A PR'
61
62
62 @pytest.mark.backends("git", "hg")
63 @pytest.mark.backends("git", "hg")
63 def test_api_try_update_closed_pull_request(
64 def test_api_try_update_closed_pull_request(
64 self, pr_util, no_notifications):
65 self, pr_util, no_notifications):
65 pull_request = pr_util.create_pull_request()
66 pull_request = pr_util.create_pull_request()
66 PullRequestModel().close_pull_request(
67 PullRequestModel().close_pull_request(
67 pull_request, TEST_USER_ADMIN_LOGIN)
68 pull_request, TEST_USER_ADMIN_LOGIN)
68
69
69 id_, params = build_data(
70 id_, params = build_data(
70 self.apikey, 'update_pull_request',
71 self.apikey, 'update_pull_request',
71 repoid=pull_request.target_repo.repo_name,
72 repoid=pull_request.target_repo.repo_name,
72 pullrequestid=pull_request.pull_request_id)
73 pullrequestid=pull_request.pull_request_id)
73 response = api_call(self.app, params)
74 response = api_call(self.app, params)
74
75
75 expected = 'pull request `{}` update failed, pull request ' \
76 expected = 'pull request `{}` update failed, pull request ' \
76 'is closed'.format(pull_request.pull_request_id)
77 'is closed'.format(pull_request.pull_request_id)
77
78
78 assert_error(id_, expected, response.body)
79 assert_error(id_, expected, response.body)
79
80
80 @pytest.mark.backends("git", "hg")
81 @pytest.mark.backends("git", "hg")
81 def test_api_update_update_commits(self, pr_util, no_notifications):
82 def test_api_update_update_commits(self, pr_util, no_notifications):
82 commits = [
83 commits = [
83 {'message': 'a'},
84 {'message': 'a'},
84 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
85 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
85 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
86 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
86 ]
87 ]
87 pull_request = pr_util.create_pull_request(
88 pull_request = pr_util.create_pull_request(
88 commits=commits, target_head='a', source_head='b', revisions=['b'])
89 commits=commits, target_head='a', source_head='b', revisions=['b'])
89 pr_util.update_source_repository(head='c')
90 pr_util.update_source_repository(head='c')
90 repo = pull_request.source_repo.scm_instance()
91 repo = pull_request.source_repo.scm_instance()
91 commits = [x for x in repo.get_commits()]
92 commits = [x for x in repo.get_commits()]
92
93
93 added_commit_id = commits[-1].raw_id # c commit
94 added_commit_id = commits[-1].raw_id # c commit
94 common_commit_id = commits[1].raw_id # b commit is common ancestor
95 common_commit_id = commits[1].raw_id # b commit is common ancestor
95 total_commits = [added_commit_id, common_commit_id]
96 total_commits = [added_commit_id, common_commit_id]
96
97
97 id_, params = build_data(
98 id_, params = build_data(
98 self.apikey, 'update_pull_request',
99 self.apikey, 'update_pull_request',
99 repoid=pull_request.target_repo.repo_name,
100 repoid=pull_request.target_repo.repo_name,
100 pullrequestid=pull_request.pull_request_id,
101 pullrequestid=pull_request.pull_request_id,
101 update_commits=True
102 update_commits=True
102 )
103 )
103 response = api_call(self.app, params)
104 response = api_call(self.app, params)
104
105
105 expected = {
106 expected = {
106 "msg": "Updated pull request `{}`".format(
107 "msg": "Updated pull request `{}`".format(
107 pull_request.pull_request_id),
108 pull_request.pull_request_id),
108 "pull_request": response.json['result']['pull_request'],
109 "pull_request": response.json['result']['pull_request'],
109 "updated_commits": {"added": [added_commit_id],
110 "updated_commits": {"added": [added_commit_id],
110 "common": [common_commit_id],
111 "common": [common_commit_id],
111 "total": total_commits,
112 "total": total_commits,
112 "removed": []},
113 "removed": []},
113 "updated_reviewers": {"added": [], "removed": []},
114 "updated_reviewers": {"added": [], "removed": []},
115 "updated_observers": {"added": [], "removed": []},
114 }
116 }
115
117
116 assert_ok(id_, expected, response.body)
118 assert_ok(id_, expected, response.body)
117
119
118 @pytest.mark.backends("git", "hg")
120 @pytest.mark.backends("git", "hg")
119 def test_api_update_change_reviewers(
121 def test_api_update_change_reviewers(
120 self, user_util, pr_util, no_notifications):
122 self, user_util, pr_util, no_notifications):
121 a = user_util.create_user()
123 a = user_util.create_user()
122 b = user_util.create_user()
124 b = user_util.create_user()
123 c = user_util.create_user()
125 c = user_util.create_user()
124 new_reviewers = [
126 new_reviewers = [
125 {'username': b.username,'reasons': ['updated via API'],
127 {'username': b.username, 'reasons': ['updated via API'],
126 'mandatory':False},
128 'mandatory':False},
127 {'username': c.username, 'reasons': ['updated via API'],
129 {'username': c.username, 'reasons': ['updated via API'],
128 'mandatory':False},
130 'mandatory':False},
129 ]
131 ]
130
132
131 added = [b.username, c.username]
133 added = [b.username, c.username]
132 removed = [a.username]
134 removed = [a.username]
133
135
134 pull_request = pr_util.create_pull_request(
136 pull_request = pr_util.create_pull_request(
135 reviewers=[(a.username, ['added via API'], False, [])])
137 reviewers=[(a.username, ['added via API'], False, 'reviewer', [])])
136
138
137 id_, params = build_data(
139 id_, params = build_data(
138 self.apikey, 'update_pull_request',
140 self.apikey, 'update_pull_request',
139 repoid=pull_request.target_repo.repo_name,
141 repoid=pull_request.target_repo.repo_name,
140 pullrequestid=pull_request.pull_request_id,
142 pullrequestid=pull_request.pull_request_id,
141 reviewers=new_reviewers)
143 reviewers=new_reviewers)
142 response = api_call(self.app, params)
144 response = api_call(self.app, params)
143 expected = {
145 expected = {
144 "msg": "Updated pull request `{}`".format(
146 "msg": "Updated pull request `{}`".format(
145 pull_request.pull_request_id),
147 pull_request.pull_request_id),
146 "pull_request": response.json['result']['pull_request'],
148 "pull_request": response.json['result']['pull_request'],
147 "updated_commits": {"added": [], "common": [], "removed": []},
149 "updated_commits": {"added": [], "common": [], "removed": []},
148 "updated_reviewers": {"added": added, "removed": removed},
150 "updated_reviewers": {"added": added, "removed": removed},
151 "updated_observers": {"added": [], "removed": []},
149 }
152 }
150
153
151 assert_ok(id_, expected, response.body)
154 assert_ok(id_, expected, response.body)
152
155
153 @pytest.mark.backends("git", "hg")
156 @pytest.mark.backends("git", "hg")
154 def test_api_update_bad_user_in_reviewers(self, pr_util):
157 def test_api_update_bad_user_in_reviewers(self, pr_util):
155 pull_request = pr_util.create_pull_request()
158 pull_request = pr_util.create_pull_request()
156
159
157 id_, params = build_data(
160 id_, params = build_data(
158 self.apikey, 'update_pull_request',
161 self.apikey, 'update_pull_request',
159 repoid=pull_request.target_repo.repo_name,
162 repoid=pull_request.target_repo.repo_name,
160 pullrequestid=pull_request.pull_request_id,
163 pullrequestid=pull_request.pull_request_id,
161 reviewers=[{'username': 'bad_name'}])
164 reviewers=[{'username': 'bad_name'}])
162 response = api_call(self.app, params)
165 response = api_call(self.app, params)
163
166
164 expected = 'user `bad_name` does not exist'
167 expected = 'user `bad_name` does not exist'
165
168
166 assert_error(id_, expected, response.body)
169 assert_error(id_, expected, response.body)
167
170
168 @pytest.mark.backends("git", "hg")
171 @pytest.mark.backends("git", "hg")
169 def test_api_update_repo_error(self, pr_util):
172 def test_api_update_repo_error(self, pr_util):
170 pull_request = pr_util.create_pull_request()
173 pull_request = pr_util.create_pull_request()
171 id_, params = build_data(
174 id_, params = build_data(
172 self.apikey, 'update_pull_request',
175 self.apikey, 'update_pull_request',
173 repoid='fake',
176 repoid='fake',
174 pullrequestid=pull_request.pull_request_id,
177 pullrequestid=pull_request.pull_request_id,
175 reviewers=[{'username': 'bad_name'}])
178 reviewers=[{'username': 'bad_name'}])
176 response = api_call(self.app, params)
179 response = api_call(self.app, params)
177
180
178 expected = 'repository `fake` does not exist'
181 expected = 'repository `fake` does not exist'
179
182
180 response_json = response.json['error']
183 response_json = response.json['error']
181 assert response_json == expected
184 assert response_json == expected
182
185
183 @pytest.mark.backends("git", "hg")
186 @pytest.mark.backends("git", "hg")
184 def test_api_update_pull_request_error(self, pr_util):
187 def test_api_update_pull_request_error(self, pr_util):
185 pull_request = pr_util.create_pull_request()
188 pull_request = pr_util.create_pull_request()
186
189
187 id_, params = build_data(
190 id_, params = build_data(
188 self.apikey, 'update_pull_request',
191 self.apikey, 'update_pull_request',
189 repoid=pull_request.target_repo.repo_name,
192 repoid=pull_request.target_repo.repo_name,
190 pullrequestid=999999,
193 pullrequestid=999999,
191 reviewers=[{'username': 'bad_name'}])
194 reviewers=[{'username': 'bad_name'}])
192 response = api_call(self.app, params)
195 response = api_call(self.app, params)
193
196
194 expected = 'pull request `999999` does not exist'
197 expected = 'pull request `999999` does not exist'
195 assert_error(id_, expected, response.body)
198 assert_error(id_, expected, response.body)
196
199
197 @pytest.mark.backends("git", "hg")
200 @pytest.mark.backends("git", "hg")
198 def test_api_update_pull_request_no_perms_to_update(
201 def test_api_update_pull_request_no_perms_to_update(
199 self, user_util, pr_util):
202 self, user_util, pr_util):
200 user = user_util.create_user()
203 user = user_util.create_user()
201 pull_request = pr_util.create_pull_request()
204 pull_request = pr_util.create_pull_request()
202
205
203 id_, params = build_data(
206 id_, params = build_data(
204 user.api_key, 'update_pull_request',
207 user.api_key, 'update_pull_request',
205 repoid=pull_request.target_repo.repo_name,
208 repoid=pull_request.target_repo.repo_name,
206 pullrequestid=pull_request.pull_request_id,)
209 pullrequestid=pull_request.pull_request_id,)
207 response = api_call(self.app, params)
210 response = api_call(self.app, params)
208
211
209 expected = ('pull request `%s` update failed, '
212 expected = ('pull request `%s` update failed, '
210 'no permission to update.') % pull_request.pull_request_id
213 'no permission to update.') % pull_request.pull_request_id
211
214
212 assert_error(id_, expected, response.body)
215 assert_error(id_, expected, response.body)
@@ -1,1056 +1,1118 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import logging
22 import logging
23
23
24 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
24 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
25 from rhodecode.api.utils import (
25 from rhodecode.api.utils import (
26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
27 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
28 validate_repo_permissions, resolve_ref_or_error, validate_set_owner_permissions)
28 validate_repo_permissions, resolve_ref_or_error, validate_set_owner_permissions)
29 from rhodecode.lib import channelstream
29 from rhodecode.lib import channelstream
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 from rhodecode.lib.base import vcs_operation_context
31 from rhodecode.lib.base import vcs_operation_context
32 from rhodecode.lib.utils2 import str2bool
32 from rhodecode.lib.utils2 import str2bool
33 from rhodecode.lib.vcs.backends.base import unicode_to_reference
33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 from rhodecode.model.changeset_status import ChangesetStatusModel
34 from rhodecode.model.comment import CommentsModel
35 from rhodecode.model.comment import CommentsModel
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment, PullRequest
36 from rhodecode.model.db import (
37 Session, ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers)
36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
38 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 from rhodecode.model.settings import SettingsModel
39 from rhodecode.model.settings import SettingsModel
38 from rhodecode.model.validation_schema import Invalid
40 from rhodecode.model.validation_schema import Invalid
39 from rhodecode.model.validation_schema.schemas.reviewer_schema import ReviewerListSchema
41 from rhodecode.model.validation_schema.schemas.reviewer_schema import ReviewerListSchema
40
42
41 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
42
44
43
45
44 @jsonrpc_method()
46 @jsonrpc_method()
45 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None),
47 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None),
46 merge_state=Optional(False)):
48 merge_state=Optional(False)):
47 """
49 """
48 Get a pull request based on the given ID.
50 Get a pull request based on the given ID.
49
51
50 :param apiuser: This is filled automatically from the |authtoken|.
52 :param apiuser: This is filled automatically from the |authtoken|.
51 :type apiuser: AuthUser
53 :type apiuser: AuthUser
52 :param repoid: Optional, repository name or repository ID from where
54 :param repoid: Optional, repository name or repository ID from where
53 the pull request was opened.
55 the pull request was opened.
54 :type repoid: str or int
56 :type repoid: str or int
55 :param pullrequestid: ID of the requested pull request.
57 :param pullrequestid: ID of the requested pull request.
56 :type pullrequestid: int
58 :type pullrequestid: int
57 :param merge_state: Optional calculate merge state for each repository.
59 :param merge_state: Optional calculate merge state for each repository.
58 This could result in longer time to fetch the data
60 This could result in longer time to fetch the data
59 :type merge_state: bool
61 :type merge_state: bool
60
62
61 Example output:
63 Example output:
62
64
63 .. code-block:: bash
65 .. code-block:: bash
64
66
65 "id": <id_given_in_input>,
67 "id": <id_given_in_input>,
66 "result":
68 "result":
67 {
69 {
68 "pull_request_id": "<pull_request_id>",
70 "pull_request_id": "<pull_request_id>",
69 "url": "<url>",
71 "url": "<url>",
70 "title": "<title>",
72 "title": "<title>",
71 "description": "<description>",
73 "description": "<description>",
72 "status" : "<status>",
74 "status" : "<status>",
73 "created_on": "<date_time_created>",
75 "created_on": "<date_time_created>",
74 "updated_on": "<date_time_updated>",
76 "updated_on": "<date_time_updated>",
75 "versions": "<number_or_versions_of_pr>",
77 "versions": "<number_or_versions_of_pr>",
76 "commit_ids": [
78 "commit_ids": [
77 ...
79 ...
78 "<commit_id>",
80 "<commit_id>",
79 "<commit_id>",
81 "<commit_id>",
80 ...
82 ...
81 ],
83 ],
82 "review_status": "<review_status>",
84 "review_status": "<review_status>",
83 "mergeable": {
85 "mergeable": {
84 "status": "<bool>",
86 "status": "<bool>",
85 "message": "<message>",
87 "message": "<message>",
86 },
88 },
87 "source": {
89 "source": {
88 "clone_url": "<clone_url>",
90 "clone_url": "<clone_url>",
89 "repository": "<repository_name>",
91 "repository": "<repository_name>",
90 "reference":
92 "reference":
91 {
93 {
92 "name": "<name>",
94 "name": "<name>",
93 "type": "<type>",
95 "type": "<type>",
94 "commit_id": "<commit_id>",
96 "commit_id": "<commit_id>",
95 }
97 }
96 },
98 },
97 "target": {
99 "target": {
98 "clone_url": "<clone_url>",
100 "clone_url": "<clone_url>",
99 "repository": "<repository_name>",
101 "repository": "<repository_name>",
100 "reference":
102 "reference":
101 {
103 {
102 "name": "<name>",
104 "name": "<name>",
103 "type": "<type>",
105 "type": "<type>",
104 "commit_id": "<commit_id>",
106 "commit_id": "<commit_id>",
105 }
107 }
106 },
108 },
107 "merge": {
109 "merge": {
108 "clone_url": "<clone_url>",
110 "clone_url": "<clone_url>",
109 "reference":
111 "reference":
110 {
112 {
111 "name": "<name>",
113 "name": "<name>",
112 "type": "<type>",
114 "type": "<type>",
113 "commit_id": "<commit_id>",
115 "commit_id": "<commit_id>",
114 }
116 }
115 },
117 },
116 "author": <user_obj>,
118 "author": <user_obj>,
117 "reviewers": [
119 "reviewers": [
118 ...
120 ...
119 {
121 {
120 "user": "<user_obj>",
122 "user": "<user_obj>",
121 "review_status": "<review_status>",
123 "review_status": "<review_status>",
122 }
124 }
123 ...
125 ...
124 ]
126 ]
125 },
127 },
126 "error": null
128 "error": null
127 """
129 """
128
130
129 pull_request = get_pull_request_or_error(pullrequestid)
131 pull_request = get_pull_request_or_error(pullrequestid)
130 if Optional.extract(repoid):
132 if Optional.extract(repoid):
131 repo = get_repo_or_error(repoid)
133 repo = get_repo_or_error(repoid)
132 else:
134 else:
133 repo = pull_request.target_repo
135 repo = pull_request.target_repo
134
136
135 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
137 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
136 raise JSONRPCError('repository `%s` or pull request `%s` '
138 raise JSONRPCError('repository `%s` or pull request `%s` '
137 'does not exist' % (repoid, pullrequestid))
139 'does not exist' % (repoid, pullrequestid))
138
140
139 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
141 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
140 # otherwise we can lock the repo on calculation of merge state while update/merge
142 # otherwise we can lock the repo on calculation of merge state while update/merge
141 # is happening.
143 # is happening.
142 pr_created = pull_request.pull_request_state == pull_request.STATE_CREATED
144 pr_created = pull_request.pull_request_state == pull_request.STATE_CREATED
143 merge_state = Optional.extract(merge_state, binary=True) and pr_created
145 merge_state = Optional.extract(merge_state, binary=True) and pr_created
144 data = pull_request.get_api_data(with_merge_state=merge_state)
146 data = pull_request.get_api_data(with_merge_state=merge_state)
145 return data
147 return data
146
148
147
149
148 @jsonrpc_method()
150 @jsonrpc_method()
149 def get_pull_requests(request, apiuser, repoid, status=Optional('new'),
151 def get_pull_requests(request, apiuser, repoid, status=Optional('new'),
150 merge_state=Optional(False)):
152 merge_state=Optional(False)):
151 """
153 """
152 Get all pull requests from the repository specified in `repoid`.
154 Get all pull requests from the repository specified in `repoid`.
153
155
154 :param apiuser: This is filled automatically from the |authtoken|.
156 :param apiuser: This is filled automatically from the |authtoken|.
155 :type apiuser: AuthUser
157 :type apiuser: AuthUser
156 :param repoid: Optional repository name or repository ID.
158 :param repoid: Optional repository name or repository ID.
157 :type repoid: str or int
159 :type repoid: str or int
158 :param status: Only return pull requests with the specified status.
160 :param status: Only return pull requests with the specified status.
159 Valid options are.
161 Valid options are.
160 * ``new`` (default)
162 * ``new`` (default)
161 * ``open``
163 * ``open``
162 * ``closed``
164 * ``closed``
163 :type status: str
165 :type status: str
164 :param merge_state: Optional calculate merge state for each repository.
166 :param merge_state: Optional calculate merge state for each repository.
165 This could result in longer time to fetch the data
167 This could result in longer time to fetch the data
166 :type merge_state: bool
168 :type merge_state: bool
167
169
168 Example output:
170 Example output:
169
171
170 .. code-block:: bash
172 .. code-block:: bash
171
173
172 "id": <id_given_in_input>,
174 "id": <id_given_in_input>,
173 "result":
175 "result":
174 [
176 [
175 ...
177 ...
176 {
178 {
177 "pull_request_id": "<pull_request_id>",
179 "pull_request_id": "<pull_request_id>",
178 "url": "<url>",
180 "url": "<url>",
179 "title" : "<title>",
181 "title" : "<title>",
180 "description": "<description>",
182 "description": "<description>",
181 "status": "<status>",
183 "status": "<status>",
182 "created_on": "<date_time_created>",
184 "created_on": "<date_time_created>",
183 "updated_on": "<date_time_updated>",
185 "updated_on": "<date_time_updated>",
184 "commit_ids": [
186 "commit_ids": [
185 ...
187 ...
186 "<commit_id>",
188 "<commit_id>",
187 "<commit_id>",
189 "<commit_id>",
188 ...
190 ...
189 ],
191 ],
190 "review_status": "<review_status>",
192 "review_status": "<review_status>",
191 "mergeable": {
193 "mergeable": {
192 "status": "<bool>",
194 "status": "<bool>",
193 "message: "<message>",
195 "message: "<message>",
194 },
196 },
195 "source": {
197 "source": {
196 "clone_url": "<clone_url>",
198 "clone_url": "<clone_url>",
197 "reference":
199 "reference":
198 {
200 {
199 "name": "<name>",
201 "name": "<name>",
200 "type": "<type>",
202 "type": "<type>",
201 "commit_id": "<commit_id>",
203 "commit_id": "<commit_id>",
202 }
204 }
203 },
205 },
204 "target": {
206 "target": {
205 "clone_url": "<clone_url>",
207 "clone_url": "<clone_url>",
206 "reference":
208 "reference":
207 {
209 {
208 "name": "<name>",
210 "name": "<name>",
209 "type": "<type>",
211 "type": "<type>",
210 "commit_id": "<commit_id>",
212 "commit_id": "<commit_id>",
211 }
213 }
212 },
214 },
213 "merge": {
215 "merge": {
214 "clone_url": "<clone_url>",
216 "clone_url": "<clone_url>",
215 "reference":
217 "reference":
216 {
218 {
217 "name": "<name>",
219 "name": "<name>",
218 "type": "<type>",
220 "type": "<type>",
219 "commit_id": "<commit_id>",
221 "commit_id": "<commit_id>",
220 }
222 }
221 },
223 },
222 "author": <user_obj>,
224 "author": <user_obj>,
223 "reviewers": [
225 "reviewers": [
224 ...
226 ...
225 {
227 {
226 "user": "<user_obj>",
228 "user": "<user_obj>",
227 "review_status": "<review_status>",
229 "review_status": "<review_status>",
228 }
230 }
229 ...
231 ...
230 ]
232 ]
231 }
233 }
232 ...
234 ...
233 ],
235 ],
234 "error": null
236 "error": null
235
237
236 """
238 """
237 repo = get_repo_or_error(repoid)
239 repo = get_repo_or_error(repoid)
238 if not has_superadmin_permission(apiuser):
240 if not has_superadmin_permission(apiuser):
239 _perms = (
241 _perms = (
240 'repository.admin', 'repository.write', 'repository.read',)
242 'repository.admin', 'repository.write', 'repository.read',)
241 validate_repo_permissions(apiuser, repoid, repo, _perms)
243 validate_repo_permissions(apiuser, repoid, repo, _perms)
242
244
243 status = Optional.extract(status)
245 status = Optional.extract(status)
244 merge_state = Optional.extract(merge_state, binary=True)
246 merge_state = Optional.extract(merge_state, binary=True)
245 pull_requests = PullRequestModel().get_all(repo, statuses=[status],
247 pull_requests = PullRequestModel().get_all(repo, statuses=[status],
246 order_by='id', order_dir='desc')
248 order_by='id', order_dir='desc')
247 data = [pr.get_api_data(with_merge_state=merge_state) for pr in pull_requests]
249 data = [pr.get_api_data(with_merge_state=merge_state) for pr in pull_requests]
248 return data
250 return data
249
251
250
252
251 @jsonrpc_method()
253 @jsonrpc_method()
252 def merge_pull_request(
254 def merge_pull_request(
253 request, apiuser, pullrequestid, repoid=Optional(None),
255 request, apiuser, pullrequestid, repoid=Optional(None),
254 userid=Optional(OAttr('apiuser'))):
256 userid=Optional(OAttr('apiuser'))):
255 """
257 """
256 Merge the pull request specified by `pullrequestid` into its target
258 Merge the pull request specified by `pullrequestid` into its target
257 repository.
259 repository.
258
260
259 :param apiuser: This is filled automatically from the |authtoken|.
261 :param apiuser: This is filled automatically from the |authtoken|.
260 :type apiuser: AuthUser
262 :type apiuser: AuthUser
261 :param repoid: Optional, repository name or repository ID of the
263 :param repoid: Optional, repository name or repository ID of the
262 target repository to which the |pr| is to be merged.
264 target repository to which the |pr| is to be merged.
263 :type repoid: str or int
265 :type repoid: str or int
264 :param pullrequestid: ID of the pull request which shall be merged.
266 :param pullrequestid: ID of the pull request which shall be merged.
265 :type pullrequestid: int
267 :type pullrequestid: int
266 :param userid: Merge the pull request as this user.
268 :param userid: Merge the pull request as this user.
267 :type userid: Optional(str or int)
269 :type userid: Optional(str or int)
268
270
269 Example output:
271 Example output:
270
272
271 .. code-block:: bash
273 .. code-block:: bash
272
274
273 "id": <id_given_in_input>,
275 "id": <id_given_in_input>,
274 "result": {
276 "result": {
275 "executed": "<bool>",
277 "executed": "<bool>",
276 "failure_reason": "<int>",
278 "failure_reason": "<int>",
277 "merge_status_message": "<str>",
279 "merge_status_message": "<str>",
278 "merge_commit_id": "<merge_commit_id>",
280 "merge_commit_id": "<merge_commit_id>",
279 "possible": "<bool>",
281 "possible": "<bool>",
280 "merge_ref": {
282 "merge_ref": {
281 "commit_id": "<commit_id>",
283 "commit_id": "<commit_id>",
282 "type": "<type>",
284 "type": "<type>",
283 "name": "<name>"
285 "name": "<name>"
284 }
286 }
285 },
287 },
286 "error": null
288 "error": null
287 """
289 """
288 pull_request = get_pull_request_or_error(pullrequestid)
290 pull_request = get_pull_request_or_error(pullrequestid)
289 if Optional.extract(repoid):
291 if Optional.extract(repoid):
290 repo = get_repo_or_error(repoid)
292 repo = get_repo_or_error(repoid)
291 else:
293 else:
292 repo = pull_request.target_repo
294 repo = pull_request.target_repo
293 auth_user = apiuser
295 auth_user = apiuser
294
296
295 if not isinstance(userid, Optional):
297 if not isinstance(userid, Optional):
296 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
298 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
297 user=apiuser, repo_name=repo.repo_name)
299 user=apiuser, repo_name=repo.repo_name)
298 if has_superadmin_permission(apiuser) or is_repo_admin:
300 if has_superadmin_permission(apiuser) or is_repo_admin:
299 apiuser = get_user_or_error(userid)
301 apiuser = get_user_or_error(userid)
300 auth_user = apiuser.AuthUser()
302 auth_user = apiuser.AuthUser()
301 else:
303 else:
302 raise JSONRPCError('userid is not the same as your user')
304 raise JSONRPCError('userid is not the same as your user')
303
305
304 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
306 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
305 raise JSONRPCError(
307 raise JSONRPCError(
306 'Operation forbidden because pull request is in state {}, '
308 'Operation forbidden because pull request is in state {}, '
307 'only state {} is allowed.'.format(
309 'only state {} is allowed.'.format(
308 pull_request.pull_request_state, PullRequest.STATE_CREATED))
310 pull_request.pull_request_state, PullRequest.STATE_CREATED))
309
311
310 with pull_request.set_state(PullRequest.STATE_UPDATING):
312 with pull_request.set_state(PullRequest.STATE_UPDATING):
311 check = MergeCheck.validate(pull_request, auth_user=auth_user,
313 check = MergeCheck.validate(pull_request, auth_user=auth_user,
312 translator=request.translate)
314 translator=request.translate)
313 merge_possible = not check.failed
315 merge_possible = not check.failed
314
316
315 if not merge_possible:
317 if not merge_possible:
316 error_messages = []
318 error_messages = []
317 for err_type, error_msg in check.errors:
319 for err_type, error_msg in check.errors:
318 error_msg = request.translate(error_msg)
320 error_msg = request.translate(error_msg)
319 error_messages.append(error_msg)
321 error_messages.append(error_msg)
320
322
321 reasons = ','.join(error_messages)
323 reasons = ','.join(error_messages)
322 raise JSONRPCError(
324 raise JSONRPCError(
323 'merge not possible for following reasons: {}'.format(reasons))
325 'merge not possible for following reasons: {}'.format(reasons))
324
326
325 target_repo = pull_request.target_repo
327 target_repo = pull_request.target_repo
326 extras = vcs_operation_context(
328 extras = vcs_operation_context(
327 request.environ, repo_name=target_repo.repo_name,
329 request.environ, repo_name=target_repo.repo_name,
328 username=auth_user.username, action='push',
330 username=auth_user.username, action='push',
329 scm=target_repo.repo_type)
331 scm=target_repo.repo_type)
330 with pull_request.set_state(PullRequest.STATE_UPDATING):
332 with pull_request.set_state(PullRequest.STATE_UPDATING):
331 merge_response = PullRequestModel().merge_repo(
333 merge_response = PullRequestModel().merge_repo(
332 pull_request, apiuser, extras=extras)
334 pull_request, apiuser, extras=extras)
333 if merge_response.executed:
335 if merge_response.executed:
334 PullRequestModel().close_pull_request(pull_request.pull_request_id, auth_user)
336 PullRequestModel().close_pull_request(pull_request.pull_request_id, auth_user)
335
337
336 Session().commit()
338 Session().commit()
337
339
338 # In previous versions the merge response directly contained the merge
340 # In previous versions the merge response directly contained the merge
339 # commit id. It is now contained in the merge reference object. To be
341 # commit id. It is now contained in the merge reference object. To be
340 # backwards compatible we have to extract it again.
342 # backwards compatible we have to extract it again.
341 merge_response = merge_response.asdict()
343 merge_response = merge_response.asdict()
342 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
344 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
343
345
344 return merge_response
346 return merge_response
345
347
346
348
347 @jsonrpc_method()
349 @jsonrpc_method()
348 def get_pull_request_comments(
350 def get_pull_request_comments(
349 request, apiuser, pullrequestid, repoid=Optional(None)):
351 request, apiuser, pullrequestid, repoid=Optional(None)):
350 """
352 """
351 Get all comments of pull request specified with the `pullrequestid`
353 Get all comments of pull request specified with the `pullrequestid`
352
354
353 :param apiuser: This is filled automatically from the |authtoken|.
355 :param apiuser: This is filled automatically from the |authtoken|.
354 :type apiuser: AuthUser
356 :type apiuser: AuthUser
355 :param repoid: Optional repository name or repository ID.
357 :param repoid: Optional repository name or repository ID.
356 :type repoid: str or int
358 :type repoid: str or int
357 :param pullrequestid: The pull request ID.
359 :param pullrequestid: The pull request ID.
358 :type pullrequestid: int
360 :type pullrequestid: int
359
361
360 Example output:
362 Example output:
361
363
362 .. code-block:: bash
364 .. code-block:: bash
363
365
364 id : <id_given_in_input>
366 id : <id_given_in_input>
365 result : [
367 result : [
366 {
368 {
367 "comment_author": {
369 "comment_author": {
368 "active": true,
370 "active": true,
369 "full_name_or_username": "Tom Gore",
371 "full_name_or_username": "Tom Gore",
370 "username": "admin"
372 "username": "admin"
371 },
373 },
372 "comment_created_on": "2017-01-02T18:43:45.533",
374 "comment_created_on": "2017-01-02T18:43:45.533",
373 "comment_f_path": null,
375 "comment_f_path": null,
374 "comment_id": 25,
376 "comment_id": 25,
375 "comment_lineno": null,
377 "comment_lineno": null,
376 "comment_status": {
378 "comment_status": {
377 "status": "under_review",
379 "status": "under_review",
378 "status_lbl": "Under Review"
380 "status_lbl": "Under Review"
379 },
381 },
380 "comment_text": "Example text",
382 "comment_text": "Example text",
381 "comment_type": null,
383 "comment_type": null,
382 "comment_last_version: 0,
384 "comment_last_version: 0,
383 "pull_request_version": null,
385 "pull_request_version": null,
384 "comment_commit_id": None,
386 "comment_commit_id": None,
385 "comment_pull_request_id": <pull_request_id>
387 "comment_pull_request_id": <pull_request_id>
386 }
388 }
387 ],
389 ],
388 error : null
390 error : null
389 """
391 """
390
392
391 pull_request = get_pull_request_or_error(pullrequestid)
393 pull_request = get_pull_request_or_error(pullrequestid)
392 if Optional.extract(repoid):
394 if Optional.extract(repoid):
393 repo = get_repo_or_error(repoid)
395 repo = get_repo_or_error(repoid)
394 else:
396 else:
395 repo = pull_request.target_repo
397 repo = pull_request.target_repo
396
398
397 if not PullRequestModel().check_user_read(
399 if not PullRequestModel().check_user_read(
398 pull_request, apiuser, api=True):
400 pull_request, apiuser, api=True):
399 raise JSONRPCError('repository `%s` or pull request `%s` '
401 raise JSONRPCError('repository `%s` or pull request `%s` '
400 'does not exist' % (repoid, pullrequestid))
402 'does not exist' % (repoid, pullrequestid))
401
403
402 (pull_request_latest,
404 (pull_request_latest,
403 pull_request_at_ver,
405 pull_request_at_ver,
404 pull_request_display_obj,
406 pull_request_display_obj,
405 at_version) = PullRequestModel().get_pr_version(
407 at_version) = PullRequestModel().get_pr_version(
406 pull_request.pull_request_id, version=None)
408 pull_request.pull_request_id, version=None)
407
409
408 versions = pull_request_display_obj.versions()
410 versions = pull_request_display_obj.versions()
409 ver_map = {
411 ver_map = {
410 ver.pull_request_version_id: cnt
412 ver.pull_request_version_id: cnt
411 for cnt, ver in enumerate(versions, 1)
413 for cnt, ver in enumerate(versions, 1)
412 }
414 }
413
415
414 # GENERAL COMMENTS with versions #
416 # GENERAL COMMENTS with versions #
415 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
417 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
416 q = q.order_by(ChangesetComment.comment_id.asc())
418 q = q.order_by(ChangesetComment.comment_id.asc())
417 general_comments = q.all()
419 general_comments = q.all()
418
420
419 # INLINE COMMENTS with versions #
421 # INLINE COMMENTS with versions #
420 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
422 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
421 q = q.order_by(ChangesetComment.comment_id.asc())
423 q = q.order_by(ChangesetComment.comment_id.asc())
422 inline_comments = q.all()
424 inline_comments = q.all()
423
425
424 data = []
426 data = []
425 for comment in inline_comments + general_comments:
427 for comment in inline_comments + general_comments:
426 full_data = comment.get_api_data()
428 full_data = comment.get_api_data()
427 pr_version_id = None
429 pr_version_id = None
428 if comment.pull_request_version_id:
430 if comment.pull_request_version_id:
429 pr_version_id = 'v{}'.format(
431 pr_version_id = 'v{}'.format(
430 ver_map[comment.pull_request_version_id])
432 ver_map[comment.pull_request_version_id])
431
433
432 # sanitize some entries
434 # sanitize some entries
433
435
434 full_data['pull_request_version'] = pr_version_id
436 full_data['pull_request_version'] = pr_version_id
435 full_data['comment_author'] = {
437 full_data['comment_author'] = {
436 'username': full_data['comment_author'].username,
438 'username': full_data['comment_author'].username,
437 'full_name_or_username': full_data['comment_author'].full_name_or_username,
439 'full_name_or_username': full_data['comment_author'].full_name_or_username,
438 'active': full_data['comment_author'].active,
440 'active': full_data['comment_author'].active,
439 }
441 }
440
442
441 if full_data['comment_status']:
443 if full_data['comment_status']:
442 full_data['comment_status'] = {
444 full_data['comment_status'] = {
443 'status': full_data['comment_status'][0].status,
445 'status': full_data['comment_status'][0].status,
444 'status_lbl': full_data['comment_status'][0].status_lbl,
446 'status_lbl': full_data['comment_status'][0].status_lbl,
445 }
447 }
446 else:
448 else:
447 full_data['comment_status'] = {}
449 full_data['comment_status'] = {}
448
450
449 data.append(full_data)
451 data.append(full_data)
450 return data
452 return data
451
453
452
454
453 @jsonrpc_method()
455 @jsonrpc_method()
454 def comment_pull_request(
456 def comment_pull_request(
455 request, apiuser, pullrequestid, repoid=Optional(None),
457 request, apiuser, pullrequestid, repoid=Optional(None),
456 message=Optional(None), commit_id=Optional(None), status=Optional(None),
458 message=Optional(None), commit_id=Optional(None), status=Optional(None),
457 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
459 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
458 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
460 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
459 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
461 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
460 """
462 """
461 Comment on the pull request specified with the `pullrequestid`,
463 Comment on the pull request specified with the `pullrequestid`,
462 in the |repo| specified by the `repoid`, and optionally change the
464 in the |repo| specified by the `repoid`, and optionally change the
463 review status.
465 review status.
464
466
465 :param apiuser: This is filled automatically from the |authtoken|.
467 :param apiuser: This is filled automatically from the |authtoken|.
466 :type apiuser: AuthUser
468 :type apiuser: AuthUser
467 :param repoid: Optional repository name or repository ID.
469 :param repoid: Optional repository name or repository ID.
468 :type repoid: str or int
470 :type repoid: str or int
469 :param pullrequestid: The pull request ID.
471 :param pullrequestid: The pull request ID.
470 :type pullrequestid: int
472 :type pullrequestid: int
471 :param commit_id: Specify the commit_id for which to set a comment. If
473 :param commit_id: Specify the commit_id for which to set a comment. If
472 given commit_id is different than latest in the PR status
474 given commit_id is different than latest in the PR status
473 change won't be performed.
475 change won't be performed.
474 :type commit_id: str
476 :type commit_id: str
475 :param message: The text content of the comment.
477 :param message: The text content of the comment.
476 :type message: str
478 :type message: str
477 :param status: (**Optional**) Set the approval status of the pull
479 :param status: (**Optional**) Set the approval status of the pull
478 request. One of: 'not_reviewed', 'approved', 'rejected',
480 request. One of: 'not_reviewed', 'approved', 'rejected',
479 'under_review'
481 'under_review'
480 :type status: str
482 :type status: str
481 :param comment_type: Comment type, one of: 'note', 'todo'
483 :param comment_type: Comment type, one of: 'note', 'todo'
482 :type comment_type: Optional(str), default: 'note'
484 :type comment_type: Optional(str), default: 'note'
483 :param resolves_comment_id: id of comment which this one will resolve
485 :param resolves_comment_id: id of comment which this one will resolve
484 :type resolves_comment_id: Optional(int)
486 :type resolves_comment_id: Optional(int)
485 :param extra_recipients: list of user ids or usernames to add
487 :param extra_recipients: list of user ids or usernames to add
486 notifications for this comment. Acts like a CC for notification
488 notifications for this comment. Acts like a CC for notification
487 :type extra_recipients: Optional(list)
489 :type extra_recipients: Optional(list)
488 :param userid: Comment on the pull request as this user
490 :param userid: Comment on the pull request as this user
489 :type userid: Optional(str or int)
491 :type userid: Optional(str or int)
490 :param send_email: Define if this comment should also send email notification
492 :param send_email: Define if this comment should also send email notification
491 :type send_email: Optional(bool)
493 :type send_email: Optional(bool)
492
494
493 Example output:
495 Example output:
494
496
495 .. code-block:: bash
497 .. code-block:: bash
496
498
497 id : <id_given_in_input>
499 id : <id_given_in_input>
498 result : {
500 result : {
499 "pull_request_id": "<Integer>",
501 "pull_request_id": "<Integer>",
500 "comment_id": "<Integer>",
502 "comment_id": "<Integer>",
501 "status": {"given": <given_status>,
503 "status": {"given": <given_status>,
502 "was_changed": <bool status_was_actually_changed> },
504 "was_changed": <bool status_was_actually_changed> },
503 },
505 },
504 error : null
506 error : null
505 """
507 """
506 _ = request.translate
508 _ = request.translate
507
509
508 pull_request = get_pull_request_or_error(pullrequestid)
510 pull_request = get_pull_request_or_error(pullrequestid)
509 if Optional.extract(repoid):
511 if Optional.extract(repoid):
510 repo = get_repo_or_error(repoid)
512 repo = get_repo_or_error(repoid)
511 else:
513 else:
512 repo = pull_request.target_repo
514 repo = pull_request.target_repo
513
515
514 db_repo_name = repo.repo_name
516 db_repo_name = repo.repo_name
515 auth_user = apiuser
517 auth_user = apiuser
516 if not isinstance(userid, Optional):
518 if not isinstance(userid, Optional):
517 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
519 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
518 user=apiuser, repo_name=db_repo_name)
520 user=apiuser, repo_name=db_repo_name)
519 if has_superadmin_permission(apiuser) or is_repo_admin:
521 if has_superadmin_permission(apiuser) or is_repo_admin:
520 apiuser = get_user_or_error(userid)
522 apiuser = get_user_or_error(userid)
521 auth_user = apiuser.AuthUser()
523 auth_user = apiuser.AuthUser()
522 else:
524 else:
523 raise JSONRPCError('userid is not the same as your user')
525 raise JSONRPCError('userid is not the same as your user')
524
526
525 if pull_request.is_closed():
527 if pull_request.is_closed():
526 raise JSONRPCError(
528 raise JSONRPCError(
527 'pull request `%s` comment failed, pull request is closed' % (
529 'pull request `%s` comment failed, pull request is closed' % (
528 pullrequestid,))
530 pullrequestid,))
529
531
530 if not PullRequestModel().check_user_read(
532 if not PullRequestModel().check_user_read(
531 pull_request, apiuser, api=True):
533 pull_request, apiuser, api=True):
532 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
534 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
533 message = Optional.extract(message)
535 message = Optional.extract(message)
534 status = Optional.extract(status)
536 status = Optional.extract(status)
535 commit_id = Optional.extract(commit_id)
537 commit_id = Optional.extract(commit_id)
536 comment_type = Optional.extract(comment_type)
538 comment_type = Optional.extract(comment_type)
537 resolves_comment_id = Optional.extract(resolves_comment_id)
539 resolves_comment_id = Optional.extract(resolves_comment_id)
538 extra_recipients = Optional.extract(extra_recipients)
540 extra_recipients = Optional.extract(extra_recipients)
539 send_email = Optional.extract(send_email, binary=True)
541 send_email = Optional.extract(send_email, binary=True)
540
542
541 if not message and not status:
543 if not message and not status:
542 raise JSONRPCError(
544 raise JSONRPCError(
543 'Both message and status parameters are missing. '
545 'Both message and status parameters are missing. '
544 'At least one is required.')
546 'At least one is required.')
545
547
546 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
548 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
547 status is not None):
549 status is not None):
548 raise JSONRPCError('Unknown comment status: `%s`' % status)
550 raise JSONRPCError('Unknown comment status: `%s`' % status)
549
551
550 if commit_id and commit_id not in pull_request.revisions:
552 if commit_id and commit_id not in pull_request.revisions:
551 raise JSONRPCError(
553 raise JSONRPCError(
552 'Invalid commit_id `%s` for this pull request.' % commit_id)
554 'Invalid commit_id `%s` for this pull request.' % commit_id)
553
555
554 allowed_to_change_status = PullRequestModel().check_user_change_status(
556 allowed_to_change_status = PullRequestModel().check_user_change_status(
555 pull_request, apiuser)
557 pull_request, apiuser)
556
558
557 # if commit_id is passed re-validated if user is allowed to change status
559 # if commit_id is passed re-validated if user is allowed to change status
558 # based on latest commit_id from the PR
560 # based on latest commit_id from the PR
559 if commit_id:
561 if commit_id:
560 commit_idx = pull_request.revisions.index(commit_id)
562 commit_idx = pull_request.revisions.index(commit_id)
561 if commit_idx != 0:
563 if commit_idx != 0:
562 allowed_to_change_status = False
564 allowed_to_change_status = False
563
565
564 if resolves_comment_id:
566 if resolves_comment_id:
565 comment = ChangesetComment.get(resolves_comment_id)
567 comment = ChangesetComment.get(resolves_comment_id)
566 if not comment:
568 if not comment:
567 raise JSONRPCError(
569 raise JSONRPCError(
568 'Invalid resolves_comment_id `%s` for this pull request.'
570 'Invalid resolves_comment_id `%s` for this pull request.'
569 % resolves_comment_id)
571 % resolves_comment_id)
570 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
572 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
571 raise JSONRPCError(
573 raise JSONRPCError(
572 'Comment `%s` is wrong type for setting status to resolved.'
574 'Comment `%s` is wrong type for setting status to resolved.'
573 % resolves_comment_id)
575 % resolves_comment_id)
574
576
575 text = message
577 text = message
576 status_label = ChangesetStatus.get_status_lbl(status)
578 status_label = ChangesetStatus.get_status_lbl(status)
577 if status and allowed_to_change_status:
579 if status and allowed_to_change_status:
578 st_message = ('Status change %(transition_icon)s %(status)s'
580 st_message = ('Status change %(transition_icon)s %(status)s'
579 % {'transition_icon': '>', 'status': status_label})
581 % {'transition_icon': '>', 'status': status_label})
580 text = message or st_message
582 text = message or st_message
581
583
582 rc_config = SettingsModel().get_all_settings()
584 rc_config = SettingsModel().get_all_settings()
583 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
585 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
584
586
585 status_change = status and allowed_to_change_status
587 status_change = status and allowed_to_change_status
586 comment = CommentsModel().create(
588 comment = CommentsModel().create(
587 text=text,
589 text=text,
588 repo=pull_request.target_repo.repo_id,
590 repo=pull_request.target_repo.repo_id,
589 user=apiuser.user_id,
591 user=apiuser.user_id,
590 pull_request=pull_request.pull_request_id,
592 pull_request=pull_request.pull_request_id,
591 f_path=None,
593 f_path=None,
592 line_no=None,
594 line_no=None,
593 status_change=(status_label if status_change else None),
595 status_change=(status_label if status_change else None),
594 status_change_type=(status if status_change else None),
596 status_change_type=(status if status_change else None),
595 closing_pr=False,
597 closing_pr=False,
596 renderer=renderer,
598 renderer=renderer,
597 comment_type=comment_type,
599 comment_type=comment_type,
598 resolves_comment_id=resolves_comment_id,
600 resolves_comment_id=resolves_comment_id,
599 auth_user=auth_user,
601 auth_user=auth_user,
600 extra_recipients=extra_recipients,
602 extra_recipients=extra_recipients,
601 send_email=send_email
603 send_email=send_email
602 )
604 )
603 is_inline = bool(comment.f_path and comment.line_no)
605 is_inline = comment.is_inline
604
606
605 if allowed_to_change_status and status:
607 if allowed_to_change_status and status:
606 old_calculated_status = pull_request.calculated_review_status()
608 old_calculated_status = pull_request.calculated_review_status()
607 ChangesetStatusModel().set_status(
609 ChangesetStatusModel().set_status(
608 pull_request.target_repo.repo_id,
610 pull_request.target_repo.repo_id,
609 status,
611 status,
610 apiuser.user_id,
612 apiuser.user_id,
611 comment,
613 comment,
612 pull_request=pull_request.pull_request_id
614 pull_request=pull_request.pull_request_id
613 )
615 )
614 Session().flush()
616 Session().flush()
615
617
616 Session().commit()
618 Session().commit()
617
619
618 PullRequestModel().trigger_pull_request_hook(
620 PullRequestModel().trigger_pull_request_hook(
619 pull_request, apiuser, 'comment',
621 pull_request, apiuser, 'comment',
620 data={'comment': comment})
622 data={'comment': comment})
621
623
622 if allowed_to_change_status and status:
624 if allowed_to_change_status and status:
623 # we now calculate the status of pull request, and based on that
625 # we now calculate the status of pull request, and based on that
624 # calculation we set the commits status
626 # calculation we set the commits status
625 calculated_status = pull_request.calculated_review_status()
627 calculated_status = pull_request.calculated_review_status()
626 if old_calculated_status != calculated_status:
628 if old_calculated_status != calculated_status:
627 PullRequestModel().trigger_pull_request_hook(
629 PullRequestModel().trigger_pull_request_hook(
628 pull_request, apiuser, 'review_status_change',
630 pull_request, apiuser, 'review_status_change',
629 data={'status': calculated_status})
631 data={'status': calculated_status})
630
632
631 data = {
633 data = {
632 'pull_request_id': pull_request.pull_request_id,
634 'pull_request_id': pull_request.pull_request_id,
633 'comment_id': comment.comment_id if comment else None,
635 'comment_id': comment.comment_id if comment else None,
634 'status': {'given': status, 'was_changed': status_change},
636 'status': {'given': status, 'was_changed': status_change},
635 }
637 }
636
638
637 comment_broadcast_channel = channelstream.comment_channel(
639 comment_broadcast_channel = channelstream.comment_channel(
638 db_repo_name, pull_request_obj=pull_request)
640 db_repo_name, pull_request_obj=pull_request)
639
641
640 comment_data = data
642 comment_data = data
641 comment_type = 'inline' if is_inline else 'general'
643 comment_type = 'inline' if is_inline else 'general'
642 channelstream.comment_channelstream_push(
644 channelstream.comment_channelstream_push(
643 request, comment_broadcast_channel, apiuser,
645 request, comment_broadcast_channel, apiuser,
644 _('posted a new {} comment').format(comment_type),
646 _('posted a new {} comment').format(comment_type),
645 comment_data=comment_data)
647 comment_data=comment_data)
646
648
647 return data
649 return data
648
650
651 def _reviewers_validation(obj_list):
652 schema = ReviewerListSchema()
653 try:
654 reviewer_objects = schema.deserialize(obj_list)
655 except Invalid as err:
656 raise JSONRPCValidationError(colander_exc=err)
657
658 # validate users
659 for reviewer_object in reviewer_objects:
660 user = get_user_or_error(reviewer_object['username'])
661 reviewer_object['user_id'] = user.user_id
662 return reviewer_objects
663
649
664
650 @jsonrpc_method()
665 @jsonrpc_method()
651 def create_pull_request(
666 def create_pull_request(
652 request, apiuser, source_repo, target_repo, source_ref, target_ref,
667 request, apiuser, source_repo, target_repo, source_ref, target_ref,
653 owner=Optional(OAttr('apiuser')), title=Optional(''), description=Optional(''),
668 owner=Optional(OAttr('apiuser')), title=Optional(''), description=Optional(''),
654 description_renderer=Optional(''), reviewers=Optional(None)):
669 description_renderer=Optional(''),
670 reviewers=Optional(None), observers=Optional(None)):
655 """
671 """
656 Creates a new pull request.
672 Creates a new pull request.
657
673
658 Accepts refs in the following formats:
674 Accepts refs in the following formats:
659
675
660 * branch:<branch_name>:<sha>
676 * branch:<branch_name>:<sha>
661 * branch:<branch_name>
677 * branch:<branch_name>
662 * bookmark:<bookmark_name>:<sha> (Mercurial only)
678 * bookmark:<bookmark_name>:<sha> (Mercurial only)
663 * bookmark:<bookmark_name> (Mercurial only)
679 * bookmark:<bookmark_name> (Mercurial only)
664
680
665 :param apiuser: This is filled automatically from the |authtoken|.
681 :param apiuser: This is filled automatically from the |authtoken|.
666 :type apiuser: AuthUser
682 :type apiuser: AuthUser
667 :param source_repo: Set the source repository name.
683 :param source_repo: Set the source repository name.
668 :type source_repo: str
684 :type source_repo: str
669 :param target_repo: Set the target repository name.
685 :param target_repo: Set the target repository name.
670 :type target_repo: str
686 :type target_repo: str
671 :param source_ref: Set the source ref name.
687 :param source_ref: Set the source ref name.
672 :type source_ref: str
688 :type source_ref: str
673 :param target_ref: Set the target ref name.
689 :param target_ref: Set the target ref name.
674 :type target_ref: str
690 :type target_ref: str
675 :param owner: user_id or username
691 :param owner: user_id or username
676 :type owner: Optional(str)
692 :type owner: Optional(str)
677 :param title: Optionally Set the pull request title, it's generated otherwise
693 :param title: Optionally Set the pull request title, it's generated otherwise
678 :type title: str
694 :type title: str
679 :param description: Set the pull request description.
695 :param description: Set the pull request description.
680 :type description: Optional(str)
696 :type description: Optional(str)
681 :type description_renderer: Optional(str)
697 :type description_renderer: Optional(str)
682 :param description_renderer: Set pull request renderer for the description.
698 :param description_renderer: Set pull request renderer for the description.
683 It should be 'rst', 'markdown' or 'plain'. If not give default
699 It should be 'rst', 'markdown' or 'plain'. If not give default
684 system renderer will be used
700 system renderer will be used
685 :param reviewers: Set the new pull request reviewers list.
701 :param reviewers: Set the new pull request reviewers list.
686 Reviewer defined by review rules will be added automatically to the
702 Reviewer defined by review rules will be added automatically to the
687 defined list.
703 defined list.
688 :type reviewers: Optional(list)
704 :type reviewers: Optional(list)
689 Accepts username strings or objects of the format:
705 Accepts username strings or objects of the format:
690
706
691 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
707 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
708 :param observers: Set the new pull request observers list.
709 Reviewer defined by review rules will be added automatically to the
710 defined list. This feature is only available in RhodeCode EE
711 :type observers: Optional(list)
712 Accepts username strings or objects of the format:
713
714 [{'username': 'nick', 'reasons': ['original author']}]
692 """
715 """
693
716
694 source_db_repo = get_repo_or_error(source_repo)
717 source_db_repo = get_repo_or_error(source_repo)
695 target_db_repo = get_repo_or_error(target_repo)
718 target_db_repo = get_repo_or_error(target_repo)
696 if not has_superadmin_permission(apiuser):
719 if not has_superadmin_permission(apiuser):
697 _perms = ('repository.admin', 'repository.write', 'repository.read',)
720 _perms = ('repository.admin', 'repository.write', 'repository.read',)
698 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
721 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
699
722
700 owner = validate_set_owner_permissions(apiuser, owner)
723 owner = validate_set_owner_permissions(apiuser, owner)
701
724
702 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
725 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
703 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
726 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
704
727
705 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
728 get_commit_or_error(full_source_ref, source_db_repo)
706 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
729 get_commit_or_error(full_target_ref, target_db_repo)
707
730
708 reviewer_objects = Optional.extract(reviewers) or []
731 reviewer_objects = Optional.extract(reviewers) or []
732 observer_objects = Optional.extract(observers) or []
709
733
710 # serialize and validate passed in given reviewers
734 # serialize and validate passed in given reviewers
711 if reviewer_objects:
735 if reviewer_objects:
712 schema = ReviewerListSchema()
736 reviewer_objects = _reviewers_validation(reviewer_objects)
713 try:
714 reviewer_objects = schema.deserialize(reviewer_objects)
715 except Invalid as err:
716 raise JSONRPCValidationError(colander_exc=err)
717
737
718 # validate users
738 if observer_objects:
719 for reviewer_object in reviewer_objects:
739 observer_objects = _reviewers_validation(reviewer_objects)
720 user = get_user_or_error(reviewer_object['username'])
721 reviewer_object['user_id'] = user.user_id
722
740
723 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
741 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
724 PullRequestModel().get_reviewer_functions()
742 PullRequestModel().get_reviewer_functions()
725
743
744 source_ref_obj = unicode_to_reference(full_source_ref)
745 target_ref_obj = unicode_to_reference(full_target_ref)
746
726 # recalculate reviewers logic, to make sure we can validate this
747 # recalculate reviewers logic, to make sure we can validate this
727 default_reviewers_data = get_default_reviewers_data(
748 default_reviewers_data = get_default_reviewers_data(
728 owner,
749 owner,
729 source_repo,
750 source_db_repo,
730 Reference(source_type, source_name, source_commit_id),
751 source_ref_obj,
731 target_repo,
752 target_db_repo,
732 Reference(target_type, target_name, target_commit_id)
753 target_ref_obj,
733 )
754 )
734
755
735 # now MERGE our given with the calculated
756 # now MERGE our given with the calculated from the default rules
736 reviewer_objects = default_reviewers_data['reviewers'] + reviewer_objects
757 just_reviewers = [
758 x for x in default_reviewers_data['reviewers']
759 if x['role'] == PullRequestReviewers.ROLE_REVIEWER]
760 reviewer_objects = just_reviewers + reviewer_objects
737
761
738 try:
762 try:
739 reviewers = validate_default_reviewers(
763 reviewers = validate_default_reviewers(
740 reviewer_objects, default_reviewers_data)
764 reviewer_objects, default_reviewers_data)
741 except ValueError as e:
765 except ValueError as e:
742 raise JSONRPCError('Reviewers Validation: {}'.format(e))
766 raise JSONRPCError('Reviewers Validation: {}'.format(e))
743
767
768 # now MERGE our given with the calculated from the default rules
769 just_observers = [
770 x for x in default_reviewers_data['reviewers']
771 if x['role'] == PullRequestReviewers.ROLE_OBSERVER]
772 observer_objects = just_observers + observer_objects
773
774 try:
775 observers = validate_observers(
776 observer_objects, default_reviewers_data)
777 except ValueError as e:
778 raise JSONRPCError('Observer Validation: {}'.format(e))
779
744 title = Optional.extract(title)
780 title = Optional.extract(title)
745 if not title:
781 if not title:
746 title_source_ref = source_ref.split(':', 2)[1]
782 title_source_ref = source_ref_obj.name
747 title = PullRequestModel().generate_pullrequest_title(
783 title = PullRequestModel().generate_pullrequest_title(
748 source=source_repo,
784 source=source_repo,
749 source_ref=title_source_ref,
785 source_ref=title_source_ref,
750 target=target_repo
786 target=target_repo
751 )
787 )
752
788
753 diff_info = default_reviewers_data['diff_info']
789 diff_info = default_reviewers_data['diff_info']
754 common_ancestor_id = diff_info['ancestor']
790 common_ancestor_id = diff_info['ancestor']
755 commits = diff_info['commits']
791 # NOTE(marcink): reversed is consistent with how we open it in the WEB interface
792 commits = [commit['commit_id'] for commit in reversed(diff_info['commits'])]
756
793
757 if not common_ancestor_id:
794 if not common_ancestor_id:
758 raise JSONRPCError('no common ancestor found')
795 raise JSONRPCError('no common ancestor found between specified references')
759
796
760 if not commits:
797 if not commits:
761 raise JSONRPCError('no commits found')
798 raise JSONRPCError('no commits found for merge between specified references')
762
763 # NOTE(marcink): reversed is consistent with how we open it in the WEB interface
764 revisions = [commit.raw_id for commit in reversed(commits)]
765
799
766 # recalculate target ref based on ancestor
800 # recalculate target ref based on ancestor
767 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
801 full_target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, common_ancestor_id))
768 full_target_ref = ':'.join((target_ref_type, target_ref_name, common_ancestor_id))
769
802
770 # fetch renderer, if set fallback to plain in case of PR
803 # fetch renderer, if set fallback to plain in case of PR
771 rc_config = SettingsModel().get_all_settings()
804 rc_config = SettingsModel().get_all_settings()
772 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
805 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
773 description = Optional.extract(description)
806 description = Optional.extract(description)
774 description_renderer = Optional.extract(description_renderer) or default_system_renderer
807 description_renderer = Optional.extract(description_renderer) or default_system_renderer
775
808
776 pull_request = PullRequestModel().create(
809 pull_request = PullRequestModel().create(
777 created_by=owner.user_id,
810 created_by=owner.user_id,
778 source_repo=source_repo,
811 source_repo=source_repo,
779 source_ref=full_source_ref,
812 source_ref=full_source_ref,
780 target_repo=target_repo,
813 target_repo=target_repo,
781 target_ref=full_target_ref,
814 target_ref=full_target_ref,
782 common_ancestor_id=common_ancestor_id,
815 common_ancestor_id=common_ancestor_id,
783 revisions=revisions,
816 revisions=commits,
784 reviewers=reviewers,
817 reviewers=reviewers,
818 observers=observers,
785 title=title,
819 title=title,
786 description=description,
820 description=description,
787 description_renderer=description_renderer,
821 description_renderer=description_renderer,
788 reviewer_data=default_reviewers_data,
822 reviewer_data=default_reviewers_data,
789 auth_user=apiuser
823 auth_user=apiuser
790 )
824 )
791
825
792 Session().commit()
826 Session().commit()
793 data = {
827 data = {
794 'msg': 'Created new pull request `{}`'.format(title),
828 'msg': 'Created new pull request `{}`'.format(title),
795 'pull_request_id': pull_request.pull_request_id,
829 'pull_request_id': pull_request.pull_request_id,
796 }
830 }
797 return data
831 return data
798
832
799
833
800 @jsonrpc_method()
834 @jsonrpc_method()
801 def update_pull_request(
835 def update_pull_request(
802 request, apiuser, pullrequestid, repoid=Optional(None),
836 request, apiuser, pullrequestid, repoid=Optional(None),
803 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
837 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
804 reviewers=Optional(None), update_commits=Optional(None)):
838 reviewers=Optional(None), observers=Optional(None), update_commits=Optional(None)):
805 """
839 """
806 Updates a pull request.
840 Updates a pull request.
807
841
808 :param apiuser: This is filled automatically from the |authtoken|.
842 :param apiuser: This is filled automatically from the |authtoken|.
809 :type apiuser: AuthUser
843 :type apiuser: AuthUser
810 :param repoid: Optional repository name or repository ID.
844 :param repoid: Optional repository name or repository ID.
811 :type repoid: str or int
845 :type repoid: str or int
812 :param pullrequestid: The pull request ID.
846 :param pullrequestid: The pull request ID.
813 :type pullrequestid: int
847 :type pullrequestid: int
814 :param title: Set the pull request title.
848 :param title: Set the pull request title.
815 :type title: str
849 :type title: str
816 :param description: Update pull request description.
850 :param description: Update pull request description.
817 :type description: Optional(str)
851 :type description: Optional(str)
818 :type description_renderer: Optional(str)
852 :type description_renderer: Optional(str)
819 :param description_renderer: Update pull request renderer for the description.
853 :param description_renderer: Update pull request renderer for the description.
820 It should be 'rst', 'markdown' or 'plain'
854 It should be 'rst', 'markdown' or 'plain'
821 :param reviewers: Update pull request reviewers list with new value.
855 :param reviewers: Update pull request reviewers list with new value.
822 :type reviewers: Optional(list)
856 :type reviewers: Optional(list)
823 Accepts username strings or objects of the format:
857 Accepts username strings or objects of the format:
824
858
825 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
859 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
860 :param observers: Update pull request observers list with new value.
861 :type observers: Optional(list)
862 Accepts username strings or objects of the format:
826
863
864 [{'username': 'nick', 'reasons': ['should be aware about this PR']}]
827 :param update_commits: Trigger update of commits for this pull request
865 :param update_commits: Trigger update of commits for this pull request
828 :type: update_commits: Optional(bool)
866 :type: update_commits: Optional(bool)
829
867
830 Example output:
868 Example output:
831
869
832 .. code-block:: bash
870 .. code-block:: bash
833
871
834 id : <id_given_in_input>
872 id : <id_given_in_input>
835 result : {
873 result : {
836 "msg": "Updated pull request `63`",
874 "msg": "Updated pull request `63`",
837 "pull_request": <pull_request_object>,
875 "pull_request": <pull_request_object>,
838 "updated_reviewers": {
876 "updated_reviewers": {
839 "added": [
877 "added": [
840 "username"
878 "username"
841 ],
879 ],
842 "removed": []
880 "removed": []
843 },
881 },
882 "updated_observers": {
883 "added": [
884 "username"
885 ],
886 "removed": []
887 },
844 "updated_commits": {
888 "updated_commits": {
845 "added": [
889 "added": [
846 "<sha1_hash>"
890 "<sha1_hash>"
847 ],
891 ],
848 "common": [
892 "common": [
849 "<sha1_hash>",
893 "<sha1_hash>",
850 "<sha1_hash>",
894 "<sha1_hash>",
851 ],
895 ],
852 "removed": []
896 "removed": []
853 }
897 }
854 }
898 }
855 error : null
899 error : null
856 """
900 """
857
901
858 pull_request = get_pull_request_or_error(pullrequestid)
902 pull_request = get_pull_request_or_error(pullrequestid)
859 if Optional.extract(repoid):
903 if Optional.extract(repoid):
860 repo = get_repo_or_error(repoid)
904 repo = get_repo_or_error(repoid)
861 else:
905 else:
862 repo = pull_request.target_repo
906 repo = pull_request.target_repo
863
907
864 if not PullRequestModel().check_user_update(
908 if not PullRequestModel().check_user_update(
865 pull_request, apiuser, api=True):
909 pull_request, apiuser, api=True):
866 raise JSONRPCError(
910 raise JSONRPCError(
867 'pull request `%s` update failed, no permission to update.' % (
911 'pull request `%s` update failed, no permission to update.' % (
868 pullrequestid,))
912 pullrequestid,))
869 if pull_request.is_closed():
913 if pull_request.is_closed():
870 raise JSONRPCError(
914 raise JSONRPCError(
871 'pull request `%s` update failed, pull request is closed' % (
915 'pull request `%s` update failed, pull request is closed' % (
872 pullrequestid,))
916 pullrequestid,))
873
917
874 reviewer_objects = Optional.extract(reviewers) or []
918 reviewer_objects = Optional.extract(reviewers) or []
875
919 observer_objects = Optional.extract(observers) or []
876 if reviewer_objects:
877 schema = ReviewerListSchema()
878 try:
879 reviewer_objects = schema.deserialize(reviewer_objects)
880 except Invalid as err:
881 raise JSONRPCValidationError(colander_exc=err)
882
883 # validate users
884 for reviewer_object in reviewer_objects:
885 user = get_user_or_error(reviewer_object['username'])
886 reviewer_object['user_id'] = user.user_id
887
888 get_default_reviewers_data, get_validated_reviewers, validate_observers = \
889 PullRequestModel().get_reviewer_functions()
890
891 # re-use stored rules
892 reviewer_rules = pull_request.reviewer_data
893 try:
894 reviewers = get_validated_reviewers(reviewer_objects, reviewer_rules)
895 except ValueError as e:
896 raise JSONRPCError('Reviewers Validation: {}'.format(e))
897 else:
898 reviewers = []
899
920
900 title = Optional.extract(title)
921 title = Optional.extract(title)
901 description = Optional.extract(description)
922 description = Optional.extract(description)
902 description_renderer = Optional.extract(description_renderer)
923 description_renderer = Optional.extract(description_renderer)
903
924
904 # Update title/description
925 # Update title/description
905 title_changed = False
926 title_changed = False
906 if title or description:
927 if title or description:
907 PullRequestModel().edit(
928 PullRequestModel().edit(
908 pull_request,
929 pull_request,
909 title or pull_request.title,
930 title or pull_request.title,
910 description or pull_request.description,
931 description or pull_request.description,
911 description_renderer or pull_request.description_renderer,
932 description_renderer or pull_request.description_renderer,
912 apiuser)
933 apiuser)
913 Session().commit()
934 Session().commit()
914 title_changed = True
935 title_changed = True
915
936
916 commit_changes = {"added": [], "common": [], "removed": []}
937 commit_changes = {"added": [], "common": [], "removed": []}
917
938
918 # Update commits
939 # Update commits
919 commits_changed = False
940 commits_changed = False
920 if str2bool(Optional.extract(update_commits)):
941 if str2bool(Optional.extract(update_commits)):
921
942
922 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
943 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
923 raise JSONRPCError(
944 raise JSONRPCError(
924 'Operation forbidden because pull request is in state {}, '
945 'Operation forbidden because pull request is in state {}, '
925 'only state {} is allowed.'.format(
946 'only state {} is allowed.'.format(
926 pull_request.pull_request_state, PullRequest.STATE_CREATED))
947 pull_request.pull_request_state, PullRequest.STATE_CREATED))
927
948
928 with pull_request.set_state(PullRequest.STATE_UPDATING):
949 with pull_request.set_state(PullRequest.STATE_UPDATING):
929 if PullRequestModel().has_valid_update_type(pull_request):
950 if PullRequestModel().has_valid_update_type(pull_request):
930 db_user = apiuser.get_instance()
951 db_user = apiuser.get_instance()
931 update_response = PullRequestModel().update_commits(
952 update_response = PullRequestModel().update_commits(
932 pull_request, db_user)
953 pull_request, db_user)
933 commit_changes = update_response.changes or commit_changes
954 commit_changes = update_response.changes or commit_changes
934 Session().commit()
955 Session().commit()
935 commits_changed = True
956 commits_changed = True
936
957
937 # Update reviewers
958 # Update reviewers
959 # serialize and validate passed in given reviewers
960 if reviewer_objects:
961 reviewer_objects = _reviewers_validation(reviewer_objects)
962
963 if observer_objects:
964 observer_objects = _reviewers_validation(reviewer_objects)
965
966 # re-use stored rules
967 default_reviewers_data = pull_request.reviewer_data
968
969 __, validate_default_reviewers, validate_observers = \
970 PullRequestModel().get_reviewer_functions()
971
972 if reviewer_objects:
973 try:
974 reviewers = validate_default_reviewers(reviewer_objects, default_reviewers_data)
975 except ValueError as e:
976 raise JSONRPCError('Reviewers Validation: {}'.format(e))
977 else:
978 reviewers = []
979
980 if observer_objects:
981 try:
982 observers = validate_default_reviewers(reviewer_objects, default_reviewers_data)
983 except ValueError as e:
984 raise JSONRPCError('Observer Validation: {}'.format(e))
985 else:
986 observers = []
987
938 reviewers_changed = False
988 reviewers_changed = False
939 reviewers_changes = {"added": [], "removed": []}
989 reviewers_changes = {"added": [], "removed": []}
940 if reviewers:
990 if reviewers:
941 old_calculated_status = pull_request.calculated_review_status()
991 old_calculated_status = pull_request.calculated_review_status()
942 added_reviewers, removed_reviewers = \
992 added_reviewers, removed_reviewers = \
943 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
993 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser.get_instance())
944
994
945 reviewers_changes['added'] = sorted(
995 reviewers_changes['added'] = sorted(
946 [get_user_or_error(n).username for n in added_reviewers])
996 [get_user_or_error(n).username for n in added_reviewers])
947 reviewers_changes['removed'] = sorted(
997 reviewers_changes['removed'] = sorted(
948 [get_user_or_error(n).username for n in removed_reviewers])
998 [get_user_or_error(n).username for n in removed_reviewers])
949 Session().commit()
999 Session().commit()
950
1000
951 # trigger status changed if change in reviewers changes the status
1001 # trigger status changed if change in reviewers changes the status
952 calculated_status = pull_request.calculated_review_status()
1002 calculated_status = pull_request.calculated_review_status()
953 if old_calculated_status != calculated_status:
1003 if old_calculated_status != calculated_status:
954 PullRequestModel().trigger_pull_request_hook(
1004 PullRequestModel().trigger_pull_request_hook(
955 pull_request, apiuser, 'review_status_change',
1005 pull_request, apiuser, 'review_status_change',
956 data={'status': calculated_status})
1006 data={'status': calculated_status})
957 reviewers_changed = True
1007 reviewers_changed = True
958
1008
959 observers_changed = False
1009 observers_changed = False
1010 observers_changes = {"added": [], "removed": []}
1011 if observers:
1012 added_observers, removed_observers = \
1013 PullRequestModel().update_observers(pull_request, observers, apiuser.get_instance())
1014
1015 observers_changes['added'] = sorted(
1016 [get_user_or_error(n).username for n in added_observers])
1017 observers_changes['removed'] = sorted(
1018 [get_user_or_error(n).username for n in removed_observers])
1019 Session().commit()
1020
1021 reviewers_changed = True
960
1022
961 # push changed to channelstream
1023 # push changed to channelstream
962 if commits_changed or reviewers_changed or observers_changed:
1024 if commits_changed or reviewers_changed or observers_changed:
963 pr_broadcast_channel = channelstream.pr_channel(pull_request)
1025 pr_broadcast_channel = channelstream.pr_channel(pull_request)
964 msg = 'Pull request was updated.'
1026 msg = 'Pull request was updated.'
965 channelstream.pr_update_channelstream_push(
1027 channelstream.pr_update_channelstream_push(
966 request, pr_broadcast_channel, apiuser, msg)
1028 request, pr_broadcast_channel, apiuser, msg)
967
1029
968 data = {
1030 data = {
969 'msg': 'Updated pull request `{}`'.format(
1031 'msg': 'Updated pull request `{}`'.format(pull_request.pull_request_id),
970 pull_request.pull_request_id),
971 'pull_request': pull_request.get_api_data(),
1032 'pull_request': pull_request.get_api_data(),
972 'updated_commits': commit_changes,
1033 'updated_commits': commit_changes,
973 'updated_reviewers': reviewers_changes
1034 'updated_reviewers': reviewers_changes,
1035 'updated_observers': observers_changes,
974 }
1036 }
975
1037
976 return data
1038 return data
977
1039
978
1040
979 @jsonrpc_method()
1041 @jsonrpc_method()
980 def close_pull_request(
1042 def close_pull_request(
981 request, apiuser, pullrequestid, repoid=Optional(None),
1043 request, apiuser, pullrequestid, repoid=Optional(None),
982 userid=Optional(OAttr('apiuser')), message=Optional('')):
1044 userid=Optional(OAttr('apiuser')), message=Optional('')):
983 """
1045 """
984 Close the pull request specified by `pullrequestid`.
1046 Close the pull request specified by `pullrequestid`.
985
1047
986 :param apiuser: This is filled automatically from the |authtoken|.
1048 :param apiuser: This is filled automatically from the |authtoken|.
987 :type apiuser: AuthUser
1049 :type apiuser: AuthUser
988 :param repoid: Repository name or repository ID to which the pull
1050 :param repoid: Repository name or repository ID to which the pull
989 request belongs.
1051 request belongs.
990 :type repoid: str or int
1052 :type repoid: str or int
991 :param pullrequestid: ID of the pull request to be closed.
1053 :param pullrequestid: ID of the pull request to be closed.
992 :type pullrequestid: int
1054 :type pullrequestid: int
993 :param userid: Close the pull request as this user.
1055 :param userid: Close the pull request as this user.
994 :type userid: Optional(str or int)
1056 :type userid: Optional(str or int)
995 :param message: Optional message to close the Pull Request with. If not
1057 :param message: Optional message to close the Pull Request with. If not
996 specified it will be generated automatically.
1058 specified it will be generated automatically.
997 :type message: Optional(str)
1059 :type message: Optional(str)
998
1060
999 Example output:
1061 Example output:
1000
1062
1001 .. code-block:: bash
1063 .. code-block:: bash
1002
1064
1003 "id": <id_given_in_input>,
1065 "id": <id_given_in_input>,
1004 "result": {
1066 "result": {
1005 "pull_request_id": "<int>",
1067 "pull_request_id": "<int>",
1006 "close_status": "<str:status_lbl>,
1068 "close_status": "<str:status_lbl>,
1007 "closed": "<bool>"
1069 "closed": "<bool>"
1008 },
1070 },
1009 "error": null
1071 "error": null
1010
1072
1011 """
1073 """
1012 _ = request.translate
1074 _ = request.translate
1013
1075
1014 pull_request = get_pull_request_or_error(pullrequestid)
1076 pull_request = get_pull_request_or_error(pullrequestid)
1015 if Optional.extract(repoid):
1077 if Optional.extract(repoid):
1016 repo = get_repo_or_error(repoid)
1078 repo = get_repo_or_error(repoid)
1017 else:
1079 else:
1018 repo = pull_request.target_repo
1080 repo = pull_request.target_repo
1019
1081
1020 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
1082 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')(
1021 user=apiuser, repo_name=repo.repo_name)
1083 user=apiuser, repo_name=repo.repo_name)
1022 if not isinstance(userid, Optional):
1084 if not isinstance(userid, Optional):
1023 if has_superadmin_permission(apiuser) or is_repo_admin:
1085 if has_superadmin_permission(apiuser) or is_repo_admin:
1024 apiuser = get_user_or_error(userid)
1086 apiuser = get_user_or_error(userid)
1025 else:
1087 else:
1026 raise JSONRPCError('userid is not the same as your user')
1088 raise JSONRPCError('userid is not the same as your user')
1027
1089
1028 if pull_request.is_closed():
1090 if pull_request.is_closed():
1029 raise JSONRPCError(
1091 raise JSONRPCError(
1030 'pull request `%s` is already closed' % (pullrequestid,))
1092 'pull request `%s` is already closed' % (pullrequestid,))
1031
1093
1032 # only owner or admin or person with write permissions
1094 # only owner or admin or person with write permissions
1033 allowed_to_close = PullRequestModel().check_user_update(
1095 allowed_to_close = PullRequestModel().check_user_update(
1034 pull_request, apiuser, api=True)
1096 pull_request, apiuser, api=True)
1035
1097
1036 if not allowed_to_close:
1098 if not allowed_to_close:
1037 raise JSONRPCError(
1099 raise JSONRPCError(
1038 'pull request `%s` close failed, no permission to close.' % (
1100 'pull request `%s` close failed, no permission to close.' % (
1039 pullrequestid,))
1101 pullrequestid,))
1040
1102
1041 # message we're using to close the PR, else it's automatically generated
1103 # message we're using to close the PR, else it's automatically generated
1042 message = Optional.extract(message)
1104 message = Optional.extract(message)
1043
1105
1044 # finally close the PR, with proper message comment
1106 # finally close the PR, with proper message comment
1045 comment, status = PullRequestModel().close_pull_request_with_comment(
1107 comment, status = PullRequestModel().close_pull_request_with_comment(
1046 pull_request, apiuser, repo, message=message, auth_user=apiuser)
1108 pull_request, apiuser, repo, message=message, auth_user=apiuser)
1047 status_lbl = ChangesetStatus.get_status_lbl(status)
1109 status_lbl = ChangesetStatus.get_status_lbl(status)
1048
1110
1049 Session().commit()
1111 Session().commit()
1050
1112
1051 data = {
1113 data = {
1052 'pull_request_id': pull_request.pull_request_id,
1114 'pull_request_id': pull_request.pull_request_id,
1053 'close_status': status_lbl,
1115 'close_status': status_lbl,
1054 'closed': True,
1116 'closed': True,
1055 }
1117 }
1056 return data
1118 return data
@@ -1,2523 +1,2523 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import time
22 import time
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.api import (
25 from rhodecode.api import (
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 from rhodecode.api.utils import (
27 from rhodecode.api.utils import (
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 validate_set_owner_permissions)
31 validate_set_owner_permissions)
32 from rhodecode.lib import audit_logger, rc_cache, channelstream
32 from rhodecode.lib import audit_logger, rc_cache, channelstream
33 from rhodecode.lib import repo_maintenance
33 from rhodecode.lib import repo_maintenance
34 from rhodecode.lib.auth import (
34 from rhodecode.lib.auth import (
35 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
35 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
36 HasRepoPermissionAnyApi)
36 HasRepoPermissionAnyApi)
37 from rhodecode.lib.celerylib.utils import get_task_id
37 from rhodecode.lib.celerylib.utils import get_task_id
38 from rhodecode.lib.utils2 import (
38 from rhodecode.lib.utils2 import (
39 str2bool, time_to_datetime, safe_str, safe_int, safe_unicode)
39 str2bool, time_to_datetime, safe_str, safe_int, safe_unicode)
40 from rhodecode.lib.ext_json import json
40 from rhodecode.lib.ext_json import json
41 from rhodecode.lib.exceptions import (
41 from rhodecode.lib.exceptions import (
42 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
42 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
43 from rhodecode.lib.vcs import RepositoryError
43 from rhodecode.lib.vcs import RepositoryError
44 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
44 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (
47 from rhodecode.model.db import (
48 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
48 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
49 ChangesetComment)
49 ChangesetComment)
50 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.permission import PermissionModel
51 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.pull_request import PullRequestModel
52 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo import RepoModel
53 from rhodecode.model.scm import ScmModel, RepoList
53 from rhodecode.model.scm import ScmModel, RepoList
54 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
54 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
55 from rhodecode.model import validation_schema
55 from rhodecode.model import validation_schema
56 from rhodecode.model.validation_schema.schemas import repo_schema
56 from rhodecode.model.validation_schema.schemas import repo_schema
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60
60
61 @jsonrpc_method()
61 @jsonrpc_method()
62 def get_repo(request, apiuser, repoid, cache=Optional(True)):
62 def get_repo(request, apiuser, repoid, cache=Optional(True)):
63 """
63 """
64 Gets an existing repository by its name or repository_id.
64 Gets an existing repository by its name or repository_id.
65
65
66 The members section so the output returns users groups or users
66 The members section so the output returns users groups or users
67 associated with that repository.
67 associated with that repository.
68
68
69 This command can only be run using an |authtoken| with admin rights,
69 This command can only be run using an |authtoken| with admin rights,
70 or users with at least read rights to the |repo|.
70 or users with at least read rights to the |repo|.
71
71
72 :param apiuser: This is filled automatically from the |authtoken|.
72 :param apiuser: This is filled automatically from the |authtoken|.
73 :type apiuser: AuthUser
73 :type apiuser: AuthUser
74 :param repoid: The repository name or repository id.
74 :param repoid: The repository name or repository id.
75 :type repoid: str or int
75 :type repoid: str or int
76 :param cache: use the cached value for last changeset
76 :param cache: use the cached value for last changeset
77 :type: cache: Optional(bool)
77 :type: cache: Optional(bool)
78
78
79 Example output:
79 Example output:
80
80
81 .. code-block:: bash
81 .. code-block:: bash
82
82
83 {
83 {
84 "error": null,
84 "error": null,
85 "id": <repo_id>,
85 "id": <repo_id>,
86 "result": {
86 "result": {
87 "clone_uri": null,
87 "clone_uri": null,
88 "created_on": "timestamp",
88 "created_on": "timestamp",
89 "description": "repo description",
89 "description": "repo description",
90 "enable_downloads": false,
90 "enable_downloads": false,
91 "enable_locking": false,
91 "enable_locking": false,
92 "enable_statistics": false,
92 "enable_statistics": false,
93 "followers": [
93 "followers": [
94 {
94 {
95 "active": true,
95 "active": true,
96 "admin": false,
96 "admin": false,
97 "api_key": "****************************************",
97 "api_key": "****************************************",
98 "api_keys": [
98 "api_keys": [
99 "****************************************"
99 "****************************************"
100 ],
100 ],
101 "email": "user@example.com",
101 "email": "user@example.com",
102 "emails": [
102 "emails": [
103 "user@example.com"
103 "user@example.com"
104 ],
104 ],
105 "extern_name": "rhodecode",
105 "extern_name": "rhodecode",
106 "extern_type": "rhodecode",
106 "extern_type": "rhodecode",
107 "firstname": "username",
107 "firstname": "username",
108 "ip_addresses": [],
108 "ip_addresses": [],
109 "language": null,
109 "language": null,
110 "last_login": "2015-09-16T17:16:35.854",
110 "last_login": "2015-09-16T17:16:35.854",
111 "lastname": "surname",
111 "lastname": "surname",
112 "user_id": <user_id>,
112 "user_id": <user_id>,
113 "username": "name"
113 "username": "name"
114 }
114 }
115 ],
115 ],
116 "fork_of": "parent-repo",
116 "fork_of": "parent-repo",
117 "landing_rev": [
117 "landing_rev": [
118 "rev",
118 "rev",
119 "tip"
119 "tip"
120 ],
120 ],
121 "last_changeset": {
121 "last_changeset": {
122 "author": "User <user@example.com>",
122 "author": "User <user@example.com>",
123 "branch": "default",
123 "branch": "default",
124 "date": "timestamp",
124 "date": "timestamp",
125 "message": "last commit message",
125 "message": "last commit message",
126 "parents": [
126 "parents": [
127 {
127 {
128 "raw_id": "commit-id"
128 "raw_id": "commit-id"
129 }
129 }
130 ],
130 ],
131 "raw_id": "commit-id",
131 "raw_id": "commit-id",
132 "revision": <revision number>,
132 "revision": <revision number>,
133 "short_id": "short id"
133 "short_id": "short id"
134 },
134 },
135 "lock_reason": null,
135 "lock_reason": null,
136 "locked_by": null,
136 "locked_by": null,
137 "locked_date": null,
137 "locked_date": null,
138 "owner": "owner-name",
138 "owner": "owner-name",
139 "permissions": [
139 "permissions": [
140 {
140 {
141 "name": "super-admin-name",
141 "name": "super-admin-name",
142 "origin": "super-admin",
142 "origin": "super-admin",
143 "permission": "repository.admin",
143 "permission": "repository.admin",
144 "type": "user"
144 "type": "user"
145 },
145 },
146 {
146 {
147 "name": "owner-name",
147 "name": "owner-name",
148 "origin": "owner",
148 "origin": "owner",
149 "permission": "repository.admin",
149 "permission": "repository.admin",
150 "type": "user"
150 "type": "user"
151 },
151 },
152 {
152 {
153 "name": "user-group-name",
153 "name": "user-group-name",
154 "origin": "permission",
154 "origin": "permission",
155 "permission": "repository.write",
155 "permission": "repository.write",
156 "type": "user_group"
156 "type": "user_group"
157 }
157 }
158 ],
158 ],
159 "private": true,
159 "private": true,
160 "repo_id": 676,
160 "repo_id": 676,
161 "repo_name": "user-group/repo-name",
161 "repo_name": "user-group/repo-name",
162 "repo_type": "hg"
162 "repo_type": "hg"
163 }
163 }
164 }
164 }
165 """
165 """
166
166
167 repo = get_repo_or_error(repoid)
167 repo = get_repo_or_error(repoid)
168 cache = Optional.extract(cache)
168 cache = Optional.extract(cache)
169
169
170 include_secrets = False
170 include_secrets = False
171 if has_superadmin_permission(apiuser):
171 if has_superadmin_permission(apiuser):
172 include_secrets = True
172 include_secrets = True
173 else:
173 else:
174 # check if we have at least read permission for this repo !
174 # check if we have at least read permission for this repo !
175 _perms = (
175 _perms = (
176 'repository.admin', 'repository.write', 'repository.read',)
176 'repository.admin', 'repository.write', 'repository.read',)
177 validate_repo_permissions(apiuser, repoid, repo, _perms)
177 validate_repo_permissions(apiuser, repoid, repo, _perms)
178
178
179 permissions = []
179 permissions = []
180 for _user in repo.permissions():
180 for _user in repo.permissions():
181 user_data = {
181 user_data = {
182 'name': _user.username,
182 'name': _user.username,
183 'permission': _user.permission,
183 'permission': _user.permission,
184 'origin': get_origin(_user),
184 'origin': get_origin(_user),
185 'type': "user",
185 'type': "user",
186 }
186 }
187 permissions.append(user_data)
187 permissions.append(user_data)
188
188
189 for _user_group in repo.permission_user_groups():
189 for _user_group in repo.permission_user_groups():
190 user_group_data = {
190 user_group_data = {
191 'name': _user_group.users_group_name,
191 'name': _user_group.users_group_name,
192 'permission': _user_group.permission,
192 'permission': _user_group.permission,
193 'origin': get_origin(_user_group),
193 'origin': get_origin(_user_group),
194 'type': "user_group",
194 'type': "user_group",
195 }
195 }
196 permissions.append(user_group_data)
196 permissions.append(user_group_data)
197
197
198 following_users = [
198 following_users = [
199 user.user.get_api_data(include_secrets=include_secrets)
199 user.user.get_api_data(include_secrets=include_secrets)
200 for user in repo.followers]
200 for user in repo.followers]
201
201
202 if not cache:
202 if not cache:
203 repo.update_commit_cache()
203 repo.update_commit_cache()
204 data = repo.get_api_data(include_secrets=include_secrets)
204 data = repo.get_api_data(include_secrets=include_secrets)
205 data['permissions'] = permissions
205 data['permissions'] = permissions
206 data['followers'] = following_users
206 data['followers'] = following_users
207 return data
207 return data
208
208
209
209
210 @jsonrpc_method()
210 @jsonrpc_method()
211 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
211 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
212 """
212 """
213 Lists all existing repositories.
213 Lists all existing repositories.
214
214
215 This command can only be run using an |authtoken| with admin rights,
215 This command can only be run using an |authtoken| with admin rights,
216 or users with at least read rights to |repos|.
216 or users with at least read rights to |repos|.
217
217
218 :param apiuser: This is filled automatically from the |authtoken|.
218 :param apiuser: This is filled automatically from the |authtoken|.
219 :type apiuser: AuthUser
219 :type apiuser: AuthUser
220 :param root: specify root repository group to fetch repositories.
220 :param root: specify root repository group to fetch repositories.
221 filters the returned repositories to be members of given root group.
221 filters the returned repositories to be members of given root group.
222 :type root: Optional(None)
222 :type root: Optional(None)
223 :param traverse: traverse given root into subrepositories. With this flag
223 :param traverse: traverse given root into subrepositories. With this flag
224 set to False, it will only return top-level repositories from `root`.
224 set to False, it will only return top-level repositories from `root`.
225 if root is empty it will return just top-level repositories.
225 if root is empty it will return just top-level repositories.
226 :type traverse: Optional(True)
226 :type traverse: Optional(True)
227
227
228
228
229 Example output:
229 Example output:
230
230
231 .. code-block:: bash
231 .. code-block:: bash
232
232
233 id : <id_given_in_input>
233 id : <id_given_in_input>
234 result: [
234 result: [
235 {
235 {
236 "repo_id" : "<repo_id>",
236 "repo_id" : "<repo_id>",
237 "repo_name" : "<reponame>"
237 "repo_name" : "<reponame>"
238 "repo_type" : "<repo_type>",
238 "repo_type" : "<repo_type>",
239 "clone_uri" : "<clone_uri>",
239 "clone_uri" : "<clone_uri>",
240 "private": : "<bool>",
240 "private": : "<bool>",
241 "created_on" : "<datetimecreated>",
241 "created_on" : "<datetimecreated>",
242 "description" : "<description>",
242 "description" : "<description>",
243 "landing_rev": "<landing_rev>",
243 "landing_rev": "<landing_rev>",
244 "owner": "<repo_owner>",
244 "owner": "<repo_owner>",
245 "fork_of": "<name_of_fork_parent>",
245 "fork_of": "<name_of_fork_parent>",
246 "enable_downloads": "<bool>",
246 "enable_downloads": "<bool>",
247 "enable_locking": "<bool>",
247 "enable_locking": "<bool>",
248 "enable_statistics": "<bool>",
248 "enable_statistics": "<bool>",
249 },
249 },
250 ...
250 ...
251 ]
251 ]
252 error: null
252 error: null
253 """
253 """
254
254
255 include_secrets = has_superadmin_permission(apiuser)
255 include_secrets = has_superadmin_permission(apiuser)
256 _perms = ('repository.read', 'repository.write', 'repository.admin',)
256 _perms = ('repository.read', 'repository.write', 'repository.admin',)
257 extras = {'user': apiuser}
257 extras = {'user': apiuser}
258
258
259 root = Optional.extract(root)
259 root = Optional.extract(root)
260 traverse = Optional.extract(traverse, binary=True)
260 traverse = Optional.extract(traverse, binary=True)
261
261
262 if root:
262 if root:
263 # verify parent existance, if it's empty return an error
263 # verify parent existance, if it's empty return an error
264 parent = RepoGroup.get_by_group_name(root)
264 parent = RepoGroup.get_by_group_name(root)
265 if not parent:
265 if not parent:
266 raise JSONRPCError(
266 raise JSONRPCError(
267 'Root repository group `{}` does not exist'.format(root))
267 'Root repository group `{}` does not exist'.format(root))
268
268
269 if traverse:
269 if traverse:
270 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
270 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
271 else:
271 else:
272 repos = RepoModel().get_repos_for_root(root=parent)
272 repos = RepoModel().get_repos_for_root(root=parent)
273 else:
273 else:
274 if traverse:
274 if traverse:
275 repos = RepoModel().get_all()
275 repos = RepoModel().get_all()
276 else:
276 else:
277 # return just top-level
277 # return just top-level
278 repos = RepoModel().get_repos_for_root(root=None)
278 repos = RepoModel().get_repos_for_root(root=None)
279
279
280 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
280 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
281 return [repo.get_api_data(include_secrets=include_secrets)
281 return [repo.get_api_data(include_secrets=include_secrets)
282 for repo in repo_list]
282 for repo in repo_list]
283
283
284
284
285 @jsonrpc_method()
285 @jsonrpc_method()
286 def get_repo_changeset(request, apiuser, repoid, revision,
286 def get_repo_changeset(request, apiuser, repoid, revision,
287 details=Optional('basic')):
287 details=Optional('basic')):
288 """
288 """
289 Returns information about a changeset.
289 Returns information about a changeset.
290
290
291 Additionally parameters define the amount of details returned by
291 Additionally parameters define the amount of details returned by
292 this function.
292 this function.
293
293
294 This command can only be run using an |authtoken| with admin rights,
294 This command can only be run using an |authtoken| with admin rights,
295 or users with at least read rights to the |repo|.
295 or users with at least read rights to the |repo|.
296
296
297 :param apiuser: This is filled automatically from the |authtoken|.
297 :param apiuser: This is filled automatically from the |authtoken|.
298 :type apiuser: AuthUser
298 :type apiuser: AuthUser
299 :param repoid: The repository name or repository id
299 :param repoid: The repository name or repository id
300 :type repoid: str or int
300 :type repoid: str or int
301 :param revision: revision for which listing should be done
301 :param revision: revision for which listing should be done
302 :type revision: str
302 :type revision: str
303 :param details: details can be 'basic|extended|full' full gives diff
303 :param details: details can be 'basic|extended|full' full gives diff
304 info details like the diff itself, and number of changed files etc.
304 info details like the diff itself, and number of changed files etc.
305 :type details: Optional(str)
305 :type details: Optional(str)
306
306
307 """
307 """
308 repo = get_repo_or_error(repoid)
308 repo = get_repo_or_error(repoid)
309 if not has_superadmin_permission(apiuser):
309 if not has_superadmin_permission(apiuser):
310 _perms = ('repository.admin', 'repository.write', 'repository.read',)
310 _perms = ('repository.admin', 'repository.write', 'repository.read',)
311 validate_repo_permissions(apiuser, repoid, repo, _perms)
311 validate_repo_permissions(apiuser, repoid, repo, _perms)
312
312
313 changes_details = Optional.extract(details)
313 changes_details = Optional.extract(details)
314 _changes_details_types = ['basic', 'extended', 'full']
314 _changes_details_types = ['basic', 'extended', 'full']
315 if changes_details not in _changes_details_types:
315 if changes_details not in _changes_details_types:
316 raise JSONRPCError(
316 raise JSONRPCError(
317 'ret_type must be one of %s' % (
317 'ret_type must be one of %s' % (
318 ','.join(_changes_details_types)))
318 ','.join(_changes_details_types)))
319
319
320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
321 'status', '_commit', '_file_paths']
321 'status', '_commit', '_file_paths']
322
322
323 try:
323 try:
324 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
324 cs = repo.get_commit(commit_id=revision, pre_load=pre_load)
325 except TypeError as e:
325 except TypeError as e:
326 raise JSONRPCError(safe_str(e))
326 raise JSONRPCError(safe_str(e))
327 _cs_json = cs.__json__()
327 _cs_json = cs.__json__()
328 _cs_json['diff'] = build_commit_data(cs, changes_details)
328 _cs_json['diff'] = build_commit_data(cs, changes_details)
329 if changes_details == 'full':
329 if changes_details == 'full':
330 _cs_json['refs'] = cs._get_refs()
330 _cs_json['refs'] = cs._get_refs()
331 return _cs_json
331 return _cs_json
332
332
333
333
334 @jsonrpc_method()
334 @jsonrpc_method()
335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
336 details=Optional('basic')):
336 details=Optional('basic')):
337 """
337 """
338 Returns a set of commits limited by the number starting
338 Returns a set of commits limited by the number starting
339 from the `start_rev` option.
339 from the `start_rev` option.
340
340
341 Additional parameters define the amount of details returned by this
341 Additional parameters define the amount of details returned by this
342 function.
342 function.
343
343
344 This command can only be run using an |authtoken| with admin rights,
344 This command can only be run using an |authtoken| with admin rights,
345 or users with at least read rights to |repos|.
345 or users with at least read rights to |repos|.
346
346
347 :param apiuser: This is filled automatically from the |authtoken|.
347 :param apiuser: This is filled automatically from the |authtoken|.
348 :type apiuser: AuthUser
348 :type apiuser: AuthUser
349 :param repoid: The repository name or repository ID.
349 :param repoid: The repository name or repository ID.
350 :type repoid: str or int
350 :type repoid: str or int
351 :param start_rev: The starting revision from where to get changesets.
351 :param start_rev: The starting revision from where to get changesets.
352 :type start_rev: str
352 :type start_rev: str
353 :param limit: Limit the number of commits to this amount
353 :param limit: Limit the number of commits to this amount
354 :type limit: str or int
354 :type limit: str or int
355 :param details: Set the level of detail returned. Valid option are:
355 :param details: Set the level of detail returned. Valid option are:
356 ``basic``, ``extended`` and ``full``.
356 ``basic``, ``extended`` and ``full``.
357 :type details: Optional(str)
357 :type details: Optional(str)
358
358
359 .. note::
359 .. note::
360
360
361 Setting the parameter `details` to the value ``full`` is extensive
361 Setting the parameter `details` to the value ``full`` is extensive
362 and returns details like the diff itself, and the number
362 and returns details like the diff itself, and the number
363 of changed files.
363 of changed files.
364
364
365 """
365 """
366 repo = get_repo_or_error(repoid)
366 repo = get_repo_or_error(repoid)
367 if not has_superadmin_permission(apiuser):
367 if not has_superadmin_permission(apiuser):
368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
369 validate_repo_permissions(apiuser, repoid, repo, _perms)
369 validate_repo_permissions(apiuser, repoid, repo, _perms)
370
370
371 changes_details = Optional.extract(details)
371 changes_details = Optional.extract(details)
372 _changes_details_types = ['basic', 'extended', 'full']
372 _changes_details_types = ['basic', 'extended', 'full']
373 if changes_details not in _changes_details_types:
373 if changes_details not in _changes_details_types:
374 raise JSONRPCError(
374 raise JSONRPCError(
375 'ret_type must be one of %s' % (
375 'ret_type must be one of %s' % (
376 ','.join(_changes_details_types)))
376 ','.join(_changes_details_types)))
377
377
378 limit = int(limit)
378 limit = int(limit)
379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
380 'status', '_commit', '_file_paths']
380 'status', '_commit', '_file_paths']
381
381
382 vcs_repo = repo.scm_instance()
382 vcs_repo = repo.scm_instance()
383 # SVN needs a special case to distinguish its index and commit id
383 # SVN needs a special case to distinguish its index and commit id
384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
385 start_rev = vcs_repo.commit_ids[0]
385 start_rev = vcs_repo.commit_ids[0]
386
386
387 try:
387 try:
388 commits = vcs_repo.get_commits(
388 commits = vcs_repo.get_commits(
389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
390 except TypeError as e:
390 except TypeError as e:
391 raise JSONRPCError(safe_str(e))
391 raise JSONRPCError(safe_str(e))
392 except Exception:
392 except Exception:
393 log.exception('Fetching of commits failed')
393 log.exception('Fetching of commits failed')
394 raise JSONRPCError('Error occurred during commit fetching')
394 raise JSONRPCError('Error occurred during commit fetching')
395
395
396 ret = []
396 ret = []
397 for cnt, commit in enumerate(commits):
397 for cnt, commit in enumerate(commits):
398 if cnt >= limit != -1:
398 if cnt >= limit != -1:
399 break
399 break
400 _cs_json = commit.__json__()
400 _cs_json = commit.__json__()
401 _cs_json['diff'] = build_commit_data(commit, changes_details)
401 _cs_json['diff'] = build_commit_data(commit, changes_details)
402 if changes_details == 'full':
402 if changes_details == 'full':
403 _cs_json['refs'] = {
403 _cs_json['refs'] = {
404 'branches': [commit.branch],
404 'branches': [commit.branch],
405 'bookmarks': getattr(commit, 'bookmarks', []),
405 'bookmarks': getattr(commit, 'bookmarks', []),
406 'tags': commit.tags
406 'tags': commit.tags
407 }
407 }
408 ret.append(_cs_json)
408 ret.append(_cs_json)
409 return ret
409 return ret
410
410
411
411
412 @jsonrpc_method()
412 @jsonrpc_method()
413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
414 ret_type=Optional('all'), details=Optional('basic'),
414 ret_type=Optional('all'), details=Optional('basic'),
415 max_file_bytes=Optional(None)):
415 max_file_bytes=Optional(None)):
416 """
416 """
417 Returns a list of nodes and children in a flat list for a given
417 Returns a list of nodes and children in a flat list for a given
418 path at given revision.
418 path at given revision.
419
419
420 It's possible to specify ret_type to show only `files` or `dirs`.
420 It's possible to specify ret_type to show only `files` or `dirs`.
421
421
422 This command can only be run using an |authtoken| with admin rights,
422 This command can only be run using an |authtoken| with admin rights,
423 or users with at least read rights to |repos|.
423 or users with at least read rights to |repos|.
424
424
425 :param apiuser: This is filled automatically from the |authtoken|.
425 :param apiuser: This is filled automatically from the |authtoken|.
426 :type apiuser: AuthUser
426 :type apiuser: AuthUser
427 :param repoid: The repository name or repository ID.
427 :param repoid: The repository name or repository ID.
428 :type repoid: str or int
428 :type repoid: str or int
429 :param revision: The revision for which listing should be done.
429 :param revision: The revision for which listing should be done.
430 :type revision: str
430 :type revision: str
431 :param root_path: The path from which to start displaying.
431 :param root_path: The path from which to start displaying.
432 :type root_path: str
432 :type root_path: str
433 :param ret_type: Set the return type. Valid options are
433 :param ret_type: Set the return type. Valid options are
434 ``all`` (default), ``files`` and ``dirs``.
434 ``all`` (default), ``files`` and ``dirs``.
435 :type ret_type: Optional(str)
435 :type ret_type: Optional(str)
436 :param details: Returns extended information about nodes, such as
436 :param details: Returns extended information about nodes, such as
437 md5, binary, and or content.
437 md5, binary, and or content.
438 The valid options are ``basic`` and ``full``.
438 The valid options are ``basic`` and ``full``.
439 :type details: Optional(str)
439 :type details: Optional(str)
440 :param max_file_bytes: Only return file content under this file size bytes
440 :param max_file_bytes: Only return file content under this file size bytes
441 :type details: Optional(int)
441 :type details: Optional(int)
442
442
443 Example output:
443 Example output:
444
444
445 .. code-block:: bash
445 .. code-block:: bash
446
446
447 id : <id_given_in_input>
447 id : <id_given_in_input>
448 result: [
448 result: [
449 {
449 {
450 "binary": false,
450 "binary": false,
451 "content": "File line",
451 "content": "File line",
452 "extension": "md",
452 "extension": "md",
453 "lines": 2,
453 "lines": 2,
454 "md5": "059fa5d29b19c0657e384749480f6422",
454 "md5": "059fa5d29b19c0657e384749480f6422",
455 "mimetype": "text/x-minidsrc",
455 "mimetype": "text/x-minidsrc",
456 "name": "file.md",
456 "name": "file.md",
457 "size": 580,
457 "size": 580,
458 "type": "file"
458 "type": "file"
459 },
459 },
460 ...
460 ...
461 ]
461 ]
462 error: null
462 error: null
463 """
463 """
464
464
465 repo = get_repo_or_error(repoid)
465 repo = get_repo_or_error(repoid)
466 if not has_superadmin_permission(apiuser):
466 if not has_superadmin_permission(apiuser):
467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
468 validate_repo_permissions(apiuser, repoid, repo, _perms)
468 validate_repo_permissions(apiuser, repoid, repo, _perms)
469
469
470 ret_type = Optional.extract(ret_type)
470 ret_type = Optional.extract(ret_type)
471 details = Optional.extract(details)
471 details = Optional.extract(details)
472 _extended_types = ['basic', 'full']
472 _extended_types = ['basic', 'full']
473 if details not in _extended_types:
473 if details not in _extended_types:
474 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
474 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
475 extended_info = False
475 extended_info = False
476 content = False
476 content = False
477 if details == 'basic':
477 if details == 'basic':
478 extended_info = True
478 extended_info = True
479
479
480 if details == 'full':
480 if details == 'full':
481 extended_info = content = True
481 extended_info = content = True
482
482
483 _map = {}
483 _map = {}
484 try:
484 try:
485 # check if repo is not empty by any chance, skip quicker if it is.
485 # check if repo is not empty by any chance, skip quicker if it is.
486 _scm = repo.scm_instance()
486 _scm = repo.scm_instance()
487 if _scm.is_empty():
487 if _scm.is_empty():
488 return []
488 return []
489
489
490 _d, _f = ScmModel().get_nodes(
490 _d, _f = ScmModel().get_nodes(
491 repo, revision, root_path, flat=False,
491 repo, revision, root_path, flat=False,
492 extended_info=extended_info, content=content,
492 extended_info=extended_info, content=content,
493 max_file_bytes=max_file_bytes)
493 max_file_bytes=max_file_bytes)
494 _map = {
494 _map = {
495 'all': _d + _f,
495 'all': _d + _f,
496 'files': _f,
496 'files': _f,
497 'dirs': _d,
497 'dirs': _d,
498 }
498 }
499 return _map[ret_type]
499 return _map[ret_type]
500 except KeyError:
500 except KeyError:
501 raise JSONRPCError(
501 raise JSONRPCError(
502 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
502 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
503 except Exception:
503 except Exception:
504 log.exception("Exception occurred while trying to get repo nodes")
504 log.exception("Exception occurred while trying to get repo nodes")
505 raise JSONRPCError(
505 raise JSONRPCError(
506 'failed to get repo: `%s` nodes' % repo.repo_name
506 'failed to get repo: `%s` nodes' % repo.repo_name
507 )
507 )
508
508
509
509
510 @jsonrpc_method()
510 @jsonrpc_method()
511 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
511 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
512 max_file_bytes=Optional(None), details=Optional('basic'),
512 max_file_bytes=Optional(None), details=Optional('basic'),
513 cache=Optional(True)):
513 cache=Optional(True)):
514 """
514 """
515 Returns a single file from repository at given revision.
515 Returns a single file from repository at given revision.
516
516
517 This command can only be run using an |authtoken| with admin rights,
517 This command can only be run using an |authtoken| with admin rights,
518 or users with at least read rights to |repos|.
518 or users with at least read rights to |repos|.
519
519
520 :param apiuser: This is filled automatically from the |authtoken|.
520 :param apiuser: This is filled automatically from the |authtoken|.
521 :type apiuser: AuthUser
521 :type apiuser: AuthUser
522 :param repoid: The repository name or repository ID.
522 :param repoid: The repository name or repository ID.
523 :type repoid: str or int
523 :type repoid: str or int
524 :param commit_id: The revision for which listing should be done.
524 :param commit_id: The revision for which listing should be done.
525 :type commit_id: str
525 :type commit_id: str
526 :param file_path: The path from which to start displaying.
526 :param file_path: The path from which to start displaying.
527 :type file_path: str
527 :type file_path: str
528 :param details: Returns different set of information about nodes.
528 :param details: Returns different set of information about nodes.
529 The valid options are ``minimal`` ``basic`` and ``full``.
529 The valid options are ``minimal`` ``basic`` and ``full``.
530 :type details: Optional(str)
530 :type details: Optional(str)
531 :param max_file_bytes: Only return file content under this file size bytes
531 :param max_file_bytes: Only return file content under this file size bytes
532 :type max_file_bytes: Optional(int)
532 :type max_file_bytes: Optional(int)
533 :param cache: Use internal caches for fetching files. If disabled fetching
533 :param cache: Use internal caches for fetching files. If disabled fetching
534 files is slower but more memory efficient
534 files is slower but more memory efficient
535 :type cache: Optional(bool)
535 :type cache: Optional(bool)
536
536
537 Example output:
537 Example output:
538
538
539 .. code-block:: bash
539 .. code-block:: bash
540
540
541 id : <id_given_in_input>
541 id : <id_given_in_input>
542 result: {
542 result: {
543 "binary": false,
543 "binary": false,
544 "extension": "py",
544 "extension": "py",
545 "lines": 35,
545 "lines": 35,
546 "content": "....",
546 "content": "....",
547 "md5": "76318336366b0f17ee249e11b0c99c41",
547 "md5": "76318336366b0f17ee249e11b0c99c41",
548 "mimetype": "text/x-python",
548 "mimetype": "text/x-python",
549 "name": "python.py",
549 "name": "python.py",
550 "size": 817,
550 "size": 817,
551 "type": "file",
551 "type": "file",
552 }
552 }
553 error: null
553 error: null
554 """
554 """
555
555
556 repo = get_repo_or_error(repoid)
556 repo = get_repo_or_error(repoid)
557 if not has_superadmin_permission(apiuser):
557 if not has_superadmin_permission(apiuser):
558 _perms = ('repository.admin', 'repository.write', 'repository.read',)
558 _perms = ('repository.admin', 'repository.write', 'repository.read',)
559 validate_repo_permissions(apiuser, repoid, repo, _perms)
559 validate_repo_permissions(apiuser, repoid, repo, _perms)
560
560
561 cache = Optional.extract(cache, binary=True)
561 cache = Optional.extract(cache, binary=True)
562 details = Optional.extract(details)
562 details = Optional.extract(details)
563 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
563 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
564 if details not in _extended_types:
564 if details not in _extended_types:
565 raise JSONRPCError(
565 raise JSONRPCError(
566 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
566 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
567 extended_info = False
567 extended_info = False
568 content = False
568 content = False
569
569
570 if details == 'minimal':
570 if details == 'minimal':
571 extended_info = False
571 extended_info = False
572
572
573 elif details == 'basic':
573 elif details == 'basic':
574 extended_info = True
574 extended_info = True
575
575
576 elif details == 'full':
576 elif details == 'full':
577 extended_info = content = True
577 extended_info = content = True
578
578
579 file_path = safe_unicode(file_path)
579 file_path = safe_unicode(file_path)
580 try:
580 try:
581 # check if repo is not empty by any chance, skip quicker if it is.
581 # check if repo is not empty by any chance, skip quicker if it is.
582 _scm = repo.scm_instance()
582 _scm = repo.scm_instance()
583 if _scm.is_empty():
583 if _scm.is_empty():
584 return None
584 return None
585
585
586 node = ScmModel().get_node(
586 node = ScmModel().get_node(
587 repo, commit_id, file_path, extended_info=extended_info,
587 repo, commit_id, file_path, extended_info=extended_info,
588 content=content, max_file_bytes=max_file_bytes, cache=cache)
588 content=content, max_file_bytes=max_file_bytes, cache=cache)
589 except NodeDoesNotExistError:
589 except NodeDoesNotExistError:
590 raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format(
590 raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format(
591 repo.repo_name, file_path, commit_id))
591 repo.repo_name, file_path, commit_id))
592 except Exception:
592 except Exception:
593 log.exception(u"Exception occurred while trying to get repo %s file",
593 log.exception(u"Exception occurred while trying to get repo %s file",
594 repo.repo_name)
594 repo.repo_name)
595 raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format(
595 raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format(
596 repo.repo_name, file_path))
596 repo.repo_name, file_path))
597
597
598 return node
598 return node
599
599
600
600
601 @jsonrpc_method()
601 @jsonrpc_method()
602 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
602 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
603 """
603 """
604 Returns a list of tree nodes for path at given revision. This api is built
604 Returns a list of tree nodes for path at given revision. This api is built
605 strictly for usage in full text search building, and shouldn't be consumed
605 strictly for usage in full text search building, and shouldn't be consumed
606
606
607 This command can only be run using an |authtoken| with admin rights,
607 This command can only be run using an |authtoken| with admin rights,
608 or users with at least read rights to |repos|.
608 or users with at least read rights to |repos|.
609
609
610 """
610 """
611
611
612 repo = get_repo_or_error(repoid)
612 repo = get_repo_or_error(repoid)
613 if not has_superadmin_permission(apiuser):
613 if not has_superadmin_permission(apiuser):
614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
615 validate_repo_permissions(apiuser, repoid, repo, _perms)
615 validate_repo_permissions(apiuser, repoid, repo, _perms)
616
616
617 repo_id = repo.repo_id
617 repo_id = repo.repo_id
618 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
618 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
619 cache_on = cache_seconds > 0
619 cache_on = cache_seconds > 0
620
620
621 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
621 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
622 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
622 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
623
623
624 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
624 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
625 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
625 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
626
626
627 try:
627 try:
628 # check if repo is not empty by any chance, skip quicker if it is.
628 # check if repo is not empty by any chance, skip quicker if it is.
629 _scm = repo.scm_instance()
629 _scm = repo.scm_instance()
630 if _scm.is_empty():
630 if _scm.is_empty():
631 return []
631 return []
632 except RepositoryError:
632 except RepositoryError:
633 log.exception("Exception occurred while trying to get repo nodes")
633 log.exception("Exception occurred while trying to get repo nodes")
634 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
634 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
635
635
636 try:
636 try:
637 # we need to resolve commit_id to a FULL sha for cache to work correctly.
637 # we need to resolve commit_id to a FULL sha for cache to work correctly.
638 # sending 'master' is a pointer that needs to be translated to current commit.
638 # sending 'master' is a pointer that needs to be translated to current commit.
639 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
639 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
640 log.debug(
640 log.debug(
641 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
641 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
642 'with caching: %s[TTL: %ss]' % (
642 'with caching: %s[TTL: %ss]' % (
643 repo_id, commit_id, cache_on, cache_seconds or 0))
643 repo_id, commit_id, cache_on, cache_seconds or 0))
644
644
645 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
645 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
646 return tree_files
646 return tree_files
647
647
648 except Exception:
648 except Exception:
649 log.exception("Exception occurred while trying to get repo nodes")
649 log.exception("Exception occurred while trying to get repo nodes")
650 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
650 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
651
651
652
652
653 @jsonrpc_method()
653 @jsonrpc_method()
654 def get_repo_refs(request, apiuser, repoid):
654 def get_repo_refs(request, apiuser, repoid):
655 """
655 """
656 Returns a dictionary of current references. It returns
656 Returns a dictionary of current references. It returns
657 bookmarks, branches, closed_branches, and tags for given repository
657 bookmarks, branches, closed_branches, and tags for given repository
658
658
659 It's possible to specify ret_type to show only `files` or `dirs`.
659 It's possible to specify ret_type to show only `files` or `dirs`.
660
660
661 This command can only be run using an |authtoken| with admin rights,
661 This command can only be run using an |authtoken| with admin rights,
662 or users with at least read rights to |repos|.
662 or users with at least read rights to |repos|.
663
663
664 :param apiuser: This is filled automatically from the |authtoken|.
664 :param apiuser: This is filled automatically from the |authtoken|.
665 :type apiuser: AuthUser
665 :type apiuser: AuthUser
666 :param repoid: The repository name or repository ID.
666 :param repoid: The repository name or repository ID.
667 :type repoid: str or int
667 :type repoid: str or int
668
668
669 Example output:
669 Example output:
670
670
671 .. code-block:: bash
671 .. code-block:: bash
672
672
673 id : <id_given_in_input>
673 id : <id_given_in_input>
674 "result": {
674 "result": {
675 "bookmarks": {
675 "bookmarks": {
676 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
676 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
677 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
677 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
678 },
678 },
679 "branches": {
679 "branches": {
680 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
680 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
681 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
681 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
682 },
682 },
683 "branches_closed": {},
683 "branches_closed": {},
684 "tags": {
684 "tags": {
685 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
685 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
686 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
686 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
687 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
687 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
688 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
688 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
689 }
689 }
690 }
690 }
691 error: null
691 error: null
692 """
692 """
693
693
694 repo = get_repo_or_error(repoid)
694 repo = get_repo_or_error(repoid)
695 if not has_superadmin_permission(apiuser):
695 if not has_superadmin_permission(apiuser):
696 _perms = ('repository.admin', 'repository.write', 'repository.read',)
696 _perms = ('repository.admin', 'repository.write', 'repository.read',)
697 validate_repo_permissions(apiuser, repoid, repo, _perms)
697 validate_repo_permissions(apiuser, repoid, repo, _perms)
698
698
699 try:
699 try:
700 # check if repo is not empty by any chance, skip quicker if it is.
700 # check if repo is not empty by any chance, skip quicker if it is.
701 vcs_instance = repo.scm_instance()
701 vcs_instance = repo.scm_instance()
702 refs = vcs_instance.refs()
702 refs = vcs_instance.refs()
703 return refs
703 return refs
704 except Exception:
704 except Exception:
705 log.exception("Exception occurred while trying to get repo refs")
705 log.exception("Exception occurred while trying to get repo refs")
706 raise JSONRPCError(
706 raise JSONRPCError(
707 'failed to get repo: `%s` references' % repo.repo_name
707 'failed to get repo: `%s` references' % repo.repo_name
708 )
708 )
709
709
710
710
711 @jsonrpc_method()
711 @jsonrpc_method()
712 def create_repo(
712 def create_repo(
713 request, apiuser, repo_name, repo_type,
713 request, apiuser, repo_name, repo_type,
714 owner=Optional(OAttr('apiuser')),
714 owner=Optional(OAttr('apiuser')),
715 description=Optional(''),
715 description=Optional(''),
716 private=Optional(False),
716 private=Optional(False),
717 clone_uri=Optional(None),
717 clone_uri=Optional(None),
718 push_uri=Optional(None),
718 push_uri=Optional(None),
719 landing_rev=Optional(None),
719 landing_rev=Optional(None),
720 enable_statistics=Optional(False),
720 enable_statistics=Optional(False),
721 enable_locking=Optional(False),
721 enable_locking=Optional(False),
722 enable_downloads=Optional(False),
722 enable_downloads=Optional(False),
723 copy_permissions=Optional(False)):
723 copy_permissions=Optional(False)):
724 """
724 """
725 Creates a repository.
725 Creates a repository.
726
726
727 * If the repository name contains "/", repository will be created inside
727 * If the repository name contains "/", repository will be created inside
728 a repository group or nested repository groups
728 a repository group or nested repository groups
729
729
730 For example "foo/bar/repo1" will create |repo| called "repo1" inside
730 For example "foo/bar/repo1" will create |repo| called "repo1" inside
731 group "foo/bar". You have to have permissions to access and write to
731 group "foo/bar". You have to have permissions to access and write to
732 the last repository group ("bar" in this example)
732 the last repository group ("bar" in this example)
733
733
734 This command can only be run using an |authtoken| with at least
734 This command can only be run using an |authtoken| with at least
735 permissions to create repositories, or write permissions to
735 permissions to create repositories, or write permissions to
736 parent repository groups.
736 parent repository groups.
737
737
738 :param apiuser: This is filled automatically from the |authtoken|.
738 :param apiuser: This is filled automatically from the |authtoken|.
739 :type apiuser: AuthUser
739 :type apiuser: AuthUser
740 :param repo_name: Set the repository name.
740 :param repo_name: Set the repository name.
741 :type repo_name: str
741 :type repo_name: str
742 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
742 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
743 :type repo_type: str
743 :type repo_type: str
744 :param owner: user_id or username
744 :param owner: user_id or username
745 :type owner: Optional(str)
745 :type owner: Optional(str)
746 :param description: Set the repository description.
746 :param description: Set the repository description.
747 :type description: Optional(str)
747 :type description: Optional(str)
748 :param private: set repository as private
748 :param private: set repository as private
749 :type private: bool
749 :type private: bool
750 :param clone_uri: set clone_uri
750 :param clone_uri: set clone_uri
751 :type clone_uri: str
751 :type clone_uri: str
752 :param push_uri: set push_uri
752 :param push_uri: set push_uri
753 :type push_uri: str
753 :type push_uri: str
754 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
754 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
755 :type landing_rev: str
755 :type landing_rev: str
756 :param enable_locking:
756 :param enable_locking:
757 :type enable_locking: bool
757 :type enable_locking: bool
758 :param enable_downloads:
758 :param enable_downloads:
759 :type enable_downloads: bool
759 :type enable_downloads: bool
760 :param enable_statistics:
760 :param enable_statistics:
761 :type enable_statistics: bool
761 :type enable_statistics: bool
762 :param copy_permissions: Copy permission from group in which the
762 :param copy_permissions: Copy permission from group in which the
763 repository is being created.
763 repository is being created.
764 :type copy_permissions: bool
764 :type copy_permissions: bool
765
765
766
766
767 Example output:
767 Example output:
768
768
769 .. code-block:: bash
769 .. code-block:: bash
770
770
771 id : <id_given_in_input>
771 id : <id_given_in_input>
772 result: {
772 result: {
773 "msg": "Created new repository `<reponame>`",
773 "msg": "Created new repository `<reponame>`",
774 "success": true,
774 "success": true,
775 "task": "<celery task id or None if done sync>"
775 "task": "<celery task id or None if done sync>"
776 }
776 }
777 error: null
777 error: null
778
778
779
779
780 Example error output:
780 Example error output:
781
781
782 .. code-block:: bash
782 .. code-block:: bash
783
783
784 id : <id_given_in_input>
784 id : <id_given_in_input>
785 result : null
785 result : null
786 error : {
786 error : {
787 'failed to create repository `<repo_name>`'
787 'failed to create repository `<repo_name>`'
788 }
788 }
789
789
790 """
790 """
791
791
792 owner = validate_set_owner_permissions(apiuser, owner)
792 owner = validate_set_owner_permissions(apiuser, owner)
793
793
794 description = Optional.extract(description)
794 description = Optional.extract(description)
795 copy_permissions = Optional.extract(copy_permissions)
795 copy_permissions = Optional.extract(copy_permissions)
796 clone_uri = Optional.extract(clone_uri)
796 clone_uri = Optional.extract(clone_uri)
797 push_uri = Optional.extract(push_uri)
797 push_uri = Optional.extract(push_uri)
798
798
799 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
799 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
800 if isinstance(private, Optional):
800 if isinstance(private, Optional):
801 private = defs.get('repo_private') or Optional.extract(private)
801 private = defs.get('repo_private') or Optional.extract(private)
802 if isinstance(repo_type, Optional):
802 if isinstance(repo_type, Optional):
803 repo_type = defs.get('repo_type')
803 repo_type = defs.get('repo_type')
804 if isinstance(enable_statistics, Optional):
804 if isinstance(enable_statistics, Optional):
805 enable_statistics = defs.get('repo_enable_statistics')
805 enable_statistics = defs.get('repo_enable_statistics')
806 if isinstance(enable_locking, Optional):
806 if isinstance(enable_locking, Optional):
807 enable_locking = defs.get('repo_enable_locking')
807 enable_locking = defs.get('repo_enable_locking')
808 if isinstance(enable_downloads, Optional):
808 if isinstance(enable_downloads, Optional):
809 enable_downloads = defs.get('repo_enable_downloads')
809 enable_downloads = defs.get('repo_enable_downloads')
810
810
811 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
811 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
812 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
812 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
813 ref_choices = list(set(ref_choices + [landing_ref]))
813 ref_choices = list(set(ref_choices + [landing_ref]))
814
814
815 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
815 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
816
816
817 schema = repo_schema.RepoSchema().bind(
817 schema = repo_schema.RepoSchema().bind(
818 repo_type_options=rhodecode.BACKENDS.keys(),
818 repo_type_options=rhodecode.BACKENDS.keys(),
819 repo_ref_options=ref_choices,
819 repo_ref_options=ref_choices,
820 repo_type=repo_type,
820 repo_type=repo_type,
821 # user caller
821 # user caller
822 user=apiuser)
822 user=apiuser)
823
823
824 try:
824 try:
825 schema_data = schema.deserialize(dict(
825 schema_data = schema.deserialize(dict(
826 repo_name=repo_name,
826 repo_name=repo_name,
827 repo_type=repo_type,
827 repo_type=repo_type,
828 repo_owner=owner.username,
828 repo_owner=owner.username,
829 repo_description=description,
829 repo_description=description,
830 repo_landing_commit_ref=landing_commit_ref,
830 repo_landing_commit_ref=landing_commit_ref,
831 repo_clone_uri=clone_uri,
831 repo_clone_uri=clone_uri,
832 repo_push_uri=push_uri,
832 repo_push_uri=push_uri,
833 repo_private=private,
833 repo_private=private,
834 repo_copy_permissions=copy_permissions,
834 repo_copy_permissions=copy_permissions,
835 repo_enable_statistics=enable_statistics,
835 repo_enable_statistics=enable_statistics,
836 repo_enable_downloads=enable_downloads,
836 repo_enable_downloads=enable_downloads,
837 repo_enable_locking=enable_locking))
837 repo_enable_locking=enable_locking))
838 except validation_schema.Invalid as err:
838 except validation_schema.Invalid as err:
839 raise JSONRPCValidationError(colander_exc=err)
839 raise JSONRPCValidationError(colander_exc=err)
840
840
841 try:
841 try:
842 data = {
842 data = {
843 'owner': owner,
843 'owner': owner,
844 'repo_name': schema_data['repo_group']['repo_name_without_group'],
844 'repo_name': schema_data['repo_group']['repo_name_without_group'],
845 'repo_name_full': schema_data['repo_name'],
845 'repo_name_full': schema_data['repo_name'],
846 'repo_group': schema_data['repo_group']['repo_group_id'],
846 'repo_group': schema_data['repo_group']['repo_group_id'],
847 'repo_type': schema_data['repo_type'],
847 'repo_type': schema_data['repo_type'],
848 'repo_description': schema_data['repo_description'],
848 'repo_description': schema_data['repo_description'],
849 'repo_private': schema_data['repo_private'],
849 'repo_private': schema_data['repo_private'],
850 'clone_uri': schema_data['repo_clone_uri'],
850 'clone_uri': schema_data['repo_clone_uri'],
851 'push_uri': schema_data['repo_push_uri'],
851 'push_uri': schema_data['repo_push_uri'],
852 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
852 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
853 'enable_statistics': schema_data['repo_enable_statistics'],
853 'enable_statistics': schema_data['repo_enable_statistics'],
854 'enable_locking': schema_data['repo_enable_locking'],
854 'enable_locking': schema_data['repo_enable_locking'],
855 'enable_downloads': schema_data['repo_enable_downloads'],
855 'enable_downloads': schema_data['repo_enable_downloads'],
856 'repo_copy_permissions': schema_data['repo_copy_permissions'],
856 'repo_copy_permissions': schema_data['repo_copy_permissions'],
857 }
857 }
858
858
859 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
859 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
860 task_id = get_task_id(task)
860 task_id = get_task_id(task)
861 # no commit, it's done in RepoModel, or async via celery
861 # no commit, it's done in RepoModel, or async via celery
862 return {
862 return {
863 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
863 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
864 'success': True, # cannot return the repo data here since fork
864 'success': True, # cannot return the repo data here since fork
865 # can be done async
865 # can be done async
866 'task': task_id
866 'task': task_id
867 }
867 }
868 except Exception:
868 except Exception:
869 log.exception(
869 log.exception(
870 u"Exception while trying to create the repository %s",
870 u"Exception while trying to create the repository %s",
871 schema_data['repo_name'])
871 schema_data['repo_name'])
872 raise JSONRPCError(
872 raise JSONRPCError(
873 'failed to create repository `%s`' % (schema_data['repo_name'],))
873 'failed to create repository `%s`' % (schema_data['repo_name'],))
874
874
875
875
876 @jsonrpc_method()
876 @jsonrpc_method()
877 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
877 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
878 description=Optional('')):
878 description=Optional('')):
879 """
879 """
880 Adds an extra field to a repository.
880 Adds an extra field to a repository.
881
881
882 This command can only be run using an |authtoken| with at least
882 This command can only be run using an |authtoken| with at least
883 write permissions to the |repo|.
883 write permissions to the |repo|.
884
884
885 :param apiuser: This is filled automatically from the |authtoken|.
885 :param apiuser: This is filled automatically from the |authtoken|.
886 :type apiuser: AuthUser
886 :type apiuser: AuthUser
887 :param repoid: Set the repository name or repository id.
887 :param repoid: Set the repository name or repository id.
888 :type repoid: str or int
888 :type repoid: str or int
889 :param key: Create a unique field key for this repository.
889 :param key: Create a unique field key for this repository.
890 :type key: str
890 :type key: str
891 :param label:
891 :param label:
892 :type label: Optional(str)
892 :type label: Optional(str)
893 :param description:
893 :param description:
894 :type description: Optional(str)
894 :type description: Optional(str)
895 """
895 """
896 repo = get_repo_or_error(repoid)
896 repo = get_repo_or_error(repoid)
897 if not has_superadmin_permission(apiuser):
897 if not has_superadmin_permission(apiuser):
898 _perms = ('repository.admin',)
898 _perms = ('repository.admin',)
899 validate_repo_permissions(apiuser, repoid, repo, _perms)
899 validate_repo_permissions(apiuser, repoid, repo, _perms)
900
900
901 label = Optional.extract(label) or key
901 label = Optional.extract(label) or key
902 description = Optional.extract(description)
902 description = Optional.extract(description)
903
903
904 field = RepositoryField.get_by_key_name(key, repo)
904 field = RepositoryField.get_by_key_name(key, repo)
905 if field:
905 if field:
906 raise JSONRPCError('Field with key '
906 raise JSONRPCError('Field with key '
907 '`%s` exists for repo `%s`' % (key, repoid))
907 '`%s` exists for repo `%s`' % (key, repoid))
908
908
909 try:
909 try:
910 RepoModel().add_repo_field(repo, key, field_label=label,
910 RepoModel().add_repo_field(repo, key, field_label=label,
911 field_desc=description)
911 field_desc=description)
912 Session().commit()
912 Session().commit()
913 return {
913 return {
914 'msg': "Added new repository field `%s`" % (key,),
914 'msg': "Added new repository field `%s`" % (key,),
915 'success': True,
915 'success': True,
916 }
916 }
917 except Exception:
917 except Exception:
918 log.exception("Exception occurred while trying to add field to repo")
918 log.exception("Exception occurred while trying to add field to repo")
919 raise JSONRPCError(
919 raise JSONRPCError(
920 'failed to create new field for repository `%s`' % (repoid,))
920 'failed to create new field for repository `%s`' % (repoid,))
921
921
922
922
923 @jsonrpc_method()
923 @jsonrpc_method()
924 def remove_field_from_repo(request, apiuser, repoid, key):
924 def remove_field_from_repo(request, apiuser, repoid, key):
925 """
925 """
926 Removes an extra field from a repository.
926 Removes an extra field from a repository.
927
927
928 This command can only be run using an |authtoken| with at least
928 This command can only be run using an |authtoken| with at least
929 write permissions to the |repo|.
929 write permissions to the |repo|.
930
930
931 :param apiuser: This is filled automatically from the |authtoken|.
931 :param apiuser: This is filled automatically from the |authtoken|.
932 :type apiuser: AuthUser
932 :type apiuser: AuthUser
933 :param repoid: Set the repository name or repository ID.
933 :param repoid: Set the repository name or repository ID.
934 :type repoid: str or int
934 :type repoid: str or int
935 :param key: Set the unique field key for this repository.
935 :param key: Set the unique field key for this repository.
936 :type key: str
936 :type key: str
937 """
937 """
938
938
939 repo = get_repo_or_error(repoid)
939 repo = get_repo_or_error(repoid)
940 if not has_superadmin_permission(apiuser):
940 if not has_superadmin_permission(apiuser):
941 _perms = ('repository.admin',)
941 _perms = ('repository.admin',)
942 validate_repo_permissions(apiuser, repoid, repo, _perms)
942 validate_repo_permissions(apiuser, repoid, repo, _perms)
943
943
944 field = RepositoryField.get_by_key_name(key, repo)
944 field = RepositoryField.get_by_key_name(key, repo)
945 if not field:
945 if not field:
946 raise JSONRPCError('Field with key `%s` does not '
946 raise JSONRPCError('Field with key `%s` does not '
947 'exists for repo `%s`' % (key, repoid))
947 'exists for repo `%s`' % (key, repoid))
948
948
949 try:
949 try:
950 RepoModel().delete_repo_field(repo, field_key=key)
950 RepoModel().delete_repo_field(repo, field_key=key)
951 Session().commit()
951 Session().commit()
952 return {
952 return {
953 'msg': "Deleted repository field `%s`" % (key,),
953 'msg': "Deleted repository field `%s`" % (key,),
954 'success': True,
954 'success': True,
955 }
955 }
956 except Exception:
956 except Exception:
957 log.exception(
957 log.exception(
958 "Exception occurred while trying to delete field from repo")
958 "Exception occurred while trying to delete field from repo")
959 raise JSONRPCError(
959 raise JSONRPCError(
960 'failed to delete field for repository `%s`' % (repoid,))
960 'failed to delete field for repository `%s`' % (repoid,))
961
961
962
962
963 @jsonrpc_method()
963 @jsonrpc_method()
964 def update_repo(
964 def update_repo(
965 request, apiuser, repoid, repo_name=Optional(None),
965 request, apiuser, repoid, repo_name=Optional(None),
966 owner=Optional(OAttr('apiuser')), description=Optional(''),
966 owner=Optional(OAttr('apiuser')), description=Optional(''),
967 private=Optional(False),
967 private=Optional(False),
968 clone_uri=Optional(None), push_uri=Optional(None),
968 clone_uri=Optional(None), push_uri=Optional(None),
969 landing_rev=Optional(None), fork_of=Optional(None),
969 landing_rev=Optional(None), fork_of=Optional(None),
970 enable_statistics=Optional(False),
970 enable_statistics=Optional(False),
971 enable_locking=Optional(False),
971 enable_locking=Optional(False),
972 enable_downloads=Optional(False), fields=Optional('')):
972 enable_downloads=Optional(False), fields=Optional('')):
973 """
973 """
974 Updates a repository with the given information.
974 Updates a repository with the given information.
975
975
976 This command can only be run using an |authtoken| with at least
976 This command can only be run using an |authtoken| with at least
977 admin permissions to the |repo|.
977 admin permissions to the |repo|.
978
978
979 * If the repository name contains "/", repository will be updated
979 * If the repository name contains "/", repository will be updated
980 accordingly with a repository group or nested repository groups
980 accordingly with a repository group or nested repository groups
981
981
982 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
982 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
983 called "repo-test" and place it inside group "foo/bar".
983 called "repo-test" and place it inside group "foo/bar".
984 You have to have permissions to access and write to the last repository
984 You have to have permissions to access and write to the last repository
985 group ("bar" in this example)
985 group ("bar" in this example)
986
986
987 :param apiuser: This is filled automatically from the |authtoken|.
987 :param apiuser: This is filled automatically from the |authtoken|.
988 :type apiuser: AuthUser
988 :type apiuser: AuthUser
989 :param repoid: repository name or repository ID.
989 :param repoid: repository name or repository ID.
990 :type repoid: str or int
990 :type repoid: str or int
991 :param repo_name: Update the |repo| name, including the
991 :param repo_name: Update the |repo| name, including the
992 repository group it's in.
992 repository group it's in.
993 :type repo_name: str
993 :type repo_name: str
994 :param owner: Set the |repo| owner.
994 :param owner: Set the |repo| owner.
995 :type owner: str
995 :type owner: str
996 :param fork_of: Set the |repo| as fork of another |repo|.
996 :param fork_of: Set the |repo| as fork of another |repo|.
997 :type fork_of: str
997 :type fork_of: str
998 :param description: Update the |repo| description.
998 :param description: Update the |repo| description.
999 :type description: str
999 :type description: str
1000 :param private: Set the |repo| as private. (True | False)
1000 :param private: Set the |repo| as private. (True | False)
1001 :type private: bool
1001 :type private: bool
1002 :param clone_uri: Update the |repo| clone URI.
1002 :param clone_uri: Update the |repo| clone URI.
1003 :type clone_uri: str
1003 :type clone_uri: str
1004 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1004 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1005 :type landing_rev: str
1005 :type landing_rev: str
1006 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1006 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1007 :type enable_statistics: bool
1007 :type enable_statistics: bool
1008 :param enable_locking: Enable |repo| locking.
1008 :param enable_locking: Enable |repo| locking.
1009 :type enable_locking: bool
1009 :type enable_locking: bool
1010 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1010 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1011 :type enable_downloads: bool
1011 :type enable_downloads: bool
1012 :param fields: Add extra fields to the |repo|. Use the following
1012 :param fields: Add extra fields to the |repo|. Use the following
1013 example format: ``field_key=field_val,field_key2=fieldval2``.
1013 example format: ``field_key=field_val,field_key2=fieldval2``.
1014 Escape ', ' with \,
1014 Escape ', ' with \,
1015 :type fields: str
1015 :type fields: str
1016 """
1016 """
1017
1017
1018 repo = get_repo_or_error(repoid)
1018 repo = get_repo_or_error(repoid)
1019
1019
1020 include_secrets = False
1020 include_secrets = False
1021 if not has_superadmin_permission(apiuser):
1021 if not has_superadmin_permission(apiuser):
1022 _perms = ('repository.admin',)
1022 _perms = ('repository.admin',)
1023 validate_repo_permissions(apiuser, repoid, repo, _perms)
1023 validate_repo_permissions(apiuser, repoid, repo, _perms)
1024 else:
1024 else:
1025 include_secrets = True
1025 include_secrets = True
1026
1026
1027 updates = dict(
1027 updates = dict(
1028 repo_name=repo_name
1028 repo_name=repo_name
1029 if not isinstance(repo_name, Optional) else repo.repo_name,
1029 if not isinstance(repo_name, Optional) else repo.repo_name,
1030
1030
1031 fork_id=fork_of
1031 fork_id=fork_of
1032 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1032 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1033
1033
1034 user=owner
1034 user=owner
1035 if not isinstance(owner, Optional) else repo.user.username,
1035 if not isinstance(owner, Optional) else repo.user.username,
1036
1036
1037 repo_description=description
1037 repo_description=description
1038 if not isinstance(description, Optional) else repo.description,
1038 if not isinstance(description, Optional) else repo.description,
1039
1039
1040 repo_private=private
1040 repo_private=private
1041 if not isinstance(private, Optional) else repo.private,
1041 if not isinstance(private, Optional) else repo.private,
1042
1042
1043 clone_uri=clone_uri
1043 clone_uri=clone_uri
1044 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1044 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1045
1045
1046 push_uri=push_uri
1046 push_uri=push_uri
1047 if not isinstance(push_uri, Optional) else repo.push_uri,
1047 if not isinstance(push_uri, Optional) else repo.push_uri,
1048
1048
1049 repo_landing_rev=landing_rev
1049 repo_landing_rev=landing_rev
1050 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1050 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1051
1051
1052 repo_enable_statistics=enable_statistics
1052 repo_enable_statistics=enable_statistics
1053 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1053 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1054
1054
1055 repo_enable_locking=enable_locking
1055 repo_enable_locking=enable_locking
1056 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1056 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1057
1057
1058 repo_enable_downloads=enable_downloads
1058 repo_enable_downloads=enable_downloads
1059 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1059 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1060
1060
1061 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1061 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1062 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1062 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1063 request.translate, repo=repo)
1063 request.translate, repo=repo)
1064 ref_choices = list(set(ref_choices + [landing_ref]))
1064 ref_choices = list(set(ref_choices + [landing_ref]))
1065
1065
1066 old_values = repo.get_api_data()
1066 old_values = repo.get_api_data()
1067 repo_type = repo.repo_type
1067 repo_type = repo.repo_type
1068 schema = repo_schema.RepoSchema().bind(
1068 schema = repo_schema.RepoSchema().bind(
1069 repo_type_options=rhodecode.BACKENDS.keys(),
1069 repo_type_options=rhodecode.BACKENDS.keys(),
1070 repo_ref_options=ref_choices,
1070 repo_ref_options=ref_choices,
1071 repo_type=repo_type,
1071 repo_type=repo_type,
1072 # user caller
1072 # user caller
1073 user=apiuser,
1073 user=apiuser,
1074 old_values=old_values)
1074 old_values=old_values)
1075 try:
1075 try:
1076 schema_data = schema.deserialize(dict(
1076 schema_data = schema.deserialize(dict(
1077 # we save old value, users cannot change type
1077 # we save old value, users cannot change type
1078 repo_type=repo_type,
1078 repo_type=repo_type,
1079
1079
1080 repo_name=updates['repo_name'],
1080 repo_name=updates['repo_name'],
1081 repo_owner=updates['user'],
1081 repo_owner=updates['user'],
1082 repo_description=updates['repo_description'],
1082 repo_description=updates['repo_description'],
1083 repo_clone_uri=updates['clone_uri'],
1083 repo_clone_uri=updates['clone_uri'],
1084 repo_push_uri=updates['push_uri'],
1084 repo_push_uri=updates['push_uri'],
1085 repo_fork_of=updates['fork_id'],
1085 repo_fork_of=updates['fork_id'],
1086 repo_private=updates['repo_private'],
1086 repo_private=updates['repo_private'],
1087 repo_landing_commit_ref=updates['repo_landing_rev'],
1087 repo_landing_commit_ref=updates['repo_landing_rev'],
1088 repo_enable_statistics=updates['repo_enable_statistics'],
1088 repo_enable_statistics=updates['repo_enable_statistics'],
1089 repo_enable_downloads=updates['repo_enable_downloads'],
1089 repo_enable_downloads=updates['repo_enable_downloads'],
1090 repo_enable_locking=updates['repo_enable_locking']))
1090 repo_enable_locking=updates['repo_enable_locking']))
1091 except validation_schema.Invalid as err:
1091 except validation_schema.Invalid as err:
1092 raise JSONRPCValidationError(colander_exc=err)
1092 raise JSONRPCValidationError(colander_exc=err)
1093
1093
1094 # save validated data back into the updates dict
1094 # save validated data back into the updates dict
1095 validated_updates = dict(
1095 validated_updates = dict(
1096 repo_name=schema_data['repo_group']['repo_name_without_group'],
1096 repo_name=schema_data['repo_group']['repo_name_without_group'],
1097 repo_group=schema_data['repo_group']['repo_group_id'],
1097 repo_group=schema_data['repo_group']['repo_group_id'],
1098
1098
1099 user=schema_data['repo_owner'],
1099 user=schema_data['repo_owner'],
1100 repo_description=schema_data['repo_description'],
1100 repo_description=schema_data['repo_description'],
1101 repo_private=schema_data['repo_private'],
1101 repo_private=schema_data['repo_private'],
1102 clone_uri=schema_data['repo_clone_uri'],
1102 clone_uri=schema_data['repo_clone_uri'],
1103 push_uri=schema_data['repo_push_uri'],
1103 push_uri=schema_data['repo_push_uri'],
1104 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1104 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1105 repo_enable_statistics=schema_data['repo_enable_statistics'],
1105 repo_enable_statistics=schema_data['repo_enable_statistics'],
1106 repo_enable_locking=schema_data['repo_enable_locking'],
1106 repo_enable_locking=schema_data['repo_enable_locking'],
1107 repo_enable_downloads=schema_data['repo_enable_downloads'],
1107 repo_enable_downloads=schema_data['repo_enable_downloads'],
1108 )
1108 )
1109
1109
1110 if schema_data['repo_fork_of']:
1110 if schema_data['repo_fork_of']:
1111 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1111 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1112 validated_updates['fork_id'] = fork_repo.repo_id
1112 validated_updates['fork_id'] = fork_repo.repo_id
1113
1113
1114 # extra fields
1114 # extra fields
1115 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1115 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1116 if fields:
1116 if fields:
1117 validated_updates.update(fields)
1117 validated_updates.update(fields)
1118
1118
1119 try:
1119 try:
1120 RepoModel().update(repo, **validated_updates)
1120 RepoModel().update(repo, **validated_updates)
1121 audit_logger.store_api(
1121 audit_logger.store_api(
1122 'repo.edit', action_data={'old_data': old_values},
1122 'repo.edit', action_data={'old_data': old_values},
1123 user=apiuser, repo=repo)
1123 user=apiuser, repo=repo)
1124 Session().commit()
1124 Session().commit()
1125 return {
1125 return {
1126 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1126 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1127 'repository': repo.get_api_data(include_secrets=include_secrets)
1127 'repository': repo.get_api_data(include_secrets=include_secrets)
1128 }
1128 }
1129 except Exception:
1129 except Exception:
1130 log.exception(
1130 log.exception(
1131 u"Exception while trying to update the repository %s",
1131 u"Exception while trying to update the repository %s",
1132 repoid)
1132 repoid)
1133 raise JSONRPCError('failed to update repo `%s`' % repoid)
1133 raise JSONRPCError('failed to update repo `%s`' % repoid)
1134
1134
1135
1135
1136 @jsonrpc_method()
1136 @jsonrpc_method()
1137 def fork_repo(request, apiuser, repoid, fork_name,
1137 def fork_repo(request, apiuser, repoid, fork_name,
1138 owner=Optional(OAttr('apiuser')),
1138 owner=Optional(OAttr('apiuser')),
1139 description=Optional(''),
1139 description=Optional(''),
1140 private=Optional(False),
1140 private=Optional(False),
1141 clone_uri=Optional(None),
1141 clone_uri=Optional(None),
1142 landing_rev=Optional(None),
1142 landing_rev=Optional(None),
1143 copy_permissions=Optional(False)):
1143 copy_permissions=Optional(False)):
1144 """
1144 """
1145 Creates a fork of the specified |repo|.
1145 Creates a fork of the specified |repo|.
1146
1146
1147 * If the fork_name contains "/", fork will be created inside
1147 * If the fork_name contains "/", fork will be created inside
1148 a repository group or nested repository groups
1148 a repository group or nested repository groups
1149
1149
1150 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1150 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1151 inside group "foo/bar". You have to have permissions to access and
1151 inside group "foo/bar". You have to have permissions to access and
1152 write to the last repository group ("bar" in this example)
1152 write to the last repository group ("bar" in this example)
1153
1153
1154 This command can only be run using an |authtoken| with minimum
1154 This command can only be run using an |authtoken| with minimum
1155 read permissions of the forked repo, create fork permissions for an user.
1155 read permissions of the forked repo, create fork permissions for an user.
1156
1156
1157 :param apiuser: This is filled automatically from the |authtoken|.
1157 :param apiuser: This is filled automatically from the |authtoken|.
1158 :type apiuser: AuthUser
1158 :type apiuser: AuthUser
1159 :param repoid: Set repository name or repository ID.
1159 :param repoid: Set repository name or repository ID.
1160 :type repoid: str or int
1160 :type repoid: str or int
1161 :param fork_name: Set the fork name, including it's repository group membership.
1161 :param fork_name: Set the fork name, including it's repository group membership.
1162 :type fork_name: str
1162 :type fork_name: str
1163 :param owner: Set the fork owner.
1163 :param owner: Set the fork owner.
1164 :type owner: str
1164 :type owner: str
1165 :param description: Set the fork description.
1165 :param description: Set the fork description.
1166 :type description: str
1166 :type description: str
1167 :param copy_permissions: Copy permissions from parent |repo|. The
1167 :param copy_permissions: Copy permissions from parent |repo|. The
1168 default is False.
1168 default is False.
1169 :type copy_permissions: bool
1169 :type copy_permissions: bool
1170 :param private: Make the fork private. The default is False.
1170 :param private: Make the fork private. The default is False.
1171 :type private: bool
1171 :type private: bool
1172 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1172 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1173
1173
1174 Example output:
1174 Example output:
1175
1175
1176 .. code-block:: bash
1176 .. code-block:: bash
1177
1177
1178 id : <id_for_response>
1178 id : <id_for_response>
1179 api_key : "<api_key>"
1179 api_key : "<api_key>"
1180 args: {
1180 args: {
1181 "repoid" : "<reponame or repo_id>",
1181 "repoid" : "<reponame or repo_id>",
1182 "fork_name": "<forkname>",
1182 "fork_name": "<forkname>",
1183 "owner": "<username or user_id = Optional(=apiuser)>",
1183 "owner": "<username or user_id = Optional(=apiuser)>",
1184 "description": "<description>",
1184 "description": "<description>",
1185 "copy_permissions": "<bool>",
1185 "copy_permissions": "<bool>",
1186 "private": "<bool>",
1186 "private": "<bool>",
1187 "landing_rev": "<landing_rev>"
1187 "landing_rev": "<landing_rev>"
1188 }
1188 }
1189
1189
1190 Example error output:
1190 Example error output:
1191
1191
1192 .. code-block:: bash
1192 .. code-block:: bash
1193
1193
1194 id : <id_given_in_input>
1194 id : <id_given_in_input>
1195 result: {
1195 result: {
1196 "msg": "Created fork of `<reponame>` as `<forkname>`",
1196 "msg": "Created fork of `<reponame>` as `<forkname>`",
1197 "success": true,
1197 "success": true,
1198 "task": "<celery task id or None if done sync>"
1198 "task": "<celery task id or None if done sync>"
1199 }
1199 }
1200 error: null
1200 error: null
1201
1201
1202 """
1202 """
1203
1203
1204 repo = get_repo_or_error(repoid)
1204 repo = get_repo_or_error(repoid)
1205 repo_name = repo.repo_name
1205 repo_name = repo.repo_name
1206
1206
1207 if not has_superadmin_permission(apiuser):
1207 if not has_superadmin_permission(apiuser):
1208 # check if we have at least read permission for
1208 # check if we have at least read permission for
1209 # this repo that we fork !
1209 # this repo that we fork !
1210 _perms = ('repository.admin', 'repository.write', 'repository.read')
1210 _perms = ('repository.admin', 'repository.write', 'repository.read')
1211 validate_repo_permissions(apiuser, repoid, repo, _perms)
1211 validate_repo_permissions(apiuser, repoid, repo, _perms)
1212
1212
1213 # check if the regular user has at least fork permissions as well
1213 # check if the regular user has at least fork permissions as well
1214 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1214 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1215 raise JSONRPCForbidden()
1215 raise JSONRPCForbidden()
1216
1216
1217 # check if user can set owner parameter
1217 # check if user can set owner parameter
1218 owner = validate_set_owner_permissions(apiuser, owner)
1218 owner = validate_set_owner_permissions(apiuser, owner)
1219
1219
1220 description = Optional.extract(description)
1220 description = Optional.extract(description)
1221 copy_permissions = Optional.extract(copy_permissions)
1221 copy_permissions = Optional.extract(copy_permissions)
1222 clone_uri = Optional.extract(clone_uri)
1222 clone_uri = Optional.extract(clone_uri)
1223
1223
1224 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1224 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1225 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1225 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1226 ref_choices = list(set(ref_choices + [landing_ref]))
1226 ref_choices = list(set(ref_choices + [landing_ref]))
1227 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1227 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1228
1228
1229 private = Optional.extract(private)
1229 private = Optional.extract(private)
1230
1230
1231 schema = repo_schema.RepoSchema().bind(
1231 schema = repo_schema.RepoSchema().bind(
1232 repo_type_options=rhodecode.BACKENDS.keys(),
1232 repo_type_options=rhodecode.BACKENDS.keys(),
1233 repo_ref_options=ref_choices,
1233 repo_ref_options=ref_choices,
1234 repo_type=repo.repo_type,
1234 repo_type=repo.repo_type,
1235 # user caller
1235 # user caller
1236 user=apiuser)
1236 user=apiuser)
1237
1237
1238 try:
1238 try:
1239 schema_data = schema.deserialize(dict(
1239 schema_data = schema.deserialize(dict(
1240 repo_name=fork_name,
1240 repo_name=fork_name,
1241 repo_type=repo.repo_type,
1241 repo_type=repo.repo_type,
1242 repo_owner=owner.username,
1242 repo_owner=owner.username,
1243 repo_description=description,
1243 repo_description=description,
1244 repo_landing_commit_ref=landing_commit_ref,
1244 repo_landing_commit_ref=landing_commit_ref,
1245 repo_clone_uri=clone_uri,
1245 repo_clone_uri=clone_uri,
1246 repo_private=private,
1246 repo_private=private,
1247 repo_copy_permissions=copy_permissions))
1247 repo_copy_permissions=copy_permissions))
1248 except validation_schema.Invalid as err:
1248 except validation_schema.Invalid as err:
1249 raise JSONRPCValidationError(colander_exc=err)
1249 raise JSONRPCValidationError(colander_exc=err)
1250
1250
1251 try:
1251 try:
1252 data = {
1252 data = {
1253 'fork_parent_id': repo.repo_id,
1253 'fork_parent_id': repo.repo_id,
1254
1254
1255 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1255 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1256 'repo_name_full': schema_data['repo_name'],
1256 'repo_name_full': schema_data['repo_name'],
1257 'repo_group': schema_data['repo_group']['repo_group_id'],
1257 'repo_group': schema_data['repo_group']['repo_group_id'],
1258 'repo_type': schema_data['repo_type'],
1258 'repo_type': schema_data['repo_type'],
1259 'description': schema_data['repo_description'],
1259 'description': schema_data['repo_description'],
1260 'private': schema_data['repo_private'],
1260 'private': schema_data['repo_private'],
1261 'copy_permissions': schema_data['repo_copy_permissions'],
1261 'copy_permissions': schema_data['repo_copy_permissions'],
1262 'landing_rev': schema_data['repo_landing_commit_ref'],
1262 'landing_rev': schema_data['repo_landing_commit_ref'],
1263 }
1263 }
1264
1264
1265 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1265 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1266 # no commit, it's done in RepoModel, or async via celery
1266 # no commit, it's done in RepoModel, or async via celery
1267 task_id = get_task_id(task)
1267 task_id = get_task_id(task)
1268
1268
1269 return {
1269 return {
1270 'msg': 'Created fork of `%s` as `%s`' % (
1270 'msg': 'Created fork of `%s` as `%s`' % (
1271 repo.repo_name, schema_data['repo_name']),
1271 repo.repo_name, schema_data['repo_name']),
1272 'success': True, # cannot return the repo data here since fork
1272 'success': True, # cannot return the repo data here since fork
1273 # can be done async
1273 # can be done async
1274 'task': task_id
1274 'task': task_id
1275 }
1275 }
1276 except Exception:
1276 except Exception:
1277 log.exception(
1277 log.exception(
1278 u"Exception while trying to create fork %s",
1278 u"Exception while trying to create fork %s",
1279 schema_data['repo_name'])
1279 schema_data['repo_name'])
1280 raise JSONRPCError(
1280 raise JSONRPCError(
1281 'failed to fork repository `%s` as `%s`' % (
1281 'failed to fork repository `%s` as `%s`' % (
1282 repo_name, schema_data['repo_name']))
1282 repo_name, schema_data['repo_name']))
1283
1283
1284
1284
1285 @jsonrpc_method()
1285 @jsonrpc_method()
1286 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1286 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1287 """
1287 """
1288 Deletes a repository.
1288 Deletes a repository.
1289
1289
1290 * When the `forks` parameter is set it's possible to detach or delete
1290 * When the `forks` parameter is set it's possible to detach or delete
1291 forks of deleted repository.
1291 forks of deleted repository.
1292
1292
1293 This command can only be run using an |authtoken| with admin
1293 This command can only be run using an |authtoken| with admin
1294 permissions on the |repo|.
1294 permissions on the |repo|.
1295
1295
1296 :param apiuser: This is filled automatically from the |authtoken|.
1296 :param apiuser: This is filled automatically from the |authtoken|.
1297 :type apiuser: AuthUser
1297 :type apiuser: AuthUser
1298 :param repoid: Set the repository name or repository ID.
1298 :param repoid: Set the repository name or repository ID.
1299 :type repoid: str or int
1299 :type repoid: str or int
1300 :param forks: Set to `detach` or `delete` forks from the |repo|.
1300 :param forks: Set to `detach` or `delete` forks from the |repo|.
1301 :type forks: Optional(str)
1301 :type forks: Optional(str)
1302
1302
1303 Example error output:
1303 Example error output:
1304
1304
1305 .. code-block:: bash
1305 .. code-block:: bash
1306
1306
1307 id : <id_given_in_input>
1307 id : <id_given_in_input>
1308 result: {
1308 result: {
1309 "msg": "Deleted repository `<reponame>`",
1309 "msg": "Deleted repository `<reponame>`",
1310 "success": true
1310 "success": true
1311 }
1311 }
1312 error: null
1312 error: null
1313 """
1313 """
1314
1314
1315 repo = get_repo_or_error(repoid)
1315 repo = get_repo_or_error(repoid)
1316 repo_name = repo.repo_name
1316 repo_name = repo.repo_name
1317 if not has_superadmin_permission(apiuser):
1317 if not has_superadmin_permission(apiuser):
1318 _perms = ('repository.admin',)
1318 _perms = ('repository.admin',)
1319 validate_repo_permissions(apiuser, repoid, repo, _perms)
1319 validate_repo_permissions(apiuser, repoid, repo, _perms)
1320
1320
1321 try:
1321 try:
1322 handle_forks = Optional.extract(forks)
1322 handle_forks = Optional.extract(forks)
1323 _forks_msg = ''
1323 _forks_msg = ''
1324 _forks = [f for f in repo.forks]
1324 _forks = [f for f in repo.forks]
1325 if handle_forks == 'detach':
1325 if handle_forks == 'detach':
1326 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1326 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1327 elif handle_forks == 'delete':
1327 elif handle_forks == 'delete':
1328 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1328 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1329 elif _forks:
1329 elif _forks:
1330 raise JSONRPCError(
1330 raise JSONRPCError(
1331 'Cannot delete `%s` it still contains attached forks' %
1331 'Cannot delete `%s` it still contains attached forks' %
1332 (repo.repo_name,)
1332 (repo.repo_name,)
1333 )
1333 )
1334 old_data = repo.get_api_data()
1334 old_data = repo.get_api_data()
1335 RepoModel().delete(repo, forks=forks)
1335 RepoModel().delete(repo, forks=forks)
1336
1336
1337 repo = audit_logger.RepoWrap(repo_id=None,
1337 repo = audit_logger.RepoWrap(repo_id=None,
1338 repo_name=repo.repo_name)
1338 repo_name=repo.repo_name)
1339
1339
1340 audit_logger.store_api(
1340 audit_logger.store_api(
1341 'repo.delete', action_data={'old_data': old_data},
1341 'repo.delete', action_data={'old_data': old_data},
1342 user=apiuser, repo=repo)
1342 user=apiuser, repo=repo)
1343
1343
1344 ScmModel().mark_for_invalidation(repo_name, delete=True)
1344 ScmModel().mark_for_invalidation(repo_name, delete=True)
1345 Session().commit()
1345 Session().commit()
1346 return {
1346 return {
1347 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1347 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1348 'success': True
1348 'success': True
1349 }
1349 }
1350 except Exception:
1350 except Exception:
1351 log.exception("Exception occurred while trying to delete repo")
1351 log.exception("Exception occurred while trying to delete repo")
1352 raise JSONRPCError(
1352 raise JSONRPCError(
1353 'failed to delete repository `%s`' % (repo_name,)
1353 'failed to delete repository `%s`' % (repo_name,)
1354 )
1354 )
1355
1355
1356
1356
1357 #TODO: marcink, change name ?
1357 #TODO: marcink, change name ?
1358 @jsonrpc_method()
1358 @jsonrpc_method()
1359 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1359 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1360 """
1360 """
1361 Invalidates the cache for the specified repository.
1361 Invalidates the cache for the specified repository.
1362
1362
1363 This command can only be run using an |authtoken| with admin rights to
1363 This command can only be run using an |authtoken| with admin rights to
1364 the specified repository.
1364 the specified repository.
1365
1365
1366 This command takes the following options:
1366 This command takes the following options:
1367
1367
1368 :param apiuser: This is filled automatically from |authtoken|.
1368 :param apiuser: This is filled automatically from |authtoken|.
1369 :type apiuser: AuthUser
1369 :type apiuser: AuthUser
1370 :param repoid: Sets the repository name or repository ID.
1370 :param repoid: Sets the repository name or repository ID.
1371 :type repoid: str or int
1371 :type repoid: str or int
1372 :param delete_keys: This deletes the invalidated keys instead of
1372 :param delete_keys: This deletes the invalidated keys instead of
1373 just flagging them.
1373 just flagging them.
1374 :type delete_keys: Optional(``True`` | ``False``)
1374 :type delete_keys: Optional(``True`` | ``False``)
1375
1375
1376 Example output:
1376 Example output:
1377
1377
1378 .. code-block:: bash
1378 .. code-block:: bash
1379
1379
1380 id : <id_given_in_input>
1380 id : <id_given_in_input>
1381 result : {
1381 result : {
1382 'msg': Cache for repository `<repository name>` was invalidated,
1382 'msg': Cache for repository `<repository name>` was invalidated,
1383 'repository': <repository name>
1383 'repository': <repository name>
1384 }
1384 }
1385 error : null
1385 error : null
1386
1386
1387 Example error output:
1387 Example error output:
1388
1388
1389 .. code-block:: bash
1389 .. code-block:: bash
1390
1390
1391 id : <id_given_in_input>
1391 id : <id_given_in_input>
1392 result : null
1392 result : null
1393 error : {
1393 error : {
1394 'Error occurred during cache invalidation action'
1394 'Error occurred during cache invalidation action'
1395 }
1395 }
1396
1396
1397 """
1397 """
1398
1398
1399 repo = get_repo_or_error(repoid)
1399 repo = get_repo_or_error(repoid)
1400 if not has_superadmin_permission(apiuser):
1400 if not has_superadmin_permission(apiuser):
1401 _perms = ('repository.admin', 'repository.write',)
1401 _perms = ('repository.admin', 'repository.write',)
1402 validate_repo_permissions(apiuser, repoid, repo, _perms)
1402 validate_repo_permissions(apiuser, repoid, repo, _perms)
1403
1403
1404 delete = Optional.extract(delete_keys)
1404 delete = Optional.extract(delete_keys)
1405 try:
1405 try:
1406 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1406 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1407 return {
1407 return {
1408 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1408 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1409 'repository': repo.repo_name
1409 'repository': repo.repo_name
1410 }
1410 }
1411 except Exception:
1411 except Exception:
1412 log.exception(
1412 log.exception(
1413 "Exception occurred while trying to invalidate repo cache")
1413 "Exception occurred while trying to invalidate repo cache")
1414 raise JSONRPCError(
1414 raise JSONRPCError(
1415 'Error occurred during cache invalidation action'
1415 'Error occurred during cache invalidation action'
1416 )
1416 )
1417
1417
1418
1418
1419 #TODO: marcink, change name ?
1419 #TODO: marcink, change name ?
1420 @jsonrpc_method()
1420 @jsonrpc_method()
1421 def lock(request, apiuser, repoid, locked=Optional(None),
1421 def lock(request, apiuser, repoid, locked=Optional(None),
1422 userid=Optional(OAttr('apiuser'))):
1422 userid=Optional(OAttr('apiuser'))):
1423 """
1423 """
1424 Sets the lock state of the specified |repo| by the given user.
1424 Sets the lock state of the specified |repo| by the given user.
1425 From more information, see :ref:`repo-locking`.
1425 From more information, see :ref:`repo-locking`.
1426
1426
1427 * If the ``userid`` option is not set, the repository is locked to the
1427 * If the ``userid`` option is not set, the repository is locked to the
1428 user who called the method.
1428 user who called the method.
1429 * If the ``locked`` parameter is not set, the current lock state of the
1429 * If the ``locked`` parameter is not set, the current lock state of the
1430 repository is displayed.
1430 repository is displayed.
1431
1431
1432 This command can only be run using an |authtoken| with admin rights to
1432 This command can only be run using an |authtoken| with admin rights to
1433 the specified repository.
1433 the specified repository.
1434
1434
1435 This command takes the following options:
1435 This command takes the following options:
1436
1436
1437 :param apiuser: This is filled automatically from the |authtoken|.
1437 :param apiuser: This is filled automatically from the |authtoken|.
1438 :type apiuser: AuthUser
1438 :type apiuser: AuthUser
1439 :param repoid: Sets the repository name or repository ID.
1439 :param repoid: Sets the repository name or repository ID.
1440 :type repoid: str or int
1440 :type repoid: str or int
1441 :param locked: Sets the lock state.
1441 :param locked: Sets the lock state.
1442 :type locked: Optional(``True`` | ``False``)
1442 :type locked: Optional(``True`` | ``False``)
1443 :param userid: Set the repository lock to this user.
1443 :param userid: Set the repository lock to this user.
1444 :type userid: Optional(str or int)
1444 :type userid: Optional(str or int)
1445
1445
1446 Example error output:
1446 Example error output:
1447
1447
1448 .. code-block:: bash
1448 .. code-block:: bash
1449
1449
1450 id : <id_given_in_input>
1450 id : <id_given_in_input>
1451 result : {
1451 result : {
1452 'repo': '<reponame>',
1452 'repo': '<reponame>',
1453 'locked': <bool: lock state>,
1453 'locked': <bool: lock state>,
1454 'locked_since': <int: lock timestamp>,
1454 'locked_since': <int: lock timestamp>,
1455 'locked_by': <username of person who made the lock>,
1455 'locked_by': <username of person who made the lock>,
1456 'lock_reason': <str: reason for locking>,
1456 'lock_reason': <str: reason for locking>,
1457 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1457 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1458 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1458 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1459 or
1459 or
1460 'msg': 'Repo `<repository name>` not locked.'
1460 'msg': 'Repo `<repository name>` not locked.'
1461 or
1461 or
1462 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1462 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1463 }
1463 }
1464 error : null
1464 error : null
1465
1465
1466 Example error output:
1466 Example error output:
1467
1467
1468 .. code-block:: bash
1468 .. code-block:: bash
1469
1469
1470 id : <id_given_in_input>
1470 id : <id_given_in_input>
1471 result : null
1471 result : null
1472 error : {
1472 error : {
1473 'Error occurred locking repository `<reponame>`'
1473 'Error occurred locking repository `<reponame>`'
1474 }
1474 }
1475 """
1475 """
1476
1476
1477 repo = get_repo_or_error(repoid)
1477 repo = get_repo_or_error(repoid)
1478 if not has_superadmin_permission(apiuser):
1478 if not has_superadmin_permission(apiuser):
1479 # check if we have at least write permission for this repo !
1479 # check if we have at least write permission for this repo !
1480 _perms = ('repository.admin', 'repository.write',)
1480 _perms = ('repository.admin', 'repository.write',)
1481 validate_repo_permissions(apiuser, repoid, repo, _perms)
1481 validate_repo_permissions(apiuser, repoid, repo, _perms)
1482
1482
1483 # make sure normal user does not pass someone else userid,
1483 # make sure normal user does not pass someone else userid,
1484 # he is not allowed to do that
1484 # he is not allowed to do that
1485 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1485 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1486 raise JSONRPCError('userid is not the same as your user')
1486 raise JSONRPCError('userid is not the same as your user')
1487
1487
1488 if isinstance(userid, Optional):
1488 if isinstance(userid, Optional):
1489 userid = apiuser.user_id
1489 userid = apiuser.user_id
1490
1490
1491 user = get_user_or_error(userid)
1491 user = get_user_or_error(userid)
1492
1492
1493 if isinstance(locked, Optional):
1493 if isinstance(locked, Optional):
1494 lockobj = repo.locked
1494 lockobj = repo.locked
1495
1495
1496 if lockobj[0] is None:
1496 if lockobj[0] is None:
1497 _d = {
1497 _d = {
1498 'repo': repo.repo_name,
1498 'repo': repo.repo_name,
1499 'locked': False,
1499 'locked': False,
1500 'locked_since': None,
1500 'locked_since': None,
1501 'locked_by': None,
1501 'locked_by': None,
1502 'lock_reason': None,
1502 'lock_reason': None,
1503 'lock_state_changed': False,
1503 'lock_state_changed': False,
1504 'msg': 'Repo `%s` not locked.' % repo.repo_name
1504 'msg': 'Repo `%s` not locked.' % repo.repo_name
1505 }
1505 }
1506 return _d
1506 return _d
1507 else:
1507 else:
1508 _user_id, _time, _reason = lockobj
1508 _user_id, _time, _reason = lockobj
1509 lock_user = get_user_or_error(userid)
1509 lock_user = get_user_or_error(userid)
1510 _d = {
1510 _d = {
1511 'repo': repo.repo_name,
1511 'repo': repo.repo_name,
1512 'locked': True,
1512 'locked': True,
1513 'locked_since': _time,
1513 'locked_since': _time,
1514 'locked_by': lock_user.username,
1514 'locked_by': lock_user.username,
1515 'lock_reason': _reason,
1515 'lock_reason': _reason,
1516 'lock_state_changed': False,
1516 'lock_state_changed': False,
1517 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1517 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1518 % (repo.repo_name, lock_user.username,
1518 % (repo.repo_name, lock_user.username,
1519 json.dumps(time_to_datetime(_time))))
1519 json.dumps(time_to_datetime(_time))))
1520 }
1520 }
1521 return _d
1521 return _d
1522
1522
1523 # force locked state through a flag
1523 # force locked state through a flag
1524 else:
1524 else:
1525 locked = str2bool(locked)
1525 locked = str2bool(locked)
1526 lock_reason = Repository.LOCK_API
1526 lock_reason = Repository.LOCK_API
1527 try:
1527 try:
1528 if locked:
1528 if locked:
1529 lock_time = time.time()
1529 lock_time = time.time()
1530 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1530 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1531 else:
1531 else:
1532 lock_time = None
1532 lock_time = None
1533 Repository.unlock(repo)
1533 Repository.unlock(repo)
1534 _d = {
1534 _d = {
1535 'repo': repo.repo_name,
1535 'repo': repo.repo_name,
1536 'locked': locked,
1536 'locked': locked,
1537 'locked_since': lock_time,
1537 'locked_since': lock_time,
1538 'locked_by': user.username,
1538 'locked_by': user.username,
1539 'lock_reason': lock_reason,
1539 'lock_reason': lock_reason,
1540 'lock_state_changed': True,
1540 'lock_state_changed': True,
1541 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1541 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1542 % (user.username, repo.repo_name, locked))
1542 % (user.username, repo.repo_name, locked))
1543 }
1543 }
1544 return _d
1544 return _d
1545 except Exception:
1545 except Exception:
1546 log.exception(
1546 log.exception(
1547 "Exception occurred while trying to lock repository")
1547 "Exception occurred while trying to lock repository")
1548 raise JSONRPCError(
1548 raise JSONRPCError(
1549 'Error occurred locking repository `%s`' % repo.repo_name
1549 'Error occurred locking repository `%s`' % repo.repo_name
1550 )
1550 )
1551
1551
1552
1552
1553 @jsonrpc_method()
1553 @jsonrpc_method()
1554 def comment_commit(
1554 def comment_commit(
1555 request, apiuser, repoid, commit_id, message, status=Optional(None),
1555 request, apiuser, repoid, commit_id, message, status=Optional(None),
1556 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1556 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1557 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1557 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1558 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1558 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1559 """
1559 """
1560 Set a commit comment, and optionally change the status of the commit.
1560 Set a commit comment, and optionally change the status of the commit.
1561
1561
1562 :param apiuser: This is filled automatically from the |authtoken|.
1562 :param apiuser: This is filled automatically from the |authtoken|.
1563 :type apiuser: AuthUser
1563 :type apiuser: AuthUser
1564 :param repoid: Set the repository name or repository ID.
1564 :param repoid: Set the repository name or repository ID.
1565 :type repoid: str or int
1565 :type repoid: str or int
1566 :param commit_id: Specify the commit_id for which to set a comment.
1566 :param commit_id: Specify the commit_id for which to set a comment.
1567 :type commit_id: str
1567 :type commit_id: str
1568 :param message: The comment text.
1568 :param message: The comment text.
1569 :type message: str
1569 :type message: str
1570 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1570 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1571 'approved', 'rejected', 'under_review'
1571 'approved', 'rejected', 'under_review'
1572 :type status: str
1572 :type status: str
1573 :param comment_type: Comment type, one of: 'note', 'todo'
1573 :param comment_type: Comment type, one of: 'note', 'todo'
1574 :type comment_type: Optional(str), default: 'note'
1574 :type comment_type: Optional(str), default: 'note'
1575 :param resolves_comment_id: id of comment which this one will resolve
1575 :param resolves_comment_id: id of comment which this one will resolve
1576 :type resolves_comment_id: Optional(int)
1576 :type resolves_comment_id: Optional(int)
1577 :param extra_recipients: list of user ids or usernames to add
1577 :param extra_recipients: list of user ids or usernames to add
1578 notifications for this comment. Acts like a CC for notification
1578 notifications for this comment. Acts like a CC for notification
1579 :type extra_recipients: Optional(list)
1579 :type extra_recipients: Optional(list)
1580 :param userid: Set the user name of the comment creator.
1580 :param userid: Set the user name of the comment creator.
1581 :type userid: Optional(str or int)
1581 :type userid: Optional(str or int)
1582 :param send_email: Define if this comment should also send email notification
1582 :param send_email: Define if this comment should also send email notification
1583 :type send_email: Optional(bool)
1583 :type send_email: Optional(bool)
1584
1584
1585 Example error output:
1585 Example error output:
1586
1586
1587 .. code-block:: bash
1587 .. code-block:: bash
1588
1588
1589 {
1589 {
1590 "id" : <id_given_in_input>,
1590 "id" : <id_given_in_input>,
1591 "result" : {
1591 "result" : {
1592 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1592 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1593 "status_change": null or <status>,
1593 "status_change": null or <status>,
1594 "success": true
1594 "success": true
1595 },
1595 },
1596 "error" : null
1596 "error" : null
1597 }
1597 }
1598
1598
1599 """
1599 """
1600 _ = request.translate
1600 _ = request.translate
1601
1601
1602 repo = get_repo_or_error(repoid)
1602 repo = get_repo_or_error(repoid)
1603 if not has_superadmin_permission(apiuser):
1603 if not has_superadmin_permission(apiuser):
1604 _perms = ('repository.read', 'repository.write', 'repository.admin')
1604 _perms = ('repository.read', 'repository.write', 'repository.admin')
1605 validate_repo_permissions(apiuser, repoid, repo, _perms)
1605 validate_repo_permissions(apiuser, repoid, repo, _perms)
1606 db_repo_name = repo.repo_name
1606 db_repo_name = repo.repo_name
1607
1607
1608 try:
1608 try:
1609 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1609 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1610 commit_id = commit.raw_id
1610 commit_id = commit.raw_id
1611 except Exception as e:
1611 except Exception as e:
1612 log.exception('Failed to fetch commit')
1612 log.exception('Failed to fetch commit')
1613 raise JSONRPCError(safe_str(e))
1613 raise JSONRPCError(safe_str(e))
1614
1614
1615 if isinstance(userid, Optional):
1615 if isinstance(userid, Optional):
1616 userid = apiuser.user_id
1616 userid = apiuser.user_id
1617
1617
1618 user = get_user_or_error(userid)
1618 user = get_user_or_error(userid)
1619 status = Optional.extract(status)
1619 status = Optional.extract(status)
1620 comment_type = Optional.extract(comment_type)
1620 comment_type = Optional.extract(comment_type)
1621 resolves_comment_id = Optional.extract(resolves_comment_id)
1621 resolves_comment_id = Optional.extract(resolves_comment_id)
1622 extra_recipients = Optional.extract(extra_recipients)
1622 extra_recipients = Optional.extract(extra_recipients)
1623 send_email = Optional.extract(send_email, binary=True)
1623 send_email = Optional.extract(send_email, binary=True)
1624
1624
1625 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1625 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1626 if status and status not in allowed_statuses:
1626 if status and status not in allowed_statuses:
1627 raise JSONRPCError('Bad status, must be on '
1627 raise JSONRPCError('Bad status, must be on '
1628 'of %s got %s' % (allowed_statuses, status,))
1628 'of %s got %s' % (allowed_statuses, status,))
1629
1629
1630 if resolves_comment_id:
1630 if resolves_comment_id:
1631 comment = ChangesetComment.get(resolves_comment_id)
1631 comment = ChangesetComment.get(resolves_comment_id)
1632 if not comment:
1632 if not comment:
1633 raise JSONRPCError(
1633 raise JSONRPCError(
1634 'Invalid resolves_comment_id `%s` for this commit.'
1634 'Invalid resolves_comment_id `%s` for this commit.'
1635 % resolves_comment_id)
1635 % resolves_comment_id)
1636 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1636 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1637 raise JSONRPCError(
1637 raise JSONRPCError(
1638 'Comment `%s` is wrong type for setting status to resolved.'
1638 'Comment `%s` is wrong type for setting status to resolved.'
1639 % resolves_comment_id)
1639 % resolves_comment_id)
1640
1640
1641 try:
1641 try:
1642 rc_config = SettingsModel().get_all_settings()
1642 rc_config = SettingsModel().get_all_settings()
1643 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1643 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1644 status_change_label = ChangesetStatus.get_status_lbl(status)
1644 status_change_label = ChangesetStatus.get_status_lbl(status)
1645 comment = CommentsModel().create(
1645 comment = CommentsModel().create(
1646 message, repo, user, commit_id=commit_id,
1646 message, repo, user, commit_id=commit_id,
1647 status_change=status_change_label,
1647 status_change=status_change_label,
1648 status_change_type=status,
1648 status_change_type=status,
1649 renderer=renderer,
1649 renderer=renderer,
1650 comment_type=comment_type,
1650 comment_type=comment_type,
1651 resolves_comment_id=resolves_comment_id,
1651 resolves_comment_id=resolves_comment_id,
1652 auth_user=apiuser,
1652 auth_user=apiuser,
1653 extra_recipients=extra_recipients,
1653 extra_recipients=extra_recipients,
1654 send_email=send_email
1654 send_email=send_email
1655 )
1655 )
1656 is_inline = bool(comment.f_path and comment.line_no)
1656 is_inline = comment.is_inline
1657
1657
1658 if status:
1658 if status:
1659 # also do a status change
1659 # also do a status change
1660 try:
1660 try:
1661 ChangesetStatusModel().set_status(
1661 ChangesetStatusModel().set_status(
1662 repo, status, user, comment, revision=commit_id,
1662 repo, status, user, comment, revision=commit_id,
1663 dont_allow_on_closed_pull_request=True
1663 dont_allow_on_closed_pull_request=True
1664 )
1664 )
1665 except StatusChangeOnClosedPullRequestError:
1665 except StatusChangeOnClosedPullRequestError:
1666 log.exception(
1666 log.exception(
1667 "Exception occurred while trying to change repo commit status")
1667 "Exception occurred while trying to change repo commit status")
1668 msg = ('Changing status on a commit associated with '
1668 msg = ('Changing status on a commit associated with '
1669 'a closed pull request is not allowed')
1669 'a closed pull request is not allowed')
1670 raise JSONRPCError(msg)
1670 raise JSONRPCError(msg)
1671
1671
1672 CommentsModel().trigger_commit_comment_hook(
1672 CommentsModel().trigger_commit_comment_hook(
1673 repo, apiuser, 'create',
1673 repo, apiuser, 'create',
1674 data={'comment': comment, 'commit': commit})
1674 data={'comment': comment, 'commit': commit})
1675
1675
1676 Session().commit()
1676 Session().commit()
1677
1677
1678 comment_broadcast_channel = channelstream.comment_channel(
1678 comment_broadcast_channel = channelstream.comment_channel(
1679 db_repo_name, commit_obj=commit)
1679 db_repo_name, commit_obj=commit)
1680
1680
1681 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1681 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1682 comment_type = 'inline' if is_inline else 'general'
1682 comment_type = 'inline' if is_inline else 'general'
1683 channelstream.comment_channelstream_push(
1683 channelstream.comment_channelstream_push(
1684 request, comment_broadcast_channel, apiuser,
1684 request, comment_broadcast_channel, apiuser,
1685 _('posted a new {} comment').format(comment_type),
1685 _('posted a new {} comment').format(comment_type),
1686 comment_data=comment_data)
1686 comment_data=comment_data)
1687
1687
1688 return {
1688 return {
1689 'msg': (
1689 'msg': (
1690 'Commented on commit `%s` for repository `%s`' % (
1690 'Commented on commit `%s` for repository `%s`' % (
1691 comment.revision, repo.repo_name)),
1691 comment.revision, repo.repo_name)),
1692 'status_change': status,
1692 'status_change': status,
1693 'success': True,
1693 'success': True,
1694 }
1694 }
1695 except JSONRPCError:
1695 except JSONRPCError:
1696 # catch any inside errors, and re-raise them to prevent from
1696 # catch any inside errors, and re-raise them to prevent from
1697 # below global catch to silence them
1697 # below global catch to silence them
1698 raise
1698 raise
1699 except Exception:
1699 except Exception:
1700 log.exception("Exception occurred while trying to comment on commit")
1700 log.exception("Exception occurred while trying to comment on commit")
1701 raise JSONRPCError(
1701 raise JSONRPCError(
1702 'failed to set comment on repository `%s`' % (repo.repo_name,)
1702 'failed to set comment on repository `%s`' % (repo.repo_name,)
1703 )
1703 )
1704
1704
1705
1705
1706 @jsonrpc_method()
1706 @jsonrpc_method()
1707 def get_repo_comments(request, apiuser, repoid,
1707 def get_repo_comments(request, apiuser, repoid,
1708 commit_id=Optional(None), comment_type=Optional(None),
1708 commit_id=Optional(None), comment_type=Optional(None),
1709 userid=Optional(None)):
1709 userid=Optional(None)):
1710 """
1710 """
1711 Get all comments for a repository
1711 Get all comments for a repository
1712
1712
1713 :param apiuser: This is filled automatically from the |authtoken|.
1713 :param apiuser: This is filled automatically from the |authtoken|.
1714 :type apiuser: AuthUser
1714 :type apiuser: AuthUser
1715 :param repoid: Set the repository name or repository ID.
1715 :param repoid: Set the repository name or repository ID.
1716 :type repoid: str or int
1716 :type repoid: str or int
1717 :param commit_id: Optionally filter the comments by the commit_id
1717 :param commit_id: Optionally filter the comments by the commit_id
1718 :type commit_id: Optional(str), default: None
1718 :type commit_id: Optional(str), default: None
1719 :param comment_type: Optionally filter the comments by the comment_type
1719 :param comment_type: Optionally filter the comments by the comment_type
1720 one of: 'note', 'todo'
1720 one of: 'note', 'todo'
1721 :type comment_type: Optional(str), default: None
1721 :type comment_type: Optional(str), default: None
1722 :param userid: Optionally filter the comments by the author of comment
1722 :param userid: Optionally filter the comments by the author of comment
1723 :type userid: Optional(str or int), Default: None
1723 :type userid: Optional(str or int), Default: None
1724
1724
1725 Example error output:
1725 Example error output:
1726
1726
1727 .. code-block:: bash
1727 .. code-block:: bash
1728
1728
1729 {
1729 {
1730 "id" : <id_given_in_input>,
1730 "id" : <id_given_in_input>,
1731 "result" : [
1731 "result" : [
1732 {
1732 {
1733 "comment_author": <USER_DETAILS>,
1733 "comment_author": <USER_DETAILS>,
1734 "comment_created_on": "2017-02-01T14:38:16.309",
1734 "comment_created_on": "2017-02-01T14:38:16.309",
1735 "comment_f_path": "file.txt",
1735 "comment_f_path": "file.txt",
1736 "comment_id": 282,
1736 "comment_id": 282,
1737 "comment_lineno": "n1",
1737 "comment_lineno": "n1",
1738 "comment_resolved_by": null,
1738 "comment_resolved_by": null,
1739 "comment_status": [],
1739 "comment_status": [],
1740 "comment_text": "This file needs a header",
1740 "comment_text": "This file needs a header",
1741 "comment_type": "todo",
1741 "comment_type": "todo",
1742 "comment_last_version: 0
1742 "comment_last_version: 0
1743 }
1743 }
1744 ],
1744 ],
1745 "error" : null
1745 "error" : null
1746 }
1746 }
1747
1747
1748 """
1748 """
1749 repo = get_repo_or_error(repoid)
1749 repo = get_repo_or_error(repoid)
1750 if not has_superadmin_permission(apiuser):
1750 if not has_superadmin_permission(apiuser):
1751 _perms = ('repository.read', 'repository.write', 'repository.admin')
1751 _perms = ('repository.read', 'repository.write', 'repository.admin')
1752 validate_repo_permissions(apiuser, repoid, repo, _perms)
1752 validate_repo_permissions(apiuser, repoid, repo, _perms)
1753
1753
1754 commit_id = Optional.extract(commit_id)
1754 commit_id = Optional.extract(commit_id)
1755
1755
1756 userid = Optional.extract(userid)
1756 userid = Optional.extract(userid)
1757 if userid:
1757 if userid:
1758 user = get_user_or_error(userid)
1758 user = get_user_or_error(userid)
1759 else:
1759 else:
1760 user = None
1760 user = None
1761
1761
1762 comment_type = Optional.extract(comment_type)
1762 comment_type = Optional.extract(comment_type)
1763 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1763 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1764 raise JSONRPCError(
1764 raise JSONRPCError(
1765 'comment_type must be one of `{}` got {}'.format(
1765 'comment_type must be one of `{}` got {}'.format(
1766 ChangesetComment.COMMENT_TYPES, comment_type)
1766 ChangesetComment.COMMENT_TYPES, comment_type)
1767 )
1767 )
1768
1768
1769 comments = CommentsModel().get_repository_comments(
1769 comments = CommentsModel().get_repository_comments(
1770 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1770 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1771 return comments
1771 return comments
1772
1772
1773
1773
1774 @jsonrpc_method()
1774 @jsonrpc_method()
1775 def get_comment(request, apiuser, comment_id):
1775 def get_comment(request, apiuser, comment_id):
1776 """
1776 """
1777 Get single comment from repository or pull_request
1777 Get single comment from repository or pull_request
1778
1778
1779 :param apiuser: This is filled automatically from the |authtoken|.
1779 :param apiuser: This is filled automatically from the |authtoken|.
1780 :type apiuser: AuthUser
1780 :type apiuser: AuthUser
1781 :param comment_id: comment id found in the URL of comment
1781 :param comment_id: comment id found in the URL of comment
1782 :type comment_id: str or int
1782 :type comment_id: str or int
1783
1783
1784 Example error output:
1784 Example error output:
1785
1785
1786 .. code-block:: bash
1786 .. code-block:: bash
1787
1787
1788 {
1788 {
1789 "id" : <id_given_in_input>,
1789 "id" : <id_given_in_input>,
1790 "result" : {
1790 "result" : {
1791 "comment_author": <USER_DETAILS>,
1791 "comment_author": <USER_DETAILS>,
1792 "comment_created_on": "2017-02-01T14:38:16.309",
1792 "comment_created_on": "2017-02-01T14:38:16.309",
1793 "comment_f_path": "file.txt",
1793 "comment_f_path": "file.txt",
1794 "comment_id": 282,
1794 "comment_id": 282,
1795 "comment_lineno": "n1",
1795 "comment_lineno": "n1",
1796 "comment_resolved_by": null,
1796 "comment_resolved_by": null,
1797 "comment_status": [],
1797 "comment_status": [],
1798 "comment_text": "This file needs a header",
1798 "comment_text": "This file needs a header",
1799 "comment_type": "todo",
1799 "comment_type": "todo",
1800 "comment_last_version: 0
1800 "comment_last_version: 0
1801 },
1801 },
1802 "error" : null
1802 "error" : null
1803 }
1803 }
1804
1804
1805 """
1805 """
1806
1806
1807 comment = ChangesetComment.get(comment_id)
1807 comment = ChangesetComment.get(comment_id)
1808 if not comment:
1808 if not comment:
1809 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1809 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1810
1810
1811 perms = ('repository.read', 'repository.write', 'repository.admin')
1811 perms = ('repository.read', 'repository.write', 'repository.admin')
1812 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1812 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1813 (user=apiuser, repo_name=comment.repo.repo_name)
1813 (user=apiuser, repo_name=comment.repo.repo_name)
1814
1814
1815 if not has_comment_perm:
1815 if not has_comment_perm:
1816 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1816 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1817
1817
1818 return comment
1818 return comment
1819
1819
1820
1820
1821 @jsonrpc_method()
1821 @jsonrpc_method()
1822 def edit_comment(request, apiuser, message, comment_id, version,
1822 def edit_comment(request, apiuser, message, comment_id, version,
1823 userid=Optional(OAttr('apiuser'))):
1823 userid=Optional(OAttr('apiuser'))):
1824 """
1824 """
1825 Edit comment on the pull request or commit,
1825 Edit comment on the pull request or commit,
1826 specified by the `comment_id` and version. Initially version should be 0
1826 specified by the `comment_id` and version. Initially version should be 0
1827
1827
1828 :param apiuser: This is filled automatically from the |authtoken|.
1828 :param apiuser: This is filled automatically from the |authtoken|.
1829 :type apiuser: AuthUser
1829 :type apiuser: AuthUser
1830 :param comment_id: Specify the comment_id for editing
1830 :param comment_id: Specify the comment_id for editing
1831 :type comment_id: int
1831 :type comment_id: int
1832 :param version: version of the comment that will be created, starts from 0
1832 :param version: version of the comment that will be created, starts from 0
1833 :type version: int
1833 :type version: int
1834 :param message: The text content of the comment.
1834 :param message: The text content of the comment.
1835 :type message: str
1835 :type message: str
1836 :param userid: Comment on the pull request as this user
1836 :param userid: Comment on the pull request as this user
1837 :type userid: Optional(str or int)
1837 :type userid: Optional(str or int)
1838
1838
1839 Example output:
1839 Example output:
1840
1840
1841 .. code-block:: bash
1841 .. code-block:: bash
1842
1842
1843 id : <id_given_in_input>
1843 id : <id_given_in_input>
1844 result : {
1844 result : {
1845 "comment": "<comment data>",
1845 "comment": "<comment data>",
1846 "version": "<Integer>",
1846 "version": "<Integer>",
1847 },
1847 },
1848 error : null
1848 error : null
1849 """
1849 """
1850
1850
1851 auth_user = apiuser
1851 auth_user = apiuser
1852 comment = ChangesetComment.get(comment_id)
1852 comment = ChangesetComment.get(comment_id)
1853 if not comment:
1853 if not comment:
1854 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1854 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1855
1855
1856 is_super_admin = has_superadmin_permission(apiuser)
1856 is_super_admin = has_superadmin_permission(apiuser)
1857 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1857 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1858 (user=apiuser, repo_name=comment.repo.repo_name)
1858 (user=apiuser, repo_name=comment.repo.repo_name)
1859
1859
1860 if not isinstance(userid, Optional):
1860 if not isinstance(userid, Optional):
1861 if is_super_admin or is_repo_admin:
1861 if is_super_admin or is_repo_admin:
1862 apiuser = get_user_or_error(userid)
1862 apiuser = get_user_or_error(userid)
1863 auth_user = apiuser.AuthUser()
1863 auth_user = apiuser.AuthUser()
1864 else:
1864 else:
1865 raise JSONRPCError('userid is not the same as your user')
1865 raise JSONRPCError('userid is not the same as your user')
1866
1866
1867 comment_author = comment.author.user_id == auth_user.user_id
1867 comment_author = comment.author.user_id == auth_user.user_id
1868 if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1868 if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1869 raise JSONRPCError("you don't have access to edit this comment")
1869 raise JSONRPCError("you don't have access to edit this comment")
1870
1870
1871 try:
1871 try:
1872 comment_history = CommentsModel().edit(
1872 comment_history = CommentsModel().edit(
1873 comment_id=comment_id,
1873 comment_id=comment_id,
1874 text=message,
1874 text=message,
1875 auth_user=auth_user,
1875 auth_user=auth_user,
1876 version=version,
1876 version=version,
1877 )
1877 )
1878 Session().commit()
1878 Session().commit()
1879 except CommentVersionMismatch:
1879 except CommentVersionMismatch:
1880 raise JSONRPCError(
1880 raise JSONRPCError(
1881 'comment ({}) version ({}) mismatch'.format(comment_id, version)
1881 'comment ({}) version ({}) mismatch'.format(comment_id, version)
1882 )
1882 )
1883 if not comment_history and not message:
1883 if not comment_history and not message:
1884 raise JSONRPCError(
1884 raise JSONRPCError(
1885 "comment ({}) can't be changed with empty string".format(comment_id)
1885 "comment ({}) can't be changed with empty string".format(comment_id)
1886 )
1886 )
1887
1887
1888 if comment.pull_request:
1888 if comment.pull_request:
1889 pull_request = comment.pull_request
1889 pull_request = comment.pull_request
1890 PullRequestModel().trigger_pull_request_hook(
1890 PullRequestModel().trigger_pull_request_hook(
1891 pull_request, apiuser, 'comment_edit',
1891 pull_request, apiuser, 'comment_edit',
1892 data={'comment': comment})
1892 data={'comment': comment})
1893 else:
1893 else:
1894 db_repo = comment.repo
1894 db_repo = comment.repo
1895 commit_id = comment.revision
1895 commit_id = comment.revision
1896 commit = db_repo.get_commit(commit_id)
1896 commit = db_repo.get_commit(commit_id)
1897 CommentsModel().trigger_commit_comment_hook(
1897 CommentsModel().trigger_commit_comment_hook(
1898 db_repo, apiuser, 'edit',
1898 db_repo, apiuser, 'edit',
1899 data={'comment': comment, 'commit': commit})
1899 data={'comment': comment, 'commit': commit})
1900
1900
1901 data = {
1901 data = {
1902 'comment': comment,
1902 'comment': comment,
1903 'version': comment_history.version if comment_history else None,
1903 'version': comment_history.version if comment_history else None,
1904 }
1904 }
1905 return data
1905 return data
1906
1906
1907
1907
1908 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1908 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1909 # @jsonrpc_method()
1909 # @jsonrpc_method()
1910 # def delete_comment(request, apiuser, comment_id):
1910 # def delete_comment(request, apiuser, comment_id):
1911 # auth_user = apiuser
1911 # auth_user = apiuser
1912 #
1912 #
1913 # comment = ChangesetComment.get(comment_id)
1913 # comment = ChangesetComment.get(comment_id)
1914 # if not comment:
1914 # if not comment:
1915 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1915 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1916 #
1916 #
1917 # is_super_admin = has_superadmin_permission(apiuser)
1917 # is_super_admin = has_superadmin_permission(apiuser)
1918 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1918 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1919 # (user=apiuser, repo_name=comment.repo.repo_name)
1919 # (user=apiuser, repo_name=comment.repo.repo_name)
1920 #
1920 #
1921 # comment_author = comment.author.user_id == auth_user.user_id
1921 # comment_author = comment.author.user_id == auth_user.user_id
1922 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1922 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1923 # raise JSONRPCError("you don't have access to edit this comment")
1923 # raise JSONRPCError("you don't have access to edit this comment")
1924
1924
1925 @jsonrpc_method()
1925 @jsonrpc_method()
1926 def grant_user_permission(request, apiuser, repoid, userid, perm):
1926 def grant_user_permission(request, apiuser, repoid, userid, perm):
1927 """
1927 """
1928 Grant permissions for the specified user on the given repository,
1928 Grant permissions for the specified user on the given repository,
1929 or update existing permissions if found.
1929 or update existing permissions if found.
1930
1930
1931 This command can only be run using an |authtoken| with admin
1931 This command can only be run using an |authtoken| with admin
1932 permissions on the |repo|.
1932 permissions on the |repo|.
1933
1933
1934 :param apiuser: This is filled automatically from the |authtoken|.
1934 :param apiuser: This is filled automatically from the |authtoken|.
1935 :type apiuser: AuthUser
1935 :type apiuser: AuthUser
1936 :param repoid: Set the repository name or repository ID.
1936 :param repoid: Set the repository name or repository ID.
1937 :type repoid: str or int
1937 :type repoid: str or int
1938 :param userid: Set the user name.
1938 :param userid: Set the user name.
1939 :type userid: str
1939 :type userid: str
1940 :param perm: Set the user permissions, using the following format
1940 :param perm: Set the user permissions, using the following format
1941 ``(repository.(none|read|write|admin))``
1941 ``(repository.(none|read|write|admin))``
1942 :type perm: str
1942 :type perm: str
1943
1943
1944 Example output:
1944 Example output:
1945
1945
1946 .. code-block:: bash
1946 .. code-block:: bash
1947
1947
1948 id : <id_given_in_input>
1948 id : <id_given_in_input>
1949 result: {
1949 result: {
1950 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1950 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1951 "success": true
1951 "success": true
1952 }
1952 }
1953 error: null
1953 error: null
1954 """
1954 """
1955
1955
1956 repo = get_repo_or_error(repoid)
1956 repo = get_repo_or_error(repoid)
1957 user = get_user_or_error(userid)
1957 user = get_user_or_error(userid)
1958 perm = get_perm_or_error(perm)
1958 perm = get_perm_or_error(perm)
1959 if not has_superadmin_permission(apiuser):
1959 if not has_superadmin_permission(apiuser):
1960 _perms = ('repository.admin',)
1960 _perms = ('repository.admin',)
1961 validate_repo_permissions(apiuser, repoid, repo, _perms)
1961 validate_repo_permissions(apiuser, repoid, repo, _perms)
1962
1962
1963 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1963 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1964 try:
1964 try:
1965 changes = RepoModel().update_permissions(
1965 changes = RepoModel().update_permissions(
1966 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1966 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1967
1967
1968 action_data = {
1968 action_data = {
1969 'added': changes['added'],
1969 'added': changes['added'],
1970 'updated': changes['updated'],
1970 'updated': changes['updated'],
1971 'deleted': changes['deleted'],
1971 'deleted': changes['deleted'],
1972 }
1972 }
1973 audit_logger.store_api(
1973 audit_logger.store_api(
1974 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1974 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1975 Session().commit()
1975 Session().commit()
1976 PermissionModel().flush_user_permission_caches(changes)
1976 PermissionModel().flush_user_permission_caches(changes)
1977
1977
1978 return {
1978 return {
1979 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1979 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1980 perm.permission_name, user.username, repo.repo_name
1980 perm.permission_name, user.username, repo.repo_name
1981 ),
1981 ),
1982 'success': True
1982 'success': True
1983 }
1983 }
1984 except Exception:
1984 except Exception:
1985 log.exception("Exception occurred while trying edit permissions for repo")
1985 log.exception("Exception occurred while trying edit permissions for repo")
1986 raise JSONRPCError(
1986 raise JSONRPCError(
1987 'failed to edit permission for user: `%s` in repo: `%s`' % (
1987 'failed to edit permission for user: `%s` in repo: `%s`' % (
1988 userid, repoid
1988 userid, repoid
1989 )
1989 )
1990 )
1990 )
1991
1991
1992
1992
1993 @jsonrpc_method()
1993 @jsonrpc_method()
1994 def revoke_user_permission(request, apiuser, repoid, userid):
1994 def revoke_user_permission(request, apiuser, repoid, userid):
1995 """
1995 """
1996 Revoke permission for a user on the specified repository.
1996 Revoke permission for a user on the specified repository.
1997
1997
1998 This command can only be run using an |authtoken| with admin
1998 This command can only be run using an |authtoken| with admin
1999 permissions on the |repo|.
1999 permissions on the |repo|.
2000
2000
2001 :param apiuser: This is filled automatically from the |authtoken|.
2001 :param apiuser: This is filled automatically from the |authtoken|.
2002 :type apiuser: AuthUser
2002 :type apiuser: AuthUser
2003 :param repoid: Set the repository name or repository ID.
2003 :param repoid: Set the repository name or repository ID.
2004 :type repoid: str or int
2004 :type repoid: str or int
2005 :param userid: Set the user name of revoked user.
2005 :param userid: Set the user name of revoked user.
2006 :type userid: str or int
2006 :type userid: str or int
2007
2007
2008 Example error output:
2008 Example error output:
2009
2009
2010 .. code-block:: bash
2010 .. code-block:: bash
2011
2011
2012 id : <id_given_in_input>
2012 id : <id_given_in_input>
2013 result: {
2013 result: {
2014 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2014 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2015 "success": true
2015 "success": true
2016 }
2016 }
2017 error: null
2017 error: null
2018 """
2018 """
2019
2019
2020 repo = get_repo_or_error(repoid)
2020 repo = get_repo_or_error(repoid)
2021 user = get_user_or_error(userid)
2021 user = get_user_or_error(userid)
2022 if not has_superadmin_permission(apiuser):
2022 if not has_superadmin_permission(apiuser):
2023 _perms = ('repository.admin',)
2023 _perms = ('repository.admin',)
2024 validate_repo_permissions(apiuser, repoid, repo, _perms)
2024 validate_repo_permissions(apiuser, repoid, repo, _perms)
2025
2025
2026 perm_deletions = [[user.user_id, None, "user"]]
2026 perm_deletions = [[user.user_id, None, "user"]]
2027 try:
2027 try:
2028 changes = RepoModel().update_permissions(
2028 changes = RepoModel().update_permissions(
2029 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2029 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2030
2030
2031 action_data = {
2031 action_data = {
2032 'added': changes['added'],
2032 'added': changes['added'],
2033 'updated': changes['updated'],
2033 'updated': changes['updated'],
2034 'deleted': changes['deleted'],
2034 'deleted': changes['deleted'],
2035 }
2035 }
2036 audit_logger.store_api(
2036 audit_logger.store_api(
2037 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2037 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2038 Session().commit()
2038 Session().commit()
2039 PermissionModel().flush_user_permission_caches(changes)
2039 PermissionModel().flush_user_permission_caches(changes)
2040
2040
2041 return {
2041 return {
2042 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
2042 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
2043 user.username, repo.repo_name
2043 user.username, repo.repo_name
2044 ),
2044 ),
2045 'success': True
2045 'success': True
2046 }
2046 }
2047 except Exception:
2047 except Exception:
2048 log.exception("Exception occurred while trying revoke permissions to repo")
2048 log.exception("Exception occurred while trying revoke permissions to repo")
2049 raise JSONRPCError(
2049 raise JSONRPCError(
2050 'failed to edit permission for user: `%s` in repo: `%s`' % (
2050 'failed to edit permission for user: `%s` in repo: `%s`' % (
2051 userid, repoid
2051 userid, repoid
2052 )
2052 )
2053 )
2053 )
2054
2054
2055
2055
2056 @jsonrpc_method()
2056 @jsonrpc_method()
2057 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2057 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2058 """
2058 """
2059 Grant permission for a user group on the specified repository,
2059 Grant permission for a user group on the specified repository,
2060 or update existing permissions.
2060 or update existing permissions.
2061
2061
2062 This command can only be run using an |authtoken| with admin
2062 This command can only be run using an |authtoken| with admin
2063 permissions on the |repo|.
2063 permissions on the |repo|.
2064
2064
2065 :param apiuser: This is filled automatically from the |authtoken|.
2065 :param apiuser: This is filled automatically from the |authtoken|.
2066 :type apiuser: AuthUser
2066 :type apiuser: AuthUser
2067 :param repoid: Set the repository name or repository ID.
2067 :param repoid: Set the repository name or repository ID.
2068 :type repoid: str or int
2068 :type repoid: str or int
2069 :param usergroupid: Specify the ID of the user group.
2069 :param usergroupid: Specify the ID of the user group.
2070 :type usergroupid: str or int
2070 :type usergroupid: str or int
2071 :param perm: Set the user group permissions using the following
2071 :param perm: Set the user group permissions using the following
2072 format: (repository.(none|read|write|admin))
2072 format: (repository.(none|read|write|admin))
2073 :type perm: str
2073 :type perm: str
2074
2074
2075 Example output:
2075 Example output:
2076
2076
2077 .. code-block:: bash
2077 .. code-block:: bash
2078
2078
2079 id : <id_given_in_input>
2079 id : <id_given_in_input>
2080 result : {
2080 result : {
2081 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2081 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2082 "success": true
2082 "success": true
2083
2083
2084 }
2084 }
2085 error : null
2085 error : null
2086
2086
2087 Example error output:
2087 Example error output:
2088
2088
2089 .. code-block:: bash
2089 .. code-block:: bash
2090
2090
2091 id : <id_given_in_input>
2091 id : <id_given_in_input>
2092 result : null
2092 result : null
2093 error : {
2093 error : {
2094 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2094 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2095 }
2095 }
2096
2096
2097 """
2097 """
2098
2098
2099 repo = get_repo_or_error(repoid)
2099 repo = get_repo_or_error(repoid)
2100 perm = get_perm_or_error(perm)
2100 perm = get_perm_or_error(perm)
2101 if not has_superadmin_permission(apiuser):
2101 if not has_superadmin_permission(apiuser):
2102 _perms = ('repository.admin',)
2102 _perms = ('repository.admin',)
2103 validate_repo_permissions(apiuser, repoid, repo, _perms)
2103 validate_repo_permissions(apiuser, repoid, repo, _perms)
2104
2104
2105 user_group = get_user_group_or_error(usergroupid)
2105 user_group = get_user_group_or_error(usergroupid)
2106 if not has_superadmin_permission(apiuser):
2106 if not has_superadmin_permission(apiuser):
2107 # check if we have at least read permission for this user group !
2107 # check if we have at least read permission for this user group !
2108 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2108 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2109 if not HasUserGroupPermissionAnyApi(*_perms)(
2109 if not HasUserGroupPermissionAnyApi(*_perms)(
2110 user=apiuser, user_group_name=user_group.users_group_name):
2110 user=apiuser, user_group_name=user_group.users_group_name):
2111 raise JSONRPCError(
2111 raise JSONRPCError(
2112 'user group `%s` does not exist' % (usergroupid,))
2112 'user group `%s` does not exist' % (usergroupid,))
2113
2113
2114 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2114 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2115 try:
2115 try:
2116 changes = RepoModel().update_permissions(
2116 changes = RepoModel().update_permissions(
2117 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2117 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2118 action_data = {
2118 action_data = {
2119 'added': changes['added'],
2119 'added': changes['added'],
2120 'updated': changes['updated'],
2120 'updated': changes['updated'],
2121 'deleted': changes['deleted'],
2121 'deleted': changes['deleted'],
2122 }
2122 }
2123 audit_logger.store_api(
2123 audit_logger.store_api(
2124 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2124 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2125 Session().commit()
2125 Session().commit()
2126 PermissionModel().flush_user_permission_caches(changes)
2126 PermissionModel().flush_user_permission_caches(changes)
2127
2127
2128 return {
2128 return {
2129 'msg': 'Granted perm: `%s` for user group: `%s` in '
2129 'msg': 'Granted perm: `%s` for user group: `%s` in '
2130 'repo: `%s`' % (
2130 'repo: `%s`' % (
2131 perm.permission_name, user_group.users_group_name,
2131 perm.permission_name, user_group.users_group_name,
2132 repo.repo_name
2132 repo.repo_name
2133 ),
2133 ),
2134 'success': True
2134 'success': True
2135 }
2135 }
2136 except Exception:
2136 except Exception:
2137 log.exception(
2137 log.exception(
2138 "Exception occurred while trying change permission on repo")
2138 "Exception occurred while trying change permission on repo")
2139 raise JSONRPCError(
2139 raise JSONRPCError(
2140 'failed to edit permission for user group: `%s` in '
2140 'failed to edit permission for user group: `%s` in '
2141 'repo: `%s`' % (
2141 'repo: `%s`' % (
2142 usergroupid, repo.repo_name
2142 usergroupid, repo.repo_name
2143 )
2143 )
2144 )
2144 )
2145
2145
2146
2146
2147 @jsonrpc_method()
2147 @jsonrpc_method()
2148 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2148 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2149 """
2149 """
2150 Revoke the permissions of a user group on a given repository.
2150 Revoke the permissions of a user group on a given repository.
2151
2151
2152 This command can only be run using an |authtoken| with admin
2152 This command can only be run using an |authtoken| with admin
2153 permissions on the |repo|.
2153 permissions on the |repo|.
2154
2154
2155 :param apiuser: This is filled automatically from the |authtoken|.
2155 :param apiuser: This is filled automatically from the |authtoken|.
2156 :type apiuser: AuthUser
2156 :type apiuser: AuthUser
2157 :param repoid: Set the repository name or repository ID.
2157 :param repoid: Set the repository name or repository ID.
2158 :type repoid: str or int
2158 :type repoid: str or int
2159 :param usergroupid: Specify the user group ID.
2159 :param usergroupid: Specify the user group ID.
2160 :type usergroupid: str or int
2160 :type usergroupid: str or int
2161
2161
2162 Example output:
2162 Example output:
2163
2163
2164 .. code-block:: bash
2164 .. code-block:: bash
2165
2165
2166 id : <id_given_in_input>
2166 id : <id_given_in_input>
2167 result: {
2167 result: {
2168 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2168 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2169 "success": true
2169 "success": true
2170 }
2170 }
2171 error: null
2171 error: null
2172 """
2172 """
2173
2173
2174 repo = get_repo_or_error(repoid)
2174 repo = get_repo_or_error(repoid)
2175 if not has_superadmin_permission(apiuser):
2175 if not has_superadmin_permission(apiuser):
2176 _perms = ('repository.admin',)
2176 _perms = ('repository.admin',)
2177 validate_repo_permissions(apiuser, repoid, repo, _perms)
2177 validate_repo_permissions(apiuser, repoid, repo, _perms)
2178
2178
2179 user_group = get_user_group_or_error(usergroupid)
2179 user_group = get_user_group_or_error(usergroupid)
2180 if not has_superadmin_permission(apiuser):
2180 if not has_superadmin_permission(apiuser):
2181 # check if we have at least read permission for this user group !
2181 # check if we have at least read permission for this user group !
2182 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2182 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2183 if not HasUserGroupPermissionAnyApi(*_perms)(
2183 if not HasUserGroupPermissionAnyApi(*_perms)(
2184 user=apiuser, user_group_name=user_group.users_group_name):
2184 user=apiuser, user_group_name=user_group.users_group_name):
2185 raise JSONRPCError(
2185 raise JSONRPCError(
2186 'user group `%s` does not exist' % (usergroupid,))
2186 'user group `%s` does not exist' % (usergroupid,))
2187
2187
2188 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2188 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2189 try:
2189 try:
2190 changes = RepoModel().update_permissions(
2190 changes = RepoModel().update_permissions(
2191 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2191 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2192 action_data = {
2192 action_data = {
2193 'added': changes['added'],
2193 'added': changes['added'],
2194 'updated': changes['updated'],
2194 'updated': changes['updated'],
2195 'deleted': changes['deleted'],
2195 'deleted': changes['deleted'],
2196 }
2196 }
2197 audit_logger.store_api(
2197 audit_logger.store_api(
2198 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2198 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2199 Session().commit()
2199 Session().commit()
2200 PermissionModel().flush_user_permission_caches(changes)
2200 PermissionModel().flush_user_permission_caches(changes)
2201
2201
2202 return {
2202 return {
2203 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
2203 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
2204 user_group.users_group_name, repo.repo_name
2204 user_group.users_group_name, repo.repo_name
2205 ),
2205 ),
2206 'success': True
2206 'success': True
2207 }
2207 }
2208 except Exception:
2208 except Exception:
2209 log.exception("Exception occurred while trying revoke "
2209 log.exception("Exception occurred while trying revoke "
2210 "user group permission on repo")
2210 "user group permission on repo")
2211 raise JSONRPCError(
2211 raise JSONRPCError(
2212 'failed to edit permission for user group: `%s` in '
2212 'failed to edit permission for user group: `%s` in '
2213 'repo: `%s`' % (
2213 'repo: `%s`' % (
2214 user_group.users_group_name, repo.repo_name
2214 user_group.users_group_name, repo.repo_name
2215 )
2215 )
2216 )
2216 )
2217
2217
2218
2218
2219 @jsonrpc_method()
2219 @jsonrpc_method()
2220 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2220 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2221 """
2221 """
2222 Triggers a pull on the given repository from a remote location. You
2222 Triggers a pull on the given repository from a remote location. You
2223 can use this to keep remote repositories up-to-date.
2223 can use this to keep remote repositories up-to-date.
2224
2224
2225 This command can only be run using an |authtoken| with admin
2225 This command can only be run using an |authtoken| with admin
2226 rights to the specified repository. For more information,
2226 rights to the specified repository. For more information,
2227 see :ref:`config-token-ref`.
2227 see :ref:`config-token-ref`.
2228
2228
2229 This command takes the following options:
2229 This command takes the following options:
2230
2230
2231 :param apiuser: This is filled automatically from the |authtoken|.
2231 :param apiuser: This is filled automatically from the |authtoken|.
2232 :type apiuser: AuthUser
2232 :type apiuser: AuthUser
2233 :param repoid: The repository name or repository ID.
2233 :param repoid: The repository name or repository ID.
2234 :type repoid: str or int
2234 :type repoid: str or int
2235 :param remote_uri: Optional remote URI to pass in for pull
2235 :param remote_uri: Optional remote URI to pass in for pull
2236 :type remote_uri: str
2236 :type remote_uri: str
2237
2237
2238 Example output:
2238 Example output:
2239
2239
2240 .. code-block:: bash
2240 .. code-block:: bash
2241
2241
2242 id : <id_given_in_input>
2242 id : <id_given_in_input>
2243 result : {
2243 result : {
2244 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2244 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2245 "repository": "<repository name>"
2245 "repository": "<repository name>"
2246 }
2246 }
2247 error : null
2247 error : null
2248
2248
2249 Example error output:
2249 Example error output:
2250
2250
2251 .. code-block:: bash
2251 .. code-block:: bash
2252
2252
2253 id : <id_given_in_input>
2253 id : <id_given_in_input>
2254 result : null
2254 result : null
2255 error : {
2255 error : {
2256 "Unable to push changes from `<remote_url>`"
2256 "Unable to push changes from `<remote_url>`"
2257 }
2257 }
2258
2258
2259 """
2259 """
2260
2260
2261 repo = get_repo_or_error(repoid)
2261 repo = get_repo_or_error(repoid)
2262 remote_uri = Optional.extract(remote_uri)
2262 remote_uri = Optional.extract(remote_uri)
2263 remote_uri_display = remote_uri or repo.clone_uri_hidden
2263 remote_uri_display = remote_uri or repo.clone_uri_hidden
2264 if not has_superadmin_permission(apiuser):
2264 if not has_superadmin_permission(apiuser):
2265 _perms = ('repository.admin',)
2265 _perms = ('repository.admin',)
2266 validate_repo_permissions(apiuser, repoid, repo, _perms)
2266 validate_repo_permissions(apiuser, repoid, repo, _perms)
2267
2267
2268 try:
2268 try:
2269 ScmModel().pull_changes(
2269 ScmModel().pull_changes(
2270 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2270 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2271 return {
2271 return {
2272 'msg': 'Pulled from url `%s` on repo `%s`' % (
2272 'msg': 'Pulled from url `%s` on repo `%s`' % (
2273 remote_uri_display, repo.repo_name),
2273 remote_uri_display, repo.repo_name),
2274 'repository': repo.repo_name
2274 'repository': repo.repo_name
2275 }
2275 }
2276 except Exception:
2276 except Exception:
2277 log.exception("Exception occurred while trying to "
2277 log.exception("Exception occurred while trying to "
2278 "pull changes from remote location")
2278 "pull changes from remote location")
2279 raise JSONRPCError(
2279 raise JSONRPCError(
2280 'Unable to pull changes from `%s`' % remote_uri_display
2280 'Unable to pull changes from `%s`' % remote_uri_display
2281 )
2281 )
2282
2282
2283
2283
2284 @jsonrpc_method()
2284 @jsonrpc_method()
2285 def strip(request, apiuser, repoid, revision, branch):
2285 def strip(request, apiuser, repoid, revision, branch):
2286 """
2286 """
2287 Strips the given revision from the specified repository.
2287 Strips the given revision from the specified repository.
2288
2288
2289 * This will remove the revision and all of its decendants.
2289 * This will remove the revision and all of its decendants.
2290
2290
2291 This command can only be run using an |authtoken| with admin rights to
2291 This command can only be run using an |authtoken| with admin rights to
2292 the specified repository.
2292 the specified repository.
2293
2293
2294 This command takes the following options:
2294 This command takes the following options:
2295
2295
2296 :param apiuser: This is filled automatically from the |authtoken|.
2296 :param apiuser: This is filled automatically from the |authtoken|.
2297 :type apiuser: AuthUser
2297 :type apiuser: AuthUser
2298 :param repoid: The repository name or repository ID.
2298 :param repoid: The repository name or repository ID.
2299 :type repoid: str or int
2299 :type repoid: str or int
2300 :param revision: The revision you wish to strip.
2300 :param revision: The revision you wish to strip.
2301 :type revision: str
2301 :type revision: str
2302 :param branch: The branch from which to strip the revision.
2302 :param branch: The branch from which to strip the revision.
2303 :type branch: str
2303 :type branch: str
2304
2304
2305 Example output:
2305 Example output:
2306
2306
2307 .. code-block:: bash
2307 .. code-block:: bash
2308
2308
2309 id : <id_given_in_input>
2309 id : <id_given_in_input>
2310 result : {
2310 result : {
2311 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2311 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2312 "repository": "<repository name>"
2312 "repository": "<repository name>"
2313 }
2313 }
2314 error : null
2314 error : null
2315
2315
2316 Example error output:
2316 Example error output:
2317
2317
2318 .. code-block:: bash
2318 .. code-block:: bash
2319
2319
2320 id : <id_given_in_input>
2320 id : <id_given_in_input>
2321 result : null
2321 result : null
2322 error : {
2322 error : {
2323 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2323 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2324 }
2324 }
2325
2325
2326 """
2326 """
2327
2327
2328 repo = get_repo_or_error(repoid)
2328 repo = get_repo_or_error(repoid)
2329 if not has_superadmin_permission(apiuser):
2329 if not has_superadmin_permission(apiuser):
2330 _perms = ('repository.admin',)
2330 _perms = ('repository.admin',)
2331 validate_repo_permissions(apiuser, repoid, repo, _perms)
2331 validate_repo_permissions(apiuser, repoid, repo, _perms)
2332
2332
2333 try:
2333 try:
2334 ScmModel().strip(repo, revision, branch)
2334 ScmModel().strip(repo, revision, branch)
2335 audit_logger.store_api(
2335 audit_logger.store_api(
2336 'repo.commit.strip', action_data={'commit_id': revision},
2336 'repo.commit.strip', action_data={'commit_id': revision},
2337 repo=repo,
2337 repo=repo,
2338 user=apiuser, commit=True)
2338 user=apiuser, commit=True)
2339
2339
2340 return {
2340 return {
2341 'msg': 'Stripped commit %s from repo `%s`' % (
2341 'msg': 'Stripped commit %s from repo `%s`' % (
2342 revision, repo.repo_name),
2342 revision, repo.repo_name),
2343 'repository': repo.repo_name
2343 'repository': repo.repo_name
2344 }
2344 }
2345 except Exception:
2345 except Exception:
2346 log.exception("Exception while trying to strip")
2346 log.exception("Exception while trying to strip")
2347 raise JSONRPCError(
2347 raise JSONRPCError(
2348 'Unable to strip commit %s from repo `%s`' % (
2348 'Unable to strip commit %s from repo `%s`' % (
2349 revision, repo.repo_name)
2349 revision, repo.repo_name)
2350 )
2350 )
2351
2351
2352
2352
2353 @jsonrpc_method()
2353 @jsonrpc_method()
2354 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2354 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2355 """
2355 """
2356 Returns all settings for a repository. If key is given it only returns the
2356 Returns all settings for a repository. If key is given it only returns the
2357 setting identified by the key or null.
2357 setting identified by the key or null.
2358
2358
2359 :param apiuser: This is filled automatically from the |authtoken|.
2359 :param apiuser: This is filled automatically from the |authtoken|.
2360 :type apiuser: AuthUser
2360 :type apiuser: AuthUser
2361 :param repoid: The repository name or repository id.
2361 :param repoid: The repository name or repository id.
2362 :type repoid: str or int
2362 :type repoid: str or int
2363 :param key: Key of the setting to return.
2363 :param key: Key of the setting to return.
2364 :type: key: Optional(str)
2364 :type: key: Optional(str)
2365
2365
2366 Example output:
2366 Example output:
2367
2367
2368 .. code-block:: bash
2368 .. code-block:: bash
2369
2369
2370 {
2370 {
2371 "error": null,
2371 "error": null,
2372 "id": 237,
2372 "id": 237,
2373 "result": {
2373 "result": {
2374 "extensions_largefiles": true,
2374 "extensions_largefiles": true,
2375 "extensions_evolve": true,
2375 "extensions_evolve": true,
2376 "hooks_changegroup_push_logger": true,
2376 "hooks_changegroup_push_logger": true,
2377 "hooks_changegroup_repo_size": false,
2377 "hooks_changegroup_repo_size": false,
2378 "hooks_outgoing_pull_logger": true,
2378 "hooks_outgoing_pull_logger": true,
2379 "phases_publish": "True",
2379 "phases_publish": "True",
2380 "rhodecode_hg_use_rebase_for_merging": true,
2380 "rhodecode_hg_use_rebase_for_merging": true,
2381 "rhodecode_pr_merge_enabled": true,
2381 "rhodecode_pr_merge_enabled": true,
2382 "rhodecode_use_outdated_comments": true
2382 "rhodecode_use_outdated_comments": true
2383 }
2383 }
2384 }
2384 }
2385 """
2385 """
2386
2386
2387 # Restrict access to this api method to super-admins, and repo admins only.
2387 # Restrict access to this api method to super-admins, and repo admins only.
2388 repo = get_repo_or_error(repoid)
2388 repo = get_repo_or_error(repoid)
2389 if not has_superadmin_permission(apiuser):
2389 if not has_superadmin_permission(apiuser):
2390 _perms = ('repository.admin',)
2390 _perms = ('repository.admin',)
2391 validate_repo_permissions(apiuser, repoid, repo, _perms)
2391 validate_repo_permissions(apiuser, repoid, repo, _perms)
2392
2392
2393 try:
2393 try:
2394 settings_model = VcsSettingsModel(repo=repo)
2394 settings_model = VcsSettingsModel(repo=repo)
2395 settings = settings_model.get_global_settings()
2395 settings = settings_model.get_global_settings()
2396 settings.update(settings_model.get_repo_settings())
2396 settings.update(settings_model.get_repo_settings())
2397
2397
2398 # If only a single setting is requested fetch it from all settings.
2398 # If only a single setting is requested fetch it from all settings.
2399 key = Optional.extract(key)
2399 key = Optional.extract(key)
2400 if key is not None:
2400 if key is not None:
2401 settings = settings.get(key, None)
2401 settings = settings.get(key, None)
2402 except Exception:
2402 except Exception:
2403 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2403 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2404 log.exception(msg)
2404 log.exception(msg)
2405 raise JSONRPCError(msg)
2405 raise JSONRPCError(msg)
2406
2406
2407 return settings
2407 return settings
2408
2408
2409
2409
2410 @jsonrpc_method()
2410 @jsonrpc_method()
2411 def set_repo_settings(request, apiuser, repoid, settings):
2411 def set_repo_settings(request, apiuser, repoid, settings):
2412 """
2412 """
2413 Update repository settings. Returns true on success.
2413 Update repository settings. Returns true on success.
2414
2414
2415 :param apiuser: This is filled automatically from the |authtoken|.
2415 :param apiuser: This is filled automatically from the |authtoken|.
2416 :type apiuser: AuthUser
2416 :type apiuser: AuthUser
2417 :param repoid: The repository name or repository id.
2417 :param repoid: The repository name or repository id.
2418 :type repoid: str or int
2418 :type repoid: str or int
2419 :param settings: The new settings for the repository.
2419 :param settings: The new settings for the repository.
2420 :type: settings: dict
2420 :type: settings: dict
2421
2421
2422 Example output:
2422 Example output:
2423
2423
2424 .. code-block:: bash
2424 .. code-block:: bash
2425
2425
2426 {
2426 {
2427 "error": null,
2427 "error": null,
2428 "id": 237,
2428 "id": 237,
2429 "result": true
2429 "result": true
2430 }
2430 }
2431 """
2431 """
2432 # Restrict access to this api method to super-admins, and repo admins only.
2432 # Restrict access to this api method to super-admins, and repo admins only.
2433 repo = get_repo_or_error(repoid)
2433 repo = get_repo_or_error(repoid)
2434 if not has_superadmin_permission(apiuser):
2434 if not has_superadmin_permission(apiuser):
2435 _perms = ('repository.admin',)
2435 _perms = ('repository.admin',)
2436 validate_repo_permissions(apiuser, repoid, repo, _perms)
2436 validate_repo_permissions(apiuser, repoid, repo, _perms)
2437
2437
2438 if type(settings) is not dict:
2438 if type(settings) is not dict:
2439 raise JSONRPCError('Settings have to be a JSON Object.')
2439 raise JSONRPCError('Settings have to be a JSON Object.')
2440
2440
2441 try:
2441 try:
2442 settings_model = VcsSettingsModel(repo=repoid)
2442 settings_model = VcsSettingsModel(repo=repoid)
2443
2443
2444 # Merge global, repo and incoming settings.
2444 # Merge global, repo and incoming settings.
2445 new_settings = settings_model.get_global_settings()
2445 new_settings = settings_model.get_global_settings()
2446 new_settings.update(settings_model.get_repo_settings())
2446 new_settings.update(settings_model.get_repo_settings())
2447 new_settings.update(settings)
2447 new_settings.update(settings)
2448
2448
2449 # Update the settings.
2449 # Update the settings.
2450 inherit_global_settings = new_settings.get(
2450 inherit_global_settings = new_settings.get(
2451 'inherit_global_settings', False)
2451 'inherit_global_settings', False)
2452 settings_model.create_or_update_repo_settings(
2452 settings_model.create_or_update_repo_settings(
2453 new_settings, inherit_global_settings=inherit_global_settings)
2453 new_settings, inherit_global_settings=inherit_global_settings)
2454 Session().commit()
2454 Session().commit()
2455 except Exception:
2455 except Exception:
2456 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2456 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2457 log.exception(msg)
2457 log.exception(msg)
2458 raise JSONRPCError(msg)
2458 raise JSONRPCError(msg)
2459
2459
2460 # Indicate success.
2460 # Indicate success.
2461 return True
2461 return True
2462
2462
2463
2463
2464 @jsonrpc_method()
2464 @jsonrpc_method()
2465 def maintenance(request, apiuser, repoid):
2465 def maintenance(request, apiuser, repoid):
2466 """
2466 """
2467 Triggers a maintenance on the given repository.
2467 Triggers a maintenance on the given repository.
2468
2468
2469 This command can only be run using an |authtoken| with admin
2469 This command can only be run using an |authtoken| with admin
2470 rights to the specified repository. For more information,
2470 rights to the specified repository. For more information,
2471 see :ref:`config-token-ref`.
2471 see :ref:`config-token-ref`.
2472
2472
2473 This command takes the following options:
2473 This command takes the following options:
2474
2474
2475 :param apiuser: This is filled automatically from the |authtoken|.
2475 :param apiuser: This is filled automatically from the |authtoken|.
2476 :type apiuser: AuthUser
2476 :type apiuser: AuthUser
2477 :param repoid: The repository name or repository ID.
2477 :param repoid: The repository name or repository ID.
2478 :type repoid: str or int
2478 :type repoid: str or int
2479
2479
2480 Example output:
2480 Example output:
2481
2481
2482 .. code-block:: bash
2482 .. code-block:: bash
2483
2483
2484 id : <id_given_in_input>
2484 id : <id_given_in_input>
2485 result : {
2485 result : {
2486 "msg": "executed maintenance command",
2486 "msg": "executed maintenance command",
2487 "executed_actions": [
2487 "executed_actions": [
2488 <action_message>, <action_message2>...
2488 <action_message>, <action_message2>...
2489 ],
2489 ],
2490 "repository": "<repository name>"
2490 "repository": "<repository name>"
2491 }
2491 }
2492 error : null
2492 error : null
2493
2493
2494 Example error output:
2494 Example error output:
2495
2495
2496 .. code-block:: bash
2496 .. code-block:: bash
2497
2497
2498 id : <id_given_in_input>
2498 id : <id_given_in_input>
2499 result : null
2499 result : null
2500 error : {
2500 error : {
2501 "Unable to execute maintenance on `<reponame>`"
2501 "Unable to execute maintenance on `<reponame>`"
2502 }
2502 }
2503
2503
2504 """
2504 """
2505
2505
2506 repo = get_repo_or_error(repoid)
2506 repo = get_repo_or_error(repoid)
2507 if not has_superadmin_permission(apiuser):
2507 if not has_superadmin_permission(apiuser):
2508 _perms = ('repository.admin',)
2508 _perms = ('repository.admin',)
2509 validate_repo_permissions(apiuser, repoid, repo, _perms)
2509 validate_repo_permissions(apiuser, repoid, repo, _perms)
2510
2510
2511 try:
2511 try:
2512 maintenance = repo_maintenance.RepoMaintenance()
2512 maintenance = repo_maintenance.RepoMaintenance()
2513 executed_actions = maintenance.execute(repo)
2513 executed_actions = maintenance.execute(repo)
2514
2514
2515 return {
2515 return {
2516 'msg': 'executed maintenance command',
2516 'msg': 'executed maintenance command',
2517 'executed_actions': executed_actions,
2517 'executed_actions': executed_actions,
2518 'repository': repo.repo_name
2518 'repository': repo.repo_name
2519 }
2519 }
2520 except Exception:
2520 except Exception:
2521 log.exception("Exception occurred while trying to run maintenance")
2521 log.exception("Exception occurred while trying to run maintenance")
2522 raise JSONRPCError(
2522 raise JSONRPCError(
2523 'Unable to execute maintenance on `%s`' % repo.repo_name)
2523 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,1658 +1,1661 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.comment import CommentsModel
34 from rhodecode.tests import (
34 from rhodecode.tests import (
35 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
36
36
37
37
38 def route_path(name, params=None, **kwargs):
38 def route_path(name, params=None, **kwargs):
39 import urllib
39 import urllib
40
40
41 base_url = {
41 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'repo_commits': '/{repo_name}/commits',
44 'repo_commits': '/{repo_name}/commits',
45 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
46 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
47 'pullrequest_show_all': '/{repo_name}/pull-request',
47 'pullrequest_show_all': '/{repo_name}/pull-request',
48 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
49 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
50 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
51 'pullrequest_new': '/{repo_name}/pull-request/new',
51 'pullrequest_new': '/{repo_name}/pull-request/new',
52 'pullrequest_create': '/{repo_name}/pull-request/create',
52 'pullrequest_create': '/{repo_name}/pull-request/create',
53 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
54 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
55 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
56 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
57 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
58 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit',
58 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit',
59 }[name].format(**kwargs)
59 }[name].format(**kwargs)
60
60
61 if params:
61 if params:
62 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
62 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
63 return base_url
63 return base_url
64
64
65
65
66 @pytest.mark.usefixtures('app', 'autologin_user')
66 @pytest.mark.usefixtures('app', 'autologin_user')
67 @pytest.mark.backends("git", "hg")
67 @pytest.mark.backends("git", "hg")
68 class TestPullrequestsView(object):
68 class TestPullrequestsView(object):
69
69
70 def test_index(self, backend):
70 def test_index(self, backend):
71 self.app.get(route_path(
71 self.app.get(route_path(
72 'pullrequest_new',
72 'pullrequest_new',
73 repo_name=backend.repo_name))
73 repo_name=backend.repo_name))
74
74
75 def test_option_menu_create_pull_request_exists(self, backend):
75 def test_option_menu_create_pull_request_exists(self, backend):
76 repo_name = backend.repo_name
76 repo_name = backend.repo_name
77 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
77 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
78
78
79 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
79 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
80 'pullrequest_new', repo_name=repo_name)
80 'pullrequest_new', repo_name=repo_name)
81 response.mustcontain(create_pr_link)
81 response.mustcontain(create_pr_link)
82
82
83 def test_create_pr_form_with_raw_commit_id(self, backend):
83 def test_create_pr_form_with_raw_commit_id(self, backend):
84 repo = backend.repo
84 repo = backend.repo
85
85
86 self.app.get(
86 self.app.get(
87 route_path('pullrequest_new', repo_name=repo.repo_name,
87 route_path('pullrequest_new', repo_name=repo.repo_name,
88 commit=repo.get_commit().raw_id),
88 commit=repo.get_commit().raw_id),
89 status=200)
89 status=200)
90
90
91 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
91 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
92 @pytest.mark.parametrize('range_diff', ["0", "1"])
92 @pytest.mark.parametrize('range_diff', ["0", "1"])
93 def test_show(self, pr_util, pr_merge_enabled, range_diff):
93 def test_show(self, pr_util, pr_merge_enabled, range_diff):
94 pull_request = pr_util.create_pull_request(
94 pull_request = pr_util.create_pull_request(
95 mergeable=pr_merge_enabled, enable_notifications=False)
95 mergeable=pr_merge_enabled, enable_notifications=False)
96
96
97 response = self.app.get(route_path(
97 response = self.app.get(route_path(
98 'pullrequest_show',
98 'pullrequest_show',
99 repo_name=pull_request.target_repo.scm_instance().name,
99 repo_name=pull_request.target_repo.scm_instance().name,
100 pull_request_id=pull_request.pull_request_id,
100 pull_request_id=pull_request.pull_request_id,
101 params={'range-diff': range_diff}))
101 params={'range-diff': range_diff}))
102
102
103 for commit_id in pull_request.revisions:
103 for commit_id in pull_request.revisions:
104 response.mustcontain(commit_id)
104 response.mustcontain(commit_id)
105
105
106 response.mustcontain(pull_request.target_ref_parts.type)
106 response.mustcontain(pull_request.target_ref_parts.type)
107 response.mustcontain(pull_request.target_ref_parts.name)
107 response.mustcontain(pull_request.target_ref_parts.name)
108
108
109 response.mustcontain('class="pull-request-merge"')
109 response.mustcontain('class="pull-request-merge"')
110
110
111 if pr_merge_enabled:
111 if pr_merge_enabled:
112 response.mustcontain('Pull request reviewer approval is pending')
112 response.mustcontain('Pull request reviewer approval is pending')
113 else:
113 else:
114 response.mustcontain('Server-side pull request merging is disabled.')
114 response.mustcontain('Server-side pull request merging is disabled.')
115
115
116 if range_diff == "1":
116 if range_diff == "1":
117 response.mustcontain('Turn off: Show the diff as commit range')
117 response.mustcontain('Turn off: Show the diff as commit range')
118
118
119 def test_show_versions_of_pr(self, backend, csrf_token):
119 def test_show_versions_of_pr(self, backend, csrf_token):
120 commits = [
120 commits = [
121 {'message': 'initial-commit',
121 {'message': 'initial-commit',
122 'added': [FileNode('test-file.txt', 'LINE1\n')]},
122 'added': [FileNode('test-file.txt', 'LINE1\n')]},
123
123
124 {'message': 'commit-1',
124 {'message': 'commit-1',
125 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]},
125 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]},
126 # Above is the initial version of PR that changes a single line
126 # Above is the initial version of PR that changes a single line
127
127
128 # from now on we'll add 3x commit adding a nother line on each step
128 # from now on we'll add 3x commit adding a nother line on each step
129 {'message': 'commit-2',
129 {'message': 'commit-2',
130 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]},
130 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]},
131
131
132 {'message': 'commit-3',
132 {'message': 'commit-3',
133 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]},
133 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]},
134
134
135 {'message': 'commit-4',
135 {'message': 'commit-4',
136 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
136 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]},
137 ]
137 ]
138
138
139 commit_ids = backend.create_master_repo(commits)
139 commit_ids = backend.create_master_repo(commits)
140 target = backend.create_repo(heads=['initial-commit'])
140 target = backend.create_repo(heads=['initial-commit'])
141 source = backend.create_repo(heads=['commit-1'])
141 source = backend.create_repo(heads=['commit-1'])
142 source_repo_name = source.repo_name
142 source_repo_name = source.repo_name
143 target_repo_name = target.repo_name
143 target_repo_name = target.repo_name
144
144
145 target_ref = 'branch:{branch}:{commit_id}'.format(
145 target_ref = 'branch:{branch}:{commit_id}'.format(
146 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
146 branch=backend.default_branch_name, commit_id=commit_ids['initial-commit'])
147 source_ref = 'branch:{branch}:{commit_id}'.format(
147 source_ref = 'branch:{branch}:{commit_id}'.format(
148 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
148 branch=backend.default_branch_name, commit_id=commit_ids['commit-1'])
149
149
150 response = self.app.post(
150 response = self.app.post(
151 route_path('pullrequest_create', repo_name=source.repo_name),
151 route_path('pullrequest_create', repo_name=source.repo_name),
152 [
152 [
153 ('source_repo', source_repo_name),
153 ('source_repo', source_repo_name),
154 ('source_ref', source_ref),
154 ('source_ref', source_ref),
155 ('target_repo', target_repo_name),
155 ('target_repo', target_repo_name),
156 ('target_ref', target_ref),
156 ('target_ref', target_ref),
157 ('common_ancestor', commit_ids['initial-commit']),
157 ('common_ancestor', commit_ids['initial-commit']),
158 ('pullrequest_title', 'Title'),
158 ('pullrequest_title', 'Title'),
159 ('pullrequest_desc', 'Description'),
159 ('pullrequest_desc', 'Description'),
160 ('description_renderer', 'markdown'),
160 ('description_renderer', 'markdown'),
161 ('__start__', 'review_members:sequence'),
161 ('__start__', 'review_members:sequence'),
162 ('__start__', 'reviewer:mapping'),
162 ('__start__', 'reviewer:mapping'),
163 ('user_id', '1'),
163 ('user_id', '1'),
164 ('__start__', 'reasons:sequence'),
164 ('__start__', 'reasons:sequence'),
165 ('reason', 'Some reason'),
165 ('reason', 'Some reason'),
166 ('__end__', 'reasons:sequence'),
166 ('__end__', 'reasons:sequence'),
167 ('__start__', 'rules:sequence'),
167 ('__start__', 'rules:sequence'),
168 ('__end__', 'rules:sequence'),
168 ('__end__', 'rules:sequence'),
169 ('mandatory', 'False'),
169 ('mandatory', 'False'),
170 ('__end__', 'reviewer:mapping'),
170 ('__end__', 'reviewer:mapping'),
171 ('__end__', 'review_members:sequence'),
171 ('__end__', 'review_members:sequence'),
172 ('__start__', 'revisions:sequence'),
172 ('__start__', 'revisions:sequence'),
173 ('revisions', commit_ids['commit-1']),
173 ('revisions', commit_ids['commit-1']),
174 ('__end__', 'revisions:sequence'),
174 ('__end__', 'revisions:sequence'),
175 ('user', ''),
175 ('user', ''),
176 ('csrf_token', csrf_token),
176 ('csrf_token', csrf_token),
177 ],
177 ],
178 status=302)
178 status=302)
179
179
180 location = response.headers['Location']
180 location = response.headers['Location']
181
181
182 pull_request_id = location.rsplit('/', 1)[1]
182 pull_request_id = location.rsplit('/', 1)[1]
183 assert pull_request_id != 'new'
183 assert pull_request_id != 'new'
184 pull_request = PullRequest.get(int(pull_request_id))
184 pull_request = PullRequest.get(int(pull_request_id))
185
185
186 pull_request_id = pull_request.pull_request_id
186 pull_request_id = pull_request.pull_request_id
187
187
188 # Show initial version of PR
188 # Show initial version of PR
189 response = self.app.get(
189 response = self.app.get(
190 route_path('pullrequest_show',
190 route_path('pullrequest_show',
191 repo_name=target_repo_name,
191 repo_name=target_repo_name,
192 pull_request_id=pull_request_id))
192 pull_request_id=pull_request_id))
193
193
194 response.mustcontain('commit-1')
194 response.mustcontain('commit-1')
195 response.mustcontain(no=['commit-2'])
195 response.mustcontain(no=['commit-2'])
196 response.mustcontain(no=['commit-3'])
196 response.mustcontain(no=['commit-3'])
197 response.mustcontain(no=['commit-4'])
197 response.mustcontain(no=['commit-4'])
198
198
199 response.mustcontain('cb-addition"></span><span>LINE2</span>')
199 response.mustcontain('cb-addition"></span><span>LINE2</span>')
200 response.mustcontain(no=['LINE3'])
200 response.mustcontain(no=['LINE3'])
201 response.mustcontain(no=['LINE4'])
201 response.mustcontain(no=['LINE4'])
202 response.mustcontain(no=['LINE5'])
202 response.mustcontain(no=['LINE5'])
203
203
204 # update PR #1
204 # update PR #1
205 source_repo = Repository.get_by_repo_name(source_repo_name)
205 source_repo = Repository.get_by_repo_name(source_repo_name)
206 backend.pull_heads(source_repo, heads=['commit-2'])
206 backend.pull_heads(source_repo, heads=['commit-2'])
207 response = self.app.post(
207 response = self.app.post(
208 route_path('pullrequest_update',
208 route_path('pullrequest_update',
209 repo_name=target_repo_name, pull_request_id=pull_request_id),
209 repo_name=target_repo_name, pull_request_id=pull_request_id),
210 params={'update_commits': 'true', 'csrf_token': csrf_token})
210 params={'update_commits': 'true', 'csrf_token': csrf_token})
211
211
212 # update PR #2
212 # update PR #2
213 source_repo = Repository.get_by_repo_name(source_repo_name)
213 source_repo = Repository.get_by_repo_name(source_repo_name)
214 backend.pull_heads(source_repo, heads=['commit-3'])
214 backend.pull_heads(source_repo, heads=['commit-3'])
215 response = self.app.post(
215 response = self.app.post(
216 route_path('pullrequest_update',
216 route_path('pullrequest_update',
217 repo_name=target_repo_name, pull_request_id=pull_request_id),
217 repo_name=target_repo_name, pull_request_id=pull_request_id),
218 params={'update_commits': 'true', 'csrf_token': csrf_token})
218 params={'update_commits': 'true', 'csrf_token': csrf_token})
219
219
220 # update PR #3
220 # update PR #3
221 source_repo = Repository.get_by_repo_name(source_repo_name)
221 source_repo = Repository.get_by_repo_name(source_repo_name)
222 backend.pull_heads(source_repo, heads=['commit-4'])
222 backend.pull_heads(source_repo, heads=['commit-4'])
223 response = self.app.post(
223 response = self.app.post(
224 route_path('pullrequest_update',
224 route_path('pullrequest_update',
225 repo_name=target_repo_name, pull_request_id=pull_request_id),
225 repo_name=target_repo_name, pull_request_id=pull_request_id),
226 params={'update_commits': 'true', 'csrf_token': csrf_token})
226 params={'update_commits': 'true', 'csrf_token': csrf_token})
227
227
228 # Show final version !
228 # Show final version !
229 response = self.app.get(
229 response = self.app.get(
230 route_path('pullrequest_show',
230 route_path('pullrequest_show',
231 repo_name=target_repo_name,
231 repo_name=target_repo_name,
232 pull_request_id=pull_request_id))
232 pull_request_id=pull_request_id))
233
233
234 # 3 updates, and the latest == 4
234 # 3 updates, and the latest == 4
235 response.mustcontain('4 versions available for this pull request')
235 response.mustcontain('4 versions available for this pull request')
236 response.mustcontain(no=['rhodecode diff rendering error'])
236 response.mustcontain(no=['rhodecode diff rendering error'])
237
237
238 # initial show must have 3 commits, and 3 adds
238 # initial show must have 3 commits, and 3 adds
239 response.mustcontain('commit-1')
239 response.mustcontain('commit-1')
240 response.mustcontain('commit-2')
240 response.mustcontain('commit-2')
241 response.mustcontain('commit-3')
241 response.mustcontain('commit-3')
242 response.mustcontain('commit-4')
242 response.mustcontain('commit-4')
243
243
244 response.mustcontain('cb-addition"></span><span>LINE2</span>')
244 response.mustcontain('cb-addition"></span><span>LINE2</span>')
245 response.mustcontain('cb-addition"></span><span>LINE3</span>')
245 response.mustcontain('cb-addition"></span><span>LINE3</span>')
246 response.mustcontain('cb-addition"></span><span>LINE4</span>')
246 response.mustcontain('cb-addition"></span><span>LINE4</span>')
247 response.mustcontain('cb-addition"></span><span>LINE5</span>')
247 response.mustcontain('cb-addition"></span><span>LINE5</span>')
248
248
249 # fetch versions
249 # fetch versions
250 pr = PullRequest.get(pull_request_id)
250 pr = PullRequest.get(pull_request_id)
251 versions = [x.pull_request_version_id for x in pr.versions.all()]
251 versions = [x.pull_request_version_id for x in pr.versions.all()]
252 assert len(versions) == 3
252 assert len(versions) == 3
253
253
254 # show v1,v2,v3,v4
254 # show v1,v2,v3,v4
255 def cb_line(text):
255 def cb_line(text):
256 return 'cb-addition"></span><span>{}</span>'.format(text)
256 return 'cb-addition"></span><span>{}</span>'.format(text)
257
257
258 def cb_context(text):
258 def cb_context(text):
259 return '<span class="cb-code"><span class="cb-action cb-context">' \
259 return '<span class="cb-code"><span class="cb-action cb-context">' \
260 '</span><span>{}</span></span>'.format(text)
260 '</span><span>{}</span></span>'.format(text)
261
261
262 commit_tests = {
262 commit_tests = {
263 # in response, not in response
263 # in response, not in response
264 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
264 1: (['commit-1'], ['commit-2', 'commit-3', 'commit-4']),
265 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
265 2: (['commit-1', 'commit-2'], ['commit-3', 'commit-4']),
266 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
266 3: (['commit-1', 'commit-2', 'commit-3'], ['commit-4']),
267 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
267 4: (['commit-1', 'commit-2', 'commit-3', 'commit-4'], []),
268 }
268 }
269 diff_tests = {
269 diff_tests = {
270 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
270 1: (['LINE2'], ['LINE3', 'LINE4', 'LINE5']),
271 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
271 2: (['LINE2', 'LINE3'], ['LINE4', 'LINE5']),
272 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
272 3: (['LINE2', 'LINE3', 'LINE4'], ['LINE5']),
273 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
273 4: (['LINE2', 'LINE3', 'LINE4', 'LINE5'], []),
274 }
274 }
275 for idx, ver in enumerate(versions, 1):
275 for idx, ver in enumerate(versions, 1):
276
276
277 response = self.app.get(
277 response = self.app.get(
278 route_path('pullrequest_show',
278 route_path('pullrequest_show',
279 repo_name=target_repo_name,
279 repo_name=target_repo_name,
280 pull_request_id=pull_request_id,
280 pull_request_id=pull_request_id,
281 params={'version': ver}))
281 params={'version': ver}))
282
282
283 response.mustcontain(no=['rhodecode diff rendering error'])
283 response.mustcontain(no=['rhodecode diff rendering error'])
284 response.mustcontain('Showing changes at v{}'.format(idx))
284 response.mustcontain('Showing changes at v{}'.format(idx))
285
285
286 yes, no = commit_tests[idx]
286 yes, no = commit_tests[idx]
287 for y in yes:
287 for y in yes:
288 response.mustcontain(y)
288 response.mustcontain(y)
289 for n in no:
289 for n in no:
290 response.mustcontain(no=n)
290 response.mustcontain(no=n)
291
291
292 yes, no = diff_tests[idx]
292 yes, no = diff_tests[idx]
293 for y in yes:
293 for y in yes:
294 response.mustcontain(cb_line(y))
294 response.mustcontain(cb_line(y))
295 for n in no:
295 for n in no:
296 response.mustcontain(no=n)
296 response.mustcontain(no=n)
297
297
298 # show diff between versions
298 # show diff between versions
299 diff_compare_tests = {
299 diff_compare_tests = {
300 1: (['LINE3'], ['LINE1', 'LINE2']),
300 1: (['LINE3'], ['LINE1', 'LINE2']),
301 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
301 2: (['LINE3', 'LINE4'], ['LINE1', 'LINE2']),
302 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
302 3: (['LINE3', 'LINE4', 'LINE5'], ['LINE1', 'LINE2']),
303 }
303 }
304 for idx, ver in enumerate(versions, 1):
304 for idx, ver in enumerate(versions, 1):
305 adds, context = diff_compare_tests[idx]
305 adds, context = diff_compare_tests[idx]
306
306
307 to_ver = ver+1
307 to_ver = ver+1
308 if idx == 3:
308 if idx == 3:
309 to_ver = 'latest'
309 to_ver = 'latest'
310
310
311 response = self.app.get(
311 response = self.app.get(
312 route_path('pullrequest_show',
312 route_path('pullrequest_show',
313 repo_name=target_repo_name,
313 repo_name=target_repo_name,
314 pull_request_id=pull_request_id,
314 pull_request_id=pull_request_id,
315 params={'from_version': versions[0], 'version': to_ver}))
315 params={'from_version': versions[0], 'version': to_ver}))
316
316
317 response.mustcontain(no=['rhodecode diff rendering error'])
317 response.mustcontain(no=['rhodecode diff rendering error'])
318
318
319 for a in adds:
319 for a in adds:
320 response.mustcontain(cb_line(a))
320 response.mustcontain(cb_line(a))
321 for c in context:
321 for c in context:
322 response.mustcontain(cb_context(c))
322 response.mustcontain(cb_context(c))
323
323
324 # test version v2 -> v3
324 # test version v2 -> v3
325 response = self.app.get(
325 response = self.app.get(
326 route_path('pullrequest_show',
326 route_path('pullrequest_show',
327 repo_name=target_repo_name,
327 repo_name=target_repo_name,
328 pull_request_id=pull_request_id,
328 pull_request_id=pull_request_id,
329 params={'from_version': versions[1], 'version': versions[2]}))
329 params={'from_version': versions[1], 'version': versions[2]}))
330
330
331 response.mustcontain(cb_context('LINE1'))
331 response.mustcontain(cb_context('LINE1'))
332 response.mustcontain(cb_context('LINE2'))
332 response.mustcontain(cb_context('LINE2'))
333 response.mustcontain(cb_context('LINE3'))
333 response.mustcontain(cb_context('LINE3'))
334 response.mustcontain(cb_line('LINE4'))
334 response.mustcontain(cb_line('LINE4'))
335
335
336 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
336 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
337 # Logout
337 # Logout
338 response = self.app.post(
338 response = self.app.post(
339 h.route_path('logout'),
339 h.route_path('logout'),
340 params={'csrf_token': csrf_token})
340 params={'csrf_token': csrf_token})
341 # Login as regular user
341 # Login as regular user
342 response = self.app.post(h.route_path('login'),
342 response = self.app.post(h.route_path('login'),
343 {'username': TEST_USER_REGULAR_LOGIN,
343 {'username': TEST_USER_REGULAR_LOGIN,
344 'password': 'test12'})
344 'password': 'test12'})
345
345
346 pull_request = pr_util.create_pull_request(
346 pull_request = pr_util.create_pull_request(
347 author=TEST_USER_REGULAR_LOGIN)
347 author=TEST_USER_REGULAR_LOGIN)
348
348
349 response = self.app.get(route_path(
349 response = self.app.get(route_path(
350 'pullrequest_show',
350 'pullrequest_show',
351 repo_name=pull_request.target_repo.scm_instance().name,
351 repo_name=pull_request.target_repo.scm_instance().name,
352 pull_request_id=pull_request.pull_request_id))
352 pull_request_id=pull_request.pull_request_id))
353
353
354 response.mustcontain('Server-side pull request merging is disabled.')
354 response.mustcontain('Server-side pull request merging is disabled.')
355
355
356 assert_response = response.assert_response()
356 assert_response = response.assert_response()
357 # for regular user without a merge permissions, we don't see it
357 # for regular user without a merge permissions, we don't see it
358 assert_response.no_element_exists('#close-pull-request-action')
358 assert_response.no_element_exists('#close-pull-request-action')
359
359
360 user_util.grant_user_permission_to_repo(
360 user_util.grant_user_permission_to_repo(
361 pull_request.target_repo,
361 pull_request.target_repo,
362 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
362 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
363 'repository.write')
363 'repository.write')
364 response = self.app.get(route_path(
364 response = self.app.get(route_path(
365 'pullrequest_show',
365 'pullrequest_show',
366 repo_name=pull_request.target_repo.scm_instance().name,
366 repo_name=pull_request.target_repo.scm_instance().name,
367 pull_request_id=pull_request.pull_request_id))
367 pull_request_id=pull_request.pull_request_id))
368
368
369 response.mustcontain('Server-side pull request merging is disabled.')
369 response.mustcontain('Server-side pull request merging is disabled.')
370
370
371 assert_response = response.assert_response()
371 assert_response = response.assert_response()
372 # now regular user has a merge permissions, we have CLOSE button
372 # now regular user has a merge permissions, we have CLOSE button
373 assert_response.one_element_exists('#close-pull-request-action')
373 assert_response.one_element_exists('#close-pull-request-action')
374
374
375 def test_show_invalid_commit_id(self, pr_util):
375 def test_show_invalid_commit_id(self, pr_util):
376 # Simulating invalid revisions which will cause a lookup error
376 # Simulating invalid revisions which will cause a lookup error
377 pull_request = pr_util.create_pull_request()
377 pull_request = pr_util.create_pull_request()
378 pull_request.revisions = ['invalid']
378 pull_request.revisions = ['invalid']
379 Session().add(pull_request)
379 Session().add(pull_request)
380 Session().commit()
380 Session().commit()
381
381
382 response = self.app.get(route_path(
382 response = self.app.get(route_path(
383 'pullrequest_show',
383 'pullrequest_show',
384 repo_name=pull_request.target_repo.scm_instance().name,
384 repo_name=pull_request.target_repo.scm_instance().name,
385 pull_request_id=pull_request.pull_request_id))
385 pull_request_id=pull_request.pull_request_id))
386
386
387 for commit_id in pull_request.revisions:
387 for commit_id in pull_request.revisions:
388 response.mustcontain(commit_id)
388 response.mustcontain(commit_id)
389
389
390 def test_show_invalid_source_reference(self, pr_util):
390 def test_show_invalid_source_reference(self, pr_util):
391 pull_request = pr_util.create_pull_request()
391 pull_request = pr_util.create_pull_request()
392 pull_request.source_ref = 'branch:b:invalid'
392 pull_request.source_ref = 'branch:b:invalid'
393 Session().add(pull_request)
393 Session().add(pull_request)
394 Session().commit()
394 Session().commit()
395
395
396 self.app.get(route_path(
396 self.app.get(route_path(
397 'pullrequest_show',
397 'pullrequest_show',
398 repo_name=pull_request.target_repo.scm_instance().name,
398 repo_name=pull_request.target_repo.scm_instance().name,
399 pull_request_id=pull_request.pull_request_id))
399 pull_request_id=pull_request.pull_request_id))
400
400
401 def test_edit_title_description(self, pr_util, csrf_token):
401 def test_edit_title_description(self, pr_util, csrf_token):
402 pull_request = pr_util.create_pull_request()
402 pull_request = pr_util.create_pull_request()
403 pull_request_id = pull_request.pull_request_id
403 pull_request_id = pull_request.pull_request_id
404
404
405 response = self.app.post(
405 response = self.app.post(
406 route_path('pullrequest_update',
406 route_path('pullrequest_update',
407 repo_name=pull_request.target_repo.repo_name,
407 repo_name=pull_request.target_repo.repo_name,
408 pull_request_id=pull_request_id),
408 pull_request_id=pull_request_id),
409 params={
409 params={
410 'edit_pull_request': 'true',
410 'edit_pull_request': 'true',
411 'title': 'New title',
411 'title': 'New title',
412 'description': 'New description',
412 'description': 'New description',
413 'csrf_token': csrf_token})
413 'csrf_token': csrf_token})
414
414
415 assert_session_flash(
415 assert_session_flash(
416 response, u'Pull request title & description updated.',
416 response, u'Pull request title & description updated.',
417 category='success')
417 category='success')
418
418
419 pull_request = PullRequest.get(pull_request_id)
419 pull_request = PullRequest.get(pull_request_id)
420 assert pull_request.title == 'New title'
420 assert pull_request.title == 'New title'
421 assert pull_request.description == 'New description'
421 assert pull_request.description == 'New description'
422
422
423 def test_edit_title_description_closed(self, pr_util, csrf_token):
423 def test_edit_title_description_closed(self, pr_util, csrf_token):
424 pull_request = pr_util.create_pull_request()
424 pull_request = pr_util.create_pull_request()
425 pull_request_id = pull_request.pull_request_id
425 pull_request_id = pull_request.pull_request_id
426 repo_name = pull_request.target_repo.repo_name
426 repo_name = pull_request.target_repo.repo_name
427 pr_util.close()
427 pr_util.close()
428
428
429 response = self.app.post(
429 response = self.app.post(
430 route_path('pullrequest_update',
430 route_path('pullrequest_update',
431 repo_name=repo_name, pull_request_id=pull_request_id),
431 repo_name=repo_name, pull_request_id=pull_request_id),
432 params={
432 params={
433 'edit_pull_request': 'true',
433 'edit_pull_request': 'true',
434 'title': 'New title',
434 'title': 'New title',
435 'description': 'New description',
435 'description': 'New description',
436 'csrf_token': csrf_token}, status=200)
436 'csrf_token': csrf_token}, status=200)
437 assert_session_flash(
437 assert_session_flash(
438 response, u'Cannot update closed pull requests.',
438 response, u'Cannot update closed pull requests.',
439 category='error')
439 category='error')
440
440
441 def test_update_invalid_source_reference(self, pr_util, csrf_token):
441 def test_update_invalid_source_reference(self, pr_util, csrf_token):
442 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
442 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
443
443
444 pull_request = pr_util.create_pull_request()
444 pull_request = pr_util.create_pull_request()
445 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
445 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
446 Session().add(pull_request)
446 Session().add(pull_request)
447 Session().commit()
447 Session().commit()
448
448
449 pull_request_id = pull_request.pull_request_id
449 pull_request_id = pull_request.pull_request_id
450
450
451 response = self.app.post(
451 response = self.app.post(
452 route_path('pullrequest_update',
452 route_path('pullrequest_update',
453 repo_name=pull_request.target_repo.repo_name,
453 repo_name=pull_request.target_repo.repo_name,
454 pull_request_id=pull_request_id),
454 pull_request_id=pull_request_id),
455 params={'update_commits': 'true', 'csrf_token': csrf_token})
455 params={'update_commits': 'true', 'csrf_token': csrf_token})
456
456
457 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
457 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
458 UpdateFailureReason.MISSING_SOURCE_REF])
458 UpdateFailureReason.MISSING_SOURCE_REF])
459 assert_session_flash(response, expected_msg, category='error')
459 assert_session_flash(response, expected_msg, category='error')
460
460
461 def test_missing_target_reference(self, pr_util, csrf_token):
461 def test_missing_target_reference(self, pr_util, csrf_token):
462 from rhodecode.lib.vcs.backends.base import MergeFailureReason
462 from rhodecode.lib.vcs.backends.base import MergeFailureReason
463 pull_request = pr_util.create_pull_request(
463 pull_request = pr_util.create_pull_request(
464 approved=True, mergeable=True)
464 approved=True, mergeable=True)
465 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
465 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
466 pull_request.target_ref = unicode_reference
466 pull_request.target_ref = unicode_reference
467 Session().add(pull_request)
467 Session().add(pull_request)
468 Session().commit()
468 Session().commit()
469
469
470 pull_request_id = pull_request.pull_request_id
470 pull_request_id = pull_request.pull_request_id
471 pull_request_url = route_path(
471 pull_request_url = route_path(
472 'pullrequest_show',
472 'pullrequest_show',
473 repo_name=pull_request.target_repo.repo_name,
473 repo_name=pull_request.target_repo.repo_name,
474 pull_request_id=pull_request_id)
474 pull_request_id=pull_request_id)
475
475
476 response = self.app.get(pull_request_url)
476 response = self.app.get(pull_request_url)
477 target_ref_id = 'invalid-branch'
477 target_ref_id = 'invalid-branch'
478 merge_resp = MergeResponse(
478 merge_resp = MergeResponse(
479 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
479 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
480 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
480 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
481 response.assert_response().element_contains(
481 response.assert_response().element_contains(
482 'div[data-role="merge-message"]', merge_resp.merge_status_message)
482 'div[data-role="merge-message"]', merge_resp.merge_status_message)
483
483
484 def test_comment_and_close_pull_request_custom_message_approved(
484 def test_comment_and_close_pull_request_custom_message_approved(
485 self, pr_util, csrf_token, xhr_header):
485 self, pr_util, csrf_token, xhr_header):
486
486
487 pull_request = pr_util.create_pull_request(approved=True)
487 pull_request = pr_util.create_pull_request(approved=True)
488 pull_request_id = pull_request.pull_request_id
488 pull_request_id = pull_request.pull_request_id
489 author = pull_request.user_id
489 author = pull_request.user_id
490 repo = pull_request.target_repo.repo_id
490 repo = pull_request.target_repo.repo_id
491
491
492 self.app.post(
492 self.app.post(
493 route_path('pullrequest_comment_create',
493 route_path('pullrequest_comment_create',
494 repo_name=pull_request.target_repo.scm_instance().name,
494 repo_name=pull_request.target_repo.scm_instance().name,
495 pull_request_id=pull_request_id),
495 pull_request_id=pull_request_id),
496 params={
496 params={
497 'close_pull_request': '1',
497 'close_pull_request': '1',
498 'text': 'Closing a PR',
498 'text': 'Closing a PR',
499 'csrf_token': csrf_token},
499 'csrf_token': csrf_token},
500 extra_environ=xhr_header,)
500 extra_environ=xhr_header,)
501
501
502 journal = UserLog.query()\
502 journal = UserLog.query()\
503 .filter(UserLog.user_id == author)\
503 .filter(UserLog.user_id == author)\
504 .filter(UserLog.repository_id == repo) \
504 .filter(UserLog.repository_id == repo) \
505 .order_by(UserLog.user_log_id.asc()) \
505 .order_by(UserLog.user_log_id.asc()) \
506 .all()
506 .all()
507 assert journal[-1].action == 'repo.pull_request.close'
507 assert journal[-1].action == 'repo.pull_request.close'
508
508
509 pull_request = PullRequest.get(pull_request_id)
509 pull_request = PullRequest.get(pull_request_id)
510 assert pull_request.is_closed()
510 assert pull_request.is_closed()
511
511
512 status = ChangesetStatusModel().get_status(
512 status = ChangesetStatusModel().get_status(
513 pull_request.source_repo, pull_request=pull_request)
513 pull_request.source_repo, pull_request=pull_request)
514 assert status == ChangesetStatus.STATUS_APPROVED
514 assert status == ChangesetStatus.STATUS_APPROVED
515 comments = ChangesetComment().query() \
515 comments = ChangesetComment().query() \
516 .filter(ChangesetComment.pull_request == pull_request) \
516 .filter(ChangesetComment.pull_request == pull_request) \
517 .order_by(ChangesetComment.comment_id.asc())\
517 .order_by(ChangesetComment.comment_id.asc())\
518 .all()
518 .all()
519 assert comments[-1].text == 'Closing a PR'
519 assert comments[-1].text == 'Closing a PR'
520
520
521 def test_comment_force_close_pull_request_rejected(
521 def test_comment_force_close_pull_request_rejected(
522 self, pr_util, csrf_token, xhr_header):
522 self, pr_util, csrf_token, xhr_header):
523 pull_request = pr_util.create_pull_request()
523 pull_request = pr_util.create_pull_request()
524 pull_request_id = pull_request.pull_request_id
524 pull_request_id = pull_request.pull_request_id
525 PullRequestModel().update_reviewers(
525 PullRequestModel().update_reviewers(
526 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
526 pull_request_id, [
527 (1, ['reason'], False, 'reviewer', []),
528 (2, ['reason2'], False, 'reviewer', [])],
527 pull_request.author)
529 pull_request.author)
528 author = pull_request.user_id
530 author = pull_request.user_id
529 repo = pull_request.target_repo.repo_id
531 repo = pull_request.target_repo.repo_id
530
532
531 self.app.post(
533 self.app.post(
532 route_path('pullrequest_comment_create',
534 route_path('pullrequest_comment_create',
533 repo_name=pull_request.target_repo.scm_instance().name,
535 repo_name=pull_request.target_repo.scm_instance().name,
534 pull_request_id=pull_request_id),
536 pull_request_id=pull_request_id),
535 params={
537 params={
536 'close_pull_request': '1',
538 'close_pull_request': '1',
537 'csrf_token': csrf_token},
539 'csrf_token': csrf_token},
538 extra_environ=xhr_header)
540 extra_environ=xhr_header)
539
541
540 pull_request = PullRequest.get(pull_request_id)
542 pull_request = PullRequest.get(pull_request_id)
541
543
542 journal = UserLog.query()\
544 journal = UserLog.query()\
543 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
545 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
544 .order_by(UserLog.user_log_id.asc()) \
546 .order_by(UserLog.user_log_id.asc()) \
545 .all()
547 .all()
546 assert journal[-1].action == 'repo.pull_request.close'
548 assert journal[-1].action == 'repo.pull_request.close'
547
549
548 # check only the latest status, not the review status
550 # check only the latest status, not the review status
549 status = ChangesetStatusModel().get_status(
551 status = ChangesetStatusModel().get_status(
550 pull_request.source_repo, pull_request=pull_request)
552 pull_request.source_repo, pull_request=pull_request)
551 assert status == ChangesetStatus.STATUS_REJECTED
553 assert status == ChangesetStatus.STATUS_REJECTED
552
554
553 def test_comment_and_close_pull_request(
555 def test_comment_and_close_pull_request(
554 self, pr_util, csrf_token, xhr_header):
556 self, pr_util, csrf_token, xhr_header):
555 pull_request = pr_util.create_pull_request()
557 pull_request = pr_util.create_pull_request()
556 pull_request_id = pull_request.pull_request_id
558 pull_request_id = pull_request.pull_request_id
557
559
558 response = self.app.post(
560 response = self.app.post(
559 route_path('pullrequest_comment_create',
561 route_path('pullrequest_comment_create',
560 repo_name=pull_request.target_repo.scm_instance().name,
562 repo_name=pull_request.target_repo.scm_instance().name,
561 pull_request_id=pull_request.pull_request_id),
563 pull_request_id=pull_request.pull_request_id),
562 params={
564 params={
563 'close_pull_request': 'true',
565 'close_pull_request': 'true',
564 'csrf_token': csrf_token},
566 'csrf_token': csrf_token},
565 extra_environ=xhr_header)
567 extra_environ=xhr_header)
566
568
567 assert response.json
569 assert response.json
568
570
569 pull_request = PullRequest.get(pull_request_id)
571 pull_request = PullRequest.get(pull_request_id)
570 assert pull_request.is_closed()
572 assert pull_request.is_closed()
571
573
572 # check only the latest status, not the review status
574 # check only the latest status, not the review status
573 status = ChangesetStatusModel().get_status(
575 status = ChangesetStatusModel().get_status(
574 pull_request.source_repo, pull_request=pull_request)
576 pull_request.source_repo, pull_request=pull_request)
575 assert status == ChangesetStatus.STATUS_REJECTED
577 assert status == ChangesetStatus.STATUS_REJECTED
576
578
577 def test_comment_and_close_pull_request_try_edit_comment(
579 def test_comment_and_close_pull_request_try_edit_comment(
578 self, pr_util, csrf_token, xhr_header
580 self, pr_util, csrf_token, xhr_header
579 ):
581 ):
580 pull_request = pr_util.create_pull_request()
582 pull_request = pr_util.create_pull_request()
581 pull_request_id = pull_request.pull_request_id
583 pull_request_id = pull_request.pull_request_id
582 target_scm = pull_request.target_repo.scm_instance()
584 target_scm = pull_request.target_repo.scm_instance()
583 target_scm_name = target_scm.name
585 target_scm_name = target_scm.name
584
586
585 response = self.app.post(
587 response = self.app.post(
586 route_path(
588 route_path(
587 'pullrequest_comment_create',
589 'pullrequest_comment_create',
588 repo_name=target_scm_name,
590 repo_name=target_scm_name,
589 pull_request_id=pull_request_id,
591 pull_request_id=pull_request_id,
590 ),
592 ),
591 params={
593 params={
592 'close_pull_request': 'true',
594 'close_pull_request': 'true',
593 'csrf_token': csrf_token,
595 'csrf_token': csrf_token,
594 },
596 },
595 extra_environ=xhr_header)
597 extra_environ=xhr_header)
596
598
597 assert response.json
599 assert response.json
598
600
599 pull_request = PullRequest.get(pull_request_id)
601 pull_request = PullRequest.get(pull_request_id)
600 target_scm = pull_request.target_repo.scm_instance()
602 target_scm = pull_request.target_repo.scm_instance()
601 target_scm_name = target_scm.name
603 target_scm_name = target_scm.name
602 assert pull_request.is_closed()
604 assert pull_request.is_closed()
603
605
604 # check only the latest status, not the review status
606 # check only the latest status, not the review status
605 status = ChangesetStatusModel().get_status(
607 status = ChangesetStatusModel().get_status(
606 pull_request.source_repo, pull_request=pull_request)
608 pull_request.source_repo, pull_request=pull_request)
607 assert status == ChangesetStatus.STATUS_REJECTED
609 assert status == ChangesetStatus.STATUS_REJECTED
608
610
609 comment_id = response.json.get('comment_id', None)
611 comment_id = response.json.get('comment_id', None)
610 test_text = 'test'
612 test_text = 'test'
611 response = self.app.post(
613 response = self.app.post(
612 route_path(
614 route_path(
613 'pullrequest_comment_edit',
615 'pullrequest_comment_edit',
614 repo_name=target_scm_name,
616 repo_name=target_scm_name,
615 pull_request_id=pull_request_id,
617 pull_request_id=pull_request_id,
616 comment_id=comment_id,
618 comment_id=comment_id,
617 ),
619 ),
618 extra_environ=xhr_header,
620 extra_environ=xhr_header,
619 params={
621 params={
620 'csrf_token': csrf_token,
622 'csrf_token': csrf_token,
621 'text': test_text,
623 'text': test_text,
622 },
624 },
623 status=403,
625 status=403,
624 )
626 )
625 assert response.status_int == 403
627 assert response.status_int == 403
626
628
627 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
629 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
628 pull_request = pr_util.create_pull_request()
630 pull_request = pr_util.create_pull_request()
629 target_scm = pull_request.target_repo.scm_instance()
631 target_scm = pull_request.target_repo.scm_instance()
630 target_scm_name = target_scm.name
632 target_scm_name = target_scm.name
631
633
632 response = self.app.post(
634 response = self.app.post(
633 route_path(
635 route_path(
634 'pullrequest_comment_create',
636 'pullrequest_comment_create',
635 repo_name=target_scm_name,
637 repo_name=target_scm_name,
636 pull_request_id=pull_request.pull_request_id),
638 pull_request_id=pull_request.pull_request_id),
637 params={
639 params={
638 'csrf_token': csrf_token,
640 'csrf_token': csrf_token,
639 'text': 'init',
641 'text': 'init',
640 },
642 },
641 extra_environ=xhr_header,
643 extra_environ=xhr_header,
642 )
644 )
643 assert response.json
645 assert response.json
644
646
645 comment_id = response.json.get('comment_id', None)
647 comment_id = response.json.get('comment_id', None)
646 assert comment_id
648 assert comment_id
647 test_text = 'test'
649 test_text = 'test'
648 self.app.post(
650 self.app.post(
649 route_path(
651 route_path(
650 'pullrequest_comment_edit',
652 'pullrequest_comment_edit',
651 repo_name=target_scm_name,
653 repo_name=target_scm_name,
652 pull_request_id=pull_request.pull_request_id,
654 pull_request_id=pull_request.pull_request_id,
653 comment_id=comment_id,
655 comment_id=comment_id,
654 ),
656 ),
655 extra_environ=xhr_header,
657 extra_environ=xhr_header,
656 params={
658 params={
657 'csrf_token': csrf_token,
659 'csrf_token': csrf_token,
658 'text': test_text,
660 'text': test_text,
659 'version': '0',
661 'version': '0',
660 },
662 },
661
663
662 )
664 )
663 text_form_db = ChangesetComment.query().filter(
665 text_form_db = ChangesetComment.query().filter(
664 ChangesetComment.comment_id == comment_id).first().text
666 ChangesetComment.comment_id == comment_id).first().text
665 assert test_text == text_form_db
667 assert test_text == text_form_db
666
668
667 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
669 def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header):
668 pull_request = pr_util.create_pull_request()
670 pull_request = pr_util.create_pull_request()
669 target_scm = pull_request.target_repo.scm_instance()
671 target_scm = pull_request.target_repo.scm_instance()
670 target_scm_name = target_scm.name
672 target_scm_name = target_scm.name
671
673
672 response = self.app.post(
674 response = self.app.post(
673 route_path(
675 route_path(
674 'pullrequest_comment_create',
676 'pullrequest_comment_create',
675 repo_name=target_scm_name,
677 repo_name=target_scm_name,
676 pull_request_id=pull_request.pull_request_id),
678 pull_request_id=pull_request.pull_request_id),
677 params={
679 params={
678 'csrf_token': csrf_token,
680 'csrf_token': csrf_token,
679 'text': 'init',
681 'text': 'init',
680 },
682 },
681 extra_environ=xhr_header,
683 extra_environ=xhr_header,
682 )
684 )
683 assert response.json
685 assert response.json
684
686
685 comment_id = response.json.get('comment_id', None)
687 comment_id = response.json.get('comment_id', None)
686 assert comment_id
688 assert comment_id
687 test_text = 'init'
689 test_text = 'init'
688 response = self.app.post(
690 response = self.app.post(
689 route_path(
691 route_path(
690 'pullrequest_comment_edit',
692 'pullrequest_comment_edit',
691 repo_name=target_scm_name,
693 repo_name=target_scm_name,
692 pull_request_id=pull_request.pull_request_id,
694 pull_request_id=pull_request.pull_request_id,
693 comment_id=comment_id,
695 comment_id=comment_id,
694 ),
696 ),
695 extra_environ=xhr_header,
697 extra_environ=xhr_header,
696 params={
698 params={
697 'csrf_token': csrf_token,
699 'csrf_token': csrf_token,
698 'text': test_text,
700 'text': test_text,
699 'version': '0',
701 'version': '0',
700 },
702 },
701 status=404,
703 status=404,
702
704
703 )
705 )
704 assert response.status_int == 404
706 assert response.status_int == 404
705
707
706 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
708 def test_comment_and_try_edit_already_edited(self, pr_util, csrf_token, xhr_header):
707 pull_request = pr_util.create_pull_request()
709 pull_request = pr_util.create_pull_request()
708 target_scm = pull_request.target_repo.scm_instance()
710 target_scm = pull_request.target_repo.scm_instance()
709 target_scm_name = target_scm.name
711 target_scm_name = target_scm.name
710
712
711 response = self.app.post(
713 response = self.app.post(
712 route_path(
714 route_path(
713 'pullrequest_comment_create',
715 'pullrequest_comment_create',
714 repo_name=target_scm_name,
716 repo_name=target_scm_name,
715 pull_request_id=pull_request.pull_request_id),
717 pull_request_id=pull_request.pull_request_id),
716 params={
718 params={
717 'csrf_token': csrf_token,
719 'csrf_token': csrf_token,
718 'text': 'init',
720 'text': 'init',
719 },
721 },
720 extra_environ=xhr_header,
722 extra_environ=xhr_header,
721 )
723 )
722 assert response.json
724 assert response.json
723 comment_id = response.json.get('comment_id', None)
725 comment_id = response.json.get('comment_id', None)
724 assert comment_id
726 assert comment_id
725
727
726 test_text = 'test'
728 test_text = 'test'
727 self.app.post(
729 self.app.post(
728 route_path(
730 route_path(
729 'pullrequest_comment_edit',
731 'pullrequest_comment_edit',
730 repo_name=target_scm_name,
732 repo_name=target_scm_name,
731 pull_request_id=pull_request.pull_request_id,
733 pull_request_id=pull_request.pull_request_id,
732 comment_id=comment_id,
734 comment_id=comment_id,
733 ),
735 ),
734 extra_environ=xhr_header,
736 extra_environ=xhr_header,
735 params={
737 params={
736 'csrf_token': csrf_token,
738 'csrf_token': csrf_token,
737 'text': test_text,
739 'text': test_text,
738 'version': '0',
740 'version': '0',
739 },
741 },
740
742
741 )
743 )
742 test_text_v2 = 'test_v2'
744 test_text_v2 = 'test_v2'
743 response = self.app.post(
745 response = self.app.post(
744 route_path(
746 route_path(
745 'pullrequest_comment_edit',
747 'pullrequest_comment_edit',
746 repo_name=target_scm_name,
748 repo_name=target_scm_name,
747 pull_request_id=pull_request.pull_request_id,
749 pull_request_id=pull_request.pull_request_id,
748 comment_id=comment_id,
750 comment_id=comment_id,
749 ),
751 ),
750 extra_environ=xhr_header,
752 extra_environ=xhr_header,
751 params={
753 params={
752 'csrf_token': csrf_token,
754 'csrf_token': csrf_token,
753 'text': test_text_v2,
755 'text': test_text_v2,
754 'version': '0',
756 'version': '0',
755 },
757 },
756 status=409,
758 status=409,
757 )
759 )
758 assert response.status_int == 409
760 assert response.status_int == 409
759
761
760 text_form_db = ChangesetComment.query().filter(
762 text_form_db = ChangesetComment.query().filter(
761 ChangesetComment.comment_id == comment_id).first().text
763 ChangesetComment.comment_id == comment_id).first().text
762
764
763 assert test_text == text_form_db
765 assert test_text == text_form_db
764 assert test_text_v2 != text_form_db
766 assert test_text_v2 != text_form_db
765
767
766 def test_comment_and_comment_edit_permissions_forbidden(
768 def test_comment_and_comment_edit_permissions_forbidden(
767 self, autologin_regular_user, user_regular, user_admin, pr_util,
769 self, autologin_regular_user, user_regular, user_admin, pr_util,
768 csrf_token, xhr_header):
770 csrf_token, xhr_header):
769 pull_request = pr_util.create_pull_request(
771 pull_request = pr_util.create_pull_request(
770 author=user_admin.username, enable_notifications=False)
772 author=user_admin.username, enable_notifications=False)
771 comment = CommentsModel().create(
773 comment = CommentsModel().create(
772 text='test',
774 text='test',
773 repo=pull_request.target_repo.scm_instance().name,
775 repo=pull_request.target_repo.scm_instance().name,
774 user=user_admin,
776 user=user_admin,
775 pull_request=pull_request,
777 pull_request=pull_request,
776 )
778 )
777 response = self.app.post(
779 response = self.app.post(
778 route_path(
780 route_path(
779 'pullrequest_comment_edit',
781 'pullrequest_comment_edit',
780 repo_name=pull_request.target_repo.scm_instance().name,
782 repo_name=pull_request.target_repo.scm_instance().name,
781 pull_request_id=pull_request.pull_request_id,
783 pull_request_id=pull_request.pull_request_id,
782 comment_id=comment.comment_id,
784 comment_id=comment.comment_id,
783 ),
785 ),
784 extra_environ=xhr_header,
786 extra_environ=xhr_header,
785 params={
787 params={
786 'csrf_token': csrf_token,
788 'csrf_token': csrf_token,
787 'text': 'test_text',
789 'text': 'test_text',
788 },
790 },
789 status=403,
791 status=403,
790 )
792 )
791 assert response.status_int == 403
793 assert response.status_int == 403
792
794
793 def test_create_pull_request(self, backend, csrf_token):
795 def test_create_pull_request(self, backend, csrf_token):
794 commits = [
796 commits = [
795 {'message': 'ancestor'},
797 {'message': 'ancestor'},
796 {'message': 'change'},
798 {'message': 'change'},
797 {'message': 'change2'},
799 {'message': 'change2'},
798 ]
800 ]
799 commit_ids = backend.create_master_repo(commits)
801 commit_ids = backend.create_master_repo(commits)
800 target = backend.create_repo(heads=['ancestor'])
802 target = backend.create_repo(heads=['ancestor'])
801 source = backend.create_repo(heads=['change2'])
803 source = backend.create_repo(heads=['change2'])
802
804
803 response = self.app.post(
805 response = self.app.post(
804 route_path('pullrequest_create', repo_name=source.repo_name),
806 route_path('pullrequest_create', repo_name=source.repo_name),
805 [
807 [
806 ('source_repo', source.repo_name),
808 ('source_repo', source.repo_name),
807 ('source_ref', 'branch:default:' + commit_ids['change2']),
809 ('source_ref', 'branch:default:' + commit_ids['change2']),
808 ('target_repo', target.repo_name),
810 ('target_repo', target.repo_name),
809 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
811 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
810 ('common_ancestor', commit_ids['ancestor']),
812 ('common_ancestor', commit_ids['ancestor']),
811 ('pullrequest_title', 'Title'),
813 ('pullrequest_title', 'Title'),
812 ('pullrequest_desc', 'Description'),
814 ('pullrequest_desc', 'Description'),
813 ('description_renderer', 'markdown'),
815 ('description_renderer', 'markdown'),
814 ('__start__', 'review_members:sequence'),
816 ('__start__', 'review_members:sequence'),
815 ('__start__', 'reviewer:mapping'),
817 ('__start__', 'reviewer:mapping'),
816 ('user_id', '1'),
818 ('user_id', '1'),
817 ('__start__', 'reasons:sequence'),
819 ('__start__', 'reasons:sequence'),
818 ('reason', 'Some reason'),
820 ('reason', 'Some reason'),
819 ('__end__', 'reasons:sequence'),
821 ('__end__', 'reasons:sequence'),
820 ('__start__', 'rules:sequence'),
822 ('__start__', 'rules:sequence'),
821 ('__end__', 'rules:sequence'),
823 ('__end__', 'rules:sequence'),
822 ('mandatory', 'False'),
824 ('mandatory', 'False'),
823 ('__end__', 'reviewer:mapping'),
825 ('__end__', 'reviewer:mapping'),
824 ('__end__', 'review_members:sequence'),
826 ('__end__', 'review_members:sequence'),
825 ('__start__', 'revisions:sequence'),
827 ('__start__', 'revisions:sequence'),
826 ('revisions', commit_ids['change']),
828 ('revisions', commit_ids['change']),
827 ('revisions', commit_ids['change2']),
829 ('revisions', commit_ids['change2']),
828 ('__end__', 'revisions:sequence'),
830 ('__end__', 'revisions:sequence'),
829 ('user', ''),
831 ('user', ''),
830 ('csrf_token', csrf_token),
832 ('csrf_token', csrf_token),
831 ],
833 ],
832 status=302)
834 status=302)
833
835
834 location = response.headers['Location']
836 location = response.headers['Location']
835 pull_request_id = location.rsplit('/', 1)[1]
837 pull_request_id = location.rsplit('/', 1)[1]
836 assert pull_request_id != 'new'
838 assert pull_request_id != 'new'
837 pull_request = PullRequest.get(int(pull_request_id))
839 pull_request = PullRequest.get(int(pull_request_id))
838
840
839 # check that we have now both revisions
841 # check that we have now both revisions
840 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
842 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
841 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
843 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
842 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
844 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
843 assert pull_request.target_ref == expected_target_ref
845 assert pull_request.target_ref == expected_target_ref
844
846
845 def test_reviewer_notifications(self, backend, csrf_token):
847 def test_reviewer_notifications(self, backend, csrf_token):
846 # We have to use the app.post for this test so it will create the
848 # We have to use the app.post for this test so it will create the
847 # notifications properly with the new PR
849 # notifications properly with the new PR
848 commits = [
850 commits = [
849 {'message': 'ancestor',
851 {'message': 'ancestor',
850 'added': [FileNode('file_A', content='content_of_ancestor')]},
852 'added': [FileNode('file_A', content='content_of_ancestor')]},
851 {'message': 'change',
853 {'message': 'change',
852 'added': [FileNode('file_a', content='content_of_change')]},
854 'added': [FileNode('file_a', content='content_of_change')]},
853 {'message': 'change-child'},
855 {'message': 'change-child'},
854 {'message': 'ancestor-child', 'parents': ['ancestor'],
856 {'message': 'ancestor-child', 'parents': ['ancestor'],
855 'added': [
857 'added': [
856 FileNode('file_B', content='content_of_ancestor_child')]},
858 FileNode('file_B', content='content_of_ancestor_child')]},
857 {'message': 'ancestor-child-2'},
859 {'message': 'ancestor-child-2'},
858 ]
860 ]
859 commit_ids = backend.create_master_repo(commits)
861 commit_ids = backend.create_master_repo(commits)
860 target = backend.create_repo(heads=['ancestor-child'])
862 target = backend.create_repo(heads=['ancestor-child'])
861 source = backend.create_repo(heads=['change'])
863 source = backend.create_repo(heads=['change'])
862
864
863 response = self.app.post(
865 response = self.app.post(
864 route_path('pullrequest_create', repo_name=source.repo_name),
866 route_path('pullrequest_create', repo_name=source.repo_name),
865 [
867 [
866 ('source_repo', source.repo_name),
868 ('source_repo', source.repo_name),
867 ('source_ref', 'branch:default:' + commit_ids['change']),
869 ('source_ref', 'branch:default:' + commit_ids['change']),
868 ('target_repo', target.repo_name),
870 ('target_repo', target.repo_name),
869 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
871 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
870 ('common_ancestor', commit_ids['ancestor']),
872 ('common_ancestor', commit_ids['ancestor']),
871 ('pullrequest_title', 'Title'),
873 ('pullrequest_title', 'Title'),
872 ('pullrequest_desc', 'Description'),
874 ('pullrequest_desc', 'Description'),
873 ('description_renderer', 'markdown'),
875 ('description_renderer', 'markdown'),
874 ('__start__', 'review_members:sequence'),
876 ('__start__', 'review_members:sequence'),
875 ('__start__', 'reviewer:mapping'),
877 ('__start__', 'reviewer:mapping'),
876 ('user_id', '2'),
878 ('user_id', '2'),
877 ('__start__', 'reasons:sequence'),
879 ('__start__', 'reasons:sequence'),
878 ('reason', 'Some reason'),
880 ('reason', 'Some reason'),
879 ('__end__', 'reasons:sequence'),
881 ('__end__', 'reasons:sequence'),
880 ('__start__', 'rules:sequence'),
882 ('__start__', 'rules:sequence'),
881 ('__end__', 'rules:sequence'),
883 ('__end__', 'rules:sequence'),
882 ('mandatory', 'False'),
884 ('mandatory', 'False'),
883 ('__end__', 'reviewer:mapping'),
885 ('__end__', 'reviewer:mapping'),
884 ('__end__', 'review_members:sequence'),
886 ('__end__', 'review_members:sequence'),
885 ('__start__', 'revisions:sequence'),
887 ('__start__', 'revisions:sequence'),
886 ('revisions', commit_ids['change']),
888 ('revisions', commit_ids['change']),
887 ('__end__', 'revisions:sequence'),
889 ('__end__', 'revisions:sequence'),
888 ('user', ''),
890 ('user', ''),
889 ('csrf_token', csrf_token),
891 ('csrf_token', csrf_token),
890 ],
892 ],
891 status=302)
893 status=302)
892
894
893 location = response.headers['Location']
895 location = response.headers['Location']
894
896
895 pull_request_id = location.rsplit('/', 1)[1]
897 pull_request_id = location.rsplit('/', 1)[1]
896 assert pull_request_id != 'new'
898 assert pull_request_id != 'new'
897 pull_request = PullRequest.get(int(pull_request_id))
899 pull_request = PullRequest.get(int(pull_request_id))
898
900
899 # Check that a notification was made
901 # Check that a notification was made
900 notifications = Notification.query()\
902 notifications = Notification.query()\
901 .filter(Notification.created_by == pull_request.author.user_id,
903 .filter(Notification.created_by == pull_request.author.user_id,
902 Notification.type_ == Notification.TYPE_PULL_REQUEST,
904 Notification.type_ == Notification.TYPE_PULL_REQUEST,
903 Notification.subject.contains(
905 Notification.subject.contains(
904 "requested a pull request review. !%s" % pull_request_id))
906 "requested a pull request review. !%s" % pull_request_id))
905 assert len(notifications.all()) == 1
907 assert len(notifications.all()) == 1
906
908
907 # Change reviewers and check that a notification was made
909 # Change reviewers and check that a notification was made
908 PullRequestModel().update_reviewers(
910 PullRequestModel().update_reviewers(
909 pull_request.pull_request_id, [(1, [], False, [])],
911 pull_request.pull_request_id, [
912 (1, [], False, 'reviewer', [])
913 ],
910 pull_request.author)
914 pull_request.author)
911 assert len(notifications.all()) == 2
915 assert len(notifications.all()) == 2
912
916
913 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
917 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
914 csrf_token):
915 commits = [
918 commits = [
916 {'message': 'ancestor',
919 {'message': 'ancestor',
917 'added': [FileNode('file_A', content='content_of_ancestor')]},
920 'added': [FileNode('file_A', content='content_of_ancestor')]},
918 {'message': 'change',
921 {'message': 'change',
919 'added': [FileNode('file_a', content='content_of_change')]},
922 'added': [FileNode('file_a', content='content_of_change')]},
920 {'message': 'change-child'},
923 {'message': 'change-child'},
921 {'message': 'ancestor-child', 'parents': ['ancestor'],
924 {'message': 'ancestor-child', 'parents': ['ancestor'],
922 'added': [
925 'added': [
923 FileNode('file_B', content='content_of_ancestor_child')]},
926 FileNode('file_B', content='content_of_ancestor_child')]},
924 {'message': 'ancestor-child-2'},
927 {'message': 'ancestor-child-2'},
925 ]
928 ]
926 commit_ids = backend.create_master_repo(commits)
929 commit_ids = backend.create_master_repo(commits)
927 target = backend.create_repo(heads=['ancestor-child'])
930 target = backend.create_repo(heads=['ancestor-child'])
928 source = backend.create_repo(heads=['change'])
931 source = backend.create_repo(heads=['change'])
929
932
930 response = self.app.post(
933 response = self.app.post(
931 route_path('pullrequest_create', repo_name=source.repo_name),
934 route_path('pullrequest_create', repo_name=source.repo_name),
932 [
935 [
933 ('source_repo', source.repo_name),
936 ('source_repo', source.repo_name),
934 ('source_ref', 'branch:default:' + commit_ids['change']),
937 ('source_ref', 'branch:default:' + commit_ids['change']),
935 ('target_repo', target.repo_name),
938 ('target_repo', target.repo_name),
936 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
939 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
937 ('common_ancestor', commit_ids['ancestor']),
940 ('common_ancestor', commit_ids['ancestor']),
938 ('pullrequest_title', 'Title'),
941 ('pullrequest_title', 'Title'),
939 ('pullrequest_desc', 'Description'),
942 ('pullrequest_desc', 'Description'),
940 ('description_renderer', 'markdown'),
943 ('description_renderer', 'markdown'),
941 ('__start__', 'review_members:sequence'),
944 ('__start__', 'review_members:sequence'),
942 ('__start__', 'reviewer:mapping'),
945 ('__start__', 'reviewer:mapping'),
943 ('user_id', '1'),
946 ('user_id', '1'),
944 ('__start__', 'reasons:sequence'),
947 ('__start__', 'reasons:sequence'),
945 ('reason', 'Some reason'),
948 ('reason', 'Some reason'),
946 ('__end__', 'reasons:sequence'),
949 ('__end__', 'reasons:sequence'),
947 ('__start__', 'rules:sequence'),
950 ('__start__', 'rules:sequence'),
948 ('__end__', 'rules:sequence'),
951 ('__end__', 'rules:sequence'),
949 ('mandatory', 'False'),
952 ('mandatory', 'False'),
950 ('__end__', 'reviewer:mapping'),
953 ('__end__', 'reviewer:mapping'),
951 ('__end__', 'review_members:sequence'),
954 ('__end__', 'review_members:sequence'),
952 ('__start__', 'revisions:sequence'),
955 ('__start__', 'revisions:sequence'),
953 ('revisions', commit_ids['change']),
956 ('revisions', commit_ids['change']),
954 ('__end__', 'revisions:sequence'),
957 ('__end__', 'revisions:sequence'),
955 ('user', ''),
958 ('user', ''),
956 ('csrf_token', csrf_token),
959 ('csrf_token', csrf_token),
957 ],
960 ],
958 status=302)
961 status=302)
959
962
960 location = response.headers['Location']
963 location = response.headers['Location']
961
964
962 pull_request_id = location.rsplit('/', 1)[1]
965 pull_request_id = location.rsplit('/', 1)[1]
963 assert pull_request_id != 'new'
966 assert pull_request_id != 'new'
964 pull_request = PullRequest.get(int(pull_request_id))
967 pull_request = PullRequest.get(int(pull_request_id))
965
968
966 # target_ref has to point to the ancestor's commit_id in order to
969 # target_ref has to point to the ancestor's commit_id in order to
967 # show the correct diff
970 # show the correct diff
968 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
971 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
969 assert pull_request.target_ref == expected_target_ref
972 assert pull_request.target_ref == expected_target_ref
970
973
971 # Check generated diff contents
974 # Check generated diff contents
972 response = response.follow()
975 response = response.follow()
973 response.mustcontain(no=['content_of_ancestor'])
976 response.mustcontain(no=['content_of_ancestor'])
974 response.mustcontain(no=['content_of_ancestor-child'])
977 response.mustcontain(no=['content_of_ancestor-child'])
975 response.mustcontain('content_of_change')
978 response.mustcontain('content_of_change')
976
979
977 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
980 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
978 # Clear any previous calls to rcextensions
981 # Clear any previous calls to rcextensions
979 rhodecode.EXTENSIONS.calls.clear()
982 rhodecode.EXTENSIONS.calls.clear()
980
983
981 pull_request = pr_util.create_pull_request(
984 pull_request = pr_util.create_pull_request(
982 approved=True, mergeable=True)
985 approved=True, mergeable=True)
983 pull_request_id = pull_request.pull_request_id
986 pull_request_id = pull_request.pull_request_id
984 repo_name = pull_request.target_repo.scm_instance().name,
987 repo_name = pull_request.target_repo.scm_instance().name,
985
988
986 url = route_path('pullrequest_merge',
989 url = route_path('pullrequest_merge',
987 repo_name=str(repo_name[0]),
990 repo_name=str(repo_name[0]),
988 pull_request_id=pull_request_id)
991 pull_request_id=pull_request_id)
989 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
992 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
990
993
991 pull_request = PullRequest.get(pull_request_id)
994 pull_request = PullRequest.get(pull_request_id)
992
995
993 assert response.status_int == 200
996 assert response.status_int == 200
994 assert pull_request.is_closed()
997 assert pull_request.is_closed()
995 assert_pull_request_status(
998 assert_pull_request_status(
996 pull_request, ChangesetStatus.STATUS_APPROVED)
999 pull_request, ChangesetStatus.STATUS_APPROVED)
997
1000
998 # Check the relevant log entries were added
1001 # Check the relevant log entries were added
999 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
1002 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
1000 actions = [log.action for log in user_logs]
1003 actions = [log.action for log in user_logs]
1001 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1004 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
1002 expected_actions = [
1005 expected_actions = [
1003 u'repo.pull_request.close',
1006 u'repo.pull_request.close',
1004 u'repo.pull_request.merge',
1007 u'repo.pull_request.merge',
1005 u'repo.pull_request.comment.create'
1008 u'repo.pull_request.comment.create'
1006 ]
1009 ]
1007 assert actions == expected_actions
1010 assert actions == expected_actions
1008
1011
1009 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1012 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
1010 actions = [log for log in user_logs]
1013 actions = [log for log in user_logs]
1011 assert actions[-1].action == 'user.push'
1014 assert actions[-1].action == 'user.push'
1012 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1015 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
1013
1016
1014 # Check post_push rcextension was really executed
1017 # Check post_push rcextension was really executed
1015 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1018 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
1016 assert len(push_calls) == 1
1019 assert len(push_calls) == 1
1017 unused_last_call_args, last_call_kwargs = push_calls[0]
1020 unused_last_call_args, last_call_kwargs = push_calls[0]
1018 assert last_call_kwargs['action'] == 'push'
1021 assert last_call_kwargs['action'] == 'push'
1019 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1022 assert last_call_kwargs['commit_ids'] == pr_commit_ids
1020
1023
1021 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1024 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1022 pull_request = pr_util.create_pull_request(mergeable=False)
1025 pull_request = pr_util.create_pull_request(mergeable=False)
1023 pull_request_id = pull_request.pull_request_id
1026 pull_request_id = pull_request.pull_request_id
1024 pull_request = PullRequest.get(pull_request_id)
1027 pull_request = PullRequest.get(pull_request_id)
1025
1028
1026 response = self.app.post(
1029 response = self.app.post(
1027 route_path('pullrequest_merge',
1030 route_path('pullrequest_merge',
1028 repo_name=pull_request.target_repo.scm_instance().name,
1031 repo_name=pull_request.target_repo.scm_instance().name,
1029 pull_request_id=pull_request.pull_request_id),
1032 pull_request_id=pull_request.pull_request_id),
1030 params={'csrf_token': csrf_token}).follow()
1033 params={'csrf_token': csrf_token}).follow()
1031
1034
1032 assert response.status_int == 200
1035 assert response.status_int == 200
1033 response.mustcontain(
1036 response.mustcontain(
1034 'Merge is not currently possible because of below failed checks.')
1037 'Merge is not currently possible because of below failed checks.')
1035 response.mustcontain('Server-side pull request merging is disabled.')
1038 response.mustcontain('Server-side pull request merging is disabled.')
1036
1039
1037 @pytest.mark.skip_backends('svn')
1040 @pytest.mark.skip_backends('svn')
1038 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1041 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
1039 pull_request = pr_util.create_pull_request(mergeable=True)
1042 pull_request = pr_util.create_pull_request(mergeable=True)
1040 pull_request_id = pull_request.pull_request_id
1043 pull_request_id = pull_request.pull_request_id
1041 repo_name = pull_request.target_repo.scm_instance().name
1044 repo_name = pull_request.target_repo.scm_instance().name
1042
1045
1043 response = self.app.post(
1046 response = self.app.post(
1044 route_path('pullrequest_merge',
1047 route_path('pullrequest_merge',
1045 repo_name=repo_name, pull_request_id=pull_request_id),
1048 repo_name=repo_name, pull_request_id=pull_request_id),
1046 params={'csrf_token': csrf_token}).follow()
1049 params={'csrf_token': csrf_token}).follow()
1047
1050
1048 assert response.status_int == 200
1051 assert response.status_int == 200
1049
1052
1050 response.mustcontain(
1053 response.mustcontain(
1051 'Merge is not currently possible because of below failed checks.')
1054 'Merge is not currently possible because of below failed checks.')
1052 response.mustcontain('Pull request reviewer approval is pending.')
1055 response.mustcontain('Pull request reviewer approval is pending.')
1053
1056
1054 def test_merge_pull_request_renders_failure_reason(
1057 def test_merge_pull_request_renders_failure_reason(
1055 self, user_regular, csrf_token, pr_util):
1058 self, user_regular, csrf_token, pr_util):
1056 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1059 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
1057 pull_request_id = pull_request.pull_request_id
1060 pull_request_id = pull_request.pull_request_id
1058 repo_name = pull_request.target_repo.scm_instance().name
1061 repo_name = pull_request.target_repo.scm_instance().name
1059
1062
1060 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1063 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
1061 MergeFailureReason.PUSH_FAILED,
1064 MergeFailureReason.PUSH_FAILED,
1062 metadata={'target': 'shadow repo',
1065 metadata={'target': 'shadow repo',
1063 'merge_commit': 'xxx'})
1066 'merge_commit': 'xxx'})
1064 model_patcher = mock.patch.multiple(
1067 model_patcher = mock.patch.multiple(
1065 PullRequestModel,
1068 PullRequestModel,
1066 merge_repo=mock.Mock(return_value=merge_resp),
1069 merge_repo=mock.Mock(return_value=merge_resp),
1067 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1070 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
1068
1071
1069 with model_patcher:
1072 with model_patcher:
1070 response = self.app.post(
1073 response = self.app.post(
1071 route_path('pullrequest_merge',
1074 route_path('pullrequest_merge',
1072 repo_name=repo_name,
1075 repo_name=repo_name,
1073 pull_request_id=pull_request_id),
1076 pull_request_id=pull_request_id),
1074 params={'csrf_token': csrf_token}, status=302)
1077 params={'csrf_token': csrf_token}, status=302)
1075
1078
1076 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1079 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
1077 metadata={'target': 'shadow repo',
1080 metadata={'target': 'shadow repo',
1078 'merge_commit': 'xxx'})
1081 'merge_commit': 'xxx'})
1079 assert_session_flash(response, merge_resp.merge_status_message)
1082 assert_session_flash(response, merge_resp.merge_status_message)
1080
1083
1081 def test_update_source_revision(self, backend, csrf_token):
1084 def test_update_source_revision(self, backend, csrf_token):
1082 commits = [
1085 commits = [
1083 {'message': 'ancestor'},
1086 {'message': 'ancestor'},
1084 {'message': 'change'},
1087 {'message': 'change'},
1085 {'message': 'change-2'},
1088 {'message': 'change-2'},
1086 ]
1089 ]
1087 commit_ids = backend.create_master_repo(commits)
1090 commit_ids = backend.create_master_repo(commits)
1088 target = backend.create_repo(heads=['ancestor'])
1091 target = backend.create_repo(heads=['ancestor'])
1089 source = backend.create_repo(heads=['change'])
1092 source = backend.create_repo(heads=['change'])
1090
1093
1091 # create pr from a in source to A in target
1094 # create pr from a in source to A in target
1092 pull_request = PullRequest()
1095 pull_request = PullRequest()
1093
1096
1094 pull_request.source_repo = source
1097 pull_request.source_repo = source
1095 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1098 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1096 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1099 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1097
1100
1098 pull_request.target_repo = target
1101 pull_request.target_repo = target
1099 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1102 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1100 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1103 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1101
1104
1102 pull_request.revisions = [commit_ids['change']]
1105 pull_request.revisions = [commit_ids['change']]
1103 pull_request.title = u"Test"
1106 pull_request.title = u"Test"
1104 pull_request.description = u"Description"
1107 pull_request.description = u"Description"
1105 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1108 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1106 pull_request.pull_request_state = PullRequest.STATE_CREATED
1109 pull_request.pull_request_state = PullRequest.STATE_CREATED
1107 Session().add(pull_request)
1110 Session().add(pull_request)
1108 Session().commit()
1111 Session().commit()
1109 pull_request_id = pull_request.pull_request_id
1112 pull_request_id = pull_request.pull_request_id
1110
1113
1111 # source has ancestor - change - change-2
1114 # source has ancestor - change - change-2
1112 backend.pull_heads(source, heads=['change-2'])
1115 backend.pull_heads(source, heads=['change-2'])
1113 target_repo_name = target.repo_name
1116 target_repo_name = target.repo_name
1114
1117
1115 # update PR
1118 # update PR
1116 self.app.post(
1119 self.app.post(
1117 route_path('pullrequest_update',
1120 route_path('pullrequest_update',
1118 repo_name=target_repo_name, pull_request_id=pull_request_id),
1121 repo_name=target_repo_name, pull_request_id=pull_request_id),
1119 params={'update_commits': 'true', 'csrf_token': csrf_token})
1122 params={'update_commits': 'true', 'csrf_token': csrf_token})
1120
1123
1121 response = self.app.get(
1124 response = self.app.get(
1122 route_path('pullrequest_show',
1125 route_path('pullrequest_show',
1123 repo_name=target_repo_name,
1126 repo_name=target_repo_name,
1124 pull_request_id=pull_request.pull_request_id))
1127 pull_request_id=pull_request.pull_request_id))
1125
1128
1126 assert response.status_int == 200
1129 assert response.status_int == 200
1127 response.mustcontain('Pull request updated to')
1130 response.mustcontain('Pull request updated to')
1128 response.mustcontain('with 1 added, 0 removed commits.')
1131 response.mustcontain('with 1 added, 0 removed commits.')
1129
1132
1130 # check that we have now both revisions
1133 # check that we have now both revisions
1131 pull_request = PullRequest.get(pull_request_id)
1134 pull_request = PullRequest.get(pull_request_id)
1132 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1135 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
1133
1136
1134 def test_update_target_revision(self, backend, csrf_token):
1137 def test_update_target_revision(self, backend, csrf_token):
1135 commits = [
1138 commits = [
1136 {'message': 'ancestor'},
1139 {'message': 'ancestor'},
1137 {'message': 'change'},
1140 {'message': 'change'},
1138 {'message': 'ancestor-new', 'parents': ['ancestor']},
1141 {'message': 'ancestor-new', 'parents': ['ancestor']},
1139 {'message': 'change-rebased'},
1142 {'message': 'change-rebased'},
1140 ]
1143 ]
1141 commit_ids = backend.create_master_repo(commits)
1144 commit_ids = backend.create_master_repo(commits)
1142 target = backend.create_repo(heads=['ancestor'])
1145 target = backend.create_repo(heads=['ancestor'])
1143 source = backend.create_repo(heads=['change'])
1146 source = backend.create_repo(heads=['change'])
1144
1147
1145 # create pr from a in source to A in target
1148 # create pr from a in source to A in target
1146 pull_request = PullRequest()
1149 pull_request = PullRequest()
1147
1150
1148 pull_request.source_repo = source
1151 pull_request.source_repo = source
1149 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1152 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1150 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1153 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1151
1154
1152 pull_request.target_repo = target
1155 pull_request.target_repo = target
1153 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1156 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1154 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1157 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1155
1158
1156 pull_request.revisions = [commit_ids['change']]
1159 pull_request.revisions = [commit_ids['change']]
1157 pull_request.title = u"Test"
1160 pull_request.title = u"Test"
1158 pull_request.description = u"Description"
1161 pull_request.description = u"Description"
1159 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1162 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1160 pull_request.pull_request_state = PullRequest.STATE_CREATED
1163 pull_request.pull_request_state = PullRequest.STATE_CREATED
1161
1164
1162 Session().add(pull_request)
1165 Session().add(pull_request)
1163 Session().commit()
1166 Session().commit()
1164 pull_request_id = pull_request.pull_request_id
1167 pull_request_id = pull_request.pull_request_id
1165
1168
1166 # target has ancestor - ancestor-new
1169 # target has ancestor - ancestor-new
1167 # source has ancestor - ancestor-new - change-rebased
1170 # source has ancestor - ancestor-new - change-rebased
1168 backend.pull_heads(target, heads=['ancestor-new'])
1171 backend.pull_heads(target, heads=['ancestor-new'])
1169 backend.pull_heads(source, heads=['change-rebased'])
1172 backend.pull_heads(source, heads=['change-rebased'])
1170 target_repo_name = target.repo_name
1173 target_repo_name = target.repo_name
1171
1174
1172 # update PR
1175 # update PR
1173 url = route_path('pullrequest_update',
1176 url = route_path('pullrequest_update',
1174 repo_name=target_repo_name,
1177 repo_name=target_repo_name,
1175 pull_request_id=pull_request_id)
1178 pull_request_id=pull_request_id)
1176 self.app.post(url,
1179 self.app.post(url,
1177 params={'update_commits': 'true', 'csrf_token': csrf_token},
1180 params={'update_commits': 'true', 'csrf_token': csrf_token},
1178 status=200)
1181 status=200)
1179
1182
1180 # check that we have now both revisions
1183 # check that we have now both revisions
1181 pull_request = PullRequest.get(pull_request_id)
1184 pull_request = PullRequest.get(pull_request_id)
1182 assert pull_request.revisions == [commit_ids['change-rebased']]
1185 assert pull_request.revisions == [commit_ids['change-rebased']]
1183 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1186 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
1184 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1187 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
1185
1188
1186 response = self.app.get(
1189 response = self.app.get(
1187 route_path('pullrequest_show',
1190 route_path('pullrequest_show',
1188 repo_name=target_repo_name,
1191 repo_name=target_repo_name,
1189 pull_request_id=pull_request.pull_request_id))
1192 pull_request_id=pull_request.pull_request_id))
1190 assert response.status_int == 200
1193 assert response.status_int == 200
1191 response.mustcontain('Pull request updated to')
1194 response.mustcontain('Pull request updated to')
1192 response.mustcontain('with 1 added, 1 removed commits.')
1195 response.mustcontain('with 1 added, 1 removed commits.')
1193
1196
1194 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1197 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
1195 backend = backend_git
1198 backend = backend_git
1196 commits = [
1199 commits = [
1197 {'message': 'master-commit-1'},
1200 {'message': 'master-commit-1'},
1198 {'message': 'master-commit-2-change-1'},
1201 {'message': 'master-commit-2-change-1'},
1199 {'message': 'master-commit-3-change-2'},
1202 {'message': 'master-commit-3-change-2'},
1200
1203
1201 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1204 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
1202 {'message': 'feat-commit-2'},
1205 {'message': 'feat-commit-2'},
1203 ]
1206 ]
1204 commit_ids = backend.create_master_repo(commits)
1207 commit_ids = backend.create_master_repo(commits)
1205 target = backend.create_repo(heads=['master-commit-3-change-2'])
1208 target = backend.create_repo(heads=['master-commit-3-change-2'])
1206 source = backend.create_repo(heads=['feat-commit-2'])
1209 source = backend.create_repo(heads=['feat-commit-2'])
1207
1210
1208 # create pr from a in source to A in target
1211 # create pr from a in source to A in target
1209 pull_request = PullRequest()
1212 pull_request = PullRequest()
1210 pull_request.source_repo = source
1213 pull_request.source_repo = source
1211
1214
1212 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1215 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1213 branch=backend.default_branch_name,
1216 branch=backend.default_branch_name,
1214 commit_id=commit_ids['master-commit-3-change-2'])
1217 commit_id=commit_ids['master-commit-3-change-2'])
1215
1218
1216 pull_request.target_repo = target
1219 pull_request.target_repo = target
1217 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1220 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1218 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1221 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
1219
1222
1220 pull_request.revisions = [
1223 pull_request.revisions = [
1221 commit_ids['feat-commit-1'],
1224 commit_ids['feat-commit-1'],
1222 commit_ids['feat-commit-2']
1225 commit_ids['feat-commit-2']
1223 ]
1226 ]
1224 pull_request.title = u"Test"
1227 pull_request.title = u"Test"
1225 pull_request.description = u"Description"
1228 pull_request.description = u"Description"
1226 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1229 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1227 pull_request.pull_request_state = PullRequest.STATE_CREATED
1230 pull_request.pull_request_state = PullRequest.STATE_CREATED
1228 Session().add(pull_request)
1231 Session().add(pull_request)
1229 Session().commit()
1232 Session().commit()
1230 pull_request_id = pull_request.pull_request_id
1233 pull_request_id = pull_request.pull_request_id
1231
1234
1232 # PR is created, now we simulate a force-push into target,
1235 # PR is created, now we simulate a force-push into target,
1233 # that drops a 2 last commits
1236 # that drops a 2 last commits
1234 vcsrepo = target.scm_instance()
1237 vcsrepo = target.scm_instance()
1235 vcsrepo.config.clear_section('hooks')
1238 vcsrepo.config.clear_section('hooks')
1236 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1239 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
1237 target_repo_name = target.repo_name
1240 target_repo_name = target.repo_name
1238
1241
1239 # update PR
1242 # update PR
1240 url = route_path('pullrequest_update',
1243 url = route_path('pullrequest_update',
1241 repo_name=target_repo_name,
1244 repo_name=target_repo_name,
1242 pull_request_id=pull_request_id)
1245 pull_request_id=pull_request_id)
1243 self.app.post(url,
1246 self.app.post(url,
1244 params={'update_commits': 'true', 'csrf_token': csrf_token},
1247 params={'update_commits': 'true', 'csrf_token': csrf_token},
1245 status=200)
1248 status=200)
1246
1249
1247 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1250 response = self.app.get(route_path('pullrequest_new', repo_name=target_repo_name))
1248 assert response.status_int == 200
1251 assert response.status_int == 200
1249 response.mustcontain('Pull request updated to')
1252 response.mustcontain('Pull request updated to')
1250 response.mustcontain('with 0 added, 0 removed commits.')
1253 response.mustcontain('with 0 added, 0 removed commits.')
1251
1254
1252 def test_update_of_ancestor_reference(self, backend, csrf_token):
1255 def test_update_of_ancestor_reference(self, backend, csrf_token):
1253 commits = [
1256 commits = [
1254 {'message': 'ancestor'},
1257 {'message': 'ancestor'},
1255 {'message': 'change'},
1258 {'message': 'change'},
1256 {'message': 'change-2'},
1259 {'message': 'change-2'},
1257 {'message': 'ancestor-new', 'parents': ['ancestor']},
1260 {'message': 'ancestor-new', 'parents': ['ancestor']},
1258 {'message': 'change-rebased'},
1261 {'message': 'change-rebased'},
1259 ]
1262 ]
1260 commit_ids = backend.create_master_repo(commits)
1263 commit_ids = backend.create_master_repo(commits)
1261 target = backend.create_repo(heads=['ancestor'])
1264 target = backend.create_repo(heads=['ancestor'])
1262 source = backend.create_repo(heads=['change'])
1265 source = backend.create_repo(heads=['change'])
1263
1266
1264 # create pr from a in source to A in target
1267 # create pr from a in source to A in target
1265 pull_request = PullRequest()
1268 pull_request = PullRequest()
1266 pull_request.source_repo = source
1269 pull_request.source_repo = source
1267
1270
1268 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1271 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1269 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1272 branch=backend.default_branch_name, commit_id=commit_ids['change'])
1270 pull_request.target_repo = target
1273 pull_request.target_repo = target
1271 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1274 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1272 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1275 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
1273 pull_request.revisions = [commit_ids['change']]
1276 pull_request.revisions = [commit_ids['change']]
1274 pull_request.title = u"Test"
1277 pull_request.title = u"Test"
1275 pull_request.description = u"Description"
1278 pull_request.description = u"Description"
1276 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1279 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1277 pull_request.pull_request_state = PullRequest.STATE_CREATED
1280 pull_request.pull_request_state = PullRequest.STATE_CREATED
1278 Session().add(pull_request)
1281 Session().add(pull_request)
1279 Session().commit()
1282 Session().commit()
1280 pull_request_id = pull_request.pull_request_id
1283 pull_request_id = pull_request.pull_request_id
1281
1284
1282 # target has ancestor - ancestor-new
1285 # target has ancestor - ancestor-new
1283 # source has ancestor - ancestor-new - change-rebased
1286 # source has ancestor - ancestor-new - change-rebased
1284 backend.pull_heads(target, heads=['ancestor-new'])
1287 backend.pull_heads(target, heads=['ancestor-new'])
1285 backend.pull_heads(source, heads=['change-rebased'])
1288 backend.pull_heads(source, heads=['change-rebased'])
1286 target_repo_name = target.repo_name
1289 target_repo_name = target.repo_name
1287
1290
1288 # update PR
1291 # update PR
1289 self.app.post(
1292 self.app.post(
1290 route_path('pullrequest_update',
1293 route_path('pullrequest_update',
1291 repo_name=target_repo_name, pull_request_id=pull_request_id),
1294 repo_name=target_repo_name, pull_request_id=pull_request_id),
1292 params={'update_commits': 'true', 'csrf_token': csrf_token},
1295 params={'update_commits': 'true', 'csrf_token': csrf_token},
1293 status=200)
1296 status=200)
1294
1297
1295 # Expect the target reference to be updated correctly
1298 # Expect the target reference to be updated correctly
1296 pull_request = PullRequest.get(pull_request_id)
1299 pull_request = PullRequest.get(pull_request_id)
1297 assert pull_request.revisions == [commit_ids['change-rebased']]
1300 assert pull_request.revisions == [commit_ids['change-rebased']]
1298 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1301 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
1299 branch=backend.default_branch_name,
1302 branch=backend.default_branch_name,
1300 commit_id=commit_ids['ancestor-new'])
1303 commit_id=commit_ids['ancestor-new'])
1301 assert pull_request.target_ref == expected_target_ref
1304 assert pull_request.target_ref == expected_target_ref
1302
1305
1303 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1306 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1304 branch_name = 'development'
1307 branch_name = 'development'
1305 commits = [
1308 commits = [
1306 {'message': 'initial-commit'},
1309 {'message': 'initial-commit'},
1307 {'message': 'old-feature'},
1310 {'message': 'old-feature'},
1308 {'message': 'new-feature', 'branch': branch_name},
1311 {'message': 'new-feature', 'branch': branch_name},
1309 ]
1312 ]
1310 repo = backend_git.create_repo(commits)
1313 repo = backend_git.create_repo(commits)
1311 repo_name = repo.repo_name
1314 repo_name = repo.repo_name
1312 commit_ids = backend_git.commit_ids
1315 commit_ids = backend_git.commit_ids
1313
1316
1314 pull_request = PullRequest()
1317 pull_request = PullRequest()
1315 pull_request.source_repo = repo
1318 pull_request.source_repo = repo
1316 pull_request.target_repo = repo
1319 pull_request.target_repo = repo
1317 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1320 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
1318 branch=branch_name, commit_id=commit_ids['new-feature'])
1321 branch=branch_name, commit_id=commit_ids['new-feature'])
1319 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1322 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
1320 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1323 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
1321 pull_request.revisions = [commit_ids['new-feature']]
1324 pull_request.revisions = [commit_ids['new-feature']]
1322 pull_request.title = u"Test"
1325 pull_request.title = u"Test"
1323 pull_request.description = u"Description"
1326 pull_request.description = u"Description"
1324 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1327 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1325 pull_request.pull_request_state = PullRequest.STATE_CREATED
1328 pull_request.pull_request_state = PullRequest.STATE_CREATED
1326 Session().add(pull_request)
1329 Session().add(pull_request)
1327 Session().commit()
1330 Session().commit()
1328
1331
1329 pull_request_id = pull_request.pull_request_id
1332 pull_request_id = pull_request.pull_request_id
1330
1333
1331 vcs = repo.scm_instance()
1334 vcs = repo.scm_instance()
1332 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1335 vcs.remove_ref('refs/heads/{}'.format(branch_name))
1333 # NOTE(marcink): run GC to ensure the commits are gone
1336 # NOTE(marcink): run GC to ensure the commits are gone
1334 vcs.run_gc()
1337 vcs.run_gc()
1335
1338
1336 response = self.app.get(route_path(
1339 response = self.app.get(route_path(
1337 'pullrequest_show',
1340 'pullrequest_show',
1338 repo_name=repo_name,
1341 repo_name=repo_name,
1339 pull_request_id=pull_request_id))
1342 pull_request_id=pull_request_id))
1340
1343
1341 assert response.status_int == 200
1344 assert response.status_int == 200
1342
1345
1343 response.assert_response().element_contains(
1346 response.assert_response().element_contains(
1344 '#changeset_compare_view_content .alert strong',
1347 '#changeset_compare_view_content .alert strong',
1345 'Missing commits')
1348 'Missing commits')
1346 response.assert_response().element_contains(
1349 response.assert_response().element_contains(
1347 '#changeset_compare_view_content .alert',
1350 '#changeset_compare_view_content .alert',
1348 'This pull request cannot be displayed, because one or more'
1351 'This pull request cannot be displayed, because one or more'
1349 ' commits no longer exist in the source repository.')
1352 ' commits no longer exist in the source repository.')
1350
1353
1351 def test_strip_commits_from_pull_request(
1354 def test_strip_commits_from_pull_request(
1352 self, backend, pr_util, csrf_token):
1355 self, backend, pr_util, csrf_token):
1353 commits = [
1356 commits = [
1354 {'message': 'initial-commit'},
1357 {'message': 'initial-commit'},
1355 {'message': 'old-feature'},
1358 {'message': 'old-feature'},
1356 {'message': 'new-feature', 'parents': ['initial-commit']},
1359 {'message': 'new-feature', 'parents': ['initial-commit']},
1357 ]
1360 ]
1358 pull_request = pr_util.create_pull_request(
1361 pull_request = pr_util.create_pull_request(
1359 commits, target_head='initial-commit', source_head='new-feature',
1362 commits, target_head='initial-commit', source_head='new-feature',
1360 revisions=['new-feature'])
1363 revisions=['new-feature'])
1361
1364
1362 vcs = pr_util.source_repository.scm_instance()
1365 vcs = pr_util.source_repository.scm_instance()
1363 if backend.alias == 'git':
1366 if backend.alias == 'git':
1364 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1367 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1365 else:
1368 else:
1366 vcs.strip(pr_util.commit_ids['new-feature'])
1369 vcs.strip(pr_util.commit_ids['new-feature'])
1367
1370
1368 response = self.app.get(route_path(
1371 response = self.app.get(route_path(
1369 'pullrequest_show',
1372 'pullrequest_show',
1370 repo_name=pr_util.target_repository.repo_name,
1373 repo_name=pr_util.target_repository.repo_name,
1371 pull_request_id=pull_request.pull_request_id))
1374 pull_request_id=pull_request.pull_request_id))
1372
1375
1373 assert response.status_int == 200
1376 assert response.status_int == 200
1374
1377
1375 response.assert_response().element_contains(
1378 response.assert_response().element_contains(
1376 '#changeset_compare_view_content .alert strong',
1379 '#changeset_compare_view_content .alert strong',
1377 'Missing commits')
1380 'Missing commits')
1378 response.assert_response().element_contains(
1381 response.assert_response().element_contains(
1379 '#changeset_compare_view_content .alert',
1382 '#changeset_compare_view_content .alert',
1380 'This pull request cannot be displayed, because one or more'
1383 'This pull request cannot be displayed, because one or more'
1381 ' commits no longer exist in the source repository.')
1384 ' commits no longer exist in the source repository.')
1382 response.assert_response().element_contains(
1385 response.assert_response().element_contains(
1383 '#update_commits',
1386 '#update_commits',
1384 'Update commits')
1387 'Update commits')
1385
1388
1386 def test_strip_commits_and_update(
1389 def test_strip_commits_and_update(
1387 self, backend, pr_util, csrf_token):
1390 self, backend, pr_util, csrf_token):
1388 commits = [
1391 commits = [
1389 {'message': 'initial-commit'},
1392 {'message': 'initial-commit'},
1390 {'message': 'old-feature'},
1393 {'message': 'old-feature'},
1391 {'message': 'new-feature', 'parents': ['old-feature']},
1394 {'message': 'new-feature', 'parents': ['old-feature']},
1392 ]
1395 ]
1393 pull_request = pr_util.create_pull_request(
1396 pull_request = pr_util.create_pull_request(
1394 commits, target_head='old-feature', source_head='new-feature',
1397 commits, target_head='old-feature', source_head='new-feature',
1395 revisions=['new-feature'], mergeable=True)
1398 revisions=['new-feature'], mergeable=True)
1396 pr_id = pull_request.pull_request_id
1399 pr_id = pull_request.pull_request_id
1397 target_repo_name = pull_request.target_repo.repo_name
1400 target_repo_name = pull_request.target_repo.repo_name
1398
1401
1399 vcs = pr_util.source_repository.scm_instance()
1402 vcs = pr_util.source_repository.scm_instance()
1400 if backend.alias == 'git':
1403 if backend.alias == 'git':
1401 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1404 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
1402 else:
1405 else:
1403 vcs.strip(pr_util.commit_ids['new-feature'])
1406 vcs.strip(pr_util.commit_ids['new-feature'])
1404
1407
1405 url = route_path('pullrequest_update',
1408 url = route_path('pullrequest_update',
1406 repo_name=target_repo_name,
1409 repo_name=target_repo_name,
1407 pull_request_id=pr_id)
1410 pull_request_id=pr_id)
1408 response = self.app.post(url,
1411 response = self.app.post(url,
1409 params={'update_commits': 'true',
1412 params={'update_commits': 'true',
1410 'csrf_token': csrf_token})
1413 'csrf_token': csrf_token})
1411
1414
1412 assert response.status_int == 200
1415 assert response.status_int == 200
1413 assert response.body == '{"response": true, "redirect_url": null}'
1416 assert response.body == '{"response": true, "redirect_url": null}'
1414
1417
1415 # Make sure that after update, it won't raise 500 errors
1418 # Make sure that after update, it won't raise 500 errors
1416 response = self.app.get(route_path(
1419 response = self.app.get(route_path(
1417 'pullrequest_show',
1420 'pullrequest_show',
1418 repo_name=target_repo_name,
1421 repo_name=target_repo_name,
1419 pull_request_id=pr_id))
1422 pull_request_id=pr_id))
1420
1423
1421 assert response.status_int == 200
1424 assert response.status_int == 200
1422 response.assert_response().element_contains(
1425 response.assert_response().element_contains(
1423 '#changeset_compare_view_content .alert strong',
1426 '#changeset_compare_view_content .alert strong',
1424 'Missing commits')
1427 'Missing commits')
1425
1428
1426 def test_branch_is_a_link(self, pr_util):
1429 def test_branch_is_a_link(self, pr_util):
1427 pull_request = pr_util.create_pull_request()
1430 pull_request = pr_util.create_pull_request()
1428 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1431 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1429 pull_request.target_ref = 'branch:target:abcdef1234567890'
1432 pull_request.target_ref = 'branch:target:abcdef1234567890'
1430 Session().add(pull_request)
1433 Session().add(pull_request)
1431 Session().commit()
1434 Session().commit()
1432
1435
1433 response = self.app.get(route_path(
1436 response = self.app.get(route_path(
1434 'pullrequest_show',
1437 'pullrequest_show',
1435 repo_name=pull_request.target_repo.scm_instance().name,
1438 repo_name=pull_request.target_repo.scm_instance().name,
1436 pull_request_id=pull_request.pull_request_id))
1439 pull_request_id=pull_request.pull_request_id))
1437 assert response.status_int == 200
1440 assert response.status_int == 200
1438
1441
1439 source = response.assert_response().get_element('.pr-source-info')
1442 source = response.assert_response().get_element('.pr-source-info')
1440 source_parent = source.getparent()
1443 source_parent = source.getparent()
1441 assert len(source_parent) == 1
1444 assert len(source_parent) == 1
1442
1445
1443 target = response.assert_response().get_element('.pr-target-info')
1446 target = response.assert_response().get_element('.pr-target-info')
1444 target_parent = target.getparent()
1447 target_parent = target.getparent()
1445 assert len(target_parent) == 1
1448 assert len(target_parent) == 1
1446
1449
1447 expected_origin_link = route_path(
1450 expected_origin_link = route_path(
1448 'repo_commits',
1451 'repo_commits',
1449 repo_name=pull_request.source_repo.scm_instance().name,
1452 repo_name=pull_request.source_repo.scm_instance().name,
1450 params=dict(branch='origin'))
1453 params=dict(branch='origin'))
1451 expected_target_link = route_path(
1454 expected_target_link = route_path(
1452 'repo_commits',
1455 'repo_commits',
1453 repo_name=pull_request.target_repo.scm_instance().name,
1456 repo_name=pull_request.target_repo.scm_instance().name,
1454 params=dict(branch='target'))
1457 params=dict(branch='target'))
1455 assert source_parent.attrib['href'] == expected_origin_link
1458 assert source_parent.attrib['href'] == expected_origin_link
1456 assert target_parent.attrib['href'] == expected_target_link
1459 assert target_parent.attrib['href'] == expected_target_link
1457
1460
1458 def test_bookmark_is_not_a_link(self, pr_util):
1461 def test_bookmark_is_not_a_link(self, pr_util):
1459 pull_request = pr_util.create_pull_request()
1462 pull_request = pr_util.create_pull_request()
1460 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1463 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1461 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1464 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1462 Session().add(pull_request)
1465 Session().add(pull_request)
1463 Session().commit()
1466 Session().commit()
1464
1467
1465 response = self.app.get(route_path(
1468 response = self.app.get(route_path(
1466 'pullrequest_show',
1469 'pullrequest_show',
1467 repo_name=pull_request.target_repo.scm_instance().name,
1470 repo_name=pull_request.target_repo.scm_instance().name,
1468 pull_request_id=pull_request.pull_request_id))
1471 pull_request_id=pull_request.pull_request_id))
1469 assert response.status_int == 200
1472 assert response.status_int == 200
1470
1473
1471 source = response.assert_response().get_element('.pr-source-info')
1474 source = response.assert_response().get_element('.pr-source-info')
1472 assert source.text.strip() == 'bookmark:origin'
1475 assert source.text.strip() == 'bookmark:origin'
1473 assert source.getparent().attrib.get('href') is None
1476 assert source.getparent().attrib.get('href') is None
1474
1477
1475 target = response.assert_response().get_element('.pr-target-info')
1478 target = response.assert_response().get_element('.pr-target-info')
1476 assert target.text.strip() == 'bookmark:target'
1479 assert target.text.strip() == 'bookmark:target'
1477 assert target.getparent().attrib.get('href') is None
1480 assert target.getparent().attrib.get('href') is None
1478
1481
1479 def test_tag_is_not_a_link(self, pr_util):
1482 def test_tag_is_not_a_link(self, pr_util):
1480 pull_request = pr_util.create_pull_request()
1483 pull_request = pr_util.create_pull_request()
1481 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1484 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1482 pull_request.target_ref = 'tag:target:abcdef1234567890'
1485 pull_request.target_ref = 'tag:target:abcdef1234567890'
1483 Session().add(pull_request)
1486 Session().add(pull_request)
1484 Session().commit()
1487 Session().commit()
1485
1488
1486 response = self.app.get(route_path(
1489 response = self.app.get(route_path(
1487 'pullrequest_show',
1490 'pullrequest_show',
1488 repo_name=pull_request.target_repo.scm_instance().name,
1491 repo_name=pull_request.target_repo.scm_instance().name,
1489 pull_request_id=pull_request.pull_request_id))
1492 pull_request_id=pull_request.pull_request_id))
1490 assert response.status_int == 200
1493 assert response.status_int == 200
1491
1494
1492 source = response.assert_response().get_element('.pr-source-info')
1495 source = response.assert_response().get_element('.pr-source-info')
1493 assert source.text.strip() == 'tag:origin'
1496 assert source.text.strip() == 'tag:origin'
1494 assert source.getparent().attrib.get('href') is None
1497 assert source.getparent().attrib.get('href') is None
1495
1498
1496 target = response.assert_response().get_element('.pr-target-info')
1499 target = response.assert_response().get_element('.pr-target-info')
1497 assert target.text.strip() == 'tag:target'
1500 assert target.text.strip() == 'tag:target'
1498 assert target.getparent().attrib.get('href') is None
1501 assert target.getparent().attrib.get('href') is None
1499
1502
1500 @pytest.mark.parametrize('mergeable', [True, False])
1503 @pytest.mark.parametrize('mergeable', [True, False])
1501 def test_shadow_repository_link(
1504 def test_shadow_repository_link(
1502 self, mergeable, pr_util, http_host_only_stub):
1505 self, mergeable, pr_util, http_host_only_stub):
1503 """
1506 """
1504 Check that the pull request summary page displays a link to the shadow
1507 Check that the pull request summary page displays a link to the shadow
1505 repository if the pull request is mergeable. If it is not mergeable
1508 repository if the pull request is mergeable. If it is not mergeable
1506 the link should not be displayed.
1509 the link should not be displayed.
1507 """
1510 """
1508 pull_request = pr_util.create_pull_request(
1511 pull_request = pr_util.create_pull_request(
1509 mergeable=mergeable, enable_notifications=False)
1512 mergeable=mergeable, enable_notifications=False)
1510 target_repo = pull_request.target_repo.scm_instance()
1513 target_repo = pull_request.target_repo.scm_instance()
1511 pr_id = pull_request.pull_request_id
1514 pr_id = pull_request.pull_request_id
1512 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1515 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1513 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1516 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1514
1517
1515 response = self.app.get(route_path(
1518 response = self.app.get(route_path(
1516 'pullrequest_show',
1519 'pullrequest_show',
1517 repo_name=target_repo.name,
1520 repo_name=target_repo.name,
1518 pull_request_id=pr_id))
1521 pull_request_id=pr_id))
1519
1522
1520 if mergeable:
1523 if mergeable:
1521 response.assert_response().element_value_contains(
1524 response.assert_response().element_value_contains(
1522 'input.pr-mergeinfo', shadow_url)
1525 'input.pr-mergeinfo', shadow_url)
1523 response.assert_response().element_value_contains(
1526 response.assert_response().element_value_contains(
1524 'input.pr-mergeinfo ', 'pr-merge')
1527 'input.pr-mergeinfo ', 'pr-merge')
1525 else:
1528 else:
1526 response.assert_response().no_element_exists('.pr-mergeinfo')
1529 response.assert_response().no_element_exists('.pr-mergeinfo')
1527
1530
1528
1531
1529 @pytest.mark.usefixtures('app')
1532 @pytest.mark.usefixtures('app')
1530 @pytest.mark.backends("git", "hg")
1533 @pytest.mark.backends("git", "hg")
1531 class TestPullrequestsControllerDelete(object):
1534 class TestPullrequestsControllerDelete(object):
1532 def test_pull_request_delete_button_permissions_admin(
1535 def test_pull_request_delete_button_permissions_admin(
1533 self, autologin_user, user_admin, pr_util):
1536 self, autologin_user, user_admin, pr_util):
1534 pull_request = pr_util.create_pull_request(
1537 pull_request = pr_util.create_pull_request(
1535 author=user_admin.username, enable_notifications=False)
1538 author=user_admin.username, enable_notifications=False)
1536
1539
1537 response = self.app.get(route_path(
1540 response = self.app.get(route_path(
1538 'pullrequest_show',
1541 'pullrequest_show',
1539 repo_name=pull_request.target_repo.scm_instance().name,
1542 repo_name=pull_request.target_repo.scm_instance().name,
1540 pull_request_id=pull_request.pull_request_id))
1543 pull_request_id=pull_request.pull_request_id))
1541
1544
1542 response.mustcontain('id="delete_pullrequest"')
1545 response.mustcontain('id="delete_pullrequest"')
1543 response.mustcontain('Confirm to delete this pull request')
1546 response.mustcontain('Confirm to delete this pull request')
1544
1547
1545 def test_pull_request_delete_button_permissions_owner(
1548 def test_pull_request_delete_button_permissions_owner(
1546 self, autologin_regular_user, user_regular, pr_util):
1549 self, autologin_regular_user, user_regular, pr_util):
1547 pull_request = pr_util.create_pull_request(
1550 pull_request = pr_util.create_pull_request(
1548 author=user_regular.username, enable_notifications=False)
1551 author=user_regular.username, enable_notifications=False)
1549
1552
1550 response = self.app.get(route_path(
1553 response = self.app.get(route_path(
1551 'pullrequest_show',
1554 'pullrequest_show',
1552 repo_name=pull_request.target_repo.scm_instance().name,
1555 repo_name=pull_request.target_repo.scm_instance().name,
1553 pull_request_id=pull_request.pull_request_id))
1556 pull_request_id=pull_request.pull_request_id))
1554
1557
1555 response.mustcontain('id="delete_pullrequest"')
1558 response.mustcontain('id="delete_pullrequest"')
1556 response.mustcontain('Confirm to delete this pull request')
1559 response.mustcontain('Confirm to delete this pull request')
1557
1560
1558 def test_pull_request_delete_button_permissions_forbidden(
1561 def test_pull_request_delete_button_permissions_forbidden(
1559 self, autologin_regular_user, user_regular, user_admin, pr_util):
1562 self, autologin_regular_user, user_regular, user_admin, pr_util):
1560 pull_request = pr_util.create_pull_request(
1563 pull_request = pr_util.create_pull_request(
1561 author=user_admin.username, enable_notifications=False)
1564 author=user_admin.username, enable_notifications=False)
1562
1565
1563 response = self.app.get(route_path(
1566 response = self.app.get(route_path(
1564 'pullrequest_show',
1567 'pullrequest_show',
1565 repo_name=pull_request.target_repo.scm_instance().name,
1568 repo_name=pull_request.target_repo.scm_instance().name,
1566 pull_request_id=pull_request.pull_request_id))
1569 pull_request_id=pull_request.pull_request_id))
1567 response.mustcontain(no=['id="delete_pullrequest"'])
1570 response.mustcontain(no=['id="delete_pullrequest"'])
1568 response.mustcontain(no=['Confirm to delete this pull request'])
1571 response.mustcontain(no=['Confirm to delete this pull request'])
1569
1572
1570 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1573 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1571 self, autologin_regular_user, user_regular, user_admin, pr_util,
1574 self, autologin_regular_user, user_regular, user_admin, pr_util,
1572 user_util):
1575 user_util):
1573
1576
1574 pull_request = pr_util.create_pull_request(
1577 pull_request = pr_util.create_pull_request(
1575 author=user_admin.username, enable_notifications=False)
1578 author=user_admin.username, enable_notifications=False)
1576
1579
1577 user_util.grant_user_permission_to_repo(
1580 user_util.grant_user_permission_to_repo(
1578 pull_request.target_repo, user_regular,
1581 pull_request.target_repo, user_regular,
1579 'repository.write')
1582 'repository.write')
1580
1583
1581 response = self.app.get(route_path(
1584 response = self.app.get(route_path(
1582 'pullrequest_show',
1585 'pullrequest_show',
1583 repo_name=pull_request.target_repo.scm_instance().name,
1586 repo_name=pull_request.target_repo.scm_instance().name,
1584 pull_request_id=pull_request.pull_request_id))
1587 pull_request_id=pull_request.pull_request_id))
1585
1588
1586 response.mustcontain('id="open_edit_pullrequest"')
1589 response.mustcontain('id="open_edit_pullrequest"')
1587 response.mustcontain('id="delete_pullrequest"')
1590 response.mustcontain('id="delete_pullrequest"')
1588 response.mustcontain(no=['Confirm to delete this pull request'])
1591 response.mustcontain(no=['Confirm to delete this pull request'])
1589
1592
1590 def test_delete_comment_returns_404_if_comment_does_not_exist(
1593 def test_delete_comment_returns_404_if_comment_does_not_exist(
1591 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1594 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1592
1595
1593 pull_request = pr_util.create_pull_request(
1596 pull_request = pr_util.create_pull_request(
1594 author=user_admin.username, enable_notifications=False)
1597 author=user_admin.username, enable_notifications=False)
1595
1598
1596 self.app.post(
1599 self.app.post(
1597 route_path(
1600 route_path(
1598 'pullrequest_comment_delete',
1601 'pullrequest_comment_delete',
1599 repo_name=pull_request.target_repo.scm_instance().name,
1602 repo_name=pull_request.target_repo.scm_instance().name,
1600 pull_request_id=pull_request.pull_request_id,
1603 pull_request_id=pull_request.pull_request_id,
1601 comment_id=1024404),
1604 comment_id=1024404),
1602 extra_environ=xhr_header,
1605 extra_environ=xhr_header,
1603 params={'csrf_token': csrf_token},
1606 params={'csrf_token': csrf_token},
1604 status=404
1607 status=404
1605 )
1608 )
1606
1609
1607 def test_delete_comment(
1610 def test_delete_comment(
1608 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1611 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1609
1612
1610 pull_request = pr_util.create_pull_request(
1613 pull_request = pr_util.create_pull_request(
1611 author=user_admin.username, enable_notifications=False)
1614 author=user_admin.username, enable_notifications=False)
1612 comment = pr_util.create_comment()
1615 comment = pr_util.create_comment()
1613 comment_id = comment.comment_id
1616 comment_id = comment.comment_id
1614
1617
1615 response = self.app.post(
1618 response = self.app.post(
1616 route_path(
1619 route_path(
1617 'pullrequest_comment_delete',
1620 'pullrequest_comment_delete',
1618 repo_name=pull_request.target_repo.scm_instance().name,
1621 repo_name=pull_request.target_repo.scm_instance().name,
1619 pull_request_id=pull_request.pull_request_id,
1622 pull_request_id=pull_request.pull_request_id,
1620 comment_id=comment_id),
1623 comment_id=comment_id),
1621 extra_environ=xhr_header,
1624 extra_environ=xhr_header,
1622 params={'csrf_token': csrf_token},
1625 params={'csrf_token': csrf_token},
1623 status=200
1626 status=200
1624 )
1627 )
1625 assert response.body == 'true'
1628 assert response.body == 'true'
1626
1629
1627 @pytest.mark.parametrize('url_type', [
1630 @pytest.mark.parametrize('url_type', [
1628 'pullrequest_new',
1631 'pullrequest_new',
1629 'pullrequest_create',
1632 'pullrequest_create',
1630 'pullrequest_update',
1633 'pullrequest_update',
1631 'pullrequest_merge',
1634 'pullrequest_merge',
1632 ])
1635 ])
1633 def test_pull_request_is_forbidden_on_archived_repo(
1636 def test_pull_request_is_forbidden_on_archived_repo(
1634 self, autologin_user, backend, xhr_header, user_util, url_type):
1637 self, autologin_user, backend, xhr_header, user_util, url_type):
1635
1638
1636 # create a temporary repo
1639 # create a temporary repo
1637 source = user_util.create_repo(repo_type=backend.alias)
1640 source = user_util.create_repo(repo_type=backend.alias)
1638 repo_name = source.repo_name
1641 repo_name = source.repo_name
1639 repo = Repository.get_by_repo_name(repo_name)
1642 repo = Repository.get_by_repo_name(repo_name)
1640 repo.archived = True
1643 repo.archived = True
1641 Session().commit()
1644 Session().commit()
1642
1645
1643 response = self.app.get(
1646 response = self.app.get(
1644 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1647 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1645
1648
1646 msg = 'Action not supported for archived repository.'
1649 msg = 'Action not supported for archived repository.'
1647 assert_session_flash(response, msg)
1650 assert_session_flash(response, msg)
1648
1651
1649
1652
1650 def assert_pull_request_status(pull_request, expected_status):
1653 def assert_pull_request_status(pull_request, expected_status):
1651 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1654 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1652 assert status == expected_status
1655 assert status == expected_status
1653
1656
1654
1657
1655 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1658 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1656 @pytest.mark.usefixtures("autologin_user")
1659 @pytest.mark.usefixtures("autologin_user")
1657 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1660 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1658 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
1661 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,791 +1,791 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 from pyramid.httpexceptions import (
24 from pyramid.httpexceptions import (
25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from pyramid.renderers import render
27 from pyramid.renderers import render
28 from pyramid.response import Response
28 from pyramid.response import Response
29
29
30 from rhodecode.apps._base import RepoAppView
30 from rhodecode.apps._base import RepoAppView
31 from rhodecode.apps.file_store import utils as store_utils
31 from rhodecode.apps.file_store import utils as store_utils
32 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
32 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
33
33
34 from rhodecode.lib import diffs, codeblocks, channelstream
34 from rhodecode.lib import diffs, codeblocks, channelstream
35 from rhodecode.lib.auth import (
35 from rhodecode.lib.auth import (
36 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
36 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.compat import OrderedDict
38 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.diffs import (
39 from rhodecode.lib.diffs import (
40 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
40 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
41 get_diff_whitespace_flag)
41 get_diff_whitespace_flag)
42 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
42 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
43 import rhodecode.lib.helpers as h
43 import rhodecode.lib.helpers as h
44 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict
44 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict
45 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.lib.vcs.backends.base import EmptyCommit
46 from rhodecode.lib.vcs.exceptions import (
46 from rhodecode.lib.vcs.exceptions import (
47 RepositoryError, CommitDoesNotExistError)
47 RepositoryError, CommitDoesNotExistError)
48 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
48 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
49 ChangesetCommentHistory
49 ChangesetCommentHistory
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.meta import Session
52 from rhodecode.model.meta import Session
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 def _update_with_GET(params, request):
58 def _update_with_GET(params, request):
59 for k in ['diff1', 'diff2', 'diff']:
59 for k in ['diff1', 'diff2', 'diff']:
60 params[k] += request.GET.getall(k)
60 params[k] += request.GET.getall(k)
61
61
62
62
63 class RepoCommitsView(RepoAppView):
63 class RepoCommitsView(RepoAppView):
64 def load_default_context(self):
64 def load_default_context(self):
65 c = self._get_local_tmpl_context(include_app_defaults=True)
65 c = self._get_local_tmpl_context(include_app_defaults=True)
66 c.rhodecode_repo = self.rhodecode_vcs_repo
66 c.rhodecode_repo = self.rhodecode_vcs_repo
67
67
68 return c
68 return c
69
69
70 def _is_diff_cache_enabled(self, target_repo):
70 def _is_diff_cache_enabled(self, target_repo):
71 caching_enabled = self._get_general_setting(
71 caching_enabled = self._get_general_setting(
72 target_repo, 'rhodecode_diff_cache')
72 target_repo, 'rhodecode_diff_cache')
73 log.debug('Diff caching enabled: %s', caching_enabled)
73 log.debug('Diff caching enabled: %s', caching_enabled)
74 return caching_enabled
74 return caching_enabled
75
75
76 def _commit(self, commit_id_range, method):
76 def _commit(self, commit_id_range, method):
77 _ = self.request.translate
77 _ = self.request.translate
78 c = self.load_default_context()
78 c = self.load_default_context()
79 c.fulldiff = self.request.GET.get('fulldiff')
79 c.fulldiff = self.request.GET.get('fulldiff')
80
80
81 # fetch global flags of ignore ws or context lines
81 # fetch global flags of ignore ws or context lines
82 diff_context = get_diff_context(self.request)
82 diff_context = get_diff_context(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84
84
85 # diff_limit will cut off the whole diff if the limit is applied
85 # diff_limit will cut off the whole diff if the limit is applied
86 # otherwise it will just hide the big files from the front-end
86 # otherwise it will just hide the big files from the front-end
87 diff_limit = c.visual.cut_off_limit_diff
87 diff_limit = c.visual.cut_off_limit_diff
88 file_limit = c.visual.cut_off_limit_file
88 file_limit = c.visual.cut_off_limit_file
89
89
90 # get ranges of commit ids if preset
90 # get ranges of commit ids if preset
91 commit_range = commit_id_range.split('...')[:2]
91 commit_range = commit_id_range.split('...')[:2]
92
92
93 try:
93 try:
94 pre_load = ['affected_files', 'author', 'branch', 'date',
94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 'message', 'parents']
95 'message', 'parents']
96 if self.rhodecode_vcs_repo.alias == 'hg':
96 if self.rhodecode_vcs_repo.alias == 'hg':
97 pre_load += ['hidden', 'obsolete', 'phase']
97 pre_load += ['hidden', 'obsolete', 'phase']
98
98
99 if len(commit_range) == 2:
99 if len(commit_range) == 2:
100 commits = self.rhodecode_vcs_repo.get_commits(
100 commits = self.rhodecode_vcs_repo.get_commits(
101 start_id=commit_range[0], end_id=commit_range[1],
101 start_id=commit_range[0], end_id=commit_range[1],
102 pre_load=pre_load, translate_tags=False)
102 pre_load=pre_load, translate_tags=False)
103 commits = list(commits)
103 commits = list(commits)
104 else:
104 else:
105 commits = [self.rhodecode_vcs_repo.get_commit(
105 commits = [self.rhodecode_vcs_repo.get_commit(
106 commit_id=commit_id_range, pre_load=pre_load)]
106 commit_id=commit_id_range, pre_load=pre_load)]
107
107
108 c.commit_ranges = commits
108 c.commit_ranges = commits
109 if not c.commit_ranges:
109 if not c.commit_ranges:
110 raise RepositoryError('The commit range returned an empty result')
110 raise RepositoryError('The commit range returned an empty result')
111 except CommitDoesNotExistError as e:
111 except CommitDoesNotExistError as e:
112 msg = _('No such commit exists. Org exception: `{}`').format(e)
112 msg = _('No such commit exists. Org exception: `{}`').format(e)
113 h.flash(msg, category='error')
113 h.flash(msg, category='error')
114 raise HTTPNotFound()
114 raise HTTPNotFound()
115 except Exception:
115 except Exception:
116 log.exception("General failure")
116 log.exception("General failure")
117 raise HTTPNotFound()
117 raise HTTPNotFound()
118 single_commit = len(c.commit_ranges) == 1
118 single_commit = len(c.commit_ranges) == 1
119
119
120 c.changes = OrderedDict()
120 c.changes = OrderedDict()
121 c.lines_added = 0
121 c.lines_added = 0
122 c.lines_deleted = 0
122 c.lines_deleted = 0
123
123
124 # auto collapse if we have more than limit
124 # auto collapse if we have more than limit
125 collapse_limit = diffs.DiffProcessor._collapse_commits_over
125 collapse_limit = diffs.DiffProcessor._collapse_commits_over
126 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
126 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
127
127
128 c.commit_statuses = ChangesetStatus.STATUSES
128 c.commit_statuses = ChangesetStatus.STATUSES
129 c.inline_comments = []
129 c.inline_comments = []
130 c.files = []
130 c.files = []
131
131
132 c.comments = []
132 c.comments = []
133 c.unresolved_comments = []
133 c.unresolved_comments = []
134 c.resolved_comments = []
134 c.resolved_comments = []
135
135
136 # Single commit
136 # Single commit
137 if single_commit:
137 if single_commit:
138 commit = c.commit_ranges[0]
138 commit = c.commit_ranges[0]
139 c.comments = CommentsModel().get_comments(
139 c.comments = CommentsModel().get_comments(
140 self.db_repo.repo_id,
140 self.db_repo.repo_id,
141 revision=commit.raw_id)
141 revision=commit.raw_id)
142
142
143 # comments from PR
143 # comments from PR
144 statuses = ChangesetStatusModel().get_statuses(
144 statuses = ChangesetStatusModel().get_statuses(
145 self.db_repo.repo_id, commit.raw_id,
145 self.db_repo.repo_id, commit.raw_id,
146 with_revisions=True)
146 with_revisions=True)
147
147
148 prs = set()
148 prs = set()
149 reviewers = list()
149 reviewers = list()
150 reviewers_duplicates = set() # to not have duplicates from multiple votes
150 reviewers_duplicates = set() # to not have duplicates from multiple votes
151 for c_status in statuses:
151 for c_status in statuses:
152
152
153 # extract associated pull-requests from votes
153 # extract associated pull-requests from votes
154 if c_status.pull_request:
154 if c_status.pull_request:
155 prs.add(c_status.pull_request)
155 prs.add(c_status.pull_request)
156
156
157 # extract reviewers
157 # extract reviewers
158 _user_id = c_status.author.user_id
158 _user_id = c_status.author.user_id
159 if _user_id not in reviewers_duplicates:
159 if _user_id not in reviewers_duplicates:
160 reviewers.append(
160 reviewers.append(
161 StrictAttributeDict({
161 StrictAttributeDict({
162 'user': c_status.author,
162 'user': c_status.author,
163
163
164 # fake attributed for commit, page that we don't have
164 # fake attributed for commit, page that we don't have
165 # but we share the display with PR page
165 # but we share the display with PR page
166 'mandatory': False,
166 'mandatory': False,
167 'reasons': [],
167 'reasons': [],
168 'rule_user_group_data': lambda: None
168 'rule_user_group_data': lambda: None
169 })
169 })
170 )
170 )
171 reviewers_duplicates.add(_user_id)
171 reviewers_duplicates.add(_user_id)
172
172
173 c.reviewers_count = len(reviewers)
173 c.reviewers_count = len(reviewers)
174 c.observers_count = 0
174 c.observers_count = 0
175
175
176 # from associated statuses, check the pull requests, and
176 # from associated statuses, check the pull requests, and
177 # show comments from them
177 # show comments from them
178 for pr in prs:
178 for pr in prs:
179 c.comments.extend(pr.comments)
179 c.comments.extend(pr.comments)
180
180
181 c.unresolved_comments = CommentsModel()\
181 c.unresolved_comments = CommentsModel()\
182 .get_commit_unresolved_todos(commit.raw_id)
182 .get_commit_unresolved_todos(commit.raw_id)
183 c.resolved_comments = CommentsModel()\
183 c.resolved_comments = CommentsModel()\
184 .get_commit_resolved_todos(commit.raw_id)
184 .get_commit_resolved_todos(commit.raw_id)
185
185
186 c.inline_comments_flat = CommentsModel()\
186 c.inline_comments_flat = CommentsModel()\
187 .get_commit_inline_comments(commit.raw_id)
187 .get_commit_inline_comments(commit.raw_id)
188
188
189 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
189 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
190 statuses, reviewers)
190 statuses, reviewers)
191
191
192 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
192 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
193
193
194 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
194 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
195
195
196 for review_obj, member, reasons, mandatory, status in review_statuses:
196 for review_obj, member, reasons, mandatory, status in review_statuses:
197 member_reviewer = h.reviewer_as_json(
197 member_reviewer = h.reviewer_as_json(
198 member, reasons=reasons, mandatory=mandatory, role=None,
198 member, reasons=reasons, mandatory=mandatory, role=None,
199 user_group=None
199 user_group=None
200 )
200 )
201
201
202 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
202 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
203 member_reviewer['review_status'] = current_review_status
203 member_reviewer['review_status'] = current_review_status
204 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
204 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
205 member_reviewer['allowed_to_update'] = False
205 member_reviewer['allowed_to_update'] = False
206 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
206 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
207
207
208 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
208 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
209
209
210 # NOTE(marcink): this uses the same voting logic as in pull-requests
210 # NOTE(marcink): this uses the same voting logic as in pull-requests
211 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
211 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
212 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
212 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
213
213
214 diff = None
214 diff = None
215 # Iterate over ranges (default commit view is always one commit)
215 # Iterate over ranges (default commit view is always one commit)
216 for commit in c.commit_ranges:
216 for commit in c.commit_ranges:
217 c.changes[commit.raw_id] = []
217 c.changes[commit.raw_id] = []
218
218
219 commit2 = commit
219 commit2 = commit
220 commit1 = commit.first_parent
220 commit1 = commit.first_parent
221
221
222 if method == 'show':
222 if method == 'show':
223 inline_comments = CommentsModel().get_inline_comments(
223 inline_comments = CommentsModel().get_inline_comments(
224 self.db_repo.repo_id, revision=commit.raw_id)
224 self.db_repo.repo_id, revision=commit.raw_id)
225 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
225 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
226 inline_comments))
226 inline_comments))
227 c.inline_comments = inline_comments
227 c.inline_comments = inline_comments
228
228
229 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
229 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
230 self.db_repo)
230 self.db_repo)
231 cache_file_path = diff_cache_exist(
231 cache_file_path = diff_cache_exist(
232 cache_path, 'diff', commit.raw_id,
232 cache_path, 'diff', commit.raw_id,
233 hide_whitespace_changes, diff_context, c.fulldiff)
233 hide_whitespace_changes, diff_context, c.fulldiff)
234
234
235 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
235 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
236 force_recache = str2bool(self.request.GET.get('force_recache'))
236 force_recache = str2bool(self.request.GET.get('force_recache'))
237
237
238 cached_diff = None
238 cached_diff = None
239 if caching_enabled:
239 if caching_enabled:
240 cached_diff = load_cached_diff(cache_file_path)
240 cached_diff = load_cached_diff(cache_file_path)
241
241
242 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
242 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
243 if not force_recache and has_proper_diff_cache:
243 if not force_recache and has_proper_diff_cache:
244 diffset = cached_diff['diff']
244 diffset = cached_diff['diff']
245 else:
245 else:
246 vcs_diff = self.rhodecode_vcs_repo.get_diff(
246 vcs_diff = self.rhodecode_vcs_repo.get_diff(
247 commit1, commit2,
247 commit1, commit2,
248 ignore_whitespace=hide_whitespace_changes,
248 ignore_whitespace=hide_whitespace_changes,
249 context=diff_context)
249 context=diff_context)
250
250
251 diff_processor = diffs.DiffProcessor(
251 diff_processor = diffs.DiffProcessor(
252 vcs_diff, format='newdiff', diff_limit=diff_limit,
252 vcs_diff, format='newdiff', diff_limit=diff_limit,
253 file_limit=file_limit, show_full_diff=c.fulldiff)
253 file_limit=file_limit, show_full_diff=c.fulldiff)
254
254
255 _parsed = diff_processor.prepare()
255 _parsed = diff_processor.prepare()
256
256
257 diffset = codeblocks.DiffSet(
257 diffset = codeblocks.DiffSet(
258 repo_name=self.db_repo_name,
258 repo_name=self.db_repo_name,
259 source_node_getter=codeblocks.diffset_node_getter(commit1),
259 source_node_getter=codeblocks.diffset_node_getter(commit1),
260 target_node_getter=codeblocks.diffset_node_getter(commit2))
260 target_node_getter=codeblocks.diffset_node_getter(commit2))
261
261
262 diffset = self.path_filter.render_patchset_filtered(
262 diffset = self.path_filter.render_patchset_filtered(
263 diffset, _parsed, commit1.raw_id, commit2.raw_id)
263 diffset, _parsed, commit1.raw_id, commit2.raw_id)
264
264
265 # save cached diff
265 # save cached diff
266 if caching_enabled:
266 if caching_enabled:
267 cache_diff(cache_file_path, diffset, None)
267 cache_diff(cache_file_path, diffset, None)
268
268
269 c.limited_diff = diffset.limited_diff
269 c.limited_diff = diffset.limited_diff
270 c.changes[commit.raw_id] = diffset
270 c.changes[commit.raw_id] = diffset
271 else:
271 else:
272 # TODO(marcink): no cache usage here...
272 # TODO(marcink): no cache usage here...
273 _diff = self.rhodecode_vcs_repo.get_diff(
273 _diff = self.rhodecode_vcs_repo.get_diff(
274 commit1, commit2,
274 commit1, commit2,
275 ignore_whitespace=hide_whitespace_changes, context=diff_context)
275 ignore_whitespace=hide_whitespace_changes, context=diff_context)
276 diff_processor = diffs.DiffProcessor(
276 diff_processor = diffs.DiffProcessor(
277 _diff, format='newdiff', diff_limit=diff_limit,
277 _diff, format='newdiff', diff_limit=diff_limit,
278 file_limit=file_limit, show_full_diff=c.fulldiff)
278 file_limit=file_limit, show_full_diff=c.fulldiff)
279 # downloads/raw we only need RAW diff nothing else
279 # downloads/raw we only need RAW diff nothing else
280 diff = self.path_filter.get_raw_patch(diff_processor)
280 diff = self.path_filter.get_raw_patch(diff_processor)
281 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
281 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
282
282
283 # sort comments by how they were generated
283 # sort comments by how they were generated
284 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
284 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
285 c.at_version_num = None
285 c.at_version_num = None
286
286
287 if len(c.commit_ranges) == 1:
287 if len(c.commit_ranges) == 1:
288 c.commit = c.commit_ranges[0]
288 c.commit = c.commit_ranges[0]
289 c.parent_tmpl = ''.join(
289 c.parent_tmpl = ''.join(
290 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
290 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
291
291
292 if method == 'download':
292 if method == 'download':
293 response = Response(diff)
293 response = Response(diff)
294 response.content_type = 'text/plain'
294 response.content_type = 'text/plain'
295 response.content_disposition = (
295 response.content_disposition = (
296 'attachment; filename=%s.diff' % commit_id_range[:12])
296 'attachment; filename=%s.diff' % commit_id_range[:12])
297 return response
297 return response
298 elif method == 'patch':
298 elif method == 'patch':
299 c.diff = safe_unicode(diff)
299 c.diff = safe_unicode(diff)
300 patch = render(
300 patch = render(
301 'rhodecode:templates/changeset/patch_changeset.mako',
301 'rhodecode:templates/changeset/patch_changeset.mako',
302 self._get_template_context(c), self.request)
302 self._get_template_context(c), self.request)
303 response = Response(patch)
303 response = Response(patch)
304 response.content_type = 'text/plain'
304 response.content_type = 'text/plain'
305 return response
305 return response
306 elif method == 'raw':
306 elif method == 'raw':
307 response = Response(diff)
307 response = Response(diff)
308 response.content_type = 'text/plain'
308 response.content_type = 'text/plain'
309 return response
309 return response
310 elif method == 'show':
310 elif method == 'show':
311 if len(c.commit_ranges) == 1:
311 if len(c.commit_ranges) == 1:
312 html = render(
312 html = render(
313 'rhodecode:templates/changeset/changeset.mako',
313 'rhodecode:templates/changeset/changeset.mako',
314 self._get_template_context(c), self.request)
314 self._get_template_context(c), self.request)
315 return Response(html)
315 return Response(html)
316 else:
316 else:
317 c.ancestor = None
317 c.ancestor = None
318 c.target_repo = self.db_repo
318 c.target_repo = self.db_repo
319 html = render(
319 html = render(
320 'rhodecode:templates/changeset/changeset_range.mako',
320 'rhodecode:templates/changeset/changeset_range.mako',
321 self._get_template_context(c), self.request)
321 self._get_template_context(c), self.request)
322 return Response(html)
322 return Response(html)
323
323
324 raise HTTPBadRequest()
324 raise HTTPBadRequest()
325
325
326 @LoginRequired()
326 @LoginRequired()
327 @HasRepoPermissionAnyDecorator(
327 @HasRepoPermissionAnyDecorator(
328 'repository.read', 'repository.write', 'repository.admin')
328 'repository.read', 'repository.write', 'repository.admin')
329 @view_config(
329 @view_config(
330 route_name='repo_commit', request_method='GET',
330 route_name='repo_commit', request_method='GET',
331 renderer=None)
331 renderer=None)
332 def repo_commit_show(self):
332 def repo_commit_show(self):
333 commit_id = self.request.matchdict['commit_id']
333 commit_id = self.request.matchdict['commit_id']
334 return self._commit(commit_id, method='show')
334 return self._commit(commit_id, method='show')
335
335
336 @LoginRequired()
336 @LoginRequired()
337 @HasRepoPermissionAnyDecorator(
337 @HasRepoPermissionAnyDecorator(
338 'repository.read', 'repository.write', 'repository.admin')
338 'repository.read', 'repository.write', 'repository.admin')
339 @view_config(
339 @view_config(
340 route_name='repo_commit_raw', request_method='GET',
340 route_name='repo_commit_raw', request_method='GET',
341 renderer=None)
341 renderer=None)
342 @view_config(
342 @view_config(
343 route_name='repo_commit_raw_deprecated', request_method='GET',
343 route_name='repo_commit_raw_deprecated', request_method='GET',
344 renderer=None)
344 renderer=None)
345 def repo_commit_raw(self):
345 def repo_commit_raw(self):
346 commit_id = self.request.matchdict['commit_id']
346 commit_id = self.request.matchdict['commit_id']
347 return self._commit(commit_id, method='raw')
347 return self._commit(commit_id, method='raw')
348
348
349 @LoginRequired()
349 @LoginRequired()
350 @HasRepoPermissionAnyDecorator(
350 @HasRepoPermissionAnyDecorator(
351 'repository.read', 'repository.write', 'repository.admin')
351 'repository.read', 'repository.write', 'repository.admin')
352 @view_config(
352 @view_config(
353 route_name='repo_commit_patch', request_method='GET',
353 route_name='repo_commit_patch', request_method='GET',
354 renderer=None)
354 renderer=None)
355 def repo_commit_patch(self):
355 def repo_commit_patch(self):
356 commit_id = self.request.matchdict['commit_id']
356 commit_id = self.request.matchdict['commit_id']
357 return self._commit(commit_id, method='patch')
357 return self._commit(commit_id, method='patch')
358
358
359 @LoginRequired()
359 @LoginRequired()
360 @HasRepoPermissionAnyDecorator(
360 @HasRepoPermissionAnyDecorator(
361 'repository.read', 'repository.write', 'repository.admin')
361 'repository.read', 'repository.write', 'repository.admin')
362 @view_config(
362 @view_config(
363 route_name='repo_commit_download', request_method='GET',
363 route_name='repo_commit_download', request_method='GET',
364 renderer=None)
364 renderer=None)
365 def repo_commit_download(self):
365 def repo_commit_download(self):
366 commit_id = self.request.matchdict['commit_id']
366 commit_id = self.request.matchdict['commit_id']
367 return self._commit(commit_id, method='download')
367 return self._commit(commit_id, method='download')
368
368
369 @LoginRequired()
369 @LoginRequired()
370 @NotAnonymous()
370 @NotAnonymous()
371 @HasRepoPermissionAnyDecorator(
371 @HasRepoPermissionAnyDecorator(
372 'repository.read', 'repository.write', 'repository.admin')
372 'repository.read', 'repository.write', 'repository.admin')
373 @CSRFRequired()
373 @CSRFRequired()
374 @view_config(
374 @view_config(
375 route_name='repo_commit_comment_create', request_method='POST',
375 route_name='repo_commit_comment_create', request_method='POST',
376 renderer='json_ext')
376 renderer='json_ext')
377 def repo_commit_comment_create(self):
377 def repo_commit_comment_create(self):
378 _ = self.request.translate
378 _ = self.request.translate
379 commit_id = self.request.matchdict['commit_id']
379 commit_id = self.request.matchdict['commit_id']
380
380
381 c = self.load_default_context()
381 c = self.load_default_context()
382 status = self.request.POST.get('changeset_status', None)
382 status = self.request.POST.get('changeset_status', None)
383 text = self.request.POST.get('text')
383 text = self.request.POST.get('text')
384 comment_type = self.request.POST.get('comment_type')
384 comment_type = self.request.POST.get('comment_type')
385 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
385 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
386
386
387 if status:
387 if status:
388 text = text or (_('Status change %(transition_icon)s %(status)s')
388 text = text or (_('Status change %(transition_icon)s %(status)s')
389 % {'transition_icon': '>',
389 % {'transition_icon': '>',
390 'status': ChangesetStatus.get_status_lbl(status)})
390 'status': ChangesetStatus.get_status_lbl(status)})
391
391
392 multi_commit_ids = []
392 multi_commit_ids = []
393 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
393 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
394 if _commit_id not in ['', None, EmptyCommit.raw_id]:
394 if _commit_id not in ['', None, EmptyCommit.raw_id]:
395 if _commit_id not in multi_commit_ids:
395 if _commit_id not in multi_commit_ids:
396 multi_commit_ids.append(_commit_id)
396 multi_commit_ids.append(_commit_id)
397
397
398 commit_ids = multi_commit_ids or [commit_id]
398 commit_ids = multi_commit_ids or [commit_id]
399
399
400 comment = None
400 comment = None
401 for current_id in filter(None, commit_ids):
401 for current_id in filter(None, commit_ids):
402 comment = CommentsModel().create(
402 comment = CommentsModel().create(
403 text=text,
403 text=text,
404 repo=self.db_repo.repo_id,
404 repo=self.db_repo.repo_id,
405 user=self._rhodecode_db_user.user_id,
405 user=self._rhodecode_db_user.user_id,
406 commit_id=current_id,
406 commit_id=current_id,
407 f_path=self.request.POST.get('f_path'),
407 f_path=self.request.POST.get('f_path'),
408 line_no=self.request.POST.get('line'),
408 line_no=self.request.POST.get('line'),
409 status_change=(ChangesetStatus.get_status_lbl(status)
409 status_change=(ChangesetStatus.get_status_lbl(status)
410 if status else None),
410 if status else None),
411 status_change_type=status,
411 status_change_type=status,
412 comment_type=comment_type,
412 comment_type=comment_type,
413 resolves_comment_id=resolves_comment_id,
413 resolves_comment_id=resolves_comment_id,
414 auth_user=self._rhodecode_user
414 auth_user=self._rhodecode_user
415 )
415 )
416 is_inline = bool(comment.f_path and comment.line_no)
416 is_inline = comment.is_inline
417
417
418 # get status if set !
418 # get status if set !
419 if status:
419 if status:
420 # if latest status was from pull request and it's closed
420 # if latest status was from pull request and it's closed
421 # disallow changing status !
421 # disallow changing status !
422 # dont_allow_on_closed_pull_request = True !
422 # dont_allow_on_closed_pull_request = True !
423
423
424 try:
424 try:
425 ChangesetStatusModel().set_status(
425 ChangesetStatusModel().set_status(
426 self.db_repo.repo_id,
426 self.db_repo.repo_id,
427 status,
427 status,
428 self._rhodecode_db_user.user_id,
428 self._rhodecode_db_user.user_id,
429 comment,
429 comment,
430 revision=current_id,
430 revision=current_id,
431 dont_allow_on_closed_pull_request=True
431 dont_allow_on_closed_pull_request=True
432 )
432 )
433 except StatusChangeOnClosedPullRequestError:
433 except StatusChangeOnClosedPullRequestError:
434 msg = _('Changing the status of a commit associated with '
434 msg = _('Changing the status of a commit associated with '
435 'a closed pull request is not allowed')
435 'a closed pull request is not allowed')
436 log.exception(msg)
436 log.exception(msg)
437 h.flash(msg, category='warning')
437 h.flash(msg, category='warning')
438 raise HTTPFound(h.route_path(
438 raise HTTPFound(h.route_path(
439 'repo_commit', repo_name=self.db_repo_name,
439 'repo_commit', repo_name=self.db_repo_name,
440 commit_id=current_id))
440 commit_id=current_id))
441
441
442 commit = self.db_repo.get_commit(current_id)
442 commit = self.db_repo.get_commit(current_id)
443 CommentsModel().trigger_commit_comment_hook(
443 CommentsModel().trigger_commit_comment_hook(
444 self.db_repo, self._rhodecode_user, 'create',
444 self.db_repo, self._rhodecode_user, 'create',
445 data={'comment': comment, 'commit': commit})
445 data={'comment': comment, 'commit': commit})
446
446
447 # finalize, commit and redirect
447 # finalize, commit and redirect
448 Session().commit()
448 Session().commit()
449
449
450 data = {
450 data = {
451 'target_id': h.safeid(h.safe_unicode(
451 'target_id': h.safeid(h.safe_unicode(
452 self.request.POST.get('f_path'))),
452 self.request.POST.get('f_path'))),
453 }
453 }
454 if comment:
454 if comment:
455 c.co = comment
455 c.co = comment
456 c.at_version_num = 0
456 c.at_version_num = 0
457 rendered_comment = render(
457 rendered_comment = render(
458 'rhodecode:templates/changeset/changeset_comment_block.mako',
458 'rhodecode:templates/changeset/changeset_comment_block.mako',
459 self._get_template_context(c), self.request)
459 self._get_template_context(c), self.request)
460
460
461 data.update(comment.get_dict())
461 data.update(comment.get_dict())
462 data.update({'rendered_text': rendered_comment})
462 data.update({'rendered_text': rendered_comment})
463
463
464 comment_broadcast_channel = channelstream.comment_channel(
464 comment_broadcast_channel = channelstream.comment_channel(
465 self.db_repo_name, commit_obj=commit)
465 self.db_repo_name, commit_obj=commit)
466
466
467 comment_data = data
467 comment_data = data
468 comment_type = 'inline' if is_inline else 'general'
468 comment_type = 'inline' if is_inline else 'general'
469 channelstream.comment_channelstream_push(
469 channelstream.comment_channelstream_push(
470 self.request, comment_broadcast_channel, self._rhodecode_user,
470 self.request, comment_broadcast_channel, self._rhodecode_user,
471 _('posted a new {} comment').format(comment_type),
471 _('posted a new {} comment').format(comment_type),
472 comment_data=comment_data)
472 comment_data=comment_data)
473
473
474 return data
474 return data
475
475
476 @LoginRequired()
476 @LoginRequired()
477 @NotAnonymous()
477 @NotAnonymous()
478 @HasRepoPermissionAnyDecorator(
478 @HasRepoPermissionAnyDecorator(
479 'repository.read', 'repository.write', 'repository.admin')
479 'repository.read', 'repository.write', 'repository.admin')
480 @CSRFRequired()
480 @CSRFRequired()
481 @view_config(
481 @view_config(
482 route_name='repo_commit_comment_preview', request_method='POST',
482 route_name='repo_commit_comment_preview', request_method='POST',
483 renderer='string', xhr=True)
483 renderer='string', xhr=True)
484 def repo_commit_comment_preview(self):
484 def repo_commit_comment_preview(self):
485 # Technically a CSRF token is not needed as no state changes with this
485 # Technically a CSRF token is not needed as no state changes with this
486 # call. However, as this is a POST is better to have it, so automated
486 # call. However, as this is a POST is better to have it, so automated
487 # tools don't flag it as potential CSRF.
487 # tools don't flag it as potential CSRF.
488 # Post is required because the payload could be bigger than the maximum
488 # Post is required because the payload could be bigger than the maximum
489 # allowed by GET.
489 # allowed by GET.
490
490
491 text = self.request.POST.get('text')
491 text = self.request.POST.get('text')
492 renderer = self.request.POST.get('renderer') or 'rst'
492 renderer = self.request.POST.get('renderer') or 'rst'
493 if text:
493 if text:
494 return h.render(text, renderer=renderer, mentions=True,
494 return h.render(text, renderer=renderer, mentions=True,
495 repo_name=self.db_repo_name)
495 repo_name=self.db_repo_name)
496 return ''
496 return ''
497
497
498 @LoginRequired()
498 @LoginRequired()
499 @HasRepoPermissionAnyDecorator(
499 @HasRepoPermissionAnyDecorator(
500 'repository.read', 'repository.write', 'repository.admin')
500 'repository.read', 'repository.write', 'repository.admin')
501 @CSRFRequired()
501 @CSRFRequired()
502 @view_config(
502 @view_config(
503 route_name='repo_commit_comment_history_view', request_method='POST',
503 route_name='repo_commit_comment_history_view', request_method='POST',
504 renderer='string', xhr=True)
504 renderer='string', xhr=True)
505 def repo_commit_comment_history_view(self):
505 def repo_commit_comment_history_view(self):
506 c = self.load_default_context()
506 c = self.load_default_context()
507
507
508 comment_history_id = self.request.matchdict['comment_history_id']
508 comment_history_id = self.request.matchdict['comment_history_id']
509 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
509 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
510 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
510 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
511
511
512 if is_repo_comment:
512 if is_repo_comment:
513 c.comment_history = comment_history
513 c.comment_history = comment_history
514
514
515 rendered_comment = render(
515 rendered_comment = render(
516 'rhodecode:templates/changeset/comment_history.mako',
516 'rhodecode:templates/changeset/comment_history.mako',
517 self._get_template_context(c)
517 self._get_template_context(c)
518 , self.request)
518 , self.request)
519 return rendered_comment
519 return rendered_comment
520 else:
520 else:
521 log.warning('No permissions for user %s to show comment_history_id: %s',
521 log.warning('No permissions for user %s to show comment_history_id: %s',
522 self._rhodecode_db_user, comment_history_id)
522 self._rhodecode_db_user, comment_history_id)
523 raise HTTPNotFound()
523 raise HTTPNotFound()
524
524
525 @LoginRequired()
525 @LoginRequired()
526 @NotAnonymous()
526 @NotAnonymous()
527 @HasRepoPermissionAnyDecorator(
527 @HasRepoPermissionAnyDecorator(
528 'repository.read', 'repository.write', 'repository.admin')
528 'repository.read', 'repository.write', 'repository.admin')
529 @CSRFRequired()
529 @CSRFRequired()
530 @view_config(
530 @view_config(
531 route_name='repo_commit_comment_attachment_upload', request_method='POST',
531 route_name='repo_commit_comment_attachment_upload', request_method='POST',
532 renderer='json_ext', xhr=True)
532 renderer='json_ext', xhr=True)
533 def repo_commit_comment_attachment_upload(self):
533 def repo_commit_comment_attachment_upload(self):
534 c = self.load_default_context()
534 c = self.load_default_context()
535 upload_key = 'attachment'
535 upload_key = 'attachment'
536
536
537 file_obj = self.request.POST.get(upload_key)
537 file_obj = self.request.POST.get(upload_key)
538
538
539 if file_obj is None:
539 if file_obj is None:
540 self.request.response.status = 400
540 self.request.response.status = 400
541 return {'store_fid': None,
541 return {'store_fid': None,
542 'access_path': None,
542 'access_path': None,
543 'error': '{} data field is missing'.format(upload_key)}
543 'error': '{} data field is missing'.format(upload_key)}
544
544
545 if not hasattr(file_obj, 'filename'):
545 if not hasattr(file_obj, 'filename'):
546 self.request.response.status = 400
546 self.request.response.status = 400
547 return {'store_fid': None,
547 return {'store_fid': None,
548 'access_path': None,
548 'access_path': None,
549 'error': 'filename cannot be read from the data field'}
549 'error': 'filename cannot be read from the data field'}
550
550
551 filename = file_obj.filename
551 filename = file_obj.filename
552 file_display_name = filename
552 file_display_name = filename
553
553
554 metadata = {
554 metadata = {
555 'user_uploaded': {'username': self._rhodecode_user.username,
555 'user_uploaded': {'username': self._rhodecode_user.username,
556 'user_id': self._rhodecode_user.user_id,
556 'user_id': self._rhodecode_user.user_id,
557 'ip': self._rhodecode_user.ip_addr}}
557 'ip': self._rhodecode_user.ip_addr}}
558
558
559 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
559 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
560 allowed_extensions = [
560 allowed_extensions = [
561 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
561 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
562 '.pptx', '.txt', '.xlsx', '.zip']
562 '.pptx', '.txt', '.xlsx', '.zip']
563 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
563 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
564
564
565 try:
565 try:
566 storage = store_utils.get_file_storage(self.request.registry.settings)
566 storage = store_utils.get_file_storage(self.request.registry.settings)
567 store_uid, metadata = storage.save_file(
567 store_uid, metadata = storage.save_file(
568 file_obj.file, filename, extra_metadata=metadata,
568 file_obj.file, filename, extra_metadata=metadata,
569 extensions=allowed_extensions, max_filesize=max_file_size)
569 extensions=allowed_extensions, max_filesize=max_file_size)
570 except FileNotAllowedException:
570 except FileNotAllowedException:
571 self.request.response.status = 400
571 self.request.response.status = 400
572 permitted_extensions = ', '.join(allowed_extensions)
572 permitted_extensions = ', '.join(allowed_extensions)
573 error_msg = 'File `{}` is not allowed. ' \
573 error_msg = 'File `{}` is not allowed. ' \
574 'Only following extensions are permitted: {}'.format(
574 'Only following extensions are permitted: {}'.format(
575 filename, permitted_extensions)
575 filename, permitted_extensions)
576 return {'store_fid': None,
576 return {'store_fid': None,
577 'access_path': None,
577 'access_path': None,
578 'error': error_msg}
578 'error': error_msg}
579 except FileOverSizeException:
579 except FileOverSizeException:
580 self.request.response.status = 400
580 self.request.response.status = 400
581 limit_mb = h.format_byte_size_binary(max_file_size)
581 limit_mb = h.format_byte_size_binary(max_file_size)
582 return {'store_fid': None,
582 return {'store_fid': None,
583 'access_path': None,
583 'access_path': None,
584 'error': 'File {} is exceeding allowed limit of {}.'.format(
584 'error': 'File {} is exceeding allowed limit of {}.'.format(
585 filename, limit_mb)}
585 filename, limit_mb)}
586
586
587 try:
587 try:
588 entry = FileStore.create(
588 entry = FileStore.create(
589 file_uid=store_uid, filename=metadata["filename"],
589 file_uid=store_uid, filename=metadata["filename"],
590 file_hash=metadata["sha256"], file_size=metadata["size"],
590 file_hash=metadata["sha256"], file_size=metadata["size"],
591 file_display_name=file_display_name,
591 file_display_name=file_display_name,
592 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
592 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
593 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
593 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
594 scope_repo_id=self.db_repo.repo_id
594 scope_repo_id=self.db_repo.repo_id
595 )
595 )
596 Session().add(entry)
596 Session().add(entry)
597 Session().commit()
597 Session().commit()
598 log.debug('Stored upload in DB as %s', entry)
598 log.debug('Stored upload in DB as %s', entry)
599 except Exception:
599 except Exception:
600 log.exception('Failed to store file %s', filename)
600 log.exception('Failed to store file %s', filename)
601 self.request.response.status = 400
601 self.request.response.status = 400
602 return {'store_fid': None,
602 return {'store_fid': None,
603 'access_path': None,
603 'access_path': None,
604 'error': 'File {} failed to store in DB.'.format(filename)}
604 'error': 'File {} failed to store in DB.'.format(filename)}
605
605
606 Session().commit()
606 Session().commit()
607
607
608 return {
608 return {
609 'store_fid': store_uid,
609 'store_fid': store_uid,
610 'access_path': h.route_path(
610 'access_path': h.route_path(
611 'download_file', fid=store_uid),
611 'download_file', fid=store_uid),
612 'fqn_access_path': h.route_url(
612 'fqn_access_path': h.route_url(
613 'download_file', fid=store_uid),
613 'download_file', fid=store_uid),
614 'repo_access_path': h.route_path(
614 'repo_access_path': h.route_path(
615 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
615 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
616 'repo_fqn_access_path': h.route_url(
616 'repo_fqn_access_path': h.route_url(
617 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
617 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
618 }
618 }
619
619
620 @LoginRequired()
620 @LoginRequired()
621 @NotAnonymous()
621 @NotAnonymous()
622 @HasRepoPermissionAnyDecorator(
622 @HasRepoPermissionAnyDecorator(
623 'repository.read', 'repository.write', 'repository.admin')
623 'repository.read', 'repository.write', 'repository.admin')
624 @CSRFRequired()
624 @CSRFRequired()
625 @view_config(
625 @view_config(
626 route_name='repo_commit_comment_delete', request_method='POST',
626 route_name='repo_commit_comment_delete', request_method='POST',
627 renderer='json_ext')
627 renderer='json_ext')
628 def repo_commit_comment_delete(self):
628 def repo_commit_comment_delete(self):
629 commit_id = self.request.matchdict['commit_id']
629 commit_id = self.request.matchdict['commit_id']
630 comment_id = self.request.matchdict['comment_id']
630 comment_id = self.request.matchdict['comment_id']
631
631
632 comment = ChangesetComment.get_or_404(comment_id)
632 comment = ChangesetComment.get_or_404(comment_id)
633 if not comment:
633 if not comment:
634 log.debug('Comment with id:%s not found, skipping', comment_id)
634 log.debug('Comment with id:%s not found, skipping', comment_id)
635 # comment already deleted in another call probably
635 # comment already deleted in another call probably
636 return True
636 return True
637
637
638 if comment.immutable:
638 if comment.immutable:
639 # don't allow deleting comments that are immutable
639 # don't allow deleting comments that are immutable
640 raise HTTPForbidden()
640 raise HTTPForbidden()
641
641
642 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
642 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
643 super_admin = h.HasPermissionAny('hg.admin')()
643 super_admin = h.HasPermissionAny('hg.admin')()
644 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
644 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
645 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
645 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
646 comment_repo_admin = is_repo_admin and is_repo_comment
646 comment_repo_admin = is_repo_admin and is_repo_comment
647
647
648 if super_admin or comment_owner or comment_repo_admin:
648 if super_admin or comment_owner or comment_repo_admin:
649 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
649 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
650 Session().commit()
650 Session().commit()
651 return True
651 return True
652 else:
652 else:
653 log.warning('No permissions for user %s to delete comment_id: %s',
653 log.warning('No permissions for user %s to delete comment_id: %s',
654 self._rhodecode_db_user, comment_id)
654 self._rhodecode_db_user, comment_id)
655 raise HTTPNotFound()
655 raise HTTPNotFound()
656
656
657 @LoginRequired()
657 @LoginRequired()
658 @NotAnonymous()
658 @NotAnonymous()
659 @HasRepoPermissionAnyDecorator(
659 @HasRepoPermissionAnyDecorator(
660 'repository.read', 'repository.write', 'repository.admin')
660 'repository.read', 'repository.write', 'repository.admin')
661 @CSRFRequired()
661 @CSRFRequired()
662 @view_config(
662 @view_config(
663 route_name='repo_commit_comment_edit', request_method='POST',
663 route_name='repo_commit_comment_edit', request_method='POST',
664 renderer='json_ext')
664 renderer='json_ext')
665 def repo_commit_comment_edit(self):
665 def repo_commit_comment_edit(self):
666 self.load_default_context()
666 self.load_default_context()
667
667
668 comment_id = self.request.matchdict['comment_id']
668 comment_id = self.request.matchdict['comment_id']
669 comment = ChangesetComment.get_or_404(comment_id)
669 comment = ChangesetComment.get_or_404(comment_id)
670
670
671 if comment.immutable:
671 if comment.immutable:
672 # don't allow deleting comments that are immutable
672 # don't allow deleting comments that are immutable
673 raise HTTPForbidden()
673 raise HTTPForbidden()
674
674
675 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
675 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
676 super_admin = h.HasPermissionAny('hg.admin')()
676 super_admin = h.HasPermissionAny('hg.admin')()
677 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
677 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
678 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
678 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
679 comment_repo_admin = is_repo_admin and is_repo_comment
679 comment_repo_admin = is_repo_admin and is_repo_comment
680
680
681 if super_admin or comment_owner or comment_repo_admin:
681 if super_admin or comment_owner or comment_repo_admin:
682 text = self.request.POST.get('text')
682 text = self.request.POST.get('text')
683 version = self.request.POST.get('version')
683 version = self.request.POST.get('version')
684 if text == comment.text:
684 if text == comment.text:
685 log.warning(
685 log.warning(
686 'Comment(repo): '
686 'Comment(repo): '
687 'Trying to create new version '
687 'Trying to create new version '
688 'with the same comment body {}'.format(
688 'with the same comment body {}'.format(
689 comment_id,
689 comment_id,
690 )
690 )
691 )
691 )
692 raise HTTPNotFound()
692 raise HTTPNotFound()
693
693
694 if version.isdigit():
694 if version.isdigit():
695 version = int(version)
695 version = int(version)
696 else:
696 else:
697 log.warning(
697 log.warning(
698 'Comment(repo): Wrong version type {} {} '
698 'Comment(repo): Wrong version type {} {} '
699 'for comment {}'.format(
699 'for comment {}'.format(
700 version,
700 version,
701 type(version),
701 type(version),
702 comment_id,
702 comment_id,
703 )
703 )
704 )
704 )
705 raise HTTPNotFound()
705 raise HTTPNotFound()
706
706
707 try:
707 try:
708 comment_history = CommentsModel().edit(
708 comment_history = CommentsModel().edit(
709 comment_id=comment_id,
709 comment_id=comment_id,
710 text=text,
710 text=text,
711 auth_user=self._rhodecode_user,
711 auth_user=self._rhodecode_user,
712 version=version,
712 version=version,
713 )
713 )
714 except CommentVersionMismatch:
714 except CommentVersionMismatch:
715 raise HTTPConflict()
715 raise HTTPConflict()
716
716
717 if not comment_history:
717 if not comment_history:
718 raise HTTPNotFound()
718 raise HTTPNotFound()
719
719
720 commit_id = self.request.matchdict['commit_id']
720 commit_id = self.request.matchdict['commit_id']
721 commit = self.db_repo.get_commit(commit_id)
721 commit = self.db_repo.get_commit(commit_id)
722 CommentsModel().trigger_commit_comment_hook(
722 CommentsModel().trigger_commit_comment_hook(
723 self.db_repo, self._rhodecode_user, 'edit',
723 self.db_repo, self._rhodecode_user, 'edit',
724 data={'comment': comment, 'commit': commit})
724 data={'comment': comment, 'commit': commit})
725
725
726 Session().commit()
726 Session().commit()
727 return {
727 return {
728 'comment_history_id': comment_history.comment_history_id,
728 'comment_history_id': comment_history.comment_history_id,
729 'comment_id': comment.comment_id,
729 'comment_id': comment.comment_id,
730 'comment_version': comment_history.version,
730 'comment_version': comment_history.version,
731 'comment_author_username': comment_history.author.username,
731 'comment_author_username': comment_history.author.username,
732 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
732 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
733 'comment_created_on': h.age_component(comment_history.created_on,
733 'comment_created_on': h.age_component(comment_history.created_on,
734 time_is_local=True),
734 time_is_local=True),
735 }
735 }
736 else:
736 else:
737 log.warning('No permissions for user %s to edit comment_id: %s',
737 log.warning('No permissions for user %s to edit comment_id: %s',
738 self._rhodecode_db_user, comment_id)
738 self._rhodecode_db_user, comment_id)
739 raise HTTPNotFound()
739 raise HTTPNotFound()
740
740
741 @LoginRequired()
741 @LoginRequired()
742 @HasRepoPermissionAnyDecorator(
742 @HasRepoPermissionAnyDecorator(
743 'repository.read', 'repository.write', 'repository.admin')
743 'repository.read', 'repository.write', 'repository.admin')
744 @view_config(
744 @view_config(
745 route_name='repo_commit_data', request_method='GET',
745 route_name='repo_commit_data', request_method='GET',
746 renderer='json_ext', xhr=True)
746 renderer='json_ext', xhr=True)
747 def repo_commit_data(self):
747 def repo_commit_data(self):
748 commit_id = self.request.matchdict['commit_id']
748 commit_id = self.request.matchdict['commit_id']
749 self.load_default_context()
749 self.load_default_context()
750
750
751 try:
751 try:
752 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
752 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
753 except CommitDoesNotExistError as e:
753 except CommitDoesNotExistError as e:
754 return EmptyCommit(message=str(e))
754 return EmptyCommit(message=str(e))
755
755
756 @LoginRequired()
756 @LoginRequired()
757 @HasRepoPermissionAnyDecorator(
757 @HasRepoPermissionAnyDecorator(
758 'repository.read', 'repository.write', 'repository.admin')
758 'repository.read', 'repository.write', 'repository.admin')
759 @view_config(
759 @view_config(
760 route_name='repo_commit_children', request_method='GET',
760 route_name='repo_commit_children', request_method='GET',
761 renderer='json_ext', xhr=True)
761 renderer='json_ext', xhr=True)
762 def repo_commit_children(self):
762 def repo_commit_children(self):
763 commit_id = self.request.matchdict['commit_id']
763 commit_id = self.request.matchdict['commit_id']
764 self.load_default_context()
764 self.load_default_context()
765
765
766 try:
766 try:
767 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
767 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
768 children = commit.children
768 children = commit.children
769 except CommitDoesNotExistError:
769 except CommitDoesNotExistError:
770 children = []
770 children = []
771
771
772 result = {"results": children}
772 result = {"results": children}
773 return result
773 return result
774
774
775 @LoginRequired()
775 @LoginRequired()
776 @HasRepoPermissionAnyDecorator(
776 @HasRepoPermissionAnyDecorator(
777 'repository.read', 'repository.write', 'repository.admin')
777 'repository.read', 'repository.write', 'repository.admin')
778 @view_config(
778 @view_config(
779 route_name='repo_commit_parents', request_method='GET',
779 route_name='repo_commit_parents', request_method='GET',
780 renderer='json_ext')
780 renderer='json_ext')
781 def repo_commit_parents(self):
781 def repo_commit_parents(self):
782 commit_id = self.request.matchdict['commit_id']
782 commit_id = self.request.matchdict['commit_id']
783 self.load_default_context()
783 self.load_default_context()
784
784
785 try:
785 try:
786 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
786 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
787 parents = commit.parents
787 parents = commit.parents
788 except CommitDoesNotExistError:
788 except CommitDoesNotExistError:
789 parents = []
789 parents = []
790 result = {"results": parents}
790 result = {"results": parents}
791 return result
791 return result
@@ -1,1813 +1,1816 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33
33
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib.base import vcs_operation_context
35 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.exceptions import CommentVersionMismatch
37 from rhodecode.lib.exceptions import CommentVersionMismatch
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 NotAnonymous, CSRFRequired)
41 NotAnonymous, CSRFRequired)
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason, Reference
43 from rhodecode.lib.vcs.backends.base import (
44 EmptyCommit, UpdateFailureReason, unicode_to_reference)
44 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
46 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.changeset_status import ChangesetStatusModel
47 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.comment import CommentsModel
48 from rhodecode.model.db import (
49 from rhodecode.model.db import (
49 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
50 func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
50 PullRequestReviewers)
51 PullRequestReviewers)
51 from rhodecode.model.forms import PullRequestForm
52 from rhodecode.model.forms import PullRequestForm
52 from rhodecode.model.meta import Session
53 from rhodecode.model.meta import Session
53 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
54 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
54 from rhodecode.model.scm import ScmModel
55 from rhodecode.model.scm import ScmModel
55
56
56 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
57
58
58
59
59 class RepoPullRequestsView(RepoAppView, DataGridAppView):
60 class RepoPullRequestsView(RepoAppView, DataGridAppView):
60
61
61 def load_default_context(self):
62 def load_default_context(self):
62 c = self._get_local_tmpl_context(include_app_defaults=True)
63 c = self._get_local_tmpl_context(include_app_defaults=True)
63 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
64 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
64 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
65 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
65 # backward compat., we use for OLD PRs a plain renderer
66 # backward compat., we use for OLD PRs a plain renderer
66 c.renderer = 'plain'
67 c.renderer = 'plain'
67 return c
68 return c
68
69
69 def _get_pull_requests_list(
70 def _get_pull_requests_list(
70 self, repo_name, source, filter_type, opened_by, statuses):
71 self, repo_name, source, filter_type, opened_by, statuses):
71
72
72 draw, start, limit = self._extract_chunk(self.request)
73 draw, start, limit = self._extract_chunk(self.request)
73 search_q, order_by, order_dir = self._extract_ordering(self.request)
74 search_q, order_by, order_dir = self._extract_ordering(self.request)
74 _render = self.request.get_partial_renderer(
75 _render = self.request.get_partial_renderer(
75 'rhodecode:templates/data_table/_dt_elements.mako')
76 'rhodecode:templates/data_table/_dt_elements.mako')
76
77
77 # pagination
78 # pagination
78
79
79 if filter_type == 'awaiting_review':
80 if filter_type == 'awaiting_review':
80 pull_requests = PullRequestModel().get_awaiting_review(
81 pull_requests = PullRequestModel().get_awaiting_review(
81 repo_name, search_q=search_q, source=source, opened_by=opened_by,
82 repo_name, search_q=search_q, source=source, opened_by=opened_by,
82 statuses=statuses, offset=start, length=limit,
83 statuses=statuses, offset=start, length=limit,
83 order_by=order_by, order_dir=order_dir)
84 order_by=order_by, order_dir=order_dir)
84 pull_requests_total_count = PullRequestModel().count_awaiting_review(
85 pull_requests_total_count = PullRequestModel().count_awaiting_review(
85 repo_name, search_q=search_q, source=source, statuses=statuses,
86 repo_name, search_q=search_q, source=source, statuses=statuses,
86 opened_by=opened_by)
87 opened_by=opened_by)
87 elif filter_type == 'awaiting_my_review':
88 elif filter_type == 'awaiting_my_review':
88 pull_requests = PullRequestModel().get_awaiting_my_review(
89 pull_requests = PullRequestModel().get_awaiting_my_review(
89 repo_name, search_q=search_q, source=source, opened_by=opened_by,
90 repo_name, search_q=search_q, source=source, opened_by=opened_by,
90 user_id=self._rhodecode_user.user_id, statuses=statuses,
91 user_id=self._rhodecode_user.user_id, statuses=statuses,
91 offset=start, length=limit, order_by=order_by,
92 offset=start, length=limit, order_by=order_by,
92 order_dir=order_dir)
93 order_dir=order_dir)
93 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
94 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
94 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
95 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
95 statuses=statuses, opened_by=opened_by)
96 statuses=statuses, opened_by=opened_by)
96 else:
97 else:
97 pull_requests = PullRequestModel().get_all(
98 pull_requests = PullRequestModel().get_all(
98 repo_name, search_q=search_q, source=source, opened_by=opened_by,
99 repo_name, search_q=search_q, source=source, opened_by=opened_by,
99 statuses=statuses, offset=start, length=limit,
100 statuses=statuses, offset=start, length=limit,
100 order_by=order_by, order_dir=order_dir)
101 order_by=order_by, order_dir=order_dir)
101 pull_requests_total_count = PullRequestModel().count_all(
102 pull_requests_total_count = PullRequestModel().count_all(
102 repo_name, search_q=search_q, source=source, statuses=statuses,
103 repo_name, search_q=search_q, source=source, statuses=statuses,
103 opened_by=opened_by)
104 opened_by=opened_by)
104
105
105 data = []
106 data = []
106 comments_model = CommentsModel()
107 comments_model = CommentsModel()
107 for pr in pull_requests:
108 for pr in pull_requests:
108 comments_count = comments_model.get_all_comments(
109 comments_count = comments_model.get_all_comments(
109 self.db_repo.repo_id, pull_request=pr, count_only=True)
110 self.db_repo.repo_id, pull_request=pr, count_only=True)
110
111
111 data.append({
112 data.append({
112 'name': _render('pullrequest_name',
113 'name': _render('pullrequest_name',
113 pr.pull_request_id, pr.pull_request_state,
114 pr.pull_request_id, pr.pull_request_state,
114 pr.work_in_progress, pr.target_repo.repo_name,
115 pr.work_in_progress, pr.target_repo.repo_name,
115 short=True),
116 short=True),
116 'name_raw': pr.pull_request_id,
117 'name_raw': pr.pull_request_id,
117 'status': _render('pullrequest_status',
118 'status': _render('pullrequest_status',
118 pr.calculated_review_status()),
119 pr.calculated_review_status()),
119 'title': _render('pullrequest_title', pr.title, pr.description),
120 'title': _render('pullrequest_title', pr.title, pr.description),
120 'description': h.escape(pr.description),
121 'description': h.escape(pr.description),
121 'updated_on': _render('pullrequest_updated_on',
122 'updated_on': _render('pullrequest_updated_on',
122 h.datetime_to_time(pr.updated_on)),
123 h.datetime_to_time(pr.updated_on)),
123 'updated_on_raw': h.datetime_to_time(pr.updated_on),
124 'updated_on_raw': h.datetime_to_time(pr.updated_on),
124 'created_on': _render('pullrequest_updated_on',
125 'created_on': _render('pullrequest_updated_on',
125 h.datetime_to_time(pr.created_on)),
126 h.datetime_to_time(pr.created_on)),
126 'created_on_raw': h.datetime_to_time(pr.created_on),
127 'created_on_raw': h.datetime_to_time(pr.created_on),
127 'state': pr.pull_request_state,
128 'state': pr.pull_request_state,
128 'author': _render('pullrequest_author',
129 'author': _render('pullrequest_author',
129 pr.author.full_contact, ),
130 pr.author.full_contact, ),
130 'author_raw': pr.author.full_name,
131 'author_raw': pr.author.full_name,
131 'comments': _render('pullrequest_comments', comments_count),
132 'comments': _render('pullrequest_comments', comments_count),
132 'comments_raw': comments_count,
133 'comments_raw': comments_count,
133 'closed': pr.is_closed(),
134 'closed': pr.is_closed(),
134 })
135 })
135
136
136 data = ({
137 data = ({
137 'draw': draw,
138 'draw': draw,
138 'data': data,
139 'data': data,
139 'recordsTotal': pull_requests_total_count,
140 'recordsTotal': pull_requests_total_count,
140 'recordsFiltered': pull_requests_total_count,
141 'recordsFiltered': pull_requests_total_count,
141 })
142 })
142 return data
143 return data
143
144
144 @LoginRequired()
145 @LoginRequired()
145 @HasRepoPermissionAnyDecorator(
146 @HasRepoPermissionAnyDecorator(
146 'repository.read', 'repository.write', 'repository.admin')
147 'repository.read', 'repository.write', 'repository.admin')
147 @view_config(
148 @view_config(
148 route_name='pullrequest_show_all', request_method='GET',
149 route_name='pullrequest_show_all', request_method='GET',
149 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
150 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
150 def pull_request_list(self):
151 def pull_request_list(self):
151 c = self.load_default_context()
152 c = self.load_default_context()
152
153
153 req_get = self.request.GET
154 req_get = self.request.GET
154 c.source = str2bool(req_get.get('source'))
155 c.source = str2bool(req_get.get('source'))
155 c.closed = str2bool(req_get.get('closed'))
156 c.closed = str2bool(req_get.get('closed'))
156 c.my = str2bool(req_get.get('my'))
157 c.my = str2bool(req_get.get('my'))
157 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
158 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
158 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
159 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
159
160
160 c.active = 'open'
161 c.active = 'open'
161 if c.my:
162 if c.my:
162 c.active = 'my'
163 c.active = 'my'
163 if c.closed:
164 if c.closed:
164 c.active = 'closed'
165 c.active = 'closed'
165 if c.awaiting_review and not c.source:
166 if c.awaiting_review and not c.source:
166 c.active = 'awaiting'
167 c.active = 'awaiting'
167 if c.source and not c.awaiting_review:
168 if c.source and not c.awaiting_review:
168 c.active = 'source'
169 c.active = 'source'
169 if c.awaiting_my_review:
170 if c.awaiting_my_review:
170 c.active = 'awaiting_my'
171 c.active = 'awaiting_my'
171
172
172 return self._get_template_context(c)
173 return self._get_template_context(c)
173
174
174 @LoginRequired()
175 @LoginRequired()
175 @HasRepoPermissionAnyDecorator(
176 @HasRepoPermissionAnyDecorator(
176 'repository.read', 'repository.write', 'repository.admin')
177 'repository.read', 'repository.write', 'repository.admin')
177 @view_config(
178 @view_config(
178 route_name='pullrequest_show_all_data', request_method='GET',
179 route_name='pullrequest_show_all_data', request_method='GET',
179 renderer='json_ext', xhr=True)
180 renderer='json_ext', xhr=True)
180 def pull_request_list_data(self):
181 def pull_request_list_data(self):
181 self.load_default_context()
182 self.load_default_context()
182
183
183 # additional filters
184 # additional filters
184 req_get = self.request.GET
185 req_get = self.request.GET
185 source = str2bool(req_get.get('source'))
186 source = str2bool(req_get.get('source'))
186 closed = str2bool(req_get.get('closed'))
187 closed = str2bool(req_get.get('closed'))
187 my = str2bool(req_get.get('my'))
188 my = str2bool(req_get.get('my'))
188 awaiting_review = str2bool(req_get.get('awaiting_review'))
189 awaiting_review = str2bool(req_get.get('awaiting_review'))
189 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
190 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
190
191
191 filter_type = 'awaiting_review' if awaiting_review \
192 filter_type = 'awaiting_review' if awaiting_review \
192 else 'awaiting_my_review' if awaiting_my_review \
193 else 'awaiting_my_review' if awaiting_my_review \
193 else None
194 else None
194
195
195 opened_by = None
196 opened_by = None
196 if my:
197 if my:
197 opened_by = [self._rhodecode_user.user_id]
198 opened_by = [self._rhodecode_user.user_id]
198
199
199 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
200 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
200 if closed:
201 if closed:
201 statuses = [PullRequest.STATUS_CLOSED]
202 statuses = [PullRequest.STATUS_CLOSED]
202
203
203 data = self._get_pull_requests_list(
204 data = self._get_pull_requests_list(
204 repo_name=self.db_repo_name, source=source,
205 repo_name=self.db_repo_name, source=source,
205 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
206 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
206
207
207 return data
208 return data
208
209
209 def _is_diff_cache_enabled(self, target_repo):
210 def _is_diff_cache_enabled(self, target_repo):
210 caching_enabled = self._get_general_setting(
211 caching_enabled = self._get_general_setting(
211 target_repo, 'rhodecode_diff_cache')
212 target_repo, 'rhodecode_diff_cache')
212 log.debug('Diff caching enabled: %s', caching_enabled)
213 log.debug('Diff caching enabled: %s', caching_enabled)
213 return caching_enabled
214 return caching_enabled
214
215
215 def _get_diffset(self, source_repo_name, source_repo,
216 def _get_diffset(self, source_repo_name, source_repo,
216 ancestor_commit,
217 ancestor_commit,
217 source_ref_id, target_ref_id,
218 source_ref_id, target_ref_id,
218 target_commit, source_commit, diff_limit, file_limit,
219 target_commit, source_commit, diff_limit, file_limit,
219 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
220 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
220
221
221 if use_ancestor:
222 if use_ancestor:
222 # we might want to not use it for versions
223 # we might want to not use it for versions
223 target_ref_id = ancestor_commit.raw_id
224 target_ref_id = ancestor_commit.raw_id
224
225
225 vcs_diff = PullRequestModel().get_diff(
226 vcs_diff = PullRequestModel().get_diff(
226 source_repo, source_ref_id, target_ref_id,
227 source_repo, source_ref_id, target_ref_id,
227 hide_whitespace_changes, diff_context)
228 hide_whitespace_changes, diff_context)
228
229
229 diff_processor = diffs.DiffProcessor(
230 diff_processor = diffs.DiffProcessor(
230 vcs_diff, format='newdiff', diff_limit=diff_limit,
231 vcs_diff, format='newdiff', diff_limit=diff_limit,
231 file_limit=file_limit, show_full_diff=fulldiff)
232 file_limit=file_limit, show_full_diff=fulldiff)
232
233
233 _parsed = diff_processor.prepare()
234 _parsed = diff_processor.prepare()
234
235
235 diffset = codeblocks.DiffSet(
236 diffset = codeblocks.DiffSet(
236 repo_name=self.db_repo_name,
237 repo_name=self.db_repo_name,
237 source_repo_name=source_repo_name,
238 source_repo_name=source_repo_name,
238 source_node_getter=codeblocks.diffset_node_getter(target_commit),
239 source_node_getter=codeblocks.diffset_node_getter(target_commit),
239 target_node_getter=codeblocks.diffset_node_getter(source_commit),
240 target_node_getter=codeblocks.diffset_node_getter(source_commit),
240 )
241 )
241 diffset = self.path_filter.render_patchset_filtered(
242 diffset = self.path_filter.render_patchset_filtered(
242 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
243 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
243
244
244 return diffset
245 return diffset
245
246
246 def _get_range_diffset(self, source_scm, source_repo,
247 def _get_range_diffset(self, source_scm, source_repo,
247 commit1, commit2, diff_limit, file_limit,
248 commit1, commit2, diff_limit, file_limit,
248 fulldiff, hide_whitespace_changes, diff_context):
249 fulldiff, hide_whitespace_changes, diff_context):
249 vcs_diff = source_scm.get_diff(
250 vcs_diff = source_scm.get_diff(
250 commit1, commit2,
251 commit1, commit2,
251 ignore_whitespace=hide_whitespace_changes,
252 ignore_whitespace=hide_whitespace_changes,
252 context=diff_context)
253 context=diff_context)
253
254
254 diff_processor = diffs.DiffProcessor(
255 diff_processor = diffs.DiffProcessor(
255 vcs_diff, format='newdiff', diff_limit=diff_limit,
256 vcs_diff, format='newdiff', diff_limit=diff_limit,
256 file_limit=file_limit, show_full_diff=fulldiff)
257 file_limit=file_limit, show_full_diff=fulldiff)
257
258
258 _parsed = diff_processor.prepare()
259 _parsed = diff_processor.prepare()
259
260
260 diffset = codeblocks.DiffSet(
261 diffset = codeblocks.DiffSet(
261 repo_name=source_repo.repo_name,
262 repo_name=source_repo.repo_name,
262 source_node_getter=codeblocks.diffset_node_getter(commit1),
263 source_node_getter=codeblocks.diffset_node_getter(commit1),
263 target_node_getter=codeblocks.diffset_node_getter(commit2))
264 target_node_getter=codeblocks.diffset_node_getter(commit2))
264
265
265 diffset = self.path_filter.render_patchset_filtered(
266 diffset = self.path_filter.render_patchset_filtered(
266 diffset, _parsed, commit1.raw_id, commit2.raw_id)
267 diffset, _parsed, commit1.raw_id, commit2.raw_id)
267
268
268 return diffset
269 return diffset
269
270
270 def register_comments_vars(self, c, pull_request, versions):
271 def register_comments_vars(self, c, pull_request, versions):
271 comments_model = CommentsModel()
272 comments_model = CommentsModel()
272
273
273 # GENERAL COMMENTS with versions #
274 # GENERAL COMMENTS with versions #
274 q = comments_model._all_general_comments_of_pull_request(pull_request)
275 q = comments_model._all_general_comments_of_pull_request(pull_request)
275 q = q.order_by(ChangesetComment.comment_id.asc())
276 q = q.order_by(ChangesetComment.comment_id.asc())
276 general_comments = q
277 general_comments = q
277
278
278 # pick comments we want to render at current version
279 # pick comments we want to render at current version
279 c.comment_versions = comments_model.aggregate_comments(
280 c.comment_versions = comments_model.aggregate_comments(
280 general_comments, versions, c.at_version_num)
281 general_comments, versions, c.at_version_num)
281
282
282 # INLINE COMMENTS with versions #
283 # INLINE COMMENTS with versions #
283 q = comments_model._all_inline_comments_of_pull_request(pull_request)
284 q = comments_model._all_inline_comments_of_pull_request(pull_request)
284 q = q.order_by(ChangesetComment.comment_id.asc())
285 q = q.order_by(ChangesetComment.comment_id.asc())
285 inline_comments = q
286 inline_comments = q
286
287
287 c.inline_versions = comments_model.aggregate_comments(
288 c.inline_versions = comments_model.aggregate_comments(
288 inline_comments, versions, c.at_version_num, inline=True)
289 inline_comments, versions, c.at_version_num, inline=True)
289
290
290 # Comments inline+general
291 # Comments inline+general
291 if c.at_version:
292 if c.at_version:
292 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
293 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
293 c.comments = c.comment_versions[c.at_version_num]['display']
294 c.comments = c.comment_versions[c.at_version_num]['display']
294 else:
295 else:
295 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
296 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
296 c.comments = c.comment_versions[c.at_version_num]['until']
297 c.comments = c.comment_versions[c.at_version_num]['until']
297
298
298 return general_comments, inline_comments
299 return general_comments, inline_comments
299
300
300 @LoginRequired()
301 @LoginRequired()
301 @HasRepoPermissionAnyDecorator(
302 @HasRepoPermissionAnyDecorator(
302 'repository.read', 'repository.write', 'repository.admin')
303 'repository.read', 'repository.write', 'repository.admin')
303 @view_config(
304 @view_config(
304 route_name='pullrequest_show', request_method='GET',
305 route_name='pullrequest_show', request_method='GET',
305 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
306 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
306 def pull_request_show(self):
307 def pull_request_show(self):
307 _ = self.request.translate
308 _ = self.request.translate
308 c = self.load_default_context()
309 c = self.load_default_context()
309
310
310 pull_request = PullRequest.get_or_404(
311 pull_request = PullRequest.get_or_404(
311 self.request.matchdict['pull_request_id'])
312 self.request.matchdict['pull_request_id'])
312 pull_request_id = pull_request.pull_request_id
313 pull_request_id = pull_request.pull_request_id
313
314
314 c.state_progressing = pull_request.is_state_changing()
315 c.state_progressing = pull_request.is_state_changing()
315 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
316 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
316
317
317 _new_state = {
318 _new_state = {
318 'created': PullRequest.STATE_CREATED,
319 'created': PullRequest.STATE_CREATED,
319 }.get(self.request.GET.get('force_state'))
320 }.get(self.request.GET.get('force_state'))
320
321
321 if c.is_super_admin and _new_state:
322 if c.is_super_admin and _new_state:
322 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
323 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
323 h.flash(
324 h.flash(
324 _('Pull Request state was force changed to `{}`').format(_new_state),
325 _('Pull Request state was force changed to `{}`').format(_new_state),
325 category='success')
326 category='success')
326 Session().commit()
327 Session().commit()
327
328
328 raise HTTPFound(h.route_path(
329 raise HTTPFound(h.route_path(
329 'pullrequest_show', repo_name=self.db_repo_name,
330 'pullrequest_show', repo_name=self.db_repo_name,
330 pull_request_id=pull_request_id))
331 pull_request_id=pull_request_id))
331
332
332 version = self.request.GET.get('version')
333 version = self.request.GET.get('version')
333 from_version = self.request.GET.get('from_version') or version
334 from_version = self.request.GET.get('from_version') or version
334 merge_checks = self.request.GET.get('merge_checks')
335 merge_checks = self.request.GET.get('merge_checks')
335 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
336 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
336 force_refresh = str2bool(self.request.GET.get('force_refresh'))
337 force_refresh = str2bool(self.request.GET.get('force_refresh'))
337 c.range_diff_on = self.request.GET.get('range-diff') == "1"
338 c.range_diff_on = self.request.GET.get('range-diff') == "1"
338
339
339 # fetch global flags of ignore ws or context lines
340 # fetch global flags of ignore ws or context lines
340 diff_context = diffs.get_diff_context(self.request)
341 diff_context = diffs.get_diff_context(self.request)
341 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
342 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
342
343
343 (pull_request_latest,
344 (pull_request_latest,
344 pull_request_at_ver,
345 pull_request_at_ver,
345 pull_request_display_obj,
346 pull_request_display_obj,
346 at_version) = PullRequestModel().get_pr_version(
347 at_version) = PullRequestModel().get_pr_version(
347 pull_request_id, version=version)
348 pull_request_id, version=version)
348
349
349 pr_closed = pull_request_latest.is_closed()
350 pr_closed = pull_request_latest.is_closed()
350
351
351 if pr_closed and (version or from_version):
352 if pr_closed and (version or from_version):
352 # not allow to browse versions for closed PR
353 # not allow to browse versions for closed PR
353 raise HTTPFound(h.route_path(
354 raise HTTPFound(h.route_path(
354 'pullrequest_show', repo_name=self.db_repo_name,
355 'pullrequest_show', repo_name=self.db_repo_name,
355 pull_request_id=pull_request_id))
356 pull_request_id=pull_request_id))
356
357
357 versions = pull_request_display_obj.versions()
358 versions = pull_request_display_obj.versions()
358 # used to store per-commit range diffs
359 # used to store per-commit range diffs
359 c.changes = collections.OrderedDict()
360 c.changes = collections.OrderedDict()
360
361
361 c.at_version = at_version
362 c.at_version = at_version
362 c.at_version_num = (at_version
363 c.at_version_num = (at_version
363 if at_version and at_version != PullRequest.LATEST_VER
364 if at_version and at_version != PullRequest.LATEST_VER
364 else None)
365 else None)
365
366
366 c.at_version_index = ChangesetComment.get_index_from_version(
367 c.at_version_index = ChangesetComment.get_index_from_version(
367 c.at_version_num, versions)
368 c.at_version_num, versions)
368
369
369 (prev_pull_request_latest,
370 (prev_pull_request_latest,
370 prev_pull_request_at_ver,
371 prev_pull_request_at_ver,
371 prev_pull_request_display_obj,
372 prev_pull_request_display_obj,
372 prev_at_version) = PullRequestModel().get_pr_version(
373 prev_at_version) = PullRequestModel().get_pr_version(
373 pull_request_id, version=from_version)
374 pull_request_id, version=from_version)
374
375
375 c.from_version = prev_at_version
376 c.from_version = prev_at_version
376 c.from_version_num = (prev_at_version
377 c.from_version_num = (prev_at_version
377 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
378 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
378 else None)
379 else None)
379 c.from_version_index = ChangesetComment.get_index_from_version(
380 c.from_version_index = ChangesetComment.get_index_from_version(
380 c.from_version_num, versions)
381 c.from_version_num, versions)
381
382
382 # define if we're in COMPARE mode or VIEW at version mode
383 # define if we're in COMPARE mode or VIEW at version mode
383 compare = at_version != prev_at_version
384 compare = at_version != prev_at_version
384
385
385 # pull_requests repo_name we opened it against
386 # pull_requests repo_name we opened it against
386 # ie. target_repo must match
387 # ie. target_repo must match
387 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
388 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
388 log.warning('Mismatch between the current repo: %s, and target %s',
389 log.warning('Mismatch between the current repo: %s, and target %s',
389 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
390 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
390 raise HTTPNotFound()
391 raise HTTPNotFound()
391
392
392 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
393 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
393
394
394 c.pull_request = pull_request_display_obj
395 c.pull_request = pull_request_display_obj
395 c.renderer = pull_request_at_ver.description_renderer or c.renderer
396 c.renderer = pull_request_at_ver.description_renderer or c.renderer
396 c.pull_request_latest = pull_request_latest
397 c.pull_request_latest = pull_request_latest
397
398
398 # inject latest version
399 # inject latest version
399 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
400 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
400 c.versions = versions + [latest_ver]
401 c.versions = versions + [latest_ver]
401
402
402 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
403 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
403 c.allowed_to_change_status = False
404 c.allowed_to_change_status = False
404 c.allowed_to_update = False
405 c.allowed_to_update = False
405 c.allowed_to_merge = False
406 c.allowed_to_merge = False
406 c.allowed_to_delete = False
407 c.allowed_to_delete = False
407 c.allowed_to_comment = False
408 c.allowed_to_comment = False
408 c.allowed_to_close = False
409 c.allowed_to_close = False
409 else:
410 else:
410 can_change_status = PullRequestModel().check_user_change_status(
411 can_change_status = PullRequestModel().check_user_change_status(
411 pull_request_at_ver, self._rhodecode_user)
412 pull_request_at_ver, self._rhodecode_user)
412 c.allowed_to_change_status = can_change_status and not pr_closed
413 c.allowed_to_change_status = can_change_status and not pr_closed
413
414
414 c.allowed_to_update = PullRequestModel().check_user_update(
415 c.allowed_to_update = PullRequestModel().check_user_update(
415 pull_request_latest, self._rhodecode_user) and not pr_closed
416 pull_request_latest, self._rhodecode_user) and not pr_closed
416 c.allowed_to_merge = PullRequestModel().check_user_merge(
417 c.allowed_to_merge = PullRequestModel().check_user_merge(
417 pull_request_latest, self._rhodecode_user) and not pr_closed
418 pull_request_latest, self._rhodecode_user) and not pr_closed
418 c.allowed_to_delete = PullRequestModel().check_user_delete(
419 c.allowed_to_delete = PullRequestModel().check_user_delete(
419 pull_request_latest, self._rhodecode_user) and not pr_closed
420 pull_request_latest, self._rhodecode_user) and not pr_closed
420 c.allowed_to_comment = not pr_closed
421 c.allowed_to_comment = not pr_closed
421 c.allowed_to_close = c.allowed_to_merge and not pr_closed
422 c.allowed_to_close = c.allowed_to_merge and not pr_closed
422
423
423 c.forbid_adding_reviewers = False
424 c.forbid_adding_reviewers = False
424 c.forbid_author_to_review = False
425 c.forbid_author_to_review = False
425 c.forbid_commit_author_to_review = False
426 c.forbid_commit_author_to_review = False
426
427
427 if pull_request_latest.reviewer_data and \
428 if pull_request_latest.reviewer_data and \
428 'rules' in pull_request_latest.reviewer_data:
429 'rules' in pull_request_latest.reviewer_data:
429 rules = pull_request_latest.reviewer_data['rules'] or {}
430 rules = pull_request_latest.reviewer_data['rules'] or {}
430 try:
431 try:
431 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
432 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
432 c.forbid_author_to_review = rules.get('forbid_author_to_review')
433 c.forbid_author_to_review = rules.get('forbid_author_to_review')
433 c.forbid_commit_author_to_review = rules.get('forbid_commit_author_to_review')
434 c.forbid_commit_author_to_review = rules.get('forbid_commit_author_to_review')
434 except Exception:
435 except Exception:
435 pass
436 pass
436
437
437 # check merge capabilities
438 # check merge capabilities
438 _merge_check = MergeCheck.validate(
439 _merge_check = MergeCheck.validate(
439 pull_request_latest, auth_user=self._rhodecode_user,
440 pull_request_latest, auth_user=self._rhodecode_user,
440 translator=self.request.translate,
441 translator=self.request.translate,
441 force_shadow_repo_refresh=force_refresh)
442 force_shadow_repo_refresh=force_refresh)
442
443
443 c.pr_merge_errors = _merge_check.error_details
444 c.pr_merge_errors = _merge_check.error_details
444 c.pr_merge_possible = not _merge_check.failed
445 c.pr_merge_possible = not _merge_check.failed
445 c.pr_merge_message = _merge_check.merge_msg
446 c.pr_merge_message = _merge_check.merge_msg
446 c.pr_merge_source_commit = _merge_check.source_commit
447 c.pr_merge_source_commit = _merge_check.source_commit
447 c.pr_merge_target_commit = _merge_check.target_commit
448 c.pr_merge_target_commit = _merge_check.target_commit
448
449
449 c.pr_merge_info = MergeCheck.get_merge_conditions(
450 c.pr_merge_info = MergeCheck.get_merge_conditions(
450 pull_request_latest, translator=self.request.translate)
451 pull_request_latest, translator=self.request.translate)
451
452
452 c.pull_request_review_status = _merge_check.review_status
453 c.pull_request_review_status = _merge_check.review_status
453 if merge_checks:
454 if merge_checks:
454 self.request.override_renderer = \
455 self.request.override_renderer = \
455 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
456 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
456 return self._get_template_context(c)
457 return self._get_template_context(c)
457
458
458 c.reviewers_count = pull_request.reviewers_count
459 c.reviewers_count = pull_request.reviewers_count
459 c.observers_count = pull_request.observers_count
460 c.observers_count = pull_request.observers_count
460
461
461 # reviewers and statuses
462 # reviewers and statuses
462 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
463 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
463 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
464 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
464 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
465 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
465
466
466 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
467 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
467 member_reviewer = h.reviewer_as_json(
468 member_reviewer = h.reviewer_as_json(
468 member, reasons=reasons, mandatory=mandatory,
469 member, reasons=reasons, mandatory=mandatory,
469 role=review_obj.role,
470 role=review_obj.role,
470 user_group=review_obj.rule_user_group_data()
471 user_group=review_obj.rule_user_group_data()
471 )
472 )
472
473
473 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
474 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
474 member_reviewer['review_status'] = current_review_status
475 member_reviewer['review_status'] = current_review_status
475 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
476 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
476 member_reviewer['allowed_to_update'] = c.allowed_to_update
477 member_reviewer['allowed_to_update'] = c.allowed_to_update
477 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
478 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
478
479
479 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
480 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
480
481
481 for observer_obj, member in pull_request_at_ver.observers():
482 for observer_obj, member in pull_request_at_ver.observers():
482 member_observer = h.reviewer_as_json(
483 member_observer = h.reviewer_as_json(
483 member, reasons=[], mandatory=False,
484 member, reasons=[], mandatory=False,
484 role=observer_obj.role,
485 role=observer_obj.role,
485 user_group=observer_obj.rule_user_group_data()
486 user_group=observer_obj.rule_user_group_data()
486 )
487 )
487 member_observer['allowed_to_update'] = c.allowed_to_update
488 member_observer['allowed_to_update'] = c.allowed_to_update
488 c.pull_request_set_observers_data_json['observers'].append(member_observer)
489 c.pull_request_set_observers_data_json['observers'].append(member_observer)
489
490
490 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
491 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
491
492
492 general_comments, inline_comments = \
493 general_comments, inline_comments = \
493 self.register_comments_vars(c, pull_request_latest, versions)
494 self.register_comments_vars(c, pull_request_latest, versions)
494
495
495 # TODOs
496 # TODOs
496 c.unresolved_comments = CommentsModel() \
497 c.unresolved_comments = CommentsModel() \
497 .get_pull_request_unresolved_todos(pull_request_latest)
498 .get_pull_request_unresolved_todos(pull_request_latest)
498 c.resolved_comments = CommentsModel() \
499 c.resolved_comments = CommentsModel() \
499 .get_pull_request_resolved_todos(pull_request_latest)
500 .get_pull_request_resolved_todos(pull_request_latest)
500
501
501 # if we use version, then do not show later comments
502 # if we use version, then do not show later comments
502 # than current version
503 # than current version
503 display_inline_comments = collections.defaultdict(
504 display_inline_comments = collections.defaultdict(
504 lambda: collections.defaultdict(list))
505 lambda: collections.defaultdict(list))
505 for co in inline_comments:
506 for co in inline_comments:
506 if c.at_version_num:
507 if c.at_version_num:
507 # pick comments that are at least UPTO given version, so we
508 # pick comments that are at least UPTO given version, so we
508 # don't render comments for higher version
509 # don't render comments for higher version
509 should_render = co.pull_request_version_id and \
510 should_render = co.pull_request_version_id and \
510 co.pull_request_version_id <= c.at_version_num
511 co.pull_request_version_id <= c.at_version_num
511 else:
512 else:
512 # showing all, for 'latest'
513 # showing all, for 'latest'
513 should_render = True
514 should_render = True
514
515
515 if should_render:
516 if should_render:
516 display_inline_comments[co.f_path][co.line_no].append(co)
517 display_inline_comments[co.f_path][co.line_no].append(co)
517
518
518 # load diff data into template context, if we use compare mode then
519 # load diff data into template context, if we use compare mode then
519 # diff is calculated based on changes between versions of PR
520 # diff is calculated based on changes between versions of PR
520
521
521 source_repo = pull_request_at_ver.source_repo
522 source_repo = pull_request_at_ver.source_repo
522 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
523 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
523
524
524 target_repo = pull_request_at_ver.target_repo
525 target_repo = pull_request_at_ver.target_repo
525 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
526 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
526
527
527 if compare:
528 if compare:
528 # in compare switch the diff base to latest commit from prev version
529 # in compare switch the diff base to latest commit from prev version
529 target_ref_id = prev_pull_request_display_obj.revisions[0]
530 target_ref_id = prev_pull_request_display_obj.revisions[0]
530
531
531 # despite opening commits for bookmarks/branches/tags, we always
532 # despite opening commits for bookmarks/branches/tags, we always
532 # convert this to rev to prevent changes after bookmark or branch change
533 # convert this to rev to prevent changes after bookmark or branch change
533 c.source_ref_type = 'rev'
534 c.source_ref_type = 'rev'
534 c.source_ref = source_ref_id
535 c.source_ref = source_ref_id
535
536
536 c.target_ref_type = 'rev'
537 c.target_ref_type = 'rev'
537 c.target_ref = target_ref_id
538 c.target_ref = target_ref_id
538
539
539 c.source_repo = source_repo
540 c.source_repo = source_repo
540 c.target_repo = target_repo
541 c.target_repo = target_repo
541
542
542 c.commit_ranges = []
543 c.commit_ranges = []
543 source_commit = EmptyCommit()
544 source_commit = EmptyCommit()
544 target_commit = EmptyCommit()
545 target_commit = EmptyCommit()
545 c.missing_requirements = False
546 c.missing_requirements = False
546
547
547 source_scm = source_repo.scm_instance()
548 source_scm = source_repo.scm_instance()
548 target_scm = target_repo.scm_instance()
549 target_scm = target_repo.scm_instance()
549
550
550 shadow_scm = None
551 shadow_scm = None
551 try:
552 try:
552 shadow_scm = pull_request_latest.get_shadow_repo()
553 shadow_scm = pull_request_latest.get_shadow_repo()
553 except Exception:
554 except Exception:
554 log.debug('Failed to get shadow repo', exc_info=True)
555 log.debug('Failed to get shadow repo', exc_info=True)
555 # try first the existing source_repo, and then shadow
556 # try first the existing source_repo, and then shadow
556 # repo if we can obtain one
557 # repo if we can obtain one
557 commits_source_repo = source_scm
558 commits_source_repo = source_scm
558 if shadow_scm:
559 if shadow_scm:
559 commits_source_repo = shadow_scm
560 commits_source_repo = shadow_scm
560
561
561 c.commits_source_repo = commits_source_repo
562 c.commits_source_repo = commits_source_repo
562 c.ancestor = None # set it to None, to hide it from PR view
563 c.ancestor = None # set it to None, to hide it from PR view
563
564
564 # empty version means latest, so we keep this to prevent
565 # empty version means latest, so we keep this to prevent
565 # double caching
566 # double caching
566 version_normalized = version or PullRequest.LATEST_VER
567 version_normalized = version or PullRequest.LATEST_VER
567 from_version_normalized = from_version or PullRequest.LATEST_VER
568 from_version_normalized = from_version or PullRequest.LATEST_VER
568
569
569 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
570 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
570 cache_file_path = diff_cache_exist(
571 cache_file_path = diff_cache_exist(
571 cache_path, 'pull_request', pull_request_id, version_normalized,
572 cache_path, 'pull_request', pull_request_id, version_normalized,
572 from_version_normalized, source_ref_id, target_ref_id,
573 from_version_normalized, source_ref_id, target_ref_id,
573 hide_whitespace_changes, diff_context, c.fulldiff)
574 hide_whitespace_changes, diff_context, c.fulldiff)
574
575
575 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
576 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
576 force_recache = self.get_recache_flag()
577 force_recache = self.get_recache_flag()
577
578
578 cached_diff = None
579 cached_diff = None
579 if caching_enabled:
580 if caching_enabled:
580 cached_diff = load_cached_diff(cache_file_path)
581 cached_diff = load_cached_diff(cache_file_path)
581
582
582 has_proper_commit_cache = (
583 has_proper_commit_cache = (
583 cached_diff and cached_diff.get('commits')
584 cached_diff and cached_diff.get('commits')
584 and len(cached_diff.get('commits', [])) == 5
585 and len(cached_diff.get('commits', [])) == 5
585 and cached_diff.get('commits')[0]
586 and cached_diff.get('commits')[0]
586 and cached_diff.get('commits')[3])
587 and cached_diff.get('commits')[3])
587
588
588 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
589 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
589 diff_commit_cache = \
590 diff_commit_cache = \
590 (ancestor_commit, commit_cache, missing_requirements,
591 (ancestor_commit, commit_cache, missing_requirements,
591 source_commit, target_commit) = cached_diff['commits']
592 source_commit, target_commit) = cached_diff['commits']
592 else:
593 else:
593 # NOTE(marcink): we reach potentially unreachable errors when a PR has
594 # NOTE(marcink): we reach potentially unreachable errors when a PR has
594 # merge errors resulting in potentially hidden commits in the shadow repo.
595 # merge errors resulting in potentially hidden commits in the shadow repo.
595 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
596 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
596 and _merge_check.merge_response
597 and _merge_check.merge_response
597 maybe_unreachable = maybe_unreachable \
598 maybe_unreachable = maybe_unreachable \
598 and _merge_check.merge_response.metadata.get('unresolved_files')
599 and _merge_check.merge_response.metadata.get('unresolved_files')
599 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
600 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
600 diff_commit_cache = \
601 diff_commit_cache = \
601 (ancestor_commit, commit_cache, missing_requirements,
602 (ancestor_commit, commit_cache, missing_requirements,
602 source_commit, target_commit) = self.get_commits(
603 source_commit, target_commit) = self.get_commits(
603 commits_source_repo,
604 commits_source_repo,
604 pull_request_at_ver,
605 pull_request_at_ver,
605 source_commit,
606 source_commit,
606 source_ref_id,
607 source_ref_id,
607 source_scm,
608 source_scm,
608 target_commit,
609 target_commit,
609 target_ref_id,
610 target_ref_id,
610 target_scm,
611 target_scm,
611 maybe_unreachable=maybe_unreachable)
612 maybe_unreachable=maybe_unreachable)
612
613
613 # register our commit range
614 # register our commit range
614 for comm in commit_cache.values():
615 for comm in commit_cache.values():
615 c.commit_ranges.append(comm)
616 c.commit_ranges.append(comm)
616
617
617 c.missing_requirements = missing_requirements
618 c.missing_requirements = missing_requirements
618 c.ancestor_commit = ancestor_commit
619 c.ancestor_commit = ancestor_commit
619 c.statuses = source_repo.statuses(
620 c.statuses = source_repo.statuses(
620 [x.raw_id for x in c.commit_ranges])
621 [x.raw_id for x in c.commit_ranges])
621
622
622 # auto collapse if we have more than limit
623 # auto collapse if we have more than limit
623 collapse_limit = diffs.DiffProcessor._collapse_commits_over
624 collapse_limit = diffs.DiffProcessor._collapse_commits_over
624 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
625 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
625 c.compare_mode = compare
626 c.compare_mode = compare
626
627
627 # diff_limit is the old behavior, will cut off the whole diff
628 # diff_limit is the old behavior, will cut off the whole diff
628 # if the limit is applied otherwise will just hide the
629 # if the limit is applied otherwise will just hide the
629 # big files from the front-end
630 # big files from the front-end
630 diff_limit = c.visual.cut_off_limit_diff
631 diff_limit = c.visual.cut_off_limit_diff
631 file_limit = c.visual.cut_off_limit_file
632 file_limit = c.visual.cut_off_limit_file
632
633
633 c.missing_commits = False
634 c.missing_commits = False
634 if (c.missing_requirements
635 if (c.missing_requirements
635 or isinstance(source_commit, EmptyCommit)
636 or isinstance(source_commit, EmptyCommit)
636 or source_commit == target_commit):
637 or source_commit == target_commit):
637
638
638 c.missing_commits = True
639 c.missing_commits = True
639 else:
640 else:
640 c.inline_comments = display_inline_comments
641 c.inline_comments = display_inline_comments
641
642
642 use_ancestor = True
643 use_ancestor = True
643 if from_version_normalized != version_normalized:
644 if from_version_normalized != version_normalized:
644 use_ancestor = False
645 use_ancestor = False
645
646
646 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
647 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
647 if not force_recache and has_proper_diff_cache:
648 if not force_recache and has_proper_diff_cache:
648 c.diffset = cached_diff['diff']
649 c.diffset = cached_diff['diff']
649 else:
650 else:
650 try:
651 try:
651 c.diffset = self._get_diffset(
652 c.diffset = self._get_diffset(
652 c.source_repo.repo_name, commits_source_repo,
653 c.source_repo.repo_name, commits_source_repo,
653 c.ancestor_commit,
654 c.ancestor_commit,
654 source_ref_id, target_ref_id,
655 source_ref_id, target_ref_id,
655 target_commit, source_commit,
656 target_commit, source_commit,
656 diff_limit, file_limit, c.fulldiff,
657 diff_limit, file_limit, c.fulldiff,
657 hide_whitespace_changes, diff_context,
658 hide_whitespace_changes, diff_context,
658 use_ancestor=use_ancestor
659 use_ancestor=use_ancestor
659 )
660 )
660
661
661 # save cached diff
662 # save cached diff
662 if caching_enabled:
663 if caching_enabled:
663 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
664 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
664 except CommitDoesNotExistError:
665 except CommitDoesNotExistError:
665 log.exception('Failed to generate diffset')
666 log.exception('Failed to generate diffset')
666 c.missing_commits = True
667 c.missing_commits = True
667
668
668 if not c.missing_commits:
669 if not c.missing_commits:
669
670
670 c.limited_diff = c.diffset.limited_diff
671 c.limited_diff = c.diffset.limited_diff
671
672
672 # calculate removed files that are bound to comments
673 # calculate removed files that are bound to comments
673 comment_deleted_files = [
674 comment_deleted_files = [
674 fname for fname in display_inline_comments
675 fname for fname in display_inline_comments
675 if fname not in c.diffset.file_stats]
676 if fname not in c.diffset.file_stats]
676
677
677 c.deleted_files_comments = collections.defaultdict(dict)
678 c.deleted_files_comments = collections.defaultdict(dict)
678 for fname, per_line_comments in display_inline_comments.items():
679 for fname, per_line_comments in display_inline_comments.items():
679 if fname in comment_deleted_files:
680 if fname in comment_deleted_files:
680 c.deleted_files_comments[fname]['stats'] = 0
681 c.deleted_files_comments[fname]['stats'] = 0
681 c.deleted_files_comments[fname]['comments'] = list()
682 c.deleted_files_comments[fname]['comments'] = list()
682 for lno, comments in per_line_comments.items():
683 for lno, comments in per_line_comments.items():
683 c.deleted_files_comments[fname]['comments'].extend(comments)
684 c.deleted_files_comments[fname]['comments'].extend(comments)
684
685
685 # maybe calculate the range diff
686 # maybe calculate the range diff
686 if c.range_diff_on:
687 if c.range_diff_on:
687 # TODO(marcink): set whitespace/context
688 # TODO(marcink): set whitespace/context
688 context_lcl = 3
689 context_lcl = 3
689 ign_whitespace_lcl = False
690 ign_whitespace_lcl = False
690
691
691 for commit in c.commit_ranges:
692 for commit in c.commit_ranges:
692 commit2 = commit
693 commit2 = commit
693 commit1 = commit.first_parent
694 commit1 = commit.first_parent
694
695
695 range_diff_cache_file_path = diff_cache_exist(
696 range_diff_cache_file_path = diff_cache_exist(
696 cache_path, 'diff', commit.raw_id,
697 cache_path, 'diff', commit.raw_id,
697 ign_whitespace_lcl, context_lcl, c.fulldiff)
698 ign_whitespace_lcl, context_lcl, c.fulldiff)
698
699
699 cached_diff = None
700 cached_diff = None
700 if caching_enabled:
701 if caching_enabled:
701 cached_diff = load_cached_diff(range_diff_cache_file_path)
702 cached_diff = load_cached_diff(range_diff_cache_file_path)
702
703
703 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
704 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
704 if not force_recache and has_proper_diff_cache:
705 if not force_recache and has_proper_diff_cache:
705 diffset = cached_diff['diff']
706 diffset = cached_diff['diff']
706 else:
707 else:
707 diffset = self._get_range_diffset(
708 diffset = self._get_range_diffset(
708 commits_source_repo, source_repo,
709 commits_source_repo, source_repo,
709 commit1, commit2, diff_limit, file_limit,
710 commit1, commit2, diff_limit, file_limit,
710 c.fulldiff, ign_whitespace_lcl, context_lcl
711 c.fulldiff, ign_whitespace_lcl, context_lcl
711 )
712 )
712
713
713 # save cached diff
714 # save cached diff
714 if caching_enabled:
715 if caching_enabled:
715 cache_diff(range_diff_cache_file_path, diffset, None)
716 cache_diff(range_diff_cache_file_path, diffset, None)
716
717
717 c.changes[commit.raw_id] = diffset
718 c.changes[commit.raw_id] = diffset
718
719
719 # this is a hack to properly display links, when creating PR, the
720 # this is a hack to properly display links, when creating PR, the
720 # compare view and others uses different notation, and
721 # compare view and others uses different notation, and
721 # compare_commits.mako renders links based on the target_repo.
722 # compare_commits.mako renders links based on the target_repo.
722 # We need to swap that here to generate it properly on the html side
723 # We need to swap that here to generate it properly on the html side
723 c.target_repo = c.source_repo
724 c.target_repo = c.source_repo
724
725
725 c.commit_statuses = ChangesetStatus.STATUSES
726 c.commit_statuses = ChangesetStatus.STATUSES
726
727
727 c.show_version_changes = not pr_closed
728 c.show_version_changes = not pr_closed
728 if c.show_version_changes:
729 if c.show_version_changes:
729 cur_obj = pull_request_at_ver
730 cur_obj = pull_request_at_ver
730 prev_obj = prev_pull_request_at_ver
731 prev_obj = prev_pull_request_at_ver
731
732
732 old_commit_ids = prev_obj.revisions
733 old_commit_ids = prev_obj.revisions
733 new_commit_ids = cur_obj.revisions
734 new_commit_ids = cur_obj.revisions
734 commit_changes = PullRequestModel()._calculate_commit_id_changes(
735 commit_changes = PullRequestModel()._calculate_commit_id_changes(
735 old_commit_ids, new_commit_ids)
736 old_commit_ids, new_commit_ids)
736 c.commit_changes_summary = commit_changes
737 c.commit_changes_summary = commit_changes
737
738
738 # calculate the diff for commits between versions
739 # calculate the diff for commits between versions
739 c.commit_changes = []
740 c.commit_changes = []
740
741
741 def mark(cs, fw):
742 def mark(cs, fw):
742 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
743 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
743
744
744 for c_type, raw_id in mark(commit_changes.added, 'a') \
745 for c_type, raw_id in mark(commit_changes.added, 'a') \
745 + mark(commit_changes.removed, 'r') \
746 + mark(commit_changes.removed, 'r') \
746 + mark(commit_changes.common, 'c'):
747 + mark(commit_changes.common, 'c'):
747
748
748 if raw_id in commit_cache:
749 if raw_id in commit_cache:
749 commit = commit_cache[raw_id]
750 commit = commit_cache[raw_id]
750 else:
751 else:
751 try:
752 try:
752 commit = commits_source_repo.get_commit(raw_id)
753 commit = commits_source_repo.get_commit(raw_id)
753 except CommitDoesNotExistError:
754 except CommitDoesNotExistError:
754 # in case we fail extracting still use "dummy" commit
755 # in case we fail extracting still use "dummy" commit
755 # for display in commit diff
756 # for display in commit diff
756 commit = h.AttributeDict(
757 commit = h.AttributeDict(
757 {'raw_id': raw_id,
758 {'raw_id': raw_id,
758 'message': 'EMPTY or MISSING COMMIT'})
759 'message': 'EMPTY or MISSING COMMIT'})
759 c.commit_changes.append([c_type, commit])
760 c.commit_changes.append([c_type, commit])
760
761
761 # current user review statuses for each version
762 # current user review statuses for each version
762 c.review_versions = {}
763 c.review_versions = {}
763 is_reviewer = PullRequestModel().is_user_reviewer(
764 is_reviewer = PullRequestModel().is_user_reviewer(
764 pull_request, self._rhodecode_user)
765 pull_request, self._rhodecode_user)
765 if is_reviewer:
766 if is_reviewer:
766 for co in general_comments:
767 for co in general_comments:
767 if co.author.user_id == self._rhodecode_user.user_id:
768 if co.author.user_id == self._rhodecode_user.user_id:
768 status = co.status_change
769 status = co.status_change
769 if status:
770 if status:
770 _ver_pr = status[0].comment.pull_request_version_id
771 _ver_pr = status[0].comment.pull_request_version_id
771 c.review_versions[_ver_pr] = status[0]
772 c.review_versions[_ver_pr] = status[0]
772
773
773 return self._get_template_context(c)
774 return self._get_template_context(c)
774
775
775 def get_commits(
776 def get_commits(
776 self, commits_source_repo, pull_request_at_ver, source_commit,
777 self, commits_source_repo, pull_request_at_ver, source_commit,
777 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
778 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
778 maybe_unreachable=False):
779 maybe_unreachable=False):
779
780
780 commit_cache = collections.OrderedDict()
781 commit_cache = collections.OrderedDict()
781 missing_requirements = False
782 missing_requirements = False
782
783
783 try:
784 try:
784 pre_load = ["author", "date", "message", "branch", "parents"]
785 pre_load = ["author", "date", "message", "branch", "parents"]
785
786
786 pull_request_commits = pull_request_at_ver.revisions
787 pull_request_commits = pull_request_at_ver.revisions
787 log.debug('Loading %s commits from %s',
788 log.debug('Loading %s commits from %s',
788 len(pull_request_commits), commits_source_repo)
789 len(pull_request_commits), commits_source_repo)
789
790
790 for rev in pull_request_commits:
791 for rev in pull_request_commits:
791 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
792 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
792 maybe_unreachable=maybe_unreachable)
793 maybe_unreachable=maybe_unreachable)
793 commit_cache[comm.raw_id] = comm
794 commit_cache[comm.raw_id] = comm
794
795
795 # Order here matters, we first need to get target, and then
796 # Order here matters, we first need to get target, and then
796 # the source
797 # the source
797 target_commit = commits_source_repo.get_commit(
798 target_commit = commits_source_repo.get_commit(
798 commit_id=safe_str(target_ref_id))
799 commit_id=safe_str(target_ref_id))
799
800
800 source_commit = commits_source_repo.get_commit(
801 source_commit = commits_source_repo.get_commit(
801 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
802 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
802 except CommitDoesNotExistError:
803 except CommitDoesNotExistError:
803 log.warning('Failed to get commit from `{}` repo'.format(
804 log.warning('Failed to get commit from `{}` repo'.format(
804 commits_source_repo), exc_info=True)
805 commits_source_repo), exc_info=True)
805 except RepositoryRequirementError:
806 except RepositoryRequirementError:
806 log.warning('Failed to get all required data from repo', exc_info=True)
807 log.warning('Failed to get all required data from repo', exc_info=True)
807 missing_requirements = True
808 missing_requirements = True
808
809
809 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
810 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
810
811
811 try:
812 try:
812 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
813 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
813 except Exception:
814 except Exception:
814 ancestor_commit = None
815 ancestor_commit = None
815
816
816 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
817 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
817
818
818 def assure_not_empty_repo(self):
819 def assure_not_empty_repo(self):
819 _ = self.request.translate
820 _ = self.request.translate
820
821
821 try:
822 try:
822 self.db_repo.scm_instance().get_commit()
823 self.db_repo.scm_instance().get_commit()
823 except EmptyRepositoryError:
824 except EmptyRepositoryError:
824 h.flash(h.literal(_('There are no commits yet')),
825 h.flash(h.literal(_('There are no commits yet')),
825 category='warning')
826 category='warning')
826 raise HTTPFound(
827 raise HTTPFound(
827 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
828 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
828
829
829 @LoginRequired()
830 @LoginRequired()
830 @NotAnonymous()
831 @NotAnonymous()
831 @HasRepoPermissionAnyDecorator(
832 @HasRepoPermissionAnyDecorator(
832 'repository.read', 'repository.write', 'repository.admin')
833 'repository.read', 'repository.write', 'repository.admin')
833 @view_config(
834 @view_config(
834 route_name='pullrequest_new', request_method='GET',
835 route_name='pullrequest_new', request_method='GET',
835 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
836 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
836 def pull_request_new(self):
837 def pull_request_new(self):
837 _ = self.request.translate
838 _ = self.request.translate
838 c = self.load_default_context()
839 c = self.load_default_context()
839
840
840 self.assure_not_empty_repo()
841 self.assure_not_empty_repo()
841 source_repo = self.db_repo
842 source_repo = self.db_repo
842
843
843 commit_id = self.request.GET.get('commit')
844 commit_id = self.request.GET.get('commit')
844 branch_ref = self.request.GET.get('branch')
845 branch_ref = self.request.GET.get('branch')
845 bookmark_ref = self.request.GET.get('bookmark')
846 bookmark_ref = self.request.GET.get('bookmark')
846
847
847 try:
848 try:
848 source_repo_data = PullRequestModel().generate_repo_data(
849 source_repo_data = PullRequestModel().generate_repo_data(
849 source_repo, commit_id=commit_id,
850 source_repo, commit_id=commit_id,
850 branch=branch_ref, bookmark=bookmark_ref,
851 branch=branch_ref, bookmark=bookmark_ref,
851 translator=self.request.translate)
852 translator=self.request.translate)
852 except CommitDoesNotExistError as e:
853 except CommitDoesNotExistError as e:
853 log.exception(e)
854 log.exception(e)
854 h.flash(_('Commit does not exist'), 'error')
855 h.flash(_('Commit does not exist'), 'error')
855 raise HTTPFound(
856 raise HTTPFound(
856 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
857 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
857
858
858 default_target_repo = source_repo
859 default_target_repo = source_repo
859
860
860 if source_repo.parent and c.has_origin_repo_read_perm:
861 if source_repo.parent and c.has_origin_repo_read_perm:
861 parent_vcs_obj = source_repo.parent.scm_instance()
862 parent_vcs_obj = source_repo.parent.scm_instance()
862 if parent_vcs_obj and not parent_vcs_obj.is_empty():
863 if parent_vcs_obj and not parent_vcs_obj.is_empty():
863 # change default if we have a parent repo
864 # change default if we have a parent repo
864 default_target_repo = source_repo.parent
865 default_target_repo = source_repo.parent
865
866
866 target_repo_data = PullRequestModel().generate_repo_data(
867 target_repo_data = PullRequestModel().generate_repo_data(
867 default_target_repo, translator=self.request.translate)
868 default_target_repo, translator=self.request.translate)
868
869
869 selected_source_ref = source_repo_data['refs']['selected_ref']
870 selected_source_ref = source_repo_data['refs']['selected_ref']
870 title_source_ref = ''
871 title_source_ref = ''
871 if selected_source_ref:
872 if selected_source_ref:
872 title_source_ref = selected_source_ref.split(':', 2)[1]
873 title_source_ref = selected_source_ref.split(':', 2)[1]
873 c.default_title = PullRequestModel().generate_pullrequest_title(
874 c.default_title = PullRequestModel().generate_pullrequest_title(
874 source=source_repo.repo_name,
875 source=source_repo.repo_name,
875 source_ref=title_source_ref,
876 source_ref=title_source_ref,
876 target=default_target_repo.repo_name
877 target=default_target_repo.repo_name
877 )
878 )
878
879
879 c.default_repo_data = {
880 c.default_repo_data = {
880 'source_repo_name': source_repo.repo_name,
881 'source_repo_name': source_repo.repo_name,
881 'source_refs_json': json.dumps(source_repo_data),
882 'source_refs_json': json.dumps(source_repo_data),
882 'target_repo_name': default_target_repo.repo_name,
883 'target_repo_name': default_target_repo.repo_name,
883 'target_refs_json': json.dumps(target_repo_data),
884 'target_refs_json': json.dumps(target_repo_data),
884 }
885 }
885 c.default_source_ref = selected_source_ref
886 c.default_source_ref = selected_source_ref
886
887
887 return self._get_template_context(c)
888 return self._get_template_context(c)
888
889
889 @LoginRequired()
890 @LoginRequired()
890 @NotAnonymous()
891 @NotAnonymous()
891 @HasRepoPermissionAnyDecorator(
892 @HasRepoPermissionAnyDecorator(
892 'repository.read', 'repository.write', 'repository.admin')
893 'repository.read', 'repository.write', 'repository.admin')
893 @view_config(
894 @view_config(
894 route_name='pullrequest_repo_refs', request_method='GET',
895 route_name='pullrequest_repo_refs', request_method='GET',
895 renderer='json_ext', xhr=True)
896 renderer='json_ext', xhr=True)
896 def pull_request_repo_refs(self):
897 def pull_request_repo_refs(self):
897 self.load_default_context()
898 self.load_default_context()
898 target_repo_name = self.request.matchdict['target_repo_name']
899 target_repo_name = self.request.matchdict['target_repo_name']
899 repo = Repository.get_by_repo_name(target_repo_name)
900 repo = Repository.get_by_repo_name(target_repo_name)
900 if not repo:
901 if not repo:
901 raise HTTPNotFound()
902 raise HTTPNotFound()
902
903
903 target_perm = HasRepoPermissionAny(
904 target_perm = HasRepoPermissionAny(
904 'repository.read', 'repository.write', 'repository.admin')(
905 'repository.read', 'repository.write', 'repository.admin')(
905 target_repo_name)
906 target_repo_name)
906 if not target_perm:
907 if not target_perm:
907 raise HTTPNotFound()
908 raise HTTPNotFound()
908
909
909 return PullRequestModel().generate_repo_data(
910 return PullRequestModel().generate_repo_data(
910 repo, translator=self.request.translate)
911 repo, translator=self.request.translate)
911
912
912 @LoginRequired()
913 @LoginRequired()
913 @NotAnonymous()
914 @NotAnonymous()
914 @HasRepoPermissionAnyDecorator(
915 @HasRepoPermissionAnyDecorator(
915 'repository.read', 'repository.write', 'repository.admin')
916 'repository.read', 'repository.write', 'repository.admin')
916 @view_config(
917 @view_config(
917 route_name='pullrequest_repo_targets', request_method='GET',
918 route_name='pullrequest_repo_targets', request_method='GET',
918 renderer='json_ext', xhr=True)
919 renderer='json_ext', xhr=True)
919 def pullrequest_repo_targets(self):
920 def pullrequest_repo_targets(self):
920 _ = self.request.translate
921 _ = self.request.translate
921 filter_query = self.request.GET.get('query')
922 filter_query = self.request.GET.get('query')
922
923
923 # get the parents
924 # get the parents
924 parent_target_repos = []
925 parent_target_repos = []
925 if self.db_repo.parent:
926 if self.db_repo.parent:
926 parents_query = Repository.query() \
927 parents_query = Repository.query() \
927 .order_by(func.length(Repository.repo_name)) \
928 .order_by(func.length(Repository.repo_name)) \
928 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
929 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
929
930
930 if filter_query:
931 if filter_query:
931 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
932 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
932 parents_query = parents_query.filter(
933 parents_query = parents_query.filter(
933 Repository.repo_name.ilike(ilike_expression))
934 Repository.repo_name.ilike(ilike_expression))
934 parents = parents_query.limit(20).all()
935 parents = parents_query.limit(20).all()
935
936
936 for parent in parents:
937 for parent in parents:
937 parent_vcs_obj = parent.scm_instance()
938 parent_vcs_obj = parent.scm_instance()
938 if parent_vcs_obj and not parent_vcs_obj.is_empty():
939 if parent_vcs_obj and not parent_vcs_obj.is_empty():
939 parent_target_repos.append(parent)
940 parent_target_repos.append(parent)
940
941
941 # get other forks, and repo itself
942 # get other forks, and repo itself
942 query = Repository.query() \
943 query = Repository.query() \
943 .order_by(func.length(Repository.repo_name)) \
944 .order_by(func.length(Repository.repo_name)) \
944 .filter(
945 .filter(
945 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
946 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
946 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
947 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
947 ) \
948 ) \
948 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
949 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
949
950
950 if filter_query:
951 if filter_query:
951 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
952 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
952 query = query.filter(Repository.repo_name.ilike(ilike_expression))
953 query = query.filter(Repository.repo_name.ilike(ilike_expression))
953
954
954 limit = max(20 - len(parent_target_repos), 5) # not less then 5
955 limit = max(20 - len(parent_target_repos), 5) # not less then 5
955 target_repos = query.limit(limit).all()
956 target_repos = query.limit(limit).all()
956
957
957 all_target_repos = target_repos + parent_target_repos
958 all_target_repos = target_repos + parent_target_repos
958
959
959 repos = []
960 repos = []
960 # This checks permissions to the repositories
961 # This checks permissions to the repositories
961 for obj in ScmModel().get_repos(all_target_repos):
962 for obj in ScmModel().get_repos(all_target_repos):
962 repos.append({
963 repos.append({
963 'id': obj['name'],
964 'id': obj['name'],
964 'text': obj['name'],
965 'text': obj['name'],
965 'type': 'repo',
966 'type': 'repo',
966 'repo_id': obj['dbrepo']['repo_id'],
967 'repo_id': obj['dbrepo']['repo_id'],
967 'repo_type': obj['dbrepo']['repo_type'],
968 'repo_type': obj['dbrepo']['repo_type'],
968 'private': obj['dbrepo']['private'],
969 'private': obj['dbrepo']['private'],
969
970
970 })
971 })
971
972
972 data = {
973 data = {
973 'more': False,
974 'more': False,
974 'results': [{
975 'results': [{
975 'text': _('Repositories'),
976 'text': _('Repositories'),
976 'children': repos
977 'children': repos
977 }] if repos else []
978 }] if repos else []
978 }
979 }
979 return data
980 return data
980
981
981 def _get_existing_ids(self, post_data):
982 def _get_existing_ids(self, post_data):
982 return filter(lambda e: e, map(safe_int, aslist(post_data.get('comments'), ',')))
983 return filter(lambda e: e, map(safe_int, aslist(post_data.get('comments'), ',')))
983
984
984 @LoginRequired()
985 @LoginRequired()
985 @NotAnonymous()
986 @NotAnonymous()
986 @HasRepoPermissionAnyDecorator(
987 @HasRepoPermissionAnyDecorator(
987 'repository.read', 'repository.write', 'repository.admin')
988 'repository.read', 'repository.write', 'repository.admin')
988 @view_config(
989 @view_config(
989 route_name='pullrequest_comments', request_method='POST',
990 route_name='pullrequest_comments', request_method='POST',
990 renderer='string_html', xhr=True)
991 renderer='string_html', xhr=True)
991 def pullrequest_comments(self):
992 def pullrequest_comments(self):
992 self.load_default_context()
993 self.load_default_context()
993
994
994 pull_request = PullRequest.get_or_404(
995 pull_request = PullRequest.get_or_404(
995 self.request.matchdict['pull_request_id'])
996 self.request.matchdict['pull_request_id'])
996 pull_request_id = pull_request.pull_request_id
997 pull_request_id = pull_request.pull_request_id
997 version = self.request.GET.get('version')
998 version = self.request.GET.get('version')
998
999
999 _render = self.request.get_partial_renderer(
1000 _render = self.request.get_partial_renderer(
1000 'rhodecode:templates/base/sidebar.mako')
1001 'rhodecode:templates/base/sidebar.mako')
1001 c = _render.get_call_context()
1002 c = _render.get_call_context()
1002
1003
1003 (pull_request_latest,
1004 (pull_request_latest,
1004 pull_request_at_ver,
1005 pull_request_at_ver,
1005 pull_request_display_obj,
1006 pull_request_display_obj,
1006 at_version) = PullRequestModel().get_pr_version(
1007 at_version) = PullRequestModel().get_pr_version(
1007 pull_request_id, version=version)
1008 pull_request_id, version=version)
1008 versions = pull_request_display_obj.versions()
1009 versions = pull_request_display_obj.versions()
1009 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1010 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1010 c.versions = versions + [latest_ver]
1011 c.versions = versions + [latest_ver]
1011
1012
1012 c.at_version = at_version
1013 c.at_version = at_version
1013 c.at_version_num = (at_version
1014 c.at_version_num = (at_version
1014 if at_version and at_version != PullRequest.LATEST_VER
1015 if at_version and at_version != PullRequest.LATEST_VER
1015 else None)
1016 else None)
1016
1017
1017 self.register_comments_vars(c, pull_request_latest, versions)
1018 self.register_comments_vars(c, pull_request_latest, versions)
1018 all_comments = c.inline_comments_flat + c.comments
1019 all_comments = c.inline_comments_flat + c.comments
1019
1020
1020 existing_ids = self._get_existing_ids(self.request.POST)
1021 existing_ids = self._get_existing_ids(self.request.POST)
1021 return _render('comments_table', all_comments, len(all_comments),
1022 return _render('comments_table', all_comments, len(all_comments),
1022 existing_ids=existing_ids)
1023 existing_ids=existing_ids)
1023
1024
1024 @LoginRequired()
1025 @LoginRequired()
1025 @NotAnonymous()
1026 @NotAnonymous()
1026 @HasRepoPermissionAnyDecorator(
1027 @HasRepoPermissionAnyDecorator(
1027 'repository.read', 'repository.write', 'repository.admin')
1028 'repository.read', 'repository.write', 'repository.admin')
1028 @view_config(
1029 @view_config(
1029 route_name='pullrequest_todos', request_method='POST',
1030 route_name='pullrequest_todos', request_method='POST',
1030 renderer='string_html', xhr=True)
1031 renderer='string_html', xhr=True)
1031 def pullrequest_todos(self):
1032 def pullrequest_todos(self):
1032 self.load_default_context()
1033 self.load_default_context()
1033
1034
1034 pull_request = PullRequest.get_or_404(
1035 pull_request = PullRequest.get_or_404(
1035 self.request.matchdict['pull_request_id'])
1036 self.request.matchdict['pull_request_id'])
1036 pull_request_id = pull_request.pull_request_id
1037 pull_request_id = pull_request.pull_request_id
1037 version = self.request.GET.get('version')
1038 version = self.request.GET.get('version')
1038
1039
1039 _render = self.request.get_partial_renderer(
1040 _render = self.request.get_partial_renderer(
1040 'rhodecode:templates/base/sidebar.mako')
1041 'rhodecode:templates/base/sidebar.mako')
1041 c = _render.get_call_context()
1042 c = _render.get_call_context()
1042 (pull_request_latest,
1043 (pull_request_latest,
1043 pull_request_at_ver,
1044 pull_request_at_ver,
1044 pull_request_display_obj,
1045 pull_request_display_obj,
1045 at_version) = PullRequestModel().get_pr_version(
1046 at_version) = PullRequestModel().get_pr_version(
1046 pull_request_id, version=version)
1047 pull_request_id, version=version)
1047 versions = pull_request_display_obj.versions()
1048 versions = pull_request_display_obj.versions()
1048 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1049 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1049 c.versions = versions + [latest_ver]
1050 c.versions = versions + [latest_ver]
1050
1051
1051 c.at_version = at_version
1052 c.at_version = at_version
1052 c.at_version_num = (at_version
1053 c.at_version_num = (at_version
1053 if at_version and at_version != PullRequest.LATEST_VER
1054 if at_version and at_version != PullRequest.LATEST_VER
1054 else None)
1055 else None)
1055
1056
1056 c.unresolved_comments = CommentsModel() \
1057 c.unresolved_comments = CommentsModel() \
1057 .get_pull_request_unresolved_todos(pull_request)
1058 .get_pull_request_unresolved_todos(pull_request)
1058 c.resolved_comments = CommentsModel() \
1059 c.resolved_comments = CommentsModel() \
1059 .get_pull_request_resolved_todos(pull_request)
1060 .get_pull_request_resolved_todos(pull_request)
1060
1061
1061 all_comments = c.unresolved_comments + c.resolved_comments
1062 all_comments = c.unresolved_comments + c.resolved_comments
1062 existing_ids = self._get_existing_ids(self.request.POST)
1063 existing_ids = self._get_existing_ids(self.request.POST)
1063 return _render('comments_table', all_comments, len(c.unresolved_comments),
1064 return _render('comments_table', all_comments, len(c.unresolved_comments),
1064 todo_comments=True, existing_ids=existing_ids)
1065 todo_comments=True, existing_ids=existing_ids)
1065
1066
1066 @LoginRequired()
1067 @LoginRequired()
1067 @NotAnonymous()
1068 @NotAnonymous()
1068 @HasRepoPermissionAnyDecorator(
1069 @HasRepoPermissionAnyDecorator(
1069 'repository.read', 'repository.write', 'repository.admin')
1070 'repository.read', 'repository.write', 'repository.admin')
1070 @CSRFRequired()
1071 @CSRFRequired()
1071 @view_config(
1072 @view_config(
1072 route_name='pullrequest_create', request_method='POST',
1073 route_name='pullrequest_create', request_method='POST',
1073 renderer=None)
1074 renderer=None)
1074 def pull_request_create(self):
1075 def pull_request_create(self):
1075 _ = self.request.translate
1076 _ = self.request.translate
1076 self.assure_not_empty_repo()
1077 self.assure_not_empty_repo()
1077 self.load_default_context()
1078 self.load_default_context()
1078
1079
1079 controls = peppercorn.parse(self.request.POST.items())
1080 controls = peppercorn.parse(self.request.POST.items())
1080
1081
1081 try:
1082 try:
1082 form = PullRequestForm(
1083 form = PullRequestForm(
1083 self.request.translate, self.db_repo.repo_id)()
1084 self.request.translate, self.db_repo.repo_id)()
1084 _form = form.to_python(controls)
1085 _form = form.to_python(controls)
1085 except formencode.Invalid as errors:
1086 except formencode.Invalid as errors:
1086 if errors.error_dict.get('revisions'):
1087 if errors.error_dict.get('revisions'):
1087 msg = 'Revisions: %s' % errors.error_dict['revisions']
1088 msg = 'Revisions: %s' % errors.error_dict['revisions']
1088 elif errors.error_dict.get('pullrequest_title'):
1089 elif errors.error_dict.get('pullrequest_title'):
1089 msg = errors.error_dict.get('pullrequest_title')
1090 msg = errors.error_dict.get('pullrequest_title')
1090 else:
1091 else:
1091 msg = _('Error creating pull request: {}').format(errors)
1092 msg = _('Error creating pull request: {}').format(errors)
1092 log.exception(msg)
1093 log.exception(msg)
1093 h.flash(msg, 'error')
1094 h.flash(msg, 'error')
1094
1095
1095 # would rather just go back to form ...
1096 # would rather just go back to form ...
1096 raise HTTPFound(
1097 raise HTTPFound(
1097 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1098 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1098
1099
1099 source_repo = _form['source_repo']
1100 source_repo = _form['source_repo']
1100 source_ref = _form['source_ref']
1101 source_ref = _form['source_ref']
1101 target_repo = _form['target_repo']
1102 target_repo = _form['target_repo']
1102 target_ref = _form['target_ref']
1103 target_ref = _form['target_ref']
1103 commit_ids = _form['revisions'][::-1]
1104 commit_ids = _form['revisions'][::-1]
1104 common_ancestor_id = _form['common_ancestor']
1105 common_ancestor_id = _form['common_ancestor']
1105
1106
1106 # find the ancestor for this pr
1107 # find the ancestor for this pr
1107 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1108 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1108 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1109 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1109
1110
1110 if not (source_db_repo or target_db_repo):
1111 if not (source_db_repo or target_db_repo):
1111 h.flash(_('source_repo or target repo not found'), category='error')
1112 h.flash(_('source_repo or target repo not found'), category='error')
1112 raise HTTPFound(
1113 raise HTTPFound(
1113 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1114 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1114
1115
1115 # re-check permissions again here
1116 # re-check permissions again here
1116 # source_repo we must have read permissions
1117 # source_repo we must have read permissions
1117
1118
1118 source_perm = HasRepoPermissionAny(
1119 source_perm = HasRepoPermissionAny(
1119 'repository.read', 'repository.write', 'repository.admin')(
1120 'repository.read', 'repository.write', 'repository.admin')(
1120 source_db_repo.repo_name)
1121 source_db_repo.repo_name)
1121 if not source_perm:
1122 if not source_perm:
1122 msg = _('Not Enough permissions to source repo `{}`.'.format(
1123 msg = _('Not Enough permissions to source repo `{}`.'.format(
1123 source_db_repo.repo_name))
1124 source_db_repo.repo_name))
1124 h.flash(msg, category='error')
1125 h.flash(msg, category='error')
1125 # copy the args back to redirect
1126 # copy the args back to redirect
1126 org_query = self.request.GET.mixed()
1127 org_query = self.request.GET.mixed()
1127 raise HTTPFound(
1128 raise HTTPFound(
1128 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1129 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1129 _query=org_query))
1130 _query=org_query))
1130
1131
1131 # target repo we must have read permissions, and also later on
1132 # target repo we must have read permissions, and also later on
1132 # we want to check branch permissions here
1133 # we want to check branch permissions here
1133 target_perm = HasRepoPermissionAny(
1134 target_perm = HasRepoPermissionAny(
1134 'repository.read', 'repository.write', 'repository.admin')(
1135 'repository.read', 'repository.write', 'repository.admin')(
1135 target_db_repo.repo_name)
1136 target_db_repo.repo_name)
1136 if not target_perm:
1137 if not target_perm:
1137 msg = _('Not Enough permissions to target repo `{}`.'.format(
1138 msg = _('Not Enough permissions to target repo `{}`.'.format(
1138 target_db_repo.repo_name))
1139 target_db_repo.repo_name))
1139 h.flash(msg, category='error')
1140 h.flash(msg, category='error')
1140 # copy the args back to redirect
1141 # copy the args back to redirect
1141 org_query = self.request.GET.mixed()
1142 org_query = self.request.GET.mixed()
1142 raise HTTPFound(
1143 raise HTTPFound(
1143 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1144 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1144 _query=org_query))
1145 _query=org_query))
1145
1146
1146 source_scm = source_db_repo.scm_instance()
1147 source_scm = source_db_repo.scm_instance()
1147 target_scm = target_db_repo.scm_instance()
1148 target_scm = target_db_repo.scm_instance()
1148
1149
1149 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
1150 source_ref_obj = unicode_to_reference(source_ref)
1150 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
1151 target_ref_obj = unicode_to_reference(target_ref)
1152
1153 source_commit = source_scm.get_commit(source_ref_obj.commit_id)
1154 target_commit = target_scm.get_commit(target_ref_obj.commit_id)
1151
1155
1152 ancestor = source_scm.get_common_ancestor(
1156 ancestor = source_scm.get_common_ancestor(
1153 source_commit.raw_id, target_commit.raw_id, target_scm)
1157 source_commit.raw_id, target_commit.raw_id, target_scm)
1154
1158
1155 source_ref_type, source_ref_name, source_commit_id = _form['target_ref'].split(':')
1156 target_ref_type, target_ref_name, target_commit_id = _form['source_ref'].split(':')
1157 # recalculate target ref based on ancestor
1159 # recalculate target ref based on ancestor
1158 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1160 target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, ancestor))
1159
1161
1160 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1162 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1161 PullRequestModel().get_reviewer_functions()
1163 PullRequestModel().get_reviewer_functions()
1162
1164
1163 # recalculate reviewers logic, to make sure we can validate this
1165 # recalculate reviewers logic, to make sure we can validate this
1164 reviewer_rules = get_default_reviewers_data(
1166 reviewer_rules = get_default_reviewers_data(
1165 self._rhodecode_db_user,
1167 self._rhodecode_db_user,
1166 source_db_repo,
1168 source_db_repo,
1167 Reference(source_ref_type, source_ref_name, source_commit_id),
1169 source_ref_obj,
1168 target_db_repo,
1170 target_db_repo,
1169 Reference(target_ref_type, target_ref_name, target_commit_id),
1171 target_ref_obj,
1170 include_diff_info=False)
1172 include_diff_info=False)
1171
1173
1172 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1174 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1173 observers = validate_observers(_form['observer_members'], reviewer_rules)
1175 observers = validate_observers(_form['observer_members'], reviewer_rules)
1174
1176
1175 pullrequest_title = _form['pullrequest_title']
1177 pullrequest_title = _form['pullrequest_title']
1176 title_source_ref = source_ref.split(':', 2)[1]
1178 title_source_ref = source_ref_obj.name
1177 if not pullrequest_title:
1179 if not pullrequest_title:
1178 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1180 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1179 source=source_repo,
1181 source=source_repo,
1180 source_ref=title_source_ref,
1182 source_ref=title_source_ref,
1181 target=target_repo
1183 target=target_repo
1182 )
1184 )
1183
1185
1184 description = _form['pullrequest_desc']
1186 description = _form['pullrequest_desc']
1185 description_renderer = _form['description_renderer']
1187 description_renderer = _form['description_renderer']
1186
1188
1187 try:
1189 try:
1188 pull_request = PullRequestModel().create(
1190 pull_request = PullRequestModel().create(
1189 created_by=self._rhodecode_user.user_id,
1191 created_by=self._rhodecode_user.user_id,
1190 source_repo=source_repo,
1192 source_repo=source_repo,
1191 source_ref=source_ref,
1193 source_ref=source_ref,
1192 target_repo=target_repo,
1194 target_repo=target_repo,
1193 target_ref=target_ref,
1195 target_ref=target_ref,
1194 revisions=commit_ids,
1196 revisions=commit_ids,
1195 common_ancestor_id=common_ancestor_id,
1197 common_ancestor_id=common_ancestor_id,
1196 reviewers=reviewers,
1198 reviewers=reviewers,
1197 observers=observers,
1199 observers=observers,
1198 title=pullrequest_title,
1200 title=pullrequest_title,
1199 description=description,
1201 description=description,
1200 description_renderer=description_renderer,
1202 description_renderer=description_renderer,
1201 reviewer_data=reviewer_rules,
1203 reviewer_data=reviewer_rules,
1202 auth_user=self._rhodecode_user
1204 auth_user=self._rhodecode_user
1203 )
1205 )
1204 Session().commit()
1206 Session().commit()
1205
1207
1206 h.flash(_('Successfully opened new pull request'),
1208 h.flash(_('Successfully opened new pull request'),
1207 category='success')
1209 category='success')
1208 except Exception:
1210 except Exception:
1209 msg = _('Error occurred during creation of this pull request.')
1211 msg = _('Error occurred during creation of this pull request.')
1210 log.exception(msg)
1212 log.exception(msg)
1211 h.flash(msg, category='error')
1213 h.flash(msg, category='error')
1212
1214
1213 # copy the args back to redirect
1215 # copy the args back to redirect
1214 org_query = self.request.GET.mixed()
1216 org_query = self.request.GET.mixed()
1215 raise HTTPFound(
1217 raise HTTPFound(
1216 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1218 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1217 _query=org_query))
1219 _query=org_query))
1218
1220
1219 raise HTTPFound(
1221 raise HTTPFound(
1220 h.route_path('pullrequest_show', repo_name=target_repo,
1222 h.route_path('pullrequest_show', repo_name=target_repo,
1221 pull_request_id=pull_request.pull_request_id))
1223 pull_request_id=pull_request.pull_request_id))
1222
1224
1223 @LoginRequired()
1225 @LoginRequired()
1224 @NotAnonymous()
1226 @NotAnonymous()
1225 @HasRepoPermissionAnyDecorator(
1227 @HasRepoPermissionAnyDecorator(
1226 'repository.read', 'repository.write', 'repository.admin')
1228 'repository.read', 'repository.write', 'repository.admin')
1227 @CSRFRequired()
1229 @CSRFRequired()
1228 @view_config(
1230 @view_config(
1229 route_name='pullrequest_update', request_method='POST',
1231 route_name='pullrequest_update', request_method='POST',
1230 renderer='json_ext')
1232 renderer='json_ext')
1231 def pull_request_update(self):
1233 def pull_request_update(self):
1232 pull_request = PullRequest.get_or_404(
1234 pull_request = PullRequest.get_or_404(
1233 self.request.matchdict['pull_request_id'])
1235 self.request.matchdict['pull_request_id'])
1234 _ = self.request.translate
1236 _ = self.request.translate
1235
1237
1236 c = self.load_default_context()
1238 c = self.load_default_context()
1237 redirect_url = None
1239 redirect_url = None
1238
1240
1239 if pull_request.is_closed():
1241 if pull_request.is_closed():
1240 log.debug('update: forbidden because pull request is closed')
1242 log.debug('update: forbidden because pull request is closed')
1241 msg = _(u'Cannot update closed pull requests.')
1243 msg = _(u'Cannot update closed pull requests.')
1242 h.flash(msg, category='error')
1244 h.flash(msg, category='error')
1243 return {'response': True,
1245 return {'response': True,
1244 'redirect_url': redirect_url}
1246 'redirect_url': redirect_url}
1245
1247
1246 is_state_changing = pull_request.is_state_changing()
1248 is_state_changing = pull_request.is_state_changing()
1247 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1249 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1248
1250
1249 # only owner or admin can update it
1251 # only owner or admin can update it
1250 allowed_to_update = PullRequestModel().check_user_update(
1252 allowed_to_update = PullRequestModel().check_user_update(
1251 pull_request, self._rhodecode_user)
1253 pull_request, self._rhodecode_user)
1252
1254
1253 if allowed_to_update:
1255 if allowed_to_update:
1254 controls = peppercorn.parse(self.request.POST.items())
1256 controls = peppercorn.parse(self.request.POST.items())
1255 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1257 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1256
1258
1257 if 'review_members' in controls:
1259 if 'review_members' in controls:
1258 self._update_reviewers(
1260 self._update_reviewers(
1259 c,
1261 c,
1260 pull_request, controls['review_members'],
1262 pull_request, controls['review_members'],
1261 pull_request.reviewer_data,
1263 pull_request.reviewer_data,
1262 PullRequestReviewers.ROLE_REVIEWER)
1264 PullRequestReviewers.ROLE_REVIEWER)
1263 elif 'observer_members' in controls:
1265 elif 'observer_members' in controls:
1264 self._update_reviewers(
1266 self._update_reviewers(
1265 c,
1267 c,
1266 pull_request, controls['observer_members'],
1268 pull_request, controls['observer_members'],
1267 pull_request.reviewer_data,
1269 pull_request.reviewer_data,
1268 PullRequestReviewers.ROLE_OBSERVER)
1270 PullRequestReviewers.ROLE_OBSERVER)
1269 elif str2bool(self.request.POST.get('update_commits', 'false')):
1271 elif str2bool(self.request.POST.get('update_commits', 'false')):
1270 if is_state_changing:
1272 if is_state_changing:
1271 log.debug('commits update: forbidden because pull request is in state %s',
1273 log.debug('commits update: forbidden because pull request is in state %s',
1272 pull_request.pull_request_state)
1274 pull_request.pull_request_state)
1273 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1275 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1274 u'Current state is: `{}`').format(
1276 u'Current state is: `{}`').format(
1275 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1277 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1276 h.flash(msg, category='error')
1278 h.flash(msg, category='error')
1277 return {'response': True,
1279 return {'response': True,
1278 'redirect_url': redirect_url}
1280 'redirect_url': redirect_url}
1279
1281
1280 self._update_commits(c, pull_request)
1282 self._update_commits(c, pull_request)
1281 if force_refresh:
1283 if force_refresh:
1282 redirect_url = h.route_path(
1284 redirect_url = h.route_path(
1283 'pullrequest_show', repo_name=self.db_repo_name,
1285 'pullrequest_show', repo_name=self.db_repo_name,
1284 pull_request_id=pull_request.pull_request_id,
1286 pull_request_id=pull_request.pull_request_id,
1285 _query={"force_refresh": 1})
1287 _query={"force_refresh": 1})
1286 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1288 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1287 self._edit_pull_request(pull_request)
1289 self._edit_pull_request(pull_request)
1288 else:
1290 else:
1289 log.error('Unhandled update data.')
1291 log.error('Unhandled update data.')
1290 raise HTTPBadRequest()
1292 raise HTTPBadRequest()
1291
1293
1292 return {'response': True,
1294 return {'response': True,
1293 'redirect_url': redirect_url}
1295 'redirect_url': redirect_url}
1294 raise HTTPForbidden()
1296 raise HTTPForbidden()
1295
1297
1296 def _edit_pull_request(self, pull_request):
1298 def _edit_pull_request(self, pull_request):
1297 """
1299 """
1298 Edit title and description
1300 Edit title and description
1299 """
1301 """
1300 _ = self.request.translate
1302 _ = self.request.translate
1301
1303
1302 try:
1304 try:
1303 PullRequestModel().edit(
1305 PullRequestModel().edit(
1304 pull_request,
1306 pull_request,
1305 self.request.POST.get('title'),
1307 self.request.POST.get('title'),
1306 self.request.POST.get('description'),
1308 self.request.POST.get('description'),
1307 self.request.POST.get('description_renderer'),
1309 self.request.POST.get('description_renderer'),
1308 self._rhodecode_user)
1310 self._rhodecode_user)
1309 except ValueError:
1311 except ValueError:
1310 msg = _(u'Cannot update closed pull requests.')
1312 msg = _(u'Cannot update closed pull requests.')
1311 h.flash(msg, category='error')
1313 h.flash(msg, category='error')
1312 return
1314 return
1313 else:
1315 else:
1314 Session().commit()
1316 Session().commit()
1315
1317
1316 msg = _(u'Pull request title & description updated.')
1318 msg = _(u'Pull request title & description updated.')
1317 h.flash(msg, category='success')
1319 h.flash(msg, category='success')
1318 return
1320 return
1319
1321
1320 def _update_commits(self, c, pull_request):
1322 def _update_commits(self, c, pull_request):
1321 _ = self.request.translate
1323 _ = self.request.translate
1322
1324
1323 with pull_request.set_state(PullRequest.STATE_UPDATING):
1325 with pull_request.set_state(PullRequest.STATE_UPDATING):
1324 resp = PullRequestModel().update_commits(
1326 resp = PullRequestModel().update_commits(
1325 pull_request, self._rhodecode_db_user)
1327 pull_request, self._rhodecode_db_user)
1326
1328
1327 if resp.executed:
1329 if resp.executed:
1328
1330
1329 if resp.target_changed and resp.source_changed:
1331 if resp.target_changed and resp.source_changed:
1330 changed = 'target and source repositories'
1332 changed = 'target and source repositories'
1331 elif resp.target_changed and not resp.source_changed:
1333 elif resp.target_changed and not resp.source_changed:
1332 changed = 'target repository'
1334 changed = 'target repository'
1333 elif not resp.target_changed and resp.source_changed:
1335 elif not resp.target_changed and resp.source_changed:
1334 changed = 'source repository'
1336 changed = 'source repository'
1335 else:
1337 else:
1336 changed = 'nothing'
1338 changed = 'nothing'
1337
1339
1338 msg = _(u'Pull request updated to "{source_commit_id}" with '
1340 msg = _(u'Pull request updated to "{source_commit_id}" with '
1339 u'{count_added} added, {count_removed} removed commits. '
1341 u'{count_added} added, {count_removed} removed commits. '
1340 u'Source of changes: {change_source}.')
1342 u'Source of changes: {change_source}.')
1341 msg = msg.format(
1343 msg = msg.format(
1342 source_commit_id=pull_request.source_ref_parts.commit_id,
1344 source_commit_id=pull_request.source_ref_parts.commit_id,
1343 count_added=len(resp.changes.added),
1345 count_added=len(resp.changes.added),
1344 count_removed=len(resp.changes.removed),
1346 count_removed=len(resp.changes.removed),
1345 change_source=changed)
1347 change_source=changed)
1346 h.flash(msg, category='success')
1348 h.flash(msg, category='success')
1347 channelstream.pr_update_channelstream_push(
1349 channelstream.pr_update_channelstream_push(
1348 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1350 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1349 else:
1351 else:
1350 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1352 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1351 warning_reasons = [
1353 warning_reasons = [
1352 UpdateFailureReason.NO_CHANGE,
1354 UpdateFailureReason.NO_CHANGE,
1353 UpdateFailureReason.WRONG_REF_TYPE,
1355 UpdateFailureReason.WRONG_REF_TYPE,
1354 ]
1356 ]
1355 category = 'warning' if resp.reason in warning_reasons else 'error'
1357 category = 'warning' if resp.reason in warning_reasons else 'error'
1356 h.flash(msg, category=category)
1358 h.flash(msg, category=category)
1357
1359
1358 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1360 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1359 _ = self.request.translate
1361 _ = self.request.translate
1360
1362
1361 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1363 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1362 PullRequestModel().get_reviewer_functions()
1364 PullRequestModel().get_reviewer_functions()
1363
1365
1364 if role == PullRequestReviewers.ROLE_REVIEWER:
1366 if role == PullRequestReviewers.ROLE_REVIEWER:
1365 try:
1367 try:
1366 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1368 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1367 except ValueError as e:
1369 except ValueError as e:
1368 log.error('Reviewers Validation: {}'.format(e))
1370 log.error('Reviewers Validation: {}'.format(e))
1369 h.flash(e, category='error')
1371 h.flash(e, category='error')
1370 return
1372 return
1371
1373
1372 old_calculated_status = pull_request.calculated_review_status()
1374 old_calculated_status = pull_request.calculated_review_status()
1373 PullRequestModel().update_reviewers(
1375 PullRequestModel().update_reviewers(
1374 pull_request, reviewers, self._rhodecode_user)
1376 pull_request, reviewers, self._rhodecode_db_user)
1375
1377
1376 Session().commit()
1378 Session().commit()
1377
1379
1378 msg = _('Pull request reviewers updated.')
1380 msg = _('Pull request reviewers updated.')
1379 h.flash(msg, category='success')
1381 h.flash(msg, category='success')
1380 channelstream.pr_update_channelstream_push(
1382 channelstream.pr_update_channelstream_push(
1381 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1383 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1382
1384
1383 # trigger status changed if change in reviewers changes the status
1385 # trigger status changed if change in reviewers changes the status
1384 calculated_status = pull_request.calculated_review_status()
1386 calculated_status = pull_request.calculated_review_status()
1385 if old_calculated_status != calculated_status:
1387 if old_calculated_status != calculated_status:
1386 PullRequestModel().trigger_pull_request_hook(
1388 PullRequestModel().trigger_pull_request_hook(
1387 pull_request, self._rhodecode_user, 'review_status_change',
1389 pull_request, self._rhodecode_user, 'review_status_change',
1388 data={'status': calculated_status})
1390 data={'status': calculated_status})
1389
1391
1390 elif role == PullRequestReviewers.ROLE_OBSERVER:
1392 elif role == PullRequestReviewers.ROLE_OBSERVER:
1391 try:
1393 try:
1392 observers = validate_observers(review_members, reviewer_rules)
1394 observers = validate_observers(review_members, reviewer_rules)
1393 except ValueError as e:
1395 except ValueError as e:
1394 log.error('Observers Validation: {}'.format(e))
1396 log.error('Observers Validation: {}'.format(e))
1395 h.flash(e, category='error')
1397 h.flash(e, category='error')
1396 return
1398 return
1397
1399
1398 PullRequestModel().update_observers(
1400 PullRequestModel().update_observers(
1399 pull_request, observers, self._rhodecode_user)
1401 pull_request, observers, self._rhodecode_db_user)
1400
1402
1401 Session().commit()
1403 Session().commit()
1402 msg = _('Pull request observers updated.')
1404 msg = _('Pull request observers updated.')
1403 h.flash(msg, category='success')
1405 h.flash(msg, category='success')
1404 channelstream.pr_update_channelstream_push(
1406 channelstream.pr_update_channelstream_push(
1405 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1407 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1406
1408
1407 @LoginRequired()
1409 @LoginRequired()
1408 @NotAnonymous()
1410 @NotAnonymous()
1409 @HasRepoPermissionAnyDecorator(
1411 @HasRepoPermissionAnyDecorator(
1410 'repository.read', 'repository.write', 'repository.admin')
1412 'repository.read', 'repository.write', 'repository.admin')
1411 @CSRFRequired()
1413 @CSRFRequired()
1412 @view_config(
1414 @view_config(
1413 route_name='pullrequest_merge', request_method='POST',
1415 route_name='pullrequest_merge', request_method='POST',
1414 renderer='json_ext')
1416 renderer='json_ext')
1415 def pull_request_merge(self):
1417 def pull_request_merge(self):
1416 """
1418 """
1417 Merge will perform a server-side merge of the specified
1419 Merge will perform a server-side merge of the specified
1418 pull request, if the pull request is approved and mergeable.
1420 pull request, if the pull request is approved and mergeable.
1419 After successful merging, the pull request is automatically
1421 After successful merging, the pull request is automatically
1420 closed, with a relevant comment.
1422 closed, with a relevant comment.
1421 """
1423 """
1422 pull_request = PullRequest.get_or_404(
1424 pull_request = PullRequest.get_or_404(
1423 self.request.matchdict['pull_request_id'])
1425 self.request.matchdict['pull_request_id'])
1424 _ = self.request.translate
1426 _ = self.request.translate
1425
1427
1426 if pull_request.is_state_changing():
1428 if pull_request.is_state_changing():
1427 log.debug('show: forbidden because pull request is in state %s',
1429 log.debug('show: forbidden because pull request is in state %s',
1428 pull_request.pull_request_state)
1430 pull_request.pull_request_state)
1429 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1431 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1430 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1432 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1431 pull_request.pull_request_state)
1433 pull_request.pull_request_state)
1432 h.flash(msg, category='error')
1434 h.flash(msg, category='error')
1433 raise HTTPFound(
1435 raise HTTPFound(
1434 h.route_path('pullrequest_show',
1436 h.route_path('pullrequest_show',
1435 repo_name=pull_request.target_repo.repo_name,
1437 repo_name=pull_request.target_repo.repo_name,
1436 pull_request_id=pull_request.pull_request_id))
1438 pull_request_id=pull_request.pull_request_id))
1437
1439
1438 self.load_default_context()
1440 self.load_default_context()
1439
1441
1440 with pull_request.set_state(PullRequest.STATE_UPDATING):
1442 with pull_request.set_state(PullRequest.STATE_UPDATING):
1441 check = MergeCheck.validate(
1443 check = MergeCheck.validate(
1442 pull_request, auth_user=self._rhodecode_user,
1444 pull_request, auth_user=self._rhodecode_user,
1443 translator=self.request.translate)
1445 translator=self.request.translate)
1444 merge_possible = not check.failed
1446 merge_possible = not check.failed
1445
1447
1446 for err_type, error_msg in check.errors:
1448 for err_type, error_msg in check.errors:
1447 h.flash(error_msg, category=err_type)
1449 h.flash(error_msg, category=err_type)
1448
1450
1449 if merge_possible:
1451 if merge_possible:
1450 log.debug("Pre-conditions checked, trying to merge.")
1452 log.debug("Pre-conditions checked, trying to merge.")
1451 extras = vcs_operation_context(
1453 extras = vcs_operation_context(
1452 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1454 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1453 username=self._rhodecode_db_user.username, action='push',
1455 username=self._rhodecode_db_user.username, action='push',
1454 scm=pull_request.target_repo.repo_type)
1456 scm=pull_request.target_repo.repo_type)
1455 with pull_request.set_state(PullRequest.STATE_UPDATING):
1457 with pull_request.set_state(PullRequest.STATE_UPDATING):
1456 self._merge_pull_request(
1458 self._merge_pull_request(
1457 pull_request, self._rhodecode_db_user, extras)
1459 pull_request, self._rhodecode_db_user, extras)
1458 else:
1460 else:
1459 log.debug("Pre-conditions failed, NOT merging.")
1461 log.debug("Pre-conditions failed, NOT merging.")
1460
1462
1461 raise HTTPFound(
1463 raise HTTPFound(
1462 h.route_path('pullrequest_show',
1464 h.route_path('pullrequest_show',
1463 repo_name=pull_request.target_repo.repo_name,
1465 repo_name=pull_request.target_repo.repo_name,
1464 pull_request_id=pull_request.pull_request_id))
1466 pull_request_id=pull_request.pull_request_id))
1465
1467
1466 def _merge_pull_request(self, pull_request, user, extras):
1468 def _merge_pull_request(self, pull_request, user, extras):
1467 _ = self.request.translate
1469 _ = self.request.translate
1468 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1470 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1469
1471
1470 if merge_resp.executed:
1472 if merge_resp.executed:
1471 log.debug("The merge was successful, closing the pull request.")
1473 log.debug("The merge was successful, closing the pull request.")
1472 PullRequestModel().close_pull_request(
1474 PullRequestModel().close_pull_request(
1473 pull_request.pull_request_id, user)
1475 pull_request.pull_request_id, user)
1474 Session().commit()
1476 Session().commit()
1475 msg = _('Pull request was successfully merged and closed.')
1477 msg = _('Pull request was successfully merged and closed.')
1476 h.flash(msg, category='success')
1478 h.flash(msg, category='success')
1477 else:
1479 else:
1478 log.debug(
1480 log.debug(
1479 "The merge was not successful. Merge response: %s", merge_resp)
1481 "The merge was not successful. Merge response: %s", merge_resp)
1480 msg = merge_resp.merge_status_message
1482 msg = merge_resp.merge_status_message
1481 h.flash(msg, category='error')
1483 h.flash(msg, category='error')
1482
1484
1483 @LoginRequired()
1485 @LoginRequired()
1484 @NotAnonymous()
1486 @NotAnonymous()
1485 @HasRepoPermissionAnyDecorator(
1487 @HasRepoPermissionAnyDecorator(
1486 'repository.read', 'repository.write', 'repository.admin')
1488 'repository.read', 'repository.write', 'repository.admin')
1487 @CSRFRequired()
1489 @CSRFRequired()
1488 @view_config(
1490 @view_config(
1489 route_name='pullrequest_delete', request_method='POST',
1491 route_name='pullrequest_delete', request_method='POST',
1490 renderer='json_ext')
1492 renderer='json_ext')
1491 def pull_request_delete(self):
1493 def pull_request_delete(self):
1492 _ = self.request.translate
1494 _ = self.request.translate
1493
1495
1494 pull_request = PullRequest.get_or_404(
1496 pull_request = PullRequest.get_or_404(
1495 self.request.matchdict['pull_request_id'])
1497 self.request.matchdict['pull_request_id'])
1496 self.load_default_context()
1498 self.load_default_context()
1497
1499
1498 pr_closed = pull_request.is_closed()
1500 pr_closed = pull_request.is_closed()
1499 allowed_to_delete = PullRequestModel().check_user_delete(
1501 allowed_to_delete = PullRequestModel().check_user_delete(
1500 pull_request, self._rhodecode_user) and not pr_closed
1502 pull_request, self._rhodecode_user) and not pr_closed
1501
1503
1502 # only owner can delete it !
1504 # only owner can delete it !
1503 if allowed_to_delete:
1505 if allowed_to_delete:
1504 PullRequestModel().delete(pull_request, self._rhodecode_user)
1506 PullRequestModel().delete(pull_request, self._rhodecode_user)
1505 Session().commit()
1507 Session().commit()
1506 h.flash(_('Successfully deleted pull request'),
1508 h.flash(_('Successfully deleted pull request'),
1507 category='success')
1509 category='success')
1508 raise HTTPFound(h.route_path('pullrequest_show_all',
1510 raise HTTPFound(h.route_path('pullrequest_show_all',
1509 repo_name=self.db_repo_name))
1511 repo_name=self.db_repo_name))
1510
1512
1511 log.warning('user %s tried to delete pull request without access',
1513 log.warning('user %s tried to delete pull request without access',
1512 self._rhodecode_user)
1514 self._rhodecode_user)
1513 raise HTTPNotFound()
1515 raise HTTPNotFound()
1514
1516
1515 @LoginRequired()
1517 @LoginRequired()
1516 @NotAnonymous()
1518 @NotAnonymous()
1517 @HasRepoPermissionAnyDecorator(
1519 @HasRepoPermissionAnyDecorator(
1518 'repository.read', 'repository.write', 'repository.admin')
1520 'repository.read', 'repository.write', 'repository.admin')
1519 @CSRFRequired()
1521 @CSRFRequired()
1520 @view_config(
1522 @view_config(
1521 route_name='pullrequest_comment_create', request_method='POST',
1523 route_name='pullrequest_comment_create', request_method='POST',
1522 renderer='json_ext')
1524 renderer='json_ext')
1523 def pull_request_comment_create(self):
1525 def pull_request_comment_create(self):
1524 _ = self.request.translate
1526 _ = self.request.translate
1525
1527
1526 pull_request = PullRequest.get_or_404(
1528 pull_request = PullRequest.get_or_404(
1527 self.request.matchdict['pull_request_id'])
1529 self.request.matchdict['pull_request_id'])
1528 pull_request_id = pull_request.pull_request_id
1530 pull_request_id = pull_request.pull_request_id
1529
1531
1530 if pull_request.is_closed():
1532 if pull_request.is_closed():
1531 log.debug('comment: forbidden because pull request is closed')
1533 log.debug('comment: forbidden because pull request is closed')
1532 raise HTTPForbidden()
1534 raise HTTPForbidden()
1533
1535
1534 allowed_to_comment = PullRequestModel().check_user_comment(
1536 allowed_to_comment = PullRequestModel().check_user_comment(
1535 pull_request, self._rhodecode_user)
1537 pull_request, self._rhodecode_user)
1536 if not allowed_to_comment:
1538 if not allowed_to_comment:
1537 log.debug('comment: forbidden because pull request is from forbidden repo')
1539 log.debug('comment: forbidden because pull request is from forbidden repo')
1538 raise HTTPForbidden()
1540 raise HTTPForbidden()
1539
1541
1540 c = self.load_default_context()
1542 c = self.load_default_context()
1541
1543
1542 status = self.request.POST.get('changeset_status', None)
1544 status = self.request.POST.get('changeset_status', None)
1543 text = self.request.POST.get('text')
1545 text = self.request.POST.get('text')
1544 comment_type = self.request.POST.get('comment_type')
1546 comment_type = self.request.POST.get('comment_type')
1545 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1547 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1546 close_pull_request = self.request.POST.get('close_pull_request')
1548 close_pull_request = self.request.POST.get('close_pull_request')
1547
1549
1548 # the logic here should work like following, if we submit close
1550 # the logic here should work like following, if we submit close
1549 # pr comment, use `close_pull_request_with_comment` function
1551 # pr comment, use `close_pull_request_with_comment` function
1550 # else handle regular comment logic
1552 # else handle regular comment logic
1551
1553
1552 if close_pull_request:
1554 if close_pull_request:
1553 # only owner or admin or person with write permissions
1555 # only owner or admin or person with write permissions
1554 allowed_to_close = PullRequestModel().check_user_update(
1556 allowed_to_close = PullRequestModel().check_user_update(
1555 pull_request, self._rhodecode_user)
1557 pull_request, self._rhodecode_user)
1556 if not allowed_to_close:
1558 if not allowed_to_close:
1557 log.debug('comment: forbidden because not allowed to close '
1559 log.debug('comment: forbidden because not allowed to close '
1558 'pull request %s', pull_request_id)
1560 'pull request %s', pull_request_id)
1559 raise HTTPForbidden()
1561 raise HTTPForbidden()
1560
1562
1561 # This also triggers `review_status_change`
1563 # This also triggers `review_status_change`
1562 comment, status = PullRequestModel().close_pull_request_with_comment(
1564 comment, status = PullRequestModel().close_pull_request_with_comment(
1563 pull_request, self._rhodecode_user, self.db_repo, message=text,
1565 pull_request, self._rhodecode_user, self.db_repo, message=text,
1564 auth_user=self._rhodecode_user)
1566 auth_user=self._rhodecode_user)
1565 Session().flush()
1567 Session().flush()
1568 is_inline = comment.is_inline
1566
1569
1567 PullRequestModel().trigger_pull_request_hook(
1570 PullRequestModel().trigger_pull_request_hook(
1568 pull_request, self._rhodecode_user, 'comment',
1571 pull_request, self._rhodecode_user, 'comment',
1569 data={'comment': comment})
1572 data={'comment': comment})
1570
1573
1571 else:
1574 else:
1572 # regular comment case, could be inline, or one with status.
1575 # regular comment case, could be inline, or one with status.
1573 # for that one we check also permissions
1576 # for that one we check also permissions
1574
1577
1575 allowed_to_change_status = PullRequestModel().check_user_change_status(
1578 allowed_to_change_status = PullRequestModel().check_user_change_status(
1576 pull_request, self._rhodecode_user)
1579 pull_request, self._rhodecode_user)
1577
1580
1578 if status and allowed_to_change_status:
1581 if status and allowed_to_change_status:
1579 message = (_('Status change %(transition_icon)s %(status)s')
1582 message = (_('Status change %(transition_icon)s %(status)s')
1580 % {'transition_icon': '>',
1583 % {'transition_icon': '>',
1581 'status': ChangesetStatus.get_status_lbl(status)})
1584 'status': ChangesetStatus.get_status_lbl(status)})
1582 text = text or message
1585 text = text or message
1583
1586
1584 comment = CommentsModel().create(
1587 comment = CommentsModel().create(
1585 text=text,
1588 text=text,
1586 repo=self.db_repo.repo_id,
1589 repo=self.db_repo.repo_id,
1587 user=self._rhodecode_user.user_id,
1590 user=self._rhodecode_user.user_id,
1588 pull_request=pull_request,
1591 pull_request=pull_request,
1589 f_path=self.request.POST.get('f_path'),
1592 f_path=self.request.POST.get('f_path'),
1590 line_no=self.request.POST.get('line'),
1593 line_no=self.request.POST.get('line'),
1591 status_change=(ChangesetStatus.get_status_lbl(status)
1594 status_change=(ChangesetStatus.get_status_lbl(status)
1592 if status and allowed_to_change_status else None),
1595 if status and allowed_to_change_status else None),
1593 status_change_type=(status
1596 status_change_type=(status
1594 if status and allowed_to_change_status else None),
1597 if status and allowed_to_change_status else None),
1595 comment_type=comment_type,
1598 comment_type=comment_type,
1596 resolves_comment_id=resolves_comment_id,
1599 resolves_comment_id=resolves_comment_id,
1597 auth_user=self._rhodecode_user
1600 auth_user=self._rhodecode_user
1598 )
1601 )
1599 is_inline = bool(comment.f_path and comment.line_no)
1602 is_inline = comment.is_inline
1600
1603
1601 if allowed_to_change_status:
1604 if allowed_to_change_status:
1602 # calculate old status before we change it
1605 # calculate old status before we change it
1603 old_calculated_status = pull_request.calculated_review_status()
1606 old_calculated_status = pull_request.calculated_review_status()
1604
1607
1605 # get status if set !
1608 # get status if set !
1606 if status:
1609 if status:
1607 ChangesetStatusModel().set_status(
1610 ChangesetStatusModel().set_status(
1608 self.db_repo.repo_id,
1611 self.db_repo.repo_id,
1609 status,
1612 status,
1610 self._rhodecode_user.user_id,
1613 self._rhodecode_user.user_id,
1611 comment,
1614 comment,
1612 pull_request=pull_request
1615 pull_request=pull_request
1613 )
1616 )
1614
1617
1615 Session().flush()
1618 Session().flush()
1616 # this is somehow required to get access to some relationship
1619 # this is somehow required to get access to some relationship
1617 # loaded on comment
1620 # loaded on comment
1618 Session().refresh(comment)
1621 Session().refresh(comment)
1619
1622
1620 PullRequestModel().trigger_pull_request_hook(
1623 PullRequestModel().trigger_pull_request_hook(
1621 pull_request, self._rhodecode_user, 'comment',
1624 pull_request, self._rhodecode_user, 'comment',
1622 data={'comment': comment})
1625 data={'comment': comment})
1623
1626
1624 # we now calculate the status of pull request, and based on that
1627 # we now calculate the status of pull request, and based on that
1625 # calculation we set the commits status
1628 # calculation we set the commits status
1626 calculated_status = pull_request.calculated_review_status()
1629 calculated_status = pull_request.calculated_review_status()
1627 if old_calculated_status != calculated_status:
1630 if old_calculated_status != calculated_status:
1628 PullRequestModel().trigger_pull_request_hook(
1631 PullRequestModel().trigger_pull_request_hook(
1629 pull_request, self._rhodecode_user, 'review_status_change',
1632 pull_request, self._rhodecode_user, 'review_status_change',
1630 data={'status': calculated_status})
1633 data={'status': calculated_status})
1631
1634
1632 Session().commit()
1635 Session().commit()
1633
1636
1634 data = {
1637 data = {
1635 'target_id': h.safeid(h.safe_unicode(
1638 'target_id': h.safeid(h.safe_unicode(
1636 self.request.POST.get('f_path'))),
1639 self.request.POST.get('f_path'))),
1637 }
1640 }
1638 if comment:
1641 if comment:
1639 c.co = comment
1642 c.co = comment
1640 c.at_version_num = None
1643 c.at_version_num = None
1641 rendered_comment = render(
1644 rendered_comment = render(
1642 'rhodecode:templates/changeset/changeset_comment_block.mako',
1645 'rhodecode:templates/changeset/changeset_comment_block.mako',
1643 self._get_template_context(c), self.request)
1646 self._get_template_context(c), self.request)
1644
1647
1645 data.update(comment.get_dict())
1648 data.update(comment.get_dict())
1646 data.update({'rendered_text': rendered_comment})
1649 data.update({'rendered_text': rendered_comment})
1647
1650
1648 comment_broadcast_channel = channelstream.comment_channel(
1651 comment_broadcast_channel = channelstream.comment_channel(
1649 self.db_repo_name, pull_request_obj=pull_request)
1652 self.db_repo_name, pull_request_obj=pull_request)
1650
1653
1651 comment_data = data
1654 comment_data = data
1652 comment_type = 'inline' if is_inline else 'general'
1655 comment_type = 'inline' if is_inline else 'general'
1653 channelstream.comment_channelstream_push(
1656 channelstream.comment_channelstream_push(
1654 self.request, comment_broadcast_channel, self._rhodecode_user,
1657 self.request, comment_broadcast_channel, self._rhodecode_user,
1655 _('posted a new {} comment').format(comment_type),
1658 _('posted a new {} comment').format(comment_type),
1656 comment_data=comment_data)
1659 comment_data=comment_data)
1657
1660
1658 return data
1661 return data
1659
1662
1660 @LoginRequired()
1663 @LoginRequired()
1661 @NotAnonymous()
1664 @NotAnonymous()
1662 @HasRepoPermissionAnyDecorator(
1665 @HasRepoPermissionAnyDecorator(
1663 'repository.read', 'repository.write', 'repository.admin')
1666 'repository.read', 'repository.write', 'repository.admin')
1664 @CSRFRequired()
1667 @CSRFRequired()
1665 @view_config(
1668 @view_config(
1666 route_name='pullrequest_comment_delete', request_method='POST',
1669 route_name='pullrequest_comment_delete', request_method='POST',
1667 renderer='json_ext')
1670 renderer='json_ext')
1668 def pull_request_comment_delete(self):
1671 def pull_request_comment_delete(self):
1669 pull_request = PullRequest.get_or_404(
1672 pull_request = PullRequest.get_or_404(
1670 self.request.matchdict['pull_request_id'])
1673 self.request.matchdict['pull_request_id'])
1671
1674
1672 comment = ChangesetComment.get_or_404(
1675 comment = ChangesetComment.get_or_404(
1673 self.request.matchdict['comment_id'])
1676 self.request.matchdict['comment_id'])
1674 comment_id = comment.comment_id
1677 comment_id = comment.comment_id
1675
1678
1676 if comment.immutable:
1679 if comment.immutable:
1677 # don't allow deleting comments that are immutable
1680 # don't allow deleting comments that are immutable
1678 raise HTTPForbidden()
1681 raise HTTPForbidden()
1679
1682
1680 if pull_request.is_closed():
1683 if pull_request.is_closed():
1681 log.debug('comment: forbidden because pull request is closed')
1684 log.debug('comment: forbidden because pull request is closed')
1682 raise HTTPForbidden()
1685 raise HTTPForbidden()
1683
1686
1684 if not comment:
1687 if not comment:
1685 log.debug('Comment with id:%s not found, skipping', comment_id)
1688 log.debug('Comment with id:%s not found, skipping', comment_id)
1686 # comment already deleted in another call probably
1689 # comment already deleted in another call probably
1687 return True
1690 return True
1688
1691
1689 if comment.pull_request.is_closed():
1692 if comment.pull_request.is_closed():
1690 # don't allow deleting comments on closed pull request
1693 # don't allow deleting comments on closed pull request
1691 raise HTTPForbidden()
1694 raise HTTPForbidden()
1692
1695
1693 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1696 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1694 super_admin = h.HasPermissionAny('hg.admin')()
1697 super_admin = h.HasPermissionAny('hg.admin')()
1695 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1698 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1696 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1699 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1697 comment_repo_admin = is_repo_admin and is_repo_comment
1700 comment_repo_admin = is_repo_admin and is_repo_comment
1698
1701
1699 if super_admin or comment_owner or comment_repo_admin:
1702 if super_admin or comment_owner or comment_repo_admin:
1700 old_calculated_status = comment.pull_request.calculated_review_status()
1703 old_calculated_status = comment.pull_request.calculated_review_status()
1701 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1704 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1702 Session().commit()
1705 Session().commit()
1703 calculated_status = comment.pull_request.calculated_review_status()
1706 calculated_status = comment.pull_request.calculated_review_status()
1704 if old_calculated_status != calculated_status:
1707 if old_calculated_status != calculated_status:
1705 PullRequestModel().trigger_pull_request_hook(
1708 PullRequestModel().trigger_pull_request_hook(
1706 comment.pull_request, self._rhodecode_user, 'review_status_change',
1709 comment.pull_request, self._rhodecode_user, 'review_status_change',
1707 data={'status': calculated_status})
1710 data={'status': calculated_status})
1708 return True
1711 return True
1709 else:
1712 else:
1710 log.warning('No permissions for user %s to delete comment_id: %s',
1713 log.warning('No permissions for user %s to delete comment_id: %s',
1711 self._rhodecode_db_user, comment_id)
1714 self._rhodecode_db_user, comment_id)
1712 raise HTTPNotFound()
1715 raise HTTPNotFound()
1713
1716
1714 @LoginRequired()
1717 @LoginRequired()
1715 @NotAnonymous()
1718 @NotAnonymous()
1716 @HasRepoPermissionAnyDecorator(
1719 @HasRepoPermissionAnyDecorator(
1717 'repository.read', 'repository.write', 'repository.admin')
1720 'repository.read', 'repository.write', 'repository.admin')
1718 @CSRFRequired()
1721 @CSRFRequired()
1719 @view_config(
1722 @view_config(
1720 route_name='pullrequest_comment_edit', request_method='POST',
1723 route_name='pullrequest_comment_edit', request_method='POST',
1721 renderer='json_ext')
1724 renderer='json_ext')
1722 def pull_request_comment_edit(self):
1725 def pull_request_comment_edit(self):
1723 self.load_default_context()
1726 self.load_default_context()
1724
1727
1725 pull_request = PullRequest.get_or_404(
1728 pull_request = PullRequest.get_or_404(
1726 self.request.matchdict['pull_request_id']
1729 self.request.matchdict['pull_request_id']
1727 )
1730 )
1728 comment = ChangesetComment.get_or_404(
1731 comment = ChangesetComment.get_or_404(
1729 self.request.matchdict['comment_id']
1732 self.request.matchdict['comment_id']
1730 )
1733 )
1731 comment_id = comment.comment_id
1734 comment_id = comment.comment_id
1732
1735
1733 if comment.immutable:
1736 if comment.immutable:
1734 # don't allow deleting comments that are immutable
1737 # don't allow deleting comments that are immutable
1735 raise HTTPForbidden()
1738 raise HTTPForbidden()
1736
1739
1737 if pull_request.is_closed():
1740 if pull_request.is_closed():
1738 log.debug('comment: forbidden because pull request is closed')
1741 log.debug('comment: forbidden because pull request is closed')
1739 raise HTTPForbidden()
1742 raise HTTPForbidden()
1740
1743
1741 if not comment:
1744 if not comment:
1742 log.debug('Comment with id:%s not found, skipping', comment_id)
1745 log.debug('Comment with id:%s not found, skipping', comment_id)
1743 # comment already deleted in another call probably
1746 # comment already deleted in another call probably
1744 return True
1747 return True
1745
1748
1746 if comment.pull_request.is_closed():
1749 if comment.pull_request.is_closed():
1747 # don't allow deleting comments on closed pull request
1750 # don't allow deleting comments on closed pull request
1748 raise HTTPForbidden()
1751 raise HTTPForbidden()
1749
1752
1750 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1753 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1751 super_admin = h.HasPermissionAny('hg.admin')()
1754 super_admin = h.HasPermissionAny('hg.admin')()
1752 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1755 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1753 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1756 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1754 comment_repo_admin = is_repo_admin and is_repo_comment
1757 comment_repo_admin = is_repo_admin and is_repo_comment
1755
1758
1756 if super_admin or comment_owner or comment_repo_admin:
1759 if super_admin or comment_owner or comment_repo_admin:
1757 text = self.request.POST.get('text')
1760 text = self.request.POST.get('text')
1758 version = self.request.POST.get('version')
1761 version = self.request.POST.get('version')
1759 if text == comment.text:
1762 if text == comment.text:
1760 log.warning(
1763 log.warning(
1761 'Comment(PR): '
1764 'Comment(PR): '
1762 'Trying to create new version '
1765 'Trying to create new version '
1763 'with the same comment body {}'.format(
1766 'with the same comment body {}'.format(
1764 comment_id,
1767 comment_id,
1765 )
1768 )
1766 )
1769 )
1767 raise HTTPNotFound()
1770 raise HTTPNotFound()
1768
1771
1769 if version.isdigit():
1772 if version.isdigit():
1770 version = int(version)
1773 version = int(version)
1771 else:
1774 else:
1772 log.warning(
1775 log.warning(
1773 'Comment(PR): Wrong version type {} {} '
1776 'Comment(PR): Wrong version type {} {} '
1774 'for comment {}'.format(
1777 'for comment {}'.format(
1775 version,
1778 version,
1776 type(version),
1779 type(version),
1777 comment_id,
1780 comment_id,
1778 )
1781 )
1779 )
1782 )
1780 raise HTTPNotFound()
1783 raise HTTPNotFound()
1781
1784
1782 try:
1785 try:
1783 comment_history = CommentsModel().edit(
1786 comment_history = CommentsModel().edit(
1784 comment_id=comment_id,
1787 comment_id=comment_id,
1785 text=text,
1788 text=text,
1786 auth_user=self._rhodecode_user,
1789 auth_user=self._rhodecode_user,
1787 version=version,
1790 version=version,
1788 )
1791 )
1789 except CommentVersionMismatch:
1792 except CommentVersionMismatch:
1790 raise HTTPConflict()
1793 raise HTTPConflict()
1791
1794
1792 if not comment_history:
1795 if not comment_history:
1793 raise HTTPNotFound()
1796 raise HTTPNotFound()
1794
1797
1795 Session().commit()
1798 Session().commit()
1796
1799
1797 PullRequestModel().trigger_pull_request_hook(
1800 PullRequestModel().trigger_pull_request_hook(
1798 pull_request, self._rhodecode_user, 'comment_edit',
1801 pull_request, self._rhodecode_user, 'comment_edit',
1799 data={'comment': comment})
1802 data={'comment': comment})
1800
1803
1801 return {
1804 return {
1802 'comment_history_id': comment_history.comment_history_id,
1805 'comment_history_id': comment_history.comment_history_id,
1803 'comment_id': comment.comment_id,
1806 'comment_id': comment.comment_id,
1804 'comment_version': comment_history.version,
1807 'comment_version': comment_history.version,
1805 'comment_author_username': comment_history.author.username,
1808 'comment_author_username': comment_history.author.username,
1806 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1809 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1807 'comment_created_on': h.age_component(comment_history.created_on,
1810 'comment_created_on': h.age_component(comment_history.created_on,
1808 time_is_local=True),
1811 time_is_local=True),
1809 }
1812 }
1810 else:
1813 else:
1811 log.warning('No permissions for user %s to edit comment_id: %s',
1814 log.warning('No permissions for user %s to edit comment_id: %s',
1812 self._rhodecode_db_user, comment_id)
1815 self._rhodecode_db_user, comment_id)
1813 raise HTTPNotFound()
1816 raise HTTPNotFound()
@@ -1,1925 +1,1948 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from pyramid import compat
37 from pyramid import compat
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class Reference(_Reference):
63 class Reference(_Reference):
64
64
65 @property
65 @property
66 def branch(self):
66 def branch(self):
67 if self.type == 'branch':
67 if self.type == 'branch':
68 return self.name
68 return self.name
69
69
70 @property
70 @property
71 def bookmark(self):
71 def bookmark(self):
72 if self.type == 'book':
72 if self.type == 'book':
73 return self.name
73 return self.name
74
74
75
75
76 def unicode_to_reference(raw):
77 """
78 Convert a unicode (or string) to a reference object.
79 If unicode evaluates to False it returns None.
80 """
81 if raw:
82 refs = raw.split(':')
83 return Reference(*refs)
84 else:
85 return None
86
87
88 def reference_to_unicode(ref):
89 """
90 Convert a reference object to unicode.
91 If reference is None it returns None.
92 """
93 if ref:
94 return u':'.join(ref)
95 else:
96 return None
97
98
76 class MergeFailureReason(object):
99 class MergeFailureReason(object):
77 """
100 """
78 Enumeration with all the reasons why the server side merge could fail.
101 Enumeration with all the reasons why the server side merge could fail.
79
102
80 DO NOT change the number of the reasons, as they may be stored in the
103 DO NOT change the number of the reasons, as they may be stored in the
81 database.
104 database.
82
105
83 Changing the name of a reason is acceptable and encouraged to deprecate old
106 Changing the name of a reason is acceptable and encouraged to deprecate old
84 reasons.
107 reasons.
85 """
108 """
86
109
87 # Everything went well.
110 # Everything went well.
88 NONE = 0
111 NONE = 0
89
112
90 # An unexpected exception was raised. Check the logs for more details.
113 # An unexpected exception was raised. Check the logs for more details.
91 UNKNOWN = 1
114 UNKNOWN = 1
92
115
93 # The merge was not successful, there are conflicts.
116 # The merge was not successful, there are conflicts.
94 MERGE_FAILED = 2
117 MERGE_FAILED = 2
95
118
96 # The merge succeeded but we could not push it to the target repository.
119 # The merge succeeded but we could not push it to the target repository.
97 PUSH_FAILED = 3
120 PUSH_FAILED = 3
98
121
99 # The specified target is not a head in the target repository.
122 # The specified target is not a head in the target repository.
100 TARGET_IS_NOT_HEAD = 4
123 TARGET_IS_NOT_HEAD = 4
101
124
102 # The source repository contains more branches than the target. Pushing
125 # The source repository contains more branches than the target. Pushing
103 # the merge will create additional branches in the target.
126 # the merge will create additional branches in the target.
104 HG_SOURCE_HAS_MORE_BRANCHES = 5
127 HG_SOURCE_HAS_MORE_BRANCHES = 5
105
128
106 # The target reference has multiple heads. That does not allow to correctly
129 # The target reference has multiple heads. That does not allow to correctly
107 # identify the target location. This could only happen for mercurial
130 # identify the target location. This could only happen for mercurial
108 # branches.
131 # branches.
109 HG_TARGET_HAS_MULTIPLE_HEADS = 6
132 HG_TARGET_HAS_MULTIPLE_HEADS = 6
110
133
111 # The target repository is locked
134 # The target repository is locked
112 TARGET_IS_LOCKED = 7
135 TARGET_IS_LOCKED = 7
113
136
114 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
137 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
115 # A involved commit could not be found.
138 # A involved commit could not be found.
116 _DEPRECATED_MISSING_COMMIT = 8
139 _DEPRECATED_MISSING_COMMIT = 8
117
140
118 # The target repo reference is missing.
141 # The target repo reference is missing.
119 MISSING_TARGET_REF = 9
142 MISSING_TARGET_REF = 9
120
143
121 # The source repo reference is missing.
144 # The source repo reference is missing.
122 MISSING_SOURCE_REF = 10
145 MISSING_SOURCE_REF = 10
123
146
124 # The merge was not successful, there are conflicts related to sub
147 # The merge was not successful, there are conflicts related to sub
125 # repositories.
148 # repositories.
126 SUBREPO_MERGE_FAILED = 11
149 SUBREPO_MERGE_FAILED = 11
127
150
128
151
129 class UpdateFailureReason(object):
152 class UpdateFailureReason(object):
130 """
153 """
131 Enumeration with all the reasons why the pull request update could fail.
154 Enumeration with all the reasons why the pull request update could fail.
132
155
133 DO NOT change the number of the reasons, as they may be stored in the
156 DO NOT change the number of the reasons, as they may be stored in the
134 database.
157 database.
135
158
136 Changing the name of a reason is acceptable and encouraged to deprecate old
159 Changing the name of a reason is acceptable and encouraged to deprecate old
137 reasons.
160 reasons.
138 """
161 """
139
162
140 # Everything went well.
163 # Everything went well.
141 NONE = 0
164 NONE = 0
142
165
143 # An unexpected exception was raised. Check the logs for more details.
166 # An unexpected exception was raised. Check the logs for more details.
144 UNKNOWN = 1
167 UNKNOWN = 1
145
168
146 # The pull request is up to date.
169 # The pull request is up to date.
147 NO_CHANGE = 2
170 NO_CHANGE = 2
148
171
149 # The pull request has a reference type that is not supported for update.
172 # The pull request has a reference type that is not supported for update.
150 WRONG_REF_TYPE = 3
173 WRONG_REF_TYPE = 3
151
174
152 # Update failed because the target reference is missing.
175 # Update failed because the target reference is missing.
153 MISSING_TARGET_REF = 4
176 MISSING_TARGET_REF = 4
154
177
155 # Update failed because the source reference is missing.
178 # Update failed because the source reference is missing.
156 MISSING_SOURCE_REF = 5
179 MISSING_SOURCE_REF = 5
157
180
158
181
159 class MergeResponse(object):
182 class MergeResponse(object):
160
183
161 # uses .format(**metadata) for variables
184 # uses .format(**metadata) for variables
162 MERGE_STATUS_MESSAGES = {
185 MERGE_STATUS_MESSAGES = {
163 MergeFailureReason.NONE: lazy_ugettext(
186 MergeFailureReason.NONE: lazy_ugettext(
164 u'This pull request can be automatically merged.'),
187 u'This pull request can be automatically merged.'),
165 MergeFailureReason.UNKNOWN: lazy_ugettext(
188 MergeFailureReason.UNKNOWN: lazy_ugettext(
166 u'This pull request cannot be merged because of an unhandled exception. '
189 u'This pull request cannot be merged because of an unhandled exception. '
167 u'{exception}'),
190 u'{exception}'),
168 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
191 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
169 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
192 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
170 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
193 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
171 u'This pull request could not be merged because push to '
194 u'This pull request could not be merged because push to '
172 u'target:`{target}@{merge_commit}` failed.'),
195 u'target:`{target}@{merge_commit}` failed.'),
173 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
196 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
174 u'This pull request cannot be merged because the target '
197 u'This pull request cannot be merged because the target '
175 u'`{target_ref.name}` is not a head.'),
198 u'`{target_ref.name}` is not a head.'),
176 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
199 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
177 u'This pull request cannot be merged because the source contains '
200 u'This pull request cannot be merged because the source contains '
178 u'more branches than the target.'),
201 u'more branches than the target.'),
179 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
202 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
180 u'This pull request cannot be merged because the target `{target_ref.name}` '
203 u'This pull request cannot be merged because the target `{target_ref.name}` '
181 u'has multiple heads: `{heads}`.'),
204 u'has multiple heads: `{heads}`.'),
182 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
205 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
183 u'This pull request cannot be merged because the target repository is '
206 u'This pull request cannot be merged because the target repository is '
184 u'locked by {locked_by}.'),
207 u'locked by {locked_by}.'),
185
208
186 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
209 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
187 u'This pull request cannot be merged because the target '
210 u'This pull request cannot be merged because the target '
188 u'reference `{target_ref.name}` is missing.'),
211 u'reference `{target_ref.name}` is missing.'),
189 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
212 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
190 u'This pull request cannot be merged because the source '
213 u'This pull request cannot be merged because the source '
191 u'reference `{source_ref.name}` is missing.'),
214 u'reference `{source_ref.name}` is missing.'),
192 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
215 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
193 u'This pull request cannot be merged because of conflicts related '
216 u'This pull request cannot be merged because of conflicts related '
194 u'to sub repositories.'),
217 u'to sub repositories.'),
195
218
196 # Deprecations
219 # Deprecations
197 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
220 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
198 u'This pull request cannot be merged because the target or the '
221 u'This pull request cannot be merged because the target or the '
199 u'source reference is missing.'),
222 u'source reference is missing.'),
200
223
201 }
224 }
202
225
203 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
226 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
204 self.possible = possible
227 self.possible = possible
205 self.executed = executed
228 self.executed = executed
206 self.merge_ref = merge_ref
229 self.merge_ref = merge_ref
207 self.failure_reason = failure_reason
230 self.failure_reason = failure_reason
208 self.metadata = metadata or {}
231 self.metadata = metadata or {}
209
232
210 def __repr__(self):
233 def __repr__(self):
211 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
234 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
212
235
213 def __eq__(self, other):
236 def __eq__(self, other):
214 same_instance = isinstance(other, self.__class__)
237 same_instance = isinstance(other, self.__class__)
215 return same_instance \
238 return same_instance \
216 and self.possible == other.possible \
239 and self.possible == other.possible \
217 and self.executed == other.executed \
240 and self.executed == other.executed \
218 and self.failure_reason == other.failure_reason
241 and self.failure_reason == other.failure_reason
219
242
220 @property
243 @property
221 def label(self):
244 def label(self):
222 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
245 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
223 not k.startswith('_'))
246 not k.startswith('_'))
224 return label_dict.get(self.failure_reason)
247 return label_dict.get(self.failure_reason)
225
248
226 @property
249 @property
227 def merge_status_message(self):
250 def merge_status_message(self):
228 """
251 """
229 Return a human friendly error message for the given merge status code.
252 Return a human friendly error message for the given merge status code.
230 """
253 """
231 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
254 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
232
255
233 try:
256 try:
234 return msg.format(**self.metadata)
257 return msg.format(**self.metadata)
235 except Exception:
258 except Exception:
236 log.exception('Failed to format %s message', self)
259 log.exception('Failed to format %s message', self)
237 return msg
260 return msg
238
261
239 def asdict(self):
262 def asdict(self):
240 data = {}
263 data = {}
241 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
264 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
242 'merge_status_message']:
265 'merge_status_message']:
243 data[k] = getattr(self, k)
266 data[k] = getattr(self, k)
244 return data
267 return data
245
268
246
269
247 class TargetRefMissing(ValueError):
270 class TargetRefMissing(ValueError):
248 pass
271 pass
249
272
250
273
251 class SourceRefMissing(ValueError):
274 class SourceRefMissing(ValueError):
252 pass
275 pass
253
276
254
277
255 class BaseRepository(object):
278 class BaseRepository(object):
256 """
279 """
257 Base Repository for final backends
280 Base Repository for final backends
258
281
259 .. attribute:: DEFAULT_BRANCH_NAME
282 .. attribute:: DEFAULT_BRANCH_NAME
260
283
261 name of default branch (i.e. "trunk" for svn, "master" for git etc.
284 name of default branch (i.e. "trunk" for svn, "master" for git etc.
262
285
263 .. attribute:: commit_ids
286 .. attribute:: commit_ids
264
287
265 list of all available commit ids, in ascending order
288 list of all available commit ids, in ascending order
266
289
267 .. attribute:: path
290 .. attribute:: path
268
291
269 absolute path to the repository
292 absolute path to the repository
270
293
271 .. attribute:: bookmarks
294 .. attribute:: bookmarks
272
295
273 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
296 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
274 there are no bookmarks or the backend implementation does not support
297 there are no bookmarks or the backend implementation does not support
275 bookmarks.
298 bookmarks.
276
299
277 .. attribute:: tags
300 .. attribute:: tags
278
301
279 Mapping from name to :term:`Commit ID` of the tag.
302 Mapping from name to :term:`Commit ID` of the tag.
280
303
281 """
304 """
282
305
283 DEFAULT_BRANCH_NAME = None
306 DEFAULT_BRANCH_NAME = None
284 DEFAULT_CONTACT = u"Unknown"
307 DEFAULT_CONTACT = u"Unknown"
285 DEFAULT_DESCRIPTION = u"unknown"
308 DEFAULT_DESCRIPTION = u"unknown"
286 EMPTY_COMMIT_ID = '0' * 40
309 EMPTY_COMMIT_ID = '0' * 40
287
310
288 path = None
311 path = None
289
312
290 _is_empty = None
313 _is_empty = None
291 _commit_ids = {}
314 _commit_ids = {}
292
315
293 def __init__(self, repo_path, config=None, create=False, **kwargs):
316 def __init__(self, repo_path, config=None, create=False, **kwargs):
294 """
317 """
295 Initializes repository. Raises RepositoryError if repository could
318 Initializes repository. Raises RepositoryError if repository could
296 not be find at the given ``repo_path`` or directory at ``repo_path``
319 not be find at the given ``repo_path`` or directory at ``repo_path``
297 exists and ``create`` is set to True.
320 exists and ``create`` is set to True.
298
321
299 :param repo_path: local path of the repository
322 :param repo_path: local path of the repository
300 :param config: repository configuration
323 :param config: repository configuration
301 :param create=False: if set to True, would try to create repository.
324 :param create=False: if set to True, would try to create repository.
302 :param src_url=None: if set, should be proper url from which repository
325 :param src_url=None: if set, should be proper url from which repository
303 would be cloned; requires ``create`` parameter to be set to True -
326 would be cloned; requires ``create`` parameter to be set to True -
304 raises RepositoryError if src_url is set and create evaluates to
327 raises RepositoryError if src_url is set and create evaluates to
305 False
328 False
306 """
329 """
307 raise NotImplementedError
330 raise NotImplementedError
308
331
309 def __repr__(self):
332 def __repr__(self):
310 return '<%s at %s>' % (self.__class__.__name__, self.path)
333 return '<%s at %s>' % (self.__class__.__name__, self.path)
311
334
312 def __len__(self):
335 def __len__(self):
313 return self.count()
336 return self.count()
314
337
315 def __eq__(self, other):
338 def __eq__(self, other):
316 same_instance = isinstance(other, self.__class__)
339 same_instance = isinstance(other, self.__class__)
317 return same_instance and other.path == self.path
340 return same_instance and other.path == self.path
318
341
319 def __ne__(self, other):
342 def __ne__(self, other):
320 return not self.__eq__(other)
343 return not self.__eq__(other)
321
344
322 def get_create_shadow_cache_pr_path(self, db_repo):
345 def get_create_shadow_cache_pr_path(self, db_repo):
323 path = db_repo.cached_diffs_dir
346 path = db_repo.cached_diffs_dir
324 if not os.path.exists(path):
347 if not os.path.exists(path):
325 os.makedirs(path, 0o755)
348 os.makedirs(path, 0o755)
326 return path
349 return path
327
350
328 @classmethod
351 @classmethod
329 def get_default_config(cls, default=None):
352 def get_default_config(cls, default=None):
330 config = Config()
353 config = Config()
331 if default and isinstance(default, list):
354 if default and isinstance(default, list):
332 for section, key, val in default:
355 for section, key, val in default:
333 config.set(section, key, val)
356 config.set(section, key, val)
334 return config
357 return config
335
358
336 @LazyProperty
359 @LazyProperty
337 def _remote(self):
360 def _remote(self):
338 raise NotImplementedError
361 raise NotImplementedError
339
362
340 def _heads(self, branch=None):
363 def _heads(self, branch=None):
341 return []
364 return []
342
365
343 @LazyProperty
366 @LazyProperty
344 def EMPTY_COMMIT(self):
367 def EMPTY_COMMIT(self):
345 return EmptyCommit(self.EMPTY_COMMIT_ID)
368 return EmptyCommit(self.EMPTY_COMMIT_ID)
346
369
347 @LazyProperty
370 @LazyProperty
348 def alias(self):
371 def alias(self):
349 for k, v in settings.BACKENDS.items():
372 for k, v in settings.BACKENDS.items():
350 if v.split('.')[-1] == str(self.__class__.__name__):
373 if v.split('.')[-1] == str(self.__class__.__name__):
351 return k
374 return k
352
375
353 @LazyProperty
376 @LazyProperty
354 def name(self):
377 def name(self):
355 return safe_unicode(os.path.basename(self.path))
378 return safe_unicode(os.path.basename(self.path))
356
379
357 @LazyProperty
380 @LazyProperty
358 def description(self):
381 def description(self):
359 raise NotImplementedError
382 raise NotImplementedError
360
383
361 def refs(self):
384 def refs(self):
362 """
385 """
363 returns a `dict` with branches, bookmarks, tags, and closed_branches
386 returns a `dict` with branches, bookmarks, tags, and closed_branches
364 for this repository
387 for this repository
365 """
388 """
366 return dict(
389 return dict(
367 branches=self.branches,
390 branches=self.branches,
368 branches_closed=self.branches_closed,
391 branches_closed=self.branches_closed,
369 tags=self.tags,
392 tags=self.tags,
370 bookmarks=self.bookmarks
393 bookmarks=self.bookmarks
371 )
394 )
372
395
373 @LazyProperty
396 @LazyProperty
374 def branches(self):
397 def branches(self):
375 """
398 """
376 A `dict` which maps branch names to commit ids.
399 A `dict` which maps branch names to commit ids.
377 """
400 """
378 raise NotImplementedError
401 raise NotImplementedError
379
402
380 @LazyProperty
403 @LazyProperty
381 def branches_closed(self):
404 def branches_closed(self):
382 """
405 """
383 A `dict` which maps tags names to commit ids.
406 A `dict` which maps tags names to commit ids.
384 """
407 """
385 raise NotImplementedError
408 raise NotImplementedError
386
409
387 @LazyProperty
410 @LazyProperty
388 def bookmarks(self):
411 def bookmarks(self):
389 """
412 """
390 A `dict` which maps tags names to commit ids.
413 A `dict` which maps tags names to commit ids.
391 """
414 """
392 raise NotImplementedError
415 raise NotImplementedError
393
416
394 @LazyProperty
417 @LazyProperty
395 def tags(self):
418 def tags(self):
396 """
419 """
397 A `dict` which maps tags names to commit ids.
420 A `dict` which maps tags names to commit ids.
398 """
421 """
399 raise NotImplementedError
422 raise NotImplementedError
400
423
401 @LazyProperty
424 @LazyProperty
402 def size(self):
425 def size(self):
403 """
426 """
404 Returns combined size in bytes for all repository files
427 Returns combined size in bytes for all repository files
405 """
428 """
406 tip = self.get_commit()
429 tip = self.get_commit()
407 return tip.size
430 return tip.size
408
431
409 def size_at_commit(self, commit_id):
432 def size_at_commit(self, commit_id):
410 commit = self.get_commit(commit_id)
433 commit = self.get_commit(commit_id)
411 return commit.size
434 return commit.size
412
435
413 def _check_for_empty(self):
436 def _check_for_empty(self):
414 no_commits = len(self._commit_ids) == 0
437 no_commits = len(self._commit_ids) == 0
415 if no_commits:
438 if no_commits:
416 # check on remote to be sure
439 # check on remote to be sure
417 return self._remote.is_empty()
440 return self._remote.is_empty()
418 else:
441 else:
419 return False
442 return False
420
443
421 def is_empty(self):
444 def is_empty(self):
422 if rhodecode.is_test:
445 if rhodecode.is_test:
423 return self._check_for_empty()
446 return self._check_for_empty()
424
447
425 if self._is_empty is None:
448 if self._is_empty is None:
426 # cache empty for production, but not tests
449 # cache empty for production, but not tests
427 self._is_empty = self._check_for_empty()
450 self._is_empty = self._check_for_empty()
428
451
429 return self._is_empty
452 return self._is_empty
430
453
431 @staticmethod
454 @staticmethod
432 def check_url(url, config):
455 def check_url(url, config):
433 """
456 """
434 Function will check given url and try to verify if it's a valid
457 Function will check given url and try to verify if it's a valid
435 link.
458 link.
436 """
459 """
437 raise NotImplementedError
460 raise NotImplementedError
438
461
439 @staticmethod
462 @staticmethod
440 def is_valid_repository(path):
463 def is_valid_repository(path):
441 """
464 """
442 Check if given `path` contains a valid repository of this backend
465 Check if given `path` contains a valid repository of this backend
443 """
466 """
444 raise NotImplementedError
467 raise NotImplementedError
445
468
446 # ==========================================================================
469 # ==========================================================================
447 # COMMITS
470 # COMMITS
448 # ==========================================================================
471 # ==========================================================================
449
472
450 @CachedProperty
473 @CachedProperty
451 def commit_ids(self):
474 def commit_ids(self):
452 raise NotImplementedError
475 raise NotImplementedError
453
476
454 def append_commit_id(self, commit_id):
477 def append_commit_id(self, commit_id):
455 if commit_id not in self.commit_ids:
478 if commit_id not in self.commit_ids:
456 self._rebuild_cache(self.commit_ids + [commit_id])
479 self._rebuild_cache(self.commit_ids + [commit_id])
457
480
458 # clear cache
481 # clear cache
459 self._invalidate_prop_cache('commit_ids')
482 self._invalidate_prop_cache('commit_ids')
460 self._is_empty = False
483 self._is_empty = False
461
484
462 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
485 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
463 translate_tag=None, maybe_unreachable=False):
486 translate_tag=None, maybe_unreachable=False):
464 """
487 """
465 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
488 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
466 are both None, most recent commit is returned.
489 are both None, most recent commit is returned.
467
490
468 :param pre_load: Optional. List of commit attributes to load.
491 :param pre_load: Optional. List of commit attributes to load.
469
492
470 :raises ``EmptyRepositoryError``: if there are no commits
493 :raises ``EmptyRepositoryError``: if there are no commits
471 """
494 """
472 raise NotImplementedError
495 raise NotImplementedError
473
496
474 def __iter__(self):
497 def __iter__(self):
475 for commit_id in self.commit_ids:
498 for commit_id in self.commit_ids:
476 yield self.get_commit(commit_id=commit_id)
499 yield self.get_commit(commit_id=commit_id)
477
500
478 def get_commits(
501 def get_commits(
479 self, start_id=None, end_id=None, start_date=None, end_date=None,
502 self, start_id=None, end_id=None, start_date=None, end_date=None,
480 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
503 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
481 """
504 """
482 Returns iterator of `BaseCommit` objects from start to end
505 Returns iterator of `BaseCommit` objects from start to end
483 not inclusive. This should behave just like a list, ie. end is not
506 not inclusive. This should behave just like a list, ie. end is not
484 inclusive.
507 inclusive.
485
508
486 :param start_id: None or str, must be a valid commit id
509 :param start_id: None or str, must be a valid commit id
487 :param end_id: None or str, must be a valid commit id
510 :param end_id: None or str, must be a valid commit id
488 :param start_date:
511 :param start_date:
489 :param end_date:
512 :param end_date:
490 :param branch_name:
513 :param branch_name:
491 :param show_hidden:
514 :param show_hidden:
492 :param pre_load:
515 :param pre_load:
493 :param translate_tags:
516 :param translate_tags:
494 """
517 """
495 raise NotImplementedError
518 raise NotImplementedError
496
519
497 def __getitem__(self, key):
520 def __getitem__(self, key):
498 """
521 """
499 Allows index based access to the commit objects of this repository.
522 Allows index based access to the commit objects of this repository.
500 """
523 """
501 pre_load = ["author", "branch", "date", "message", "parents"]
524 pre_load = ["author", "branch", "date", "message", "parents"]
502 if isinstance(key, slice):
525 if isinstance(key, slice):
503 return self._get_range(key, pre_load)
526 return self._get_range(key, pre_load)
504 return self.get_commit(commit_idx=key, pre_load=pre_load)
527 return self.get_commit(commit_idx=key, pre_load=pre_load)
505
528
506 def _get_range(self, slice_obj, pre_load):
529 def _get_range(self, slice_obj, pre_load):
507 for commit_id in self.commit_ids.__getitem__(slice_obj):
530 for commit_id in self.commit_ids.__getitem__(slice_obj):
508 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
531 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
509
532
510 def count(self):
533 def count(self):
511 return len(self.commit_ids)
534 return len(self.commit_ids)
512
535
513 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
536 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
514 """
537 """
515 Creates and returns a tag for the given ``commit_id``.
538 Creates and returns a tag for the given ``commit_id``.
516
539
517 :param name: name for new tag
540 :param name: name for new tag
518 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
541 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
519 :param commit_id: commit id for which new tag would be created
542 :param commit_id: commit id for which new tag would be created
520 :param message: message of the tag's commit
543 :param message: message of the tag's commit
521 :param date: date of tag's commit
544 :param date: date of tag's commit
522
545
523 :raises TagAlreadyExistError: if tag with same name already exists
546 :raises TagAlreadyExistError: if tag with same name already exists
524 """
547 """
525 raise NotImplementedError
548 raise NotImplementedError
526
549
527 def remove_tag(self, name, user, message=None, date=None):
550 def remove_tag(self, name, user, message=None, date=None):
528 """
551 """
529 Removes tag with the given ``name``.
552 Removes tag with the given ``name``.
530
553
531 :param name: name of the tag to be removed
554 :param name: name of the tag to be removed
532 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
555 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
533 :param message: message of the tag's removal commit
556 :param message: message of the tag's removal commit
534 :param date: date of tag's removal commit
557 :param date: date of tag's removal commit
535
558
536 :raises TagDoesNotExistError: if tag with given name does not exists
559 :raises TagDoesNotExistError: if tag with given name does not exists
537 """
560 """
538 raise NotImplementedError
561 raise NotImplementedError
539
562
540 def get_diff(
563 def get_diff(
541 self, commit1, commit2, path=None, ignore_whitespace=False,
564 self, commit1, commit2, path=None, ignore_whitespace=False,
542 context=3, path1=None):
565 context=3, path1=None):
543 """
566 """
544 Returns (git like) *diff*, as plain text. Shows changes introduced by
567 Returns (git like) *diff*, as plain text. Shows changes introduced by
545 `commit2` since `commit1`.
568 `commit2` since `commit1`.
546
569
547 :param commit1: Entry point from which diff is shown. Can be
570 :param commit1: Entry point from which diff is shown. Can be
548 ``self.EMPTY_COMMIT`` - in this case, patch showing all
571 ``self.EMPTY_COMMIT`` - in this case, patch showing all
549 the changes since empty state of the repository until `commit2`
572 the changes since empty state of the repository until `commit2`
550 :param commit2: Until which commit changes should be shown.
573 :param commit2: Until which commit changes should be shown.
551 :param path: Can be set to a path of a file to create a diff of that
574 :param path: Can be set to a path of a file to create a diff of that
552 file. If `path1` is also set, this value is only associated to
575 file. If `path1` is also set, this value is only associated to
553 `commit2`.
576 `commit2`.
554 :param ignore_whitespace: If set to ``True``, would not show whitespace
577 :param ignore_whitespace: If set to ``True``, would not show whitespace
555 changes. Defaults to ``False``.
578 changes. Defaults to ``False``.
556 :param context: How many lines before/after changed lines should be
579 :param context: How many lines before/after changed lines should be
557 shown. Defaults to ``3``.
580 shown. Defaults to ``3``.
558 :param path1: Can be set to a path to associate with `commit1`. This
581 :param path1: Can be set to a path to associate with `commit1`. This
559 parameter works only for backends which support diff generation for
582 parameter works only for backends which support diff generation for
560 different paths. Other backends will raise a `ValueError` if `path1`
583 different paths. Other backends will raise a `ValueError` if `path1`
561 is set and has a different value than `path`.
584 is set and has a different value than `path`.
562 :param file_path: filter this diff by given path pattern
585 :param file_path: filter this diff by given path pattern
563 """
586 """
564 raise NotImplementedError
587 raise NotImplementedError
565
588
566 def strip(self, commit_id, branch=None):
589 def strip(self, commit_id, branch=None):
567 """
590 """
568 Strip given commit_id from the repository
591 Strip given commit_id from the repository
569 """
592 """
570 raise NotImplementedError
593 raise NotImplementedError
571
594
572 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
595 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
573 """
596 """
574 Return a latest common ancestor commit if one exists for this repo
597 Return a latest common ancestor commit if one exists for this repo
575 `commit_id1` vs `commit_id2` from `repo2`.
598 `commit_id1` vs `commit_id2` from `repo2`.
576
599
577 :param commit_id1: Commit it from this repository to use as a
600 :param commit_id1: Commit it from this repository to use as a
578 target for the comparison.
601 target for the comparison.
579 :param commit_id2: Source commit id to use for comparison.
602 :param commit_id2: Source commit id to use for comparison.
580 :param repo2: Source repository to use for comparison.
603 :param repo2: Source repository to use for comparison.
581 """
604 """
582 raise NotImplementedError
605 raise NotImplementedError
583
606
584 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
607 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
585 """
608 """
586 Compare this repository's revision `commit_id1` with `commit_id2`.
609 Compare this repository's revision `commit_id1` with `commit_id2`.
587
610
588 Returns a tuple(commits, ancestor) that would be merged from
611 Returns a tuple(commits, ancestor) that would be merged from
589 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
612 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
590 will be returned as ancestor.
613 will be returned as ancestor.
591
614
592 :param commit_id1: Commit it from this repository to use as a
615 :param commit_id1: Commit it from this repository to use as a
593 target for the comparison.
616 target for the comparison.
594 :param commit_id2: Source commit id to use for comparison.
617 :param commit_id2: Source commit id to use for comparison.
595 :param repo2: Source repository to use for comparison.
618 :param repo2: Source repository to use for comparison.
596 :param merge: If set to ``True`` will do a merge compare which also
619 :param merge: If set to ``True`` will do a merge compare which also
597 returns the common ancestor.
620 returns the common ancestor.
598 :param pre_load: Optional. List of commit attributes to load.
621 :param pre_load: Optional. List of commit attributes to load.
599 """
622 """
600 raise NotImplementedError
623 raise NotImplementedError
601
624
602 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
625 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
603 user_name='', user_email='', message='', dry_run=False,
626 user_name='', user_email='', message='', dry_run=False,
604 use_rebase=False, close_branch=False):
627 use_rebase=False, close_branch=False):
605 """
628 """
606 Merge the revisions specified in `source_ref` from `source_repo`
629 Merge the revisions specified in `source_ref` from `source_repo`
607 onto the `target_ref` of this repository.
630 onto the `target_ref` of this repository.
608
631
609 `source_ref` and `target_ref` are named tupls with the following
632 `source_ref` and `target_ref` are named tupls with the following
610 fields `type`, `name` and `commit_id`.
633 fields `type`, `name` and `commit_id`.
611
634
612 Returns a MergeResponse named tuple with the following fields
635 Returns a MergeResponse named tuple with the following fields
613 'possible', 'executed', 'source_commit', 'target_commit',
636 'possible', 'executed', 'source_commit', 'target_commit',
614 'merge_commit'.
637 'merge_commit'.
615
638
616 :param repo_id: `repo_id` target repo id.
639 :param repo_id: `repo_id` target repo id.
617 :param workspace_id: `workspace_id` unique identifier.
640 :param workspace_id: `workspace_id` unique identifier.
618 :param target_ref: `target_ref` points to the commit on top of which
641 :param target_ref: `target_ref` points to the commit on top of which
619 the `source_ref` should be merged.
642 the `source_ref` should be merged.
620 :param source_repo: The repository that contains the commits to be
643 :param source_repo: The repository that contains the commits to be
621 merged.
644 merged.
622 :param source_ref: `source_ref` points to the topmost commit from
645 :param source_ref: `source_ref` points to the topmost commit from
623 the `source_repo` which should be merged.
646 the `source_repo` which should be merged.
624 :param user_name: Merge commit `user_name`.
647 :param user_name: Merge commit `user_name`.
625 :param user_email: Merge commit `user_email`.
648 :param user_email: Merge commit `user_email`.
626 :param message: Merge commit `message`.
649 :param message: Merge commit `message`.
627 :param dry_run: If `True` the merge will not take place.
650 :param dry_run: If `True` the merge will not take place.
628 :param use_rebase: If `True` commits from the source will be rebased
651 :param use_rebase: If `True` commits from the source will be rebased
629 on top of the target instead of being merged.
652 on top of the target instead of being merged.
630 :param close_branch: If `True` branch will be close before merging it
653 :param close_branch: If `True` branch will be close before merging it
631 """
654 """
632 if dry_run:
655 if dry_run:
633 message = message or settings.MERGE_DRY_RUN_MESSAGE
656 message = message or settings.MERGE_DRY_RUN_MESSAGE
634 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
657 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
635 user_name = user_name or settings.MERGE_DRY_RUN_USER
658 user_name = user_name or settings.MERGE_DRY_RUN_USER
636 else:
659 else:
637 if not user_name:
660 if not user_name:
638 raise ValueError('user_name cannot be empty')
661 raise ValueError('user_name cannot be empty')
639 if not user_email:
662 if not user_email:
640 raise ValueError('user_email cannot be empty')
663 raise ValueError('user_email cannot be empty')
641 if not message:
664 if not message:
642 raise ValueError('message cannot be empty')
665 raise ValueError('message cannot be empty')
643
666
644 try:
667 try:
645 return self._merge_repo(
668 return self._merge_repo(
646 repo_id, workspace_id, target_ref, source_repo,
669 repo_id, workspace_id, target_ref, source_repo,
647 source_ref, message, user_name, user_email, dry_run=dry_run,
670 source_ref, message, user_name, user_email, dry_run=dry_run,
648 use_rebase=use_rebase, close_branch=close_branch)
671 use_rebase=use_rebase, close_branch=close_branch)
649 except RepositoryError as exc:
672 except RepositoryError as exc:
650 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
673 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
651 return MergeResponse(
674 return MergeResponse(
652 False, False, None, MergeFailureReason.UNKNOWN,
675 False, False, None, MergeFailureReason.UNKNOWN,
653 metadata={'exception': str(exc)})
676 metadata={'exception': str(exc)})
654
677
655 def _merge_repo(self, repo_id, workspace_id, target_ref,
678 def _merge_repo(self, repo_id, workspace_id, target_ref,
656 source_repo, source_ref, merge_message,
679 source_repo, source_ref, merge_message,
657 merger_name, merger_email, dry_run=False,
680 merger_name, merger_email, dry_run=False,
658 use_rebase=False, close_branch=False):
681 use_rebase=False, close_branch=False):
659 """Internal implementation of merge."""
682 """Internal implementation of merge."""
660 raise NotImplementedError
683 raise NotImplementedError
661
684
662 def _maybe_prepare_merge_workspace(
685 def _maybe_prepare_merge_workspace(
663 self, repo_id, workspace_id, target_ref, source_ref):
686 self, repo_id, workspace_id, target_ref, source_ref):
664 """
687 """
665 Create the merge workspace.
688 Create the merge workspace.
666
689
667 :param workspace_id: `workspace_id` unique identifier.
690 :param workspace_id: `workspace_id` unique identifier.
668 """
691 """
669 raise NotImplementedError
692 raise NotImplementedError
670
693
671 @classmethod
694 @classmethod
672 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
695 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
673 """
696 """
674 Legacy version that was used before. We still need it for
697 Legacy version that was used before. We still need it for
675 backward compat
698 backward compat
676 """
699 """
677 return os.path.join(
700 return os.path.join(
678 os.path.dirname(repo_path),
701 os.path.dirname(repo_path),
679 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
702 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
680
703
681 @classmethod
704 @classmethod
682 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
705 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
683 # The name of the shadow repository must start with '.', so it is
706 # The name of the shadow repository must start with '.', so it is
684 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
707 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
685 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
708 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
686 if os.path.exists(legacy_repository_path):
709 if os.path.exists(legacy_repository_path):
687 return legacy_repository_path
710 return legacy_repository_path
688 else:
711 else:
689 return os.path.join(
712 return os.path.join(
690 os.path.dirname(repo_path),
713 os.path.dirname(repo_path),
691 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
714 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
692
715
693 def cleanup_merge_workspace(self, repo_id, workspace_id):
716 def cleanup_merge_workspace(self, repo_id, workspace_id):
694 """
717 """
695 Remove merge workspace.
718 Remove merge workspace.
696
719
697 This function MUST not fail in case there is no workspace associated to
720 This function MUST not fail in case there is no workspace associated to
698 the given `workspace_id`.
721 the given `workspace_id`.
699
722
700 :param workspace_id: `workspace_id` unique identifier.
723 :param workspace_id: `workspace_id` unique identifier.
701 """
724 """
702 shadow_repository_path = self._get_shadow_repository_path(
725 shadow_repository_path = self._get_shadow_repository_path(
703 self.path, repo_id, workspace_id)
726 self.path, repo_id, workspace_id)
704 shadow_repository_path_del = '{}.{}.delete'.format(
727 shadow_repository_path_del = '{}.{}.delete'.format(
705 shadow_repository_path, time.time())
728 shadow_repository_path, time.time())
706
729
707 # move the shadow repo, so it never conflicts with the one used.
730 # move the shadow repo, so it never conflicts with the one used.
708 # we use this method because shutil.rmtree had some edge case problems
731 # we use this method because shutil.rmtree had some edge case problems
709 # removing symlinked repositories
732 # removing symlinked repositories
710 if not os.path.isdir(shadow_repository_path):
733 if not os.path.isdir(shadow_repository_path):
711 return
734 return
712
735
713 shutil.move(shadow_repository_path, shadow_repository_path_del)
736 shutil.move(shadow_repository_path, shadow_repository_path_del)
714 try:
737 try:
715 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
738 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
716 except Exception:
739 except Exception:
717 log.exception('Failed to gracefully remove shadow repo under %s',
740 log.exception('Failed to gracefully remove shadow repo under %s',
718 shadow_repository_path_del)
741 shadow_repository_path_del)
719 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
742 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
720
743
721 # ========== #
744 # ========== #
722 # COMMIT API #
745 # COMMIT API #
723 # ========== #
746 # ========== #
724
747
725 @LazyProperty
748 @LazyProperty
726 def in_memory_commit(self):
749 def in_memory_commit(self):
727 """
750 """
728 Returns :class:`InMemoryCommit` object for this repository.
751 Returns :class:`InMemoryCommit` object for this repository.
729 """
752 """
730 raise NotImplementedError
753 raise NotImplementedError
731
754
732 # ======================== #
755 # ======================== #
733 # UTILITIES FOR SUBCLASSES #
756 # UTILITIES FOR SUBCLASSES #
734 # ======================== #
757 # ======================== #
735
758
736 def _validate_diff_commits(self, commit1, commit2):
759 def _validate_diff_commits(self, commit1, commit2):
737 """
760 """
738 Validates that the given commits are related to this repository.
761 Validates that the given commits are related to this repository.
739
762
740 Intended as a utility for sub classes to have a consistent validation
763 Intended as a utility for sub classes to have a consistent validation
741 of input parameters in methods like :meth:`get_diff`.
764 of input parameters in methods like :meth:`get_diff`.
742 """
765 """
743 self._validate_commit(commit1)
766 self._validate_commit(commit1)
744 self._validate_commit(commit2)
767 self._validate_commit(commit2)
745 if (isinstance(commit1, EmptyCommit) and
768 if (isinstance(commit1, EmptyCommit) and
746 isinstance(commit2, EmptyCommit)):
769 isinstance(commit2, EmptyCommit)):
747 raise ValueError("Cannot compare two empty commits")
770 raise ValueError("Cannot compare two empty commits")
748
771
749 def _validate_commit(self, commit):
772 def _validate_commit(self, commit):
750 if not isinstance(commit, BaseCommit):
773 if not isinstance(commit, BaseCommit):
751 raise TypeError(
774 raise TypeError(
752 "%s is not of type BaseCommit" % repr(commit))
775 "%s is not of type BaseCommit" % repr(commit))
753 if commit.repository != self and not isinstance(commit, EmptyCommit):
776 if commit.repository != self and not isinstance(commit, EmptyCommit):
754 raise ValueError(
777 raise ValueError(
755 "Commit %s must be a valid commit from this repository %s, "
778 "Commit %s must be a valid commit from this repository %s, "
756 "related to this repository instead %s." %
779 "related to this repository instead %s." %
757 (commit, self, commit.repository))
780 (commit, self, commit.repository))
758
781
759 def _validate_commit_id(self, commit_id):
782 def _validate_commit_id(self, commit_id):
760 if not isinstance(commit_id, compat.string_types):
783 if not isinstance(commit_id, compat.string_types):
761 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
784 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
762
785
763 def _validate_commit_idx(self, commit_idx):
786 def _validate_commit_idx(self, commit_idx):
764 if not isinstance(commit_idx, (int, long)):
787 if not isinstance(commit_idx, (int, long)):
765 raise TypeError("commit_idx must be a numeric value")
788 raise TypeError("commit_idx must be a numeric value")
766
789
767 def _validate_branch_name(self, branch_name):
790 def _validate_branch_name(self, branch_name):
768 if branch_name and branch_name not in self.branches_all:
791 if branch_name and branch_name not in self.branches_all:
769 msg = ("Branch %s not found in %s" % (branch_name, self))
792 msg = ("Branch %s not found in %s" % (branch_name, self))
770 raise BranchDoesNotExistError(msg)
793 raise BranchDoesNotExistError(msg)
771
794
772 #
795 #
773 # Supporting deprecated API parts
796 # Supporting deprecated API parts
774 # TODO: johbo: consider to move this into a mixin
797 # TODO: johbo: consider to move this into a mixin
775 #
798 #
776
799
777 @property
800 @property
778 def EMPTY_CHANGESET(self):
801 def EMPTY_CHANGESET(self):
779 warnings.warn(
802 warnings.warn(
780 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
803 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
781 return self.EMPTY_COMMIT_ID
804 return self.EMPTY_COMMIT_ID
782
805
783 @property
806 @property
784 def revisions(self):
807 def revisions(self):
785 warnings.warn("Use commits attribute instead", DeprecationWarning)
808 warnings.warn("Use commits attribute instead", DeprecationWarning)
786 return self.commit_ids
809 return self.commit_ids
787
810
788 @revisions.setter
811 @revisions.setter
789 def revisions(self, value):
812 def revisions(self, value):
790 warnings.warn("Use commits attribute instead", DeprecationWarning)
813 warnings.warn("Use commits attribute instead", DeprecationWarning)
791 self.commit_ids = value
814 self.commit_ids = value
792
815
793 def get_changeset(self, revision=None, pre_load=None):
816 def get_changeset(self, revision=None, pre_load=None):
794 warnings.warn("Use get_commit instead", DeprecationWarning)
817 warnings.warn("Use get_commit instead", DeprecationWarning)
795 commit_id = None
818 commit_id = None
796 commit_idx = None
819 commit_idx = None
797 if isinstance(revision, compat.string_types):
820 if isinstance(revision, compat.string_types):
798 commit_id = revision
821 commit_id = revision
799 else:
822 else:
800 commit_idx = revision
823 commit_idx = revision
801 return self.get_commit(
824 return self.get_commit(
802 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
825 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
803
826
804 def get_changesets(
827 def get_changesets(
805 self, start=None, end=None, start_date=None, end_date=None,
828 self, start=None, end=None, start_date=None, end_date=None,
806 branch_name=None, pre_load=None):
829 branch_name=None, pre_load=None):
807 warnings.warn("Use get_commits instead", DeprecationWarning)
830 warnings.warn("Use get_commits instead", DeprecationWarning)
808 start_id = self._revision_to_commit(start)
831 start_id = self._revision_to_commit(start)
809 end_id = self._revision_to_commit(end)
832 end_id = self._revision_to_commit(end)
810 return self.get_commits(
833 return self.get_commits(
811 start_id=start_id, end_id=end_id, start_date=start_date,
834 start_id=start_id, end_id=end_id, start_date=start_date,
812 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
835 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
813
836
814 def _revision_to_commit(self, revision):
837 def _revision_to_commit(self, revision):
815 """
838 """
816 Translates a revision to a commit_id
839 Translates a revision to a commit_id
817
840
818 Helps to support the old changeset based API which allows to use
841 Helps to support the old changeset based API which allows to use
819 commit ids and commit indices interchangeable.
842 commit ids and commit indices interchangeable.
820 """
843 """
821 if revision is None:
844 if revision is None:
822 return revision
845 return revision
823
846
824 if isinstance(revision, compat.string_types):
847 if isinstance(revision, compat.string_types):
825 commit_id = revision
848 commit_id = revision
826 else:
849 else:
827 commit_id = self.commit_ids[revision]
850 commit_id = self.commit_ids[revision]
828 return commit_id
851 return commit_id
829
852
830 @property
853 @property
831 def in_memory_changeset(self):
854 def in_memory_changeset(self):
832 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
855 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
833 return self.in_memory_commit
856 return self.in_memory_commit
834
857
835 def get_path_permissions(self, username):
858 def get_path_permissions(self, username):
836 """
859 """
837 Returns a path permission checker or None if not supported
860 Returns a path permission checker or None if not supported
838
861
839 :param username: session user name
862 :param username: session user name
840 :return: an instance of BasePathPermissionChecker or None
863 :return: an instance of BasePathPermissionChecker or None
841 """
864 """
842 return None
865 return None
843
866
844 def install_hooks(self, force=False):
867 def install_hooks(self, force=False):
845 return self._remote.install_hooks(force)
868 return self._remote.install_hooks(force)
846
869
847 def get_hooks_info(self):
870 def get_hooks_info(self):
848 return self._remote.get_hooks_info()
871 return self._remote.get_hooks_info()
849
872
850
873
851 class BaseCommit(object):
874 class BaseCommit(object):
852 """
875 """
853 Each backend should implement it's commit representation.
876 Each backend should implement it's commit representation.
854
877
855 **Attributes**
878 **Attributes**
856
879
857 ``repository``
880 ``repository``
858 repository object within which commit exists
881 repository object within which commit exists
859
882
860 ``id``
883 ``id``
861 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
884 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
862 just ``tip``.
885 just ``tip``.
863
886
864 ``raw_id``
887 ``raw_id``
865 raw commit representation (i.e. full 40 length sha for git
888 raw commit representation (i.e. full 40 length sha for git
866 backend)
889 backend)
867
890
868 ``short_id``
891 ``short_id``
869 shortened (if apply) version of ``raw_id``; it would be simple
892 shortened (if apply) version of ``raw_id``; it would be simple
870 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
893 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
871 as ``raw_id`` for subversion
894 as ``raw_id`` for subversion
872
895
873 ``idx``
896 ``idx``
874 commit index
897 commit index
875
898
876 ``files``
899 ``files``
877 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
900 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
878
901
879 ``dirs``
902 ``dirs``
880 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
903 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
881
904
882 ``nodes``
905 ``nodes``
883 combined list of ``Node`` objects
906 combined list of ``Node`` objects
884
907
885 ``author``
908 ``author``
886 author of the commit, as unicode
909 author of the commit, as unicode
887
910
888 ``message``
911 ``message``
889 message of the commit, as unicode
912 message of the commit, as unicode
890
913
891 ``parents``
914 ``parents``
892 list of parent commits
915 list of parent commits
893
916
894 """
917 """
895
918
896 branch = None
919 branch = None
897 """
920 """
898 Depending on the backend this should be set to the branch name of the
921 Depending on the backend this should be set to the branch name of the
899 commit. Backends not supporting branches on commits should leave this
922 commit. Backends not supporting branches on commits should leave this
900 value as ``None``.
923 value as ``None``.
901 """
924 """
902
925
903 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
926 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
904 """
927 """
905 This template is used to generate a default prefix for repository archives
928 This template is used to generate a default prefix for repository archives
906 if no prefix has been specified.
929 if no prefix has been specified.
907 """
930 """
908
931
909 def __str__(self):
932 def __str__(self):
910 return '<%s at %s:%s>' % (
933 return '<%s at %s:%s>' % (
911 self.__class__.__name__, self.idx, self.short_id)
934 self.__class__.__name__, self.idx, self.short_id)
912
935
913 def __repr__(self):
936 def __repr__(self):
914 return self.__str__()
937 return self.__str__()
915
938
916 def __unicode__(self):
939 def __unicode__(self):
917 return u'%s:%s' % (self.idx, self.short_id)
940 return u'%s:%s' % (self.idx, self.short_id)
918
941
919 def __eq__(self, other):
942 def __eq__(self, other):
920 same_instance = isinstance(other, self.__class__)
943 same_instance = isinstance(other, self.__class__)
921 return same_instance and self.raw_id == other.raw_id
944 return same_instance and self.raw_id == other.raw_id
922
945
923 def __json__(self):
946 def __json__(self):
924 parents = []
947 parents = []
925 try:
948 try:
926 for parent in self.parents:
949 for parent in self.parents:
927 parents.append({'raw_id': parent.raw_id})
950 parents.append({'raw_id': parent.raw_id})
928 except NotImplementedError:
951 except NotImplementedError:
929 # empty commit doesn't have parents implemented
952 # empty commit doesn't have parents implemented
930 pass
953 pass
931
954
932 return {
955 return {
933 'short_id': self.short_id,
956 'short_id': self.short_id,
934 'raw_id': self.raw_id,
957 'raw_id': self.raw_id,
935 'revision': self.idx,
958 'revision': self.idx,
936 'message': self.message,
959 'message': self.message,
937 'date': self.date,
960 'date': self.date,
938 'author': self.author,
961 'author': self.author,
939 'parents': parents,
962 'parents': parents,
940 'branch': self.branch
963 'branch': self.branch
941 }
964 }
942
965
943 def __getstate__(self):
966 def __getstate__(self):
944 d = self.__dict__.copy()
967 d = self.__dict__.copy()
945 d.pop('_remote', None)
968 d.pop('_remote', None)
946 d.pop('repository', None)
969 d.pop('repository', None)
947 return d
970 return d
948
971
949 def serialize(self):
972 def serialize(self):
950 return self.__json__()
973 return self.__json__()
951
974
952 def _get_refs(self):
975 def _get_refs(self):
953 return {
976 return {
954 'branches': [self.branch] if self.branch else [],
977 'branches': [self.branch] if self.branch else [],
955 'bookmarks': getattr(self, 'bookmarks', []),
978 'bookmarks': getattr(self, 'bookmarks', []),
956 'tags': self.tags
979 'tags': self.tags
957 }
980 }
958
981
959 @LazyProperty
982 @LazyProperty
960 def last(self):
983 def last(self):
961 """
984 """
962 ``True`` if this is last commit in repository, ``False``
985 ``True`` if this is last commit in repository, ``False``
963 otherwise; trying to access this attribute while there is no
986 otherwise; trying to access this attribute while there is no
964 commits would raise `EmptyRepositoryError`
987 commits would raise `EmptyRepositoryError`
965 """
988 """
966 if self.repository is None:
989 if self.repository is None:
967 raise CommitError("Cannot check if it's most recent commit")
990 raise CommitError("Cannot check if it's most recent commit")
968 return self.raw_id == self.repository.commit_ids[-1]
991 return self.raw_id == self.repository.commit_ids[-1]
969
992
970 @LazyProperty
993 @LazyProperty
971 def parents(self):
994 def parents(self):
972 """
995 """
973 Returns list of parent commits.
996 Returns list of parent commits.
974 """
997 """
975 raise NotImplementedError
998 raise NotImplementedError
976
999
977 @LazyProperty
1000 @LazyProperty
978 def first_parent(self):
1001 def first_parent(self):
979 """
1002 """
980 Returns list of parent commits.
1003 Returns list of parent commits.
981 """
1004 """
982 return self.parents[0] if self.parents else EmptyCommit()
1005 return self.parents[0] if self.parents else EmptyCommit()
983
1006
984 @property
1007 @property
985 def merge(self):
1008 def merge(self):
986 """
1009 """
987 Returns boolean if commit is a merge.
1010 Returns boolean if commit is a merge.
988 """
1011 """
989 return len(self.parents) > 1
1012 return len(self.parents) > 1
990
1013
991 @LazyProperty
1014 @LazyProperty
992 def children(self):
1015 def children(self):
993 """
1016 """
994 Returns list of child commits.
1017 Returns list of child commits.
995 """
1018 """
996 raise NotImplementedError
1019 raise NotImplementedError
997
1020
998 @LazyProperty
1021 @LazyProperty
999 def id(self):
1022 def id(self):
1000 """
1023 """
1001 Returns string identifying this commit.
1024 Returns string identifying this commit.
1002 """
1025 """
1003 raise NotImplementedError
1026 raise NotImplementedError
1004
1027
1005 @LazyProperty
1028 @LazyProperty
1006 def raw_id(self):
1029 def raw_id(self):
1007 """
1030 """
1008 Returns raw string identifying this commit.
1031 Returns raw string identifying this commit.
1009 """
1032 """
1010 raise NotImplementedError
1033 raise NotImplementedError
1011
1034
1012 @LazyProperty
1035 @LazyProperty
1013 def short_id(self):
1036 def short_id(self):
1014 """
1037 """
1015 Returns shortened version of ``raw_id`` attribute, as string,
1038 Returns shortened version of ``raw_id`` attribute, as string,
1016 identifying this commit, useful for presentation to users.
1039 identifying this commit, useful for presentation to users.
1017 """
1040 """
1018 raise NotImplementedError
1041 raise NotImplementedError
1019
1042
1020 @LazyProperty
1043 @LazyProperty
1021 def idx(self):
1044 def idx(self):
1022 """
1045 """
1023 Returns integer identifying this commit.
1046 Returns integer identifying this commit.
1024 """
1047 """
1025 raise NotImplementedError
1048 raise NotImplementedError
1026
1049
1027 @LazyProperty
1050 @LazyProperty
1028 def committer(self):
1051 def committer(self):
1029 """
1052 """
1030 Returns committer for this commit
1053 Returns committer for this commit
1031 """
1054 """
1032 raise NotImplementedError
1055 raise NotImplementedError
1033
1056
1034 @LazyProperty
1057 @LazyProperty
1035 def committer_name(self):
1058 def committer_name(self):
1036 """
1059 """
1037 Returns committer name for this commit
1060 Returns committer name for this commit
1038 """
1061 """
1039
1062
1040 return author_name(self.committer)
1063 return author_name(self.committer)
1041
1064
1042 @LazyProperty
1065 @LazyProperty
1043 def committer_email(self):
1066 def committer_email(self):
1044 """
1067 """
1045 Returns committer email address for this commit
1068 Returns committer email address for this commit
1046 """
1069 """
1047
1070
1048 return author_email(self.committer)
1071 return author_email(self.committer)
1049
1072
1050 @LazyProperty
1073 @LazyProperty
1051 def author(self):
1074 def author(self):
1052 """
1075 """
1053 Returns author for this commit
1076 Returns author for this commit
1054 """
1077 """
1055
1078
1056 raise NotImplementedError
1079 raise NotImplementedError
1057
1080
1058 @LazyProperty
1081 @LazyProperty
1059 def author_name(self):
1082 def author_name(self):
1060 """
1083 """
1061 Returns author name for this commit
1084 Returns author name for this commit
1062 """
1085 """
1063
1086
1064 return author_name(self.author)
1087 return author_name(self.author)
1065
1088
1066 @LazyProperty
1089 @LazyProperty
1067 def author_email(self):
1090 def author_email(self):
1068 """
1091 """
1069 Returns author email address for this commit
1092 Returns author email address for this commit
1070 """
1093 """
1071
1094
1072 return author_email(self.author)
1095 return author_email(self.author)
1073
1096
1074 def get_file_mode(self, path):
1097 def get_file_mode(self, path):
1075 """
1098 """
1076 Returns stat mode of the file at `path`.
1099 Returns stat mode of the file at `path`.
1077 """
1100 """
1078 raise NotImplementedError
1101 raise NotImplementedError
1079
1102
1080 def is_link(self, path):
1103 def is_link(self, path):
1081 """
1104 """
1082 Returns ``True`` if given `path` is a symlink
1105 Returns ``True`` if given `path` is a symlink
1083 """
1106 """
1084 raise NotImplementedError
1107 raise NotImplementedError
1085
1108
1086 def is_node_binary(self, path):
1109 def is_node_binary(self, path):
1087 """
1110 """
1088 Returns ``True`` is given path is a binary file
1111 Returns ``True`` is given path is a binary file
1089 """
1112 """
1090 raise NotImplementedError
1113 raise NotImplementedError
1091
1114
1092 def get_file_content(self, path):
1115 def get_file_content(self, path):
1093 """
1116 """
1094 Returns content of the file at the given `path`.
1117 Returns content of the file at the given `path`.
1095 """
1118 """
1096 raise NotImplementedError
1119 raise NotImplementedError
1097
1120
1098 def get_file_content_streamed(self, path):
1121 def get_file_content_streamed(self, path):
1099 """
1122 """
1100 returns a streaming response from vcsserver with file content
1123 returns a streaming response from vcsserver with file content
1101 """
1124 """
1102 raise NotImplementedError
1125 raise NotImplementedError
1103
1126
1104 def get_file_size(self, path):
1127 def get_file_size(self, path):
1105 """
1128 """
1106 Returns size of the file at the given `path`.
1129 Returns size of the file at the given `path`.
1107 """
1130 """
1108 raise NotImplementedError
1131 raise NotImplementedError
1109
1132
1110 def get_path_commit(self, path, pre_load=None):
1133 def get_path_commit(self, path, pre_load=None):
1111 """
1134 """
1112 Returns last commit of the file at the given `path`.
1135 Returns last commit of the file at the given `path`.
1113
1136
1114 :param pre_load: Optional. List of commit attributes to load.
1137 :param pre_load: Optional. List of commit attributes to load.
1115 """
1138 """
1116 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1139 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1117 if not commits:
1140 if not commits:
1118 raise RepositoryError(
1141 raise RepositoryError(
1119 'Failed to fetch history for path {}. '
1142 'Failed to fetch history for path {}. '
1120 'Please check if such path exists in your repository'.format(
1143 'Please check if such path exists in your repository'.format(
1121 path))
1144 path))
1122 return commits[0]
1145 return commits[0]
1123
1146
1124 def get_path_history(self, path, limit=None, pre_load=None):
1147 def get_path_history(self, path, limit=None, pre_load=None):
1125 """
1148 """
1126 Returns history of file as reversed list of :class:`BaseCommit`
1149 Returns history of file as reversed list of :class:`BaseCommit`
1127 objects for which file at given `path` has been modified.
1150 objects for which file at given `path` has been modified.
1128
1151
1129 :param limit: Optional. Allows to limit the size of the returned
1152 :param limit: Optional. Allows to limit the size of the returned
1130 history. This is intended as a hint to the underlying backend, so
1153 history. This is intended as a hint to the underlying backend, so
1131 that it can apply optimizations depending on the limit.
1154 that it can apply optimizations depending on the limit.
1132 :param pre_load: Optional. List of commit attributes to load.
1155 :param pre_load: Optional. List of commit attributes to load.
1133 """
1156 """
1134 raise NotImplementedError
1157 raise NotImplementedError
1135
1158
1136 def get_file_annotate(self, path, pre_load=None):
1159 def get_file_annotate(self, path, pre_load=None):
1137 """
1160 """
1138 Returns a generator of four element tuples with
1161 Returns a generator of four element tuples with
1139 lineno, sha, commit lazy loader and line
1162 lineno, sha, commit lazy loader and line
1140
1163
1141 :param pre_load: Optional. List of commit attributes to load.
1164 :param pre_load: Optional. List of commit attributes to load.
1142 """
1165 """
1143 raise NotImplementedError
1166 raise NotImplementedError
1144
1167
1145 def get_nodes(self, path):
1168 def get_nodes(self, path):
1146 """
1169 """
1147 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1170 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1148 state of commit at the given ``path``.
1171 state of commit at the given ``path``.
1149
1172
1150 :raises ``CommitError``: if node at the given ``path`` is not
1173 :raises ``CommitError``: if node at the given ``path`` is not
1151 instance of ``DirNode``
1174 instance of ``DirNode``
1152 """
1175 """
1153 raise NotImplementedError
1176 raise NotImplementedError
1154
1177
1155 def get_node(self, path):
1178 def get_node(self, path):
1156 """
1179 """
1157 Returns ``Node`` object from the given ``path``.
1180 Returns ``Node`` object from the given ``path``.
1158
1181
1159 :raises ``NodeDoesNotExistError``: if there is no node at the given
1182 :raises ``NodeDoesNotExistError``: if there is no node at the given
1160 ``path``
1183 ``path``
1161 """
1184 """
1162 raise NotImplementedError
1185 raise NotImplementedError
1163
1186
1164 def get_largefile_node(self, path):
1187 def get_largefile_node(self, path):
1165 """
1188 """
1166 Returns the path to largefile from Mercurial/Git-lfs storage.
1189 Returns the path to largefile from Mercurial/Git-lfs storage.
1167 or None if it's not a largefile node
1190 or None if it's not a largefile node
1168 """
1191 """
1169 return None
1192 return None
1170
1193
1171 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1194 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1172 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1195 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1173 """
1196 """
1174 Creates an archive containing the contents of the repository.
1197 Creates an archive containing the contents of the repository.
1175
1198
1176 :param archive_dest_path: path to the file which to create the archive.
1199 :param archive_dest_path: path to the file which to create the archive.
1177 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1200 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1178 :param prefix: name of root directory in archive.
1201 :param prefix: name of root directory in archive.
1179 Default is repository name and commit's short_id joined with dash:
1202 Default is repository name and commit's short_id joined with dash:
1180 ``"{repo_name}-{short_id}"``.
1203 ``"{repo_name}-{short_id}"``.
1181 :param write_metadata: write a metadata file into archive.
1204 :param write_metadata: write a metadata file into archive.
1182 :param mtime: custom modification time for archive creation, defaults
1205 :param mtime: custom modification time for archive creation, defaults
1183 to time.time() if not given.
1206 to time.time() if not given.
1184 :param archive_at_path: pack files at this path (default '/')
1207 :param archive_at_path: pack files at this path (default '/')
1185
1208
1186 :raise VCSError: If prefix has a problem.
1209 :raise VCSError: If prefix has a problem.
1187 """
1210 """
1188 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1211 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1189 if kind not in allowed_kinds:
1212 if kind not in allowed_kinds:
1190 raise ImproperArchiveTypeError(
1213 raise ImproperArchiveTypeError(
1191 'Archive kind (%s) not supported use one of %s' %
1214 'Archive kind (%s) not supported use one of %s' %
1192 (kind, allowed_kinds))
1215 (kind, allowed_kinds))
1193
1216
1194 prefix = self._validate_archive_prefix(prefix)
1217 prefix = self._validate_archive_prefix(prefix)
1195
1218
1196 mtime = mtime is not None or time.mktime(self.date.timetuple())
1219 mtime = mtime is not None or time.mktime(self.date.timetuple())
1197
1220
1198 file_info = []
1221 file_info = []
1199 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1222 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1200 for _r, _d, files in cur_rev.walk(archive_at_path):
1223 for _r, _d, files in cur_rev.walk(archive_at_path):
1201 for f in files:
1224 for f in files:
1202 f_path = os.path.join(prefix, f.path)
1225 f_path = os.path.join(prefix, f.path)
1203 file_info.append(
1226 file_info.append(
1204 (f_path, f.mode, f.is_link(), f.raw_bytes))
1227 (f_path, f.mode, f.is_link(), f.raw_bytes))
1205
1228
1206 if write_metadata:
1229 if write_metadata:
1207 metadata = [
1230 metadata = [
1208 ('repo_name', self.repository.name),
1231 ('repo_name', self.repository.name),
1209 ('commit_id', self.raw_id),
1232 ('commit_id', self.raw_id),
1210 ('mtime', mtime),
1233 ('mtime', mtime),
1211 ('branch', self.branch),
1234 ('branch', self.branch),
1212 ('tags', ','.join(self.tags)),
1235 ('tags', ','.join(self.tags)),
1213 ]
1236 ]
1214 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1237 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1215 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1238 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1216
1239
1217 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1240 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1218
1241
1219 def _validate_archive_prefix(self, prefix):
1242 def _validate_archive_prefix(self, prefix):
1220 if prefix is None:
1243 if prefix is None:
1221 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1244 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1222 repo_name=safe_str(self.repository.name),
1245 repo_name=safe_str(self.repository.name),
1223 short_id=self.short_id)
1246 short_id=self.short_id)
1224 elif not isinstance(prefix, str):
1247 elif not isinstance(prefix, str):
1225 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1248 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1226 elif prefix.startswith('/'):
1249 elif prefix.startswith('/'):
1227 raise VCSError("Prefix cannot start with leading slash")
1250 raise VCSError("Prefix cannot start with leading slash")
1228 elif prefix.strip() == '':
1251 elif prefix.strip() == '':
1229 raise VCSError("Prefix cannot be empty")
1252 raise VCSError("Prefix cannot be empty")
1230 return prefix
1253 return prefix
1231
1254
1232 @LazyProperty
1255 @LazyProperty
1233 def root(self):
1256 def root(self):
1234 """
1257 """
1235 Returns ``RootNode`` object for this commit.
1258 Returns ``RootNode`` object for this commit.
1236 """
1259 """
1237 return self.get_node('')
1260 return self.get_node('')
1238
1261
1239 def next(self, branch=None):
1262 def next(self, branch=None):
1240 """
1263 """
1241 Returns next commit from current, if branch is gives it will return
1264 Returns next commit from current, if branch is gives it will return
1242 next commit belonging to this branch
1265 next commit belonging to this branch
1243
1266
1244 :param branch: show commits within the given named branch
1267 :param branch: show commits within the given named branch
1245 """
1268 """
1246 indexes = xrange(self.idx + 1, self.repository.count())
1269 indexes = xrange(self.idx + 1, self.repository.count())
1247 return self._find_next(indexes, branch)
1270 return self._find_next(indexes, branch)
1248
1271
1249 def prev(self, branch=None):
1272 def prev(self, branch=None):
1250 """
1273 """
1251 Returns previous commit from current, if branch is gives it will
1274 Returns previous commit from current, if branch is gives it will
1252 return previous commit belonging to this branch
1275 return previous commit belonging to this branch
1253
1276
1254 :param branch: show commit within the given named branch
1277 :param branch: show commit within the given named branch
1255 """
1278 """
1256 indexes = xrange(self.idx - 1, -1, -1)
1279 indexes = xrange(self.idx - 1, -1, -1)
1257 return self._find_next(indexes, branch)
1280 return self._find_next(indexes, branch)
1258
1281
1259 def _find_next(self, indexes, branch=None):
1282 def _find_next(self, indexes, branch=None):
1260 if branch and self.branch != branch:
1283 if branch and self.branch != branch:
1261 raise VCSError('Branch option used on commit not belonging '
1284 raise VCSError('Branch option used on commit not belonging '
1262 'to that branch')
1285 'to that branch')
1263
1286
1264 for next_idx in indexes:
1287 for next_idx in indexes:
1265 commit = self.repository.get_commit(commit_idx=next_idx)
1288 commit = self.repository.get_commit(commit_idx=next_idx)
1266 if branch and branch != commit.branch:
1289 if branch and branch != commit.branch:
1267 continue
1290 continue
1268 return commit
1291 return commit
1269 raise CommitDoesNotExistError
1292 raise CommitDoesNotExistError
1270
1293
1271 def diff(self, ignore_whitespace=True, context=3):
1294 def diff(self, ignore_whitespace=True, context=3):
1272 """
1295 """
1273 Returns a `Diff` object representing the change made by this commit.
1296 Returns a `Diff` object representing the change made by this commit.
1274 """
1297 """
1275 parent = self.first_parent
1298 parent = self.first_parent
1276 diff = self.repository.get_diff(
1299 diff = self.repository.get_diff(
1277 parent, self,
1300 parent, self,
1278 ignore_whitespace=ignore_whitespace,
1301 ignore_whitespace=ignore_whitespace,
1279 context=context)
1302 context=context)
1280 return diff
1303 return diff
1281
1304
1282 @LazyProperty
1305 @LazyProperty
1283 def added(self):
1306 def added(self):
1284 """
1307 """
1285 Returns list of added ``FileNode`` objects.
1308 Returns list of added ``FileNode`` objects.
1286 """
1309 """
1287 raise NotImplementedError
1310 raise NotImplementedError
1288
1311
1289 @LazyProperty
1312 @LazyProperty
1290 def changed(self):
1313 def changed(self):
1291 """
1314 """
1292 Returns list of modified ``FileNode`` objects.
1315 Returns list of modified ``FileNode`` objects.
1293 """
1316 """
1294 raise NotImplementedError
1317 raise NotImplementedError
1295
1318
1296 @LazyProperty
1319 @LazyProperty
1297 def removed(self):
1320 def removed(self):
1298 """
1321 """
1299 Returns list of removed ``FileNode`` objects.
1322 Returns list of removed ``FileNode`` objects.
1300 """
1323 """
1301 raise NotImplementedError
1324 raise NotImplementedError
1302
1325
1303 @LazyProperty
1326 @LazyProperty
1304 def size(self):
1327 def size(self):
1305 """
1328 """
1306 Returns total number of bytes from contents of all filenodes.
1329 Returns total number of bytes from contents of all filenodes.
1307 """
1330 """
1308 return sum((node.size for node in self.get_filenodes_generator()))
1331 return sum((node.size for node in self.get_filenodes_generator()))
1309
1332
1310 def walk(self, topurl=''):
1333 def walk(self, topurl=''):
1311 """
1334 """
1312 Similar to os.walk method. Insted of filesystem it walks through
1335 Similar to os.walk method. Insted of filesystem it walks through
1313 commit starting at given ``topurl``. Returns generator of tuples
1336 commit starting at given ``topurl``. Returns generator of tuples
1314 (topnode, dirnodes, filenodes).
1337 (topnode, dirnodes, filenodes).
1315 """
1338 """
1316 topnode = self.get_node(topurl)
1339 topnode = self.get_node(topurl)
1317 if not topnode.is_dir():
1340 if not topnode.is_dir():
1318 return
1341 return
1319 yield (topnode, topnode.dirs, topnode.files)
1342 yield (topnode, topnode.dirs, topnode.files)
1320 for dirnode in topnode.dirs:
1343 for dirnode in topnode.dirs:
1321 for tup in self.walk(dirnode.path):
1344 for tup in self.walk(dirnode.path):
1322 yield tup
1345 yield tup
1323
1346
1324 def get_filenodes_generator(self):
1347 def get_filenodes_generator(self):
1325 """
1348 """
1326 Returns generator that yields *all* file nodes.
1349 Returns generator that yields *all* file nodes.
1327 """
1350 """
1328 for topnode, dirs, files in self.walk():
1351 for topnode, dirs, files in self.walk():
1329 for node in files:
1352 for node in files:
1330 yield node
1353 yield node
1331
1354
1332 #
1355 #
1333 # Utilities for sub classes to support consistent behavior
1356 # Utilities for sub classes to support consistent behavior
1334 #
1357 #
1335
1358
1336 def no_node_at_path(self, path):
1359 def no_node_at_path(self, path):
1337 return NodeDoesNotExistError(
1360 return NodeDoesNotExistError(
1338 u"There is no file nor directory at the given path: "
1361 u"There is no file nor directory at the given path: "
1339 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1362 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1340
1363
1341 def _fix_path(self, path):
1364 def _fix_path(self, path):
1342 """
1365 """
1343 Paths are stored without trailing slash so we need to get rid off it if
1366 Paths are stored without trailing slash so we need to get rid off it if
1344 needed.
1367 needed.
1345 """
1368 """
1346 return path.rstrip('/')
1369 return path.rstrip('/')
1347
1370
1348 #
1371 #
1349 # Deprecated API based on changesets
1372 # Deprecated API based on changesets
1350 #
1373 #
1351
1374
1352 @property
1375 @property
1353 def revision(self):
1376 def revision(self):
1354 warnings.warn("Use idx instead", DeprecationWarning)
1377 warnings.warn("Use idx instead", DeprecationWarning)
1355 return self.idx
1378 return self.idx
1356
1379
1357 @revision.setter
1380 @revision.setter
1358 def revision(self, value):
1381 def revision(self, value):
1359 warnings.warn("Use idx instead", DeprecationWarning)
1382 warnings.warn("Use idx instead", DeprecationWarning)
1360 self.idx = value
1383 self.idx = value
1361
1384
1362 def get_file_changeset(self, path):
1385 def get_file_changeset(self, path):
1363 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1386 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1364 return self.get_path_commit(path)
1387 return self.get_path_commit(path)
1365
1388
1366
1389
1367 class BaseChangesetClass(type):
1390 class BaseChangesetClass(type):
1368
1391
1369 def __instancecheck__(self, instance):
1392 def __instancecheck__(self, instance):
1370 return isinstance(instance, BaseCommit)
1393 return isinstance(instance, BaseCommit)
1371
1394
1372
1395
1373 class BaseChangeset(BaseCommit):
1396 class BaseChangeset(BaseCommit):
1374
1397
1375 __metaclass__ = BaseChangesetClass
1398 __metaclass__ = BaseChangesetClass
1376
1399
1377 def __new__(cls, *args, **kwargs):
1400 def __new__(cls, *args, **kwargs):
1378 warnings.warn(
1401 warnings.warn(
1379 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1402 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1380 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1403 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1381
1404
1382
1405
1383 class BaseInMemoryCommit(object):
1406 class BaseInMemoryCommit(object):
1384 """
1407 """
1385 Represents differences between repository's state (most recent head) and
1408 Represents differences between repository's state (most recent head) and
1386 changes made *in place*.
1409 changes made *in place*.
1387
1410
1388 **Attributes**
1411 **Attributes**
1389
1412
1390 ``repository``
1413 ``repository``
1391 repository object for this in-memory-commit
1414 repository object for this in-memory-commit
1392
1415
1393 ``added``
1416 ``added``
1394 list of ``FileNode`` objects marked as *added*
1417 list of ``FileNode`` objects marked as *added*
1395
1418
1396 ``changed``
1419 ``changed``
1397 list of ``FileNode`` objects marked as *changed*
1420 list of ``FileNode`` objects marked as *changed*
1398
1421
1399 ``removed``
1422 ``removed``
1400 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1423 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1401 *removed*
1424 *removed*
1402
1425
1403 ``parents``
1426 ``parents``
1404 list of :class:`BaseCommit` instances representing parents of
1427 list of :class:`BaseCommit` instances representing parents of
1405 in-memory commit. Should always be 2-element sequence.
1428 in-memory commit. Should always be 2-element sequence.
1406
1429
1407 """
1430 """
1408
1431
1409 def __init__(self, repository):
1432 def __init__(self, repository):
1410 self.repository = repository
1433 self.repository = repository
1411 self.added = []
1434 self.added = []
1412 self.changed = []
1435 self.changed = []
1413 self.removed = []
1436 self.removed = []
1414 self.parents = []
1437 self.parents = []
1415
1438
1416 def add(self, *filenodes):
1439 def add(self, *filenodes):
1417 """
1440 """
1418 Marks given ``FileNode`` objects as *to be committed*.
1441 Marks given ``FileNode`` objects as *to be committed*.
1419
1442
1420 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1443 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1421 latest commit
1444 latest commit
1422 :raises ``NodeAlreadyAddedError``: if node with same path is already
1445 :raises ``NodeAlreadyAddedError``: if node with same path is already
1423 marked as *added*
1446 marked as *added*
1424 """
1447 """
1425 # Check if not already marked as *added* first
1448 # Check if not already marked as *added* first
1426 for node in filenodes:
1449 for node in filenodes:
1427 if node.path in (n.path for n in self.added):
1450 if node.path in (n.path for n in self.added):
1428 raise NodeAlreadyAddedError(
1451 raise NodeAlreadyAddedError(
1429 "Such FileNode %s is already marked for addition"
1452 "Such FileNode %s is already marked for addition"
1430 % node.path)
1453 % node.path)
1431 for node in filenodes:
1454 for node in filenodes:
1432 self.added.append(node)
1455 self.added.append(node)
1433
1456
1434 def change(self, *filenodes):
1457 def change(self, *filenodes):
1435 """
1458 """
1436 Marks given ``FileNode`` objects to be *changed* in next commit.
1459 Marks given ``FileNode`` objects to be *changed* in next commit.
1437
1460
1438 :raises ``EmptyRepositoryError``: if there are no commits yet
1461 :raises ``EmptyRepositoryError``: if there are no commits yet
1439 :raises ``NodeAlreadyExistsError``: if node with same path is already
1462 :raises ``NodeAlreadyExistsError``: if node with same path is already
1440 marked to be *changed*
1463 marked to be *changed*
1441 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1464 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1442 marked to be *removed*
1465 marked to be *removed*
1443 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1466 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1444 commit
1467 commit
1445 :raises ``NodeNotChangedError``: if node hasn't really be changed
1468 :raises ``NodeNotChangedError``: if node hasn't really be changed
1446 """
1469 """
1447 for node in filenodes:
1470 for node in filenodes:
1448 if node.path in (n.path for n in self.removed):
1471 if node.path in (n.path for n in self.removed):
1449 raise NodeAlreadyRemovedError(
1472 raise NodeAlreadyRemovedError(
1450 "Node at %s is already marked as removed" % node.path)
1473 "Node at %s is already marked as removed" % node.path)
1451 try:
1474 try:
1452 self.repository.get_commit()
1475 self.repository.get_commit()
1453 except EmptyRepositoryError:
1476 except EmptyRepositoryError:
1454 raise EmptyRepositoryError(
1477 raise EmptyRepositoryError(
1455 "Nothing to change - try to *add* new nodes rather than "
1478 "Nothing to change - try to *add* new nodes rather than "
1456 "changing them")
1479 "changing them")
1457 for node in filenodes:
1480 for node in filenodes:
1458 if node.path in (n.path for n in self.changed):
1481 if node.path in (n.path for n in self.changed):
1459 raise NodeAlreadyChangedError(
1482 raise NodeAlreadyChangedError(
1460 "Node at '%s' is already marked as changed" % node.path)
1483 "Node at '%s' is already marked as changed" % node.path)
1461 self.changed.append(node)
1484 self.changed.append(node)
1462
1485
1463 def remove(self, *filenodes):
1486 def remove(self, *filenodes):
1464 """
1487 """
1465 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1488 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1466 *removed* in next commit.
1489 *removed* in next commit.
1467
1490
1468 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1491 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1469 be *removed*
1492 be *removed*
1470 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1493 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1471 be *changed*
1494 be *changed*
1472 """
1495 """
1473 for node in filenodes:
1496 for node in filenodes:
1474 if node.path in (n.path for n in self.removed):
1497 if node.path in (n.path for n in self.removed):
1475 raise NodeAlreadyRemovedError(
1498 raise NodeAlreadyRemovedError(
1476 "Node is already marked to for removal at %s" % node.path)
1499 "Node is already marked to for removal at %s" % node.path)
1477 if node.path in (n.path for n in self.changed):
1500 if node.path in (n.path for n in self.changed):
1478 raise NodeAlreadyChangedError(
1501 raise NodeAlreadyChangedError(
1479 "Node is already marked to be changed at %s" % node.path)
1502 "Node is already marked to be changed at %s" % node.path)
1480 # We only mark node as *removed* - real removal is done by
1503 # We only mark node as *removed* - real removal is done by
1481 # commit method
1504 # commit method
1482 self.removed.append(node)
1505 self.removed.append(node)
1483
1506
1484 def reset(self):
1507 def reset(self):
1485 """
1508 """
1486 Resets this instance to initial state (cleans ``added``, ``changed``
1509 Resets this instance to initial state (cleans ``added``, ``changed``
1487 and ``removed`` lists).
1510 and ``removed`` lists).
1488 """
1511 """
1489 self.added = []
1512 self.added = []
1490 self.changed = []
1513 self.changed = []
1491 self.removed = []
1514 self.removed = []
1492 self.parents = []
1515 self.parents = []
1493
1516
1494 def get_ipaths(self):
1517 def get_ipaths(self):
1495 """
1518 """
1496 Returns generator of paths from nodes marked as added, changed or
1519 Returns generator of paths from nodes marked as added, changed or
1497 removed.
1520 removed.
1498 """
1521 """
1499 for node in itertools.chain(self.added, self.changed, self.removed):
1522 for node in itertools.chain(self.added, self.changed, self.removed):
1500 yield node.path
1523 yield node.path
1501
1524
1502 def get_paths(self):
1525 def get_paths(self):
1503 """
1526 """
1504 Returns list of paths from nodes marked as added, changed or removed.
1527 Returns list of paths from nodes marked as added, changed or removed.
1505 """
1528 """
1506 return list(self.get_ipaths())
1529 return list(self.get_ipaths())
1507
1530
1508 def check_integrity(self, parents=None):
1531 def check_integrity(self, parents=None):
1509 """
1532 """
1510 Checks in-memory commit's integrity. Also, sets parents if not
1533 Checks in-memory commit's integrity. Also, sets parents if not
1511 already set.
1534 already set.
1512
1535
1513 :raises CommitError: if any error occurs (i.e.
1536 :raises CommitError: if any error occurs (i.e.
1514 ``NodeDoesNotExistError``).
1537 ``NodeDoesNotExistError``).
1515 """
1538 """
1516 if not self.parents:
1539 if not self.parents:
1517 parents = parents or []
1540 parents = parents or []
1518 if len(parents) == 0:
1541 if len(parents) == 0:
1519 try:
1542 try:
1520 parents = [self.repository.get_commit(), None]
1543 parents = [self.repository.get_commit(), None]
1521 except EmptyRepositoryError:
1544 except EmptyRepositoryError:
1522 parents = [None, None]
1545 parents = [None, None]
1523 elif len(parents) == 1:
1546 elif len(parents) == 1:
1524 parents += [None]
1547 parents += [None]
1525 self.parents = parents
1548 self.parents = parents
1526
1549
1527 # Local parents, only if not None
1550 # Local parents, only if not None
1528 parents = [p for p in self.parents if p]
1551 parents = [p for p in self.parents if p]
1529
1552
1530 # Check nodes marked as added
1553 # Check nodes marked as added
1531 for p in parents:
1554 for p in parents:
1532 for node in self.added:
1555 for node in self.added:
1533 try:
1556 try:
1534 p.get_node(node.path)
1557 p.get_node(node.path)
1535 except NodeDoesNotExistError:
1558 except NodeDoesNotExistError:
1536 pass
1559 pass
1537 else:
1560 else:
1538 raise NodeAlreadyExistsError(
1561 raise NodeAlreadyExistsError(
1539 "Node `%s` already exists at %s" % (node.path, p))
1562 "Node `%s` already exists at %s" % (node.path, p))
1540
1563
1541 # Check nodes marked as changed
1564 # Check nodes marked as changed
1542 missing = set(self.changed)
1565 missing = set(self.changed)
1543 not_changed = set(self.changed)
1566 not_changed = set(self.changed)
1544 if self.changed and not parents:
1567 if self.changed and not parents:
1545 raise NodeDoesNotExistError(str(self.changed[0].path))
1568 raise NodeDoesNotExistError(str(self.changed[0].path))
1546 for p in parents:
1569 for p in parents:
1547 for node in self.changed:
1570 for node in self.changed:
1548 try:
1571 try:
1549 old = p.get_node(node.path)
1572 old = p.get_node(node.path)
1550 missing.remove(node)
1573 missing.remove(node)
1551 # if content actually changed, remove node from not_changed
1574 # if content actually changed, remove node from not_changed
1552 if old.content != node.content:
1575 if old.content != node.content:
1553 not_changed.remove(node)
1576 not_changed.remove(node)
1554 except NodeDoesNotExistError:
1577 except NodeDoesNotExistError:
1555 pass
1578 pass
1556 if self.changed and missing:
1579 if self.changed and missing:
1557 raise NodeDoesNotExistError(
1580 raise NodeDoesNotExistError(
1558 "Node `%s` marked as modified but missing in parents: %s"
1581 "Node `%s` marked as modified but missing in parents: %s"
1559 % (node.path, parents))
1582 % (node.path, parents))
1560
1583
1561 if self.changed and not_changed:
1584 if self.changed and not_changed:
1562 raise NodeNotChangedError(
1585 raise NodeNotChangedError(
1563 "Node `%s` wasn't actually changed (parents: %s)"
1586 "Node `%s` wasn't actually changed (parents: %s)"
1564 % (not_changed.pop().path, parents))
1587 % (not_changed.pop().path, parents))
1565
1588
1566 # Check nodes marked as removed
1589 # Check nodes marked as removed
1567 if self.removed and not parents:
1590 if self.removed and not parents:
1568 raise NodeDoesNotExistError(
1591 raise NodeDoesNotExistError(
1569 "Cannot remove node at %s as there "
1592 "Cannot remove node at %s as there "
1570 "were no parents specified" % self.removed[0].path)
1593 "were no parents specified" % self.removed[0].path)
1571 really_removed = set()
1594 really_removed = set()
1572 for p in parents:
1595 for p in parents:
1573 for node in self.removed:
1596 for node in self.removed:
1574 try:
1597 try:
1575 p.get_node(node.path)
1598 p.get_node(node.path)
1576 really_removed.add(node)
1599 really_removed.add(node)
1577 except CommitError:
1600 except CommitError:
1578 pass
1601 pass
1579 not_removed = set(self.removed) - really_removed
1602 not_removed = set(self.removed) - really_removed
1580 if not_removed:
1603 if not_removed:
1581 # TODO: johbo: This code branch does not seem to be covered
1604 # TODO: johbo: This code branch does not seem to be covered
1582 raise NodeDoesNotExistError(
1605 raise NodeDoesNotExistError(
1583 "Cannot remove node at %s from "
1606 "Cannot remove node at %s from "
1584 "following parents: %s" % (not_removed, parents))
1607 "following parents: %s" % (not_removed, parents))
1585
1608
1586 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1609 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1587 """
1610 """
1588 Performs in-memory commit (doesn't check workdir in any way) and
1611 Performs in-memory commit (doesn't check workdir in any way) and
1589 returns newly created :class:`BaseCommit`. Updates repository's
1612 returns newly created :class:`BaseCommit`. Updates repository's
1590 attribute `commits`.
1613 attribute `commits`.
1591
1614
1592 .. note::
1615 .. note::
1593
1616
1594 While overriding this method each backend's should call
1617 While overriding this method each backend's should call
1595 ``self.check_integrity(parents)`` in the first place.
1618 ``self.check_integrity(parents)`` in the first place.
1596
1619
1597 :param message: message of the commit
1620 :param message: message of the commit
1598 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1621 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1599 :param parents: single parent or sequence of parents from which commit
1622 :param parents: single parent or sequence of parents from which commit
1600 would be derived
1623 would be derived
1601 :param date: ``datetime.datetime`` instance. Defaults to
1624 :param date: ``datetime.datetime`` instance. Defaults to
1602 ``datetime.datetime.now()``.
1625 ``datetime.datetime.now()``.
1603 :param branch: branch name, as string. If none given, default backend's
1626 :param branch: branch name, as string. If none given, default backend's
1604 branch would be used.
1627 branch would be used.
1605
1628
1606 :raises ``CommitError``: if any error occurs while committing
1629 :raises ``CommitError``: if any error occurs while committing
1607 """
1630 """
1608 raise NotImplementedError
1631 raise NotImplementedError
1609
1632
1610
1633
1611 class BaseInMemoryChangesetClass(type):
1634 class BaseInMemoryChangesetClass(type):
1612
1635
1613 def __instancecheck__(self, instance):
1636 def __instancecheck__(self, instance):
1614 return isinstance(instance, BaseInMemoryCommit)
1637 return isinstance(instance, BaseInMemoryCommit)
1615
1638
1616
1639
1617 class BaseInMemoryChangeset(BaseInMemoryCommit):
1640 class BaseInMemoryChangeset(BaseInMemoryCommit):
1618
1641
1619 __metaclass__ = BaseInMemoryChangesetClass
1642 __metaclass__ = BaseInMemoryChangesetClass
1620
1643
1621 def __new__(cls, *args, **kwargs):
1644 def __new__(cls, *args, **kwargs):
1622 warnings.warn(
1645 warnings.warn(
1623 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1646 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1624 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1647 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1625
1648
1626
1649
1627 class EmptyCommit(BaseCommit):
1650 class EmptyCommit(BaseCommit):
1628 """
1651 """
1629 An dummy empty commit. It's possible to pass hash when creating
1652 An dummy empty commit. It's possible to pass hash when creating
1630 an EmptyCommit
1653 an EmptyCommit
1631 """
1654 """
1632
1655
1633 def __init__(
1656 def __init__(
1634 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1657 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1635 message='', author='', date=None):
1658 message='', author='', date=None):
1636 self._empty_commit_id = commit_id
1659 self._empty_commit_id = commit_id
1637 # TODO: johbo: Solve idx parameter, default value does not make
1660 # TODO: johbo: Solve idx parameter, default value does not make
1638 # too much sense
1661 # too much sense
1639 self.idx = idx
1662 self.idx = idx
1640 self.message = message
1663 self.message = message
1641 self.author = author
1664 self.author = author
1642 self.date = date or datetime.datetime.fromtimestamp(0)
1665 self.date = date or datetime.datetime.fromtimestamp(0)
1643 self.repository = repo
1666 self.repository = repo
1644 self.alias = alias
1667 self.alias = alias
1645
1668
1646 @LazyProperty
1669 @LazyProperty
1647 def raw_id(self):
1670 def raw_id(self):
1648 """
1671 """
1649 Returns raw string identifying this commit, useful for web
1672 Returns raw string identifying this commit, useful for web
1650 representation.
1673 representation.
1651 """
1674 """
1652
1675
1653 return self._empty_commit_id
1676 return self._empty_commit_id
1654
1677
1655 @LazyProperty
1678 @LazyProperty
1656 def branch(self):
1679 def branch(self):
1657 if self.alias:
1680 if self.alias:
1658 from rhodecode.lib.vcs.backends import get_backend
1681 from rhodecode.lib.vcs.backends import get_backend
1659 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1682 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1660
1683
1661 @LazyProperty
1684 @LazyProperty
1662 def short_id(self):
1685 def short_id(self):
1663 return self.raw_id[:12]
1686 return self.raw_id[:12]
1664
1687
1665 @LazyProperty
1688 @LazyProperty
1666 def id(self):
1689 def id(self):
1667 return self.raw_id
1690 return self.raw_id
1668
1691
1669 def get_path_commit(self, path):
1692 def get_path_commit(self, path):
1670 return self
1693 return self
1671
1694
1672 def get_file_content(self, path):
1695 def get_file_content(self, path):
1673 return u''
1696 return u''
1674
1697
1675 def get_file_content_streamed(self, path):
1698 def get_file_content_streamed(self, path):
1676 yield self.get_file_content()
1699 yield self.get_file_content()
1677
1700
1678 def get_file_size(self, path):
1701 def get_file_size(self, path):
1679 return 0
1702 return 0
1680
1703
1681
1704
1682 class EmptyChangesetClass(type):
1705 class EmptyChangesetClass(type):
1683
1706
1684 def __instancecheck__(self, instance):
1707 def __instancecheck__(self, instance):
1685 return isinstance(instance, EmptyCommit)
1708 return isinstance(instance, EmptyCommit)
1686
1709
1687
1710
1688 class EmptyChangeset(EmptyCommit):
1711 class EmptyChangeset(EmptyCommit):
1689
1712
1690 __metaclass__ = EmptyChangesetClass
1713 __metaclass__ = EmptyChangesetClass
1691
1714
1692 def __new__(cls, *args, **kwargs):
1715 def __new__(cls, *args, **kwargs):
1693 warnings.warn(
1716 warnings.warn(
1694 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1717 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1695 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1718 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1696
1719
1697 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1720 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1698 alias=None, revision=-1, message='', author='', date=None):
1721 alias=None, revision=-1, message='', author='', date=None):
1699 if requested_revision is not None:
1722 if requested_revision is not None:
1700 warnings.warn(
1723 warnings.warn(
1701 "Parameter requested_revision not supported anymore",
1724 "Parameter requested_revision not supported anymore",
1702 DeprecationWarning)
1725 DeprecationWarning)
1703 super(EmptyChangeset, self).__init__(
1726 super(EmptyChangeset, self).__init__(
1704 commit_id=cs, repo=repo, alias=alias, idx=revision,
1727 commit_id=cs, repo=repo, alias=alias, idx=revision,
1705 message=message, author=author, date=date)
1728 message=message, author=author, date=date)
1706
1729
1707 @property
1730 @property
1708 def revision(self):
1731 def revision(self):
1709 warnings.warn("Use idx instead", DeprecationWarning)
1732 warnings.warn("Use idx instead", DeprecationWarning)
1710 return self.idx
1733 return self.idx
1711
1734
1712 @revision.setter
1735 @revision.setter
1713 def revision(self, value):
1736 def revision(self, value):
1714 warnings.warn("Use idx instead", DeprecationWarning)
1737 warnings.warn("Use idx instead", DeprecationWarning)
1715 self.idx = value
1738 self.idx = value
1716
1739
1717
1740
1718 class EmptyRepository(BaseRepository):
1741 class EmptyRepository(BaseRepository):
1719 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1742 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1720 pass
1743 pass
1721
1744
1722 def get_diff(self, *args, **kwargs):
1745 def get_diff(self, *args, **kwargs):
1723 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1746 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1724 return GitDiff('')
1747 return GitDiff('')
1725
1748
1726
1749
1727 class CollectionGenerator(object):
1750 class CollectionGenerator(object):
1728
1751
1729 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1752 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1730 self.repo = repo
1753 self.repo = repo
1731 self.commit_ids = commit_ids
1754 self.commit_ids = commit_ids
1732 # TODO: (oliver) this isn't currently hooked up
1755 # TODO: (oliver) this isn't currently hooked up
1733 self.collection_size = None
1756 self.collection_size = None
1734 self.pre_load = pre_load
1757 self.pre_load = pre_load
1735 self.translate_tag = translate_tag
1758 self.translate_tag = translate_tag
1736
1759
1737 def __len__(self):
1760 def __len__(self):
1738 if self.collection_size is not None:
1761 if self.collection_size is not None:
1739 return self.collection_size
1762 return self.collection_size
1740 return self.commit_ids.__len__()
1763 return self.commit_ids.__len__()
1741
1764
1742 def __iter__(self):
1765 def __iter__(self):
1743 for commit_id in self.commit_ids:
1766 for commit_id in self.commit_ids:
1744 # TODO: johbo: Mercurial passes in commit indices or commit ids
1767 # TODO: johbo: Mercurial passes in commit indices or commit ids
1745 yield self._commit_factory(commit_id)
1768 yield self._commit_factory(commit_id)
1746
1769
1747 def _commit_factory(self, commit_id):
1770 def _commit_factory(self, commit_id):
1748 """
1771 """
1749 Allows backends to override the way commits are generated.
1772 Allows backends to override the way commits are generated.
1750 """
1773 """
1751 return self.repo.get_commit(
1774 return self.repo.get_commit(
1752 commit_id=commit_id, pre_load=self.pre_load,
1775 commit_id=commit_id, pre_load=self.pre_load,
1753 translate_tag=self.translate_tag)
1776 translate_tag=self.translate_tag)
1754
1777
1755 def __getslice__(self, i, j):
1778 def __getslice__(self, i, j):
1756 """
1779 """
1757 Returns an iterator of sliced repository
1780 Returns an iterator of sliced repository
1758 """
1781 """
1759 commit_ids = self.commit_ids[i:j]
1782 commit_ids = self.commit_ids[i:j]
1760 return self.__class__(
1783 return self.__class__(
1761 self.repo, commit_ids, pre_load=self.pre_load,
1784 self.repo, commit_ids, pre_load=self.pre_load,
1762 translate_tag=self.translate_tag)
1785 translate_tag=self.translate_tag)
1763
1786
1764 def __repr__(self):
1787 def __repr__(self):
1765 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1788 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1766
1789
1767
1790
1768 class Config(object):
1791 class Config(object):
1769 """
1792 """
1770 Represents the configuration for a repository.
1793 Represents the configuration for a repository.
1771
1794
1772 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1795 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1773 standard library. It implements only the needed subset.
1796 standard library. It implements only the needed subset.
1774 """
1797 """
1775
1798
1776 def __init__(self):
1799 def __init__(self):
1777 self._values = {}
1800 self._values = {}
1778
1801
1779 def copy(self):
1802 def copy(self):
1780 clone = Config()
1803 clone = Config()
1781 for section, values in self._values.items():
1804 for section, values in self._values.items():
1782 clone._values[section] = values.copy()
1805 clone._values[section] = values.copy()
1783 return clone
1806 return clone
1784
1807
1785 def __repr__(self):
1808 def __repr__(self):
1786 return '<Config(%s sections) at %s>' % (
1809 return '<Config(%s sections) at %s>' % (
1787 len(self._values), hex(id(self)))
1810 len(self._values), hex(id(self)))
1788
1811
1789 def items(self, section):
1812 def items(self, section):
1790 return self._values.get(section, {}).iteritems()
1813 return self._values.get(section, {}).iteritems()
1791
1814
1792 def get(self, section, option):
1815 def get(self, section, option):
1793 return self._values.get(section, {}).get(option)
1816 return self._values.get(section, {}).get(option)
1794
1817
1795 def set(self, section, option, value):
1818 def set(self, section, option, value):
1796 section_values = self._values.setdefault(section, {})
1819 section_values = self._values.setdefault(section, {})
1797 section_values[option] = value
1820 section_values[option] = value
1798
1821
1799 def clear_section(self, section):
1822 def clear_section(self, section):
1800 self._values[section] = {}
1823 self._values[section] = {}
1801
1824
1802 def serialize(self):
1825 def serialize(self):
1803 """
1826 """
1804 Creates a list of three tuples (section, key, value) representing
1827 Creates a list of three tuples (section, key, value) representing
1805 this config object.
1828 this config object.
1806 """
1829 """
1807 items = []
1830 items = []
1808 for section in self._values:
1831 for section in self._values:
1809 for option, value in self._values[section].items():
1832 for option, value in self._values[section].items():
1810 items.append(
1833 items.append(
1811 (safe_str(section), safe_str(option), safe_str(value)))
1834 (safe_str(section), safe_str(option), safe_str(value)))
1812 return items
1835 return items
1813
1836
1814
1837
1815 class Diff(object):
1838 class Diff(object):
1816 """
1839 """
1817 Represents a diff result from a repository backend.
1840 Represents a diff result from a repository backend.
1818
1841
1819 Subclasses have to provide a backend specific value for
1842 Subclasses have to provide a backend specific value for
1820 :attr:`_header_re` and :attr:`_meta_re`.
1843 :attr:`_header_re` and :attr:`_meta_re`.
1821 """
1844 """
1822 _meta_re = None
1845 _meta_re = None
1823 _header_re = None
1846 _header_re = None
1824
1847
1825 def __init__(self, raw_diff):
1848 def __init__(self, raw_diff):
1826 self.raw = raw_diff
1849 self.raw = raw_diff
1827
1850
1828 def chunks(self):
1851 def chunks(self):
1829 """
1852 """
1830 split the diff in chunks of separate --git a/file b/file chunks
1853 split the diff in chunks of separate --git a/file b/file chunks
1831 to make diffs consistent we must prepend with \n, and make sure
1854 to make diffs consistent we must prepend with \n, and make sure
1832 we can detect last chunk as this was also has special rule
1855 we can detect last chunk as this was also has special rule
1833 """
1856 """
1834
1857
1835 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1858 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1836 header = diff_parts[0]
1859 header = diff_parts[0]
1837
1860
1838 if self._meta_re:
1861 if self._meta_re:
1839 match = self._meta_re.match(header)
1862 match = self._meta_re.match(header)
1840
1863
1841 chunks = diff_parts[1:]
1864 chunks = diff_parts[1:]
1842 total_chunks = len(chunks)
1865 total_chunks = len(chunks)
1843
1866
1844 return (
1867 return (
1845 DiffChunk(chunk, self, cur_chunk == total_chunks)
1868 DiffChunk(chunk, self, cur_chunk == total_chunks)
1846 for cur_chunk, chunk in enumerate(chunks, start=1))
1869 for cur_chunk, chunk in enumerate(chunks, start=1))
1847
1870
1848
1871
1849 class DiffChunk(object):
1872 class DiffChunk(object):
1850
1873
1851 def __init__(self, chunk, diff, last_chunk):
1874 def __init__(self, chunk, diff, last_chunk):
1852 self._diff = diff
1875 self._diff = diff
1853
1876
1854 # since we split by \ndiff --git that part is lost from original diff
1877 # since we split by \ndiff --git that part is lost from original diff
1855 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1878 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1856 if not last_chunk:
1879 if not last_chunk:
1857 chunk += '\n'
1880 chunk += '\n'
1858
1881
1859 match = self._diff._header_re.match(chunk)
1882 match = self._diff._header_re.match(chunk)
1860 self.header = match.groupdict()
1883 self.header = match.groupdict()
1861 self.diff = chunk[match.end():]
1884 self.diff = chunk[match.end():]
1862 self.raw = chunk
1885 self.raw = chunk
1863
1886
1864
1887
1865 class BasePathPermissionChecker(object):
1888 class BasePathPermissionChecker(object):
1866
1889
1867 @staticmethod
1890 @staticmethod
1868 def create_from_patterns(includes, excludes):
1891 def create_from_patterns(includes, excludes):
1869 if includes and '*' in includes and not excludes:
1892 if includes and '*' in includes and not excludes:
1870 return AllPathPermissionChecker()
1893 return AllPathPermissionChecker()
1871 elif excludes and '*' in excludes:
1894 elif excludes and '*' in excludes:
1872 return NonePathPermissionChecker()
1895 return NonePathPermissionChecker()
1873 else:
1896 else:
1874 return PatternPathPermissionChecker(includes, excludes)
1897 return PatternPathPermissionChecker(includes, excludes)
1875
1898
1876 @property
1899 @property
1877 def has_full_access(self):
1900 def has_full_access(self):
1878 raise NotImplemented()
1901 raise NotImplemented()
1879
1902
1880 def has_access(self, path):
1903 def has_access(self, path):
1881 raise NotImplemented()
1904 raise NotImplemented()
1882
1905
1883
1906
1884 class AllPathPermissionChecker(BasePathPermissionChecker):
1907 class AllPathPermissionChecker(BasePathPermissionChecker):
1885
1908
1886 @property
1909 @property
1887 def has_full_access(self):
1910 def has_full_access(self):
1888 return True
1911 return True
1889
1912
1890 def has_access(self, path):
1913 def has_access(self, path):
1891 return True
1914 return True
1892
1915
1893
1916
1894 class NonePathPermissionChecker(BasePathPermissionChecker):
1917 class NonePathPermissionChecker(BasePathPermissionChecker):
1895
1918
1896 @property
1919 @property
1897 def has_full_access(self):
1920 def has_full_access(self):
1898 return False
1921 return False
1899
1922
1900 def has_access(self, path):
1923 def has_access(self, path):
1901 return False
1924 return False
1902
1925
1903
1926
1904 class PatternPathPermissionChecker(BasePathPermissionChecker):
1927 class PatternPathPermissionChecker(BasePathPermissionChecker):
1905
1928
1906 def __init__(self, includes, excludes):
1929 def __init__(self, includes, excludes):
1907 self.includes = includes
1930 self.includes = includes
1908 self.excludes = excludes
1931 self.excludes = excludes
1909 self.includes_re = [] if not includes else [
1932 self.includes_re = [] if not includes else [
1910 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1933 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1911 self.excludes_re = [] if not excludes else [
1934 self.excludes_re = [] if not excludes else [
1912 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1935 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1913
1936
1914 @property
1937 @property
1915 def has_full_access(self):
1938 def has_full_access(self):
1916 return '*' in self.includes and not self.excludes
1939 return '*' in self.includes and not self.excludes
1917
1940
1918 def has_access(self, path):
1941 def has_access(self, path):
1919 for regex in self.excludes_re:
1942 for regex in self.excludes_re:
1920 if regex.match(path):
1943 if regex.match(path):
1921 return False
1944 return False
1922 for regex in self.includes_re:
1945 for regex in self.includes_re:
1923 if regex.match(path):
1946 if regex.match(path):
1924 return True
1947 return True
1925 return False
1948 return False
@@ -1,5794 +1,5783 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Database Models for RhodeCode Enterprise
22 Database Models for RhodeCode Enterprise
23 """
23 """
24
24
25 import re
25 import re
26 import os
26 import os
27 import time
27 import time
28 import string
28 import string
29 import hashlib
29 import hashlib
30 import logging
30 import logging
31 import datetime
31 import datetime
32 import uuid
32 import uuid
33 import warnings
33 import warnings
34 import ipaddress
34 import ipaddress
35 import functools
35 import functools
36 import traceback
36 import traceback
37 import collections
37 import collections
38
38
39 from sqlalchemy import (
39 from sqlalchemy import (
40 or_, and_, not_, func, cast, TypeDecorator, event,
40 or_, and_, not_, func, cast, TypeDecorator, event,
41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 Text, Float, PickleType, BigInteger)
43 Text, Float, PickleType, BigInteger)
44 from sqlalchemy.sql.expression import true, false, case
44 from sqlalchemy.sql.expression import true, false, case
45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 from sqlalchemy.orm import (
46 from sqlalchemy.orm import (
47 relationship, joinedload, class_mapper, validates, aliased)
47 relationship, joinedload, class_mapper, validates, aliased)
48 from sqlalchemy.ext.declarative import declared_attr
48 from sqlalchemy.ext.declarative import declared_attr
49 from sqlalchemy.ext.hybrid import hybrid_property
49 from sqlalchemy.ext.hybrid import hybrid_property
50 from sqlalchemy.exc import IntegrityError # pragma: no cover
50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 from sqlalchemy.dialects.mysql import LONGTEXT
51 from sqlalchemy.dialects.mysql import LONGTEXT
52 from zope.cachedescriptors.property import Lazy as LazyProperty
52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 from pyramid import compat
53 from pyramid import compat
54 from pyramid.threadlocal import get_current_request
54 from pyramid.threadlocal import get_current_request
55 from webhelpers2.text import remove_formatting
55 from webhelpers2.text import remove_formatting
56
56
57 from rhodecode.translation import _
57 from rhodecode.translation import _
58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
59 from rhodecode.lib.vcs.backends.base import (
60 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
60 from rhodecode.lib.utils2 import (
61 from rhodecode.lib.utils2 import (
61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 JsonRaw
66 JsonRaw
66 from rhodecode.lib.ext_json import json
67 from rhodecode.lib.ext_json import json
67 from rhodecode.lib.caching_query import FromCache
68 from rhodecode.lib.caching_query import FromCache
68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 from rhodecode.lib.encrypt2 import Encryptor
70 from rhodecode.lib.encrypt2 import Encryptor
70 from rhodecode.lib.exceptions import (
71 from rhodecode.lib.exceptions import (
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 from rhodecode.model.meta import Base, Session
73 from rhodecode.model.meta import Base, Session
73
74
74 URL_SEP = '/'
75 URL_SEP = '/'
75 log = logging.getLogger(__name__)
76 log = logging.getLogger(__name__)
76
77
77 # =============================================================================
78 # =============================================================================
78 # BASE CLASSES
79 # BASE CLASSES
79 # =============================================================================
80 # =============================================================================
80
81
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 # beaker.session.secret if first is not set.
83 # beaker.session.secret if first is not set.
83 # and initialized at environment.py
84 # and initialized at environment.py
84 ENCRYPTION_KEY = None
85 ENCRYPTION_KEY = None
85
86
86 # used to sort permissions by types, '#' used here is not allowed to be in
87 # used to sort permissions by types, '#' used here is not allowed to be in
87 # usernames, and it's very early in sorted string.printable table.
88 # usernames, and it's very early in sorted string.printable table.
88 PERMISSION_TYPE_SORT = {
89 PERMISSION_TYPE_SORT = {
89 'admin': '####',
90 'admin': '####',
90 'write': '###',
91 'write': '###',
91 'read': '##',
92 'read': '##',
92 'none': '#',
93 'none': '#',
93 }
94 }
94
95
95
96
96 def display_user_sort(obj):
97 def display_user_sort(obj):
97 """
98 """
98 Sort function used to sort permissions in .permissions() function of
99 Sort function used to sort permissions in .permissions() function of
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 of all other resources
101 of all other resources
101 """
102 """
102
103
103 if obj.username == User.DEFAULT_USER:
104 if obj.username == User.DEFAULT_USER:
104 return '#####'
105 return '#####'
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 extra_sort_num = '1' # default
107 extra_sort_num = '1' # default
107
108
108 # NOTE(dan): inactive duplicates goes last
109 # NOTE(dan): inactive duplicates goes last
109 if getattr(obj, 'duplicate_perm', None):
110 if getattr(obj, 'duplicate_perm', None):
110 extra_sort_num = '9'
111 extra_sort_num = '9'
111 return prefix + extra_sort_num + obj.username
112 return prefix + extra_sort_num + obj.username
112
113
113
114
114 def display_user_group_sort(obj):
115 def display_user_group_sort(obj):
115 """
116 """
116 Sort function used to sort permissions in .permissions() function of
117 Sort function used to sort permissions in .permissions() function of
117 Repository, RepoGroup, UserGroup. Also it put the default user in front
118 Repository, RepoGroup, UserGroup. Also it put the default user in front
118 of all other resources
119 of all other resources
119 """
120 """
120
121
121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
122 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
122 return prefix + obj.users_group_name
123 return prefix + obj.users_group_name
123
124
124
125
125 def _hash_key(k):
126 def _hash_key(k):
126 return sha1_safe(k)
127 return sha1_safe(k)
127
128
128
129
129 def in_filter_generator(qry, items, limit=500):
130 def in_filter_generator(qry, items, limit=500):
130 """
131 """
131 Splits IN() into multiple with OR
132 Splits IN() into multiple with OR
132 e.g.::
133 e.g.::
133 cnt = Repository.query().filter(
134 cnt = Repository.query().filter(
134 or_(
135 or_(
135 *in_filter_generator(Repository.repo_id, range(100000))
136 *in_filter_generator(Repository.repo_id, range(100000))
136 )).count()
137 )).count()
137 """
138 """
138 if not items:
139 if not items:
139 # empty list will cause empty query which might cause security issues
140 # empty list will cause empty query which might cause security issues
140 # this can lead to hidden unpleasant results
141 # this can lead to hidden unpleasant results
141 items = [-1]
142 items = [-1]
142
143
143 parts = []
144 parts = []
144 for chunk in xrange(0, len(items), limit):
145 for chunk in xrange(0, len(items), limit):
145 parts.append(
146 parts.append(
146 qry.in_(items[chunk: chunk + limit])
147 qry.in_(items[chunk: chunk + limit])
147 )
148 )
148
149
149 return parts
150 return parts
150
151
151
152
152 base_table_args = {
153 base_table_args = {
153 'extend_existing': True,
154 'extend_existing': True,
154 'mysql_engine': 'InnoDB',
155 'mysql_engine': 'InnoDB',
155 'mysql_charset': 'utf8',
156 'mysql_charset': 'utf8',
156 'sqlite_autoincrement': True
157 'sqlite_autoincrement': True
157 }
158 }
158
159
159
160
160 class EncryptedTextValue(TypeDecorator):
161 class EncryptedTextValue(TypeDecorator):
161 """
162 """
162 Special column for encrypted long text data, use like::
163 Special column for encrypted long text data, use like::
163
164
164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
165 value = Column("encrypted_value", EncryptedValue(), nullable=False)
165
166
166 This column is intelligent so if value is in unencrypted form it return
167 This column is intelligent so if value is in unencrypted form it return
167 unencrypted form, but on save it always encrypts
168 unencrypted form, but on save it always encrypts
168 """
169 """
169 impl = Text
170 impl = Text
170
171
171 def process_bind_param(self, value, dialect):
172 def process_bind_param(self, value, dialect):
172 """
173 """
173 Setter for storing value
174 Setter for storing value
174 """
175 """
175 import rhodecode
176 import rhodecode
176 if not value:
177 if not value:
177 return value
178 return value
178
179
179 # protect against double encrypting if values is already encrypted
180 # protect against double encrypting if values is already encrypted
180 if value.startswith('enc$aes$') \
181 if value.startswith('enc$aes$') \
181 or value.startswith('enc$aes_hmac$') \
182 or value.startswith('enc$aes_hmac$') \
182 or value.startswith('enc2$'):
183 or value.startswith('enc2$'):
183 raise ValueError('value needs to be in unencrypted format, '
184 raise ValueError('value needs to be in unencrypted format, '
184 'ie. not starting with enc$ or enc2$')
185 'ie. not starting with enc$ or enc2$')
185
186
186 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
187 if algo == 'aes':
188 if algo == 'aes':
188 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
189 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
189 elif algo == 'fernet':
190 elif algo == 'fernet':
190 return Encryptor(ENCRYPTION_KEY).encrypt(value)
191 return Encryptor(ENCRYPTION_KEY).encrypt(value)
191 else:
192 else:
192 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
193 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
193
194
194 def process_result_value(self, value, dialect):
195 def process_result_value(self, value, dialect):
195 """
196 """
196 Getter for retrieving value
197 Getter for retrieving value
197 """
198 """
198
199
199 import rhodecode
200 import rhodecode
200 if not value:
201 if not value:
201 return value
202 return value
202
203
203 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
204 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
204 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
205 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
205 if algo == 'aes':
206 if algo == 'aes':
206 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
207 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
207 elif algo == 'fernet':
208 elif algo == 'fernet':
208 return Encryptor(ENCRYPTION_KEY).decrypt(value)
209 return Encryptor(ENCRYPTION_KEY).decrypt(value)
209 else:
210 else:
210 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
211 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
211 return decrypted_data
212 return decrypted_data
212
213
213
214
214 class BaseModel(object):
215 class BaseModel(object):
215 """
216 """
216 Base Model for all classes
217 Base Model for all classes
217 """
218 """
218
219
219 @classmethod
220 @classmethod
220 def _get_keys(cls):
221 def _get_keys(cls):
221 """return column names for this model """
222 """return column names for this model """
222 return class_mapper(cls).c.keys()
223 return class_mapper(cls).c.keys()
223
224
224 def get_dict(self):
225 def get_dict(self):
225 """
226 """
226 return dict with keys and values corresponding
227 return dict with keys and values corresponding
227 to this model data """
228 to this model data """
228
229
229 d = {}
230 d = {}
230 for k in self._get_keys():
231 for k in self._get_keys():
231 d[k] = getattr(self, k)
232 d[k] = getattr(self, k)
232
233
233 # also use __json__() if present to get additional fields
234 # also use __json__() if present to get additional fields
234 _json_attr = getattr(self, '__json__', None)
235 _json_attr = getattr(self, '__json__', None)
235 if _json_attr:
236 if _json_attr:
236 # update with attributes from __json__
237 # update with attributes from __json__
237 if callable(_json_attr):
238 if callable(_json_attr):
238 _json_attr = _json_attr()
239 _json_attr = _json_attr()
239 for k, val in _json_attr.iteritems():
240 for k, val in _json_attr.iteritems():
240 d[k] = val
241 d[k] = val
241 return d
242 return d
242
243
243 def get_appstruct(self):
244 def get_appstruct(self):
244 """return list with keys and values tuples corresponding
245 """return list with keys and values tuples corresponding
245 to this model data """
246 to this model data """
246
247
247 lst = []
248 lst = []
248 for k in self._get_keys():
249 for k in self._get_keys():
249 lst.append((k, getattr(self, k),))
250 lst.append((k, getattr(self, k),))
250 return lst
251 return lst
251
252
252 def populate_obj(self, populate_dict):
253 def populate_obj(self, populate_dict):
253 """populate model with data from given populate_dict"""
254 """populate model with data from given populate_dict"""
254
255
255 for k in self._get_keys():
256 for k in self._get_keys():
256 if k in populate_dict:
257 if k in populate_dict:
257 setattr(self, k, populate_dict[k])
258 setattr(self, k, populate_dict[k])
258
259
259 @classmethod
260 @classmethod
260 def query(cls):
261 def query(cls):
261 return Session().query(cls)
262 return Session().query(cls)
262
263
263 @classmethod
264 @classmethod
264 def get(cls, id_):
265 def get(cls, id_):
265 if id_:
266 if id_:
266 return cls.query().get(id_)
267 return cls.query().get(id_)
267
268
268 @classmethod
269 @classmethod
269 def get_or_404(cls, id_):
270 def get_or_404(cls, id_):
270 from pyramid.httpexceptions import HTTPNotFound
271 from pyramid.httpexceptions import HTTPNotFound
271
272
272 try:
273 try:
273 id_ = int(id_)
274 id_ = int(id_)
274 except (TypeError, ValueError):
275 except (TypeError, ValueError):
275 raise HTTPNotFound()
276 raise HTTPNotFound()
276
277
277 res = cls.query().get(id_)
278 res = cls.query().get(id_)
278 if not res:
279 if not res:
279 raise HTTPNotFound()
280 raise HTTPNotFound()
280 return res
281 return res
281
282
282 @classmethod
283 @classmethod
283 def getAll(cls):
284 def getAll(cls):
284 # deprecated and left for backward compatibility
285 # deprecated and left for backward compatibility
285 return cls.get_all()
286 return cls.get_all()
286
287
287 @classmethod
288 @classmethod
288 def get_all(cls):
289 def get_all(cls):
289 return cls.query().all()
290 return cls.query().all()
290
291
291 @classmethod
292 @classmethod
292 def delete(cls, id_):
293 def delete(cls, id_):
293 obj = cls.query().get(id_)
294 obj = cls.query().get(id_)
294 Session().delete(obj)
295 Session().delete(obj)
295
296
296 @classmethod
297 @classmethod
297 def identity_cache(cls, session, attr_name, value):
298 def identity_cache(cls, session, attr_name, value):
298 exist_in_session = []
299 exist_in_session = []
299 for (item_cls, pkey), instance in session.identity_map.items():
300 for (item_cls, pkey), instance in session.identity_map.items():
300 if cls == item_cls and getattr(instance, attr_name) == value:
301 if cls == item_cls and getattr(instance, attr_name) == value:
301 exist_in_session.append(instance)
302 exist_in_session.append(instance)
302 if exist_in_session:
303 if exist_in_session:
303 if len(exist_in_session) == 1:
304 if len(exist_in_session) == 1:
304 return exist_in_session[0]
305 return exist_in_session[0]
305 log.exception(
306 log.exception(
306 'multiple objects with attr %s and '
307 'multiple objects with attr %s and '
307 'value %s found with same name: %r',
308 'value %s found with same name: %r',
308 attr_name, value, exist_in_session)
309 attr_name, value, exist_in_session)
309
310
310 def __repr__(self):
311 def __repr__(self):
311 if hasattr(self, '__unicode__'):
312 if hasattr(self, '__unicode__'):
312 # python repr needs to return str
313 # python repr needs to return str
313 try:
314 try:
314 return safe_str(self.__unicode__())
315 return safe_str(self.__unicode__())
315 except UnicodeDecodeError:
316 except UnicodeDecodeError:
316 pass
317 pass
317 return '<DB:%s>' % (self.__class__.__name__)
318 return '<DB:%s>' % (self.__class__.__name__)
318
319
319
320
320 class RhodeCodeSetting(Base, BaseModel):
321 class RhodeCodeSetting(Base, BaseModel):
321 __tablename__ = 'rhodecode_settings'
322 __tablename__ = 'rhodecode_settings'
322 __table_args__ = (
323 __table_args__ = (
323 UniqueConstraint('app_settings_name'),
324 UniqueConstraint('app_settings_name'),
324 base_table_args
325 base_table_args
325 )
326 )
326
327
327 SETTINGS_TYPES = {
328 SETTINGS_TYPES = {
328 'str': safe_str,
329 'str': safe_str,
329 'int': safe_int,
330 'int': safe_int,
330 'unicode': safe_unicode,
331 'unicode': safe_unicode,
331 'bool': str2bool,
332 'bool': str2bool,
332 'list': functools.partial(aslist, sep=',')
333 'list': functools.partial(aslist, sep=',')
333 }
334 }
334 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
335 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
335 GLOBAL_CONF_KEY = 'app_settings'
336 GLOBAL_CONF_KEY = 'app_settings'
336
337
337 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
338 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
338 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
339 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
339 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
340 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
340 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
341 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
341
342
342 def __init__(self, key='', val='', type='unicode'):
343 def __init__(self, key='', val='', type='unicode'):
343 self.app_settings_name = key
344 self.app_settings_name = key
344 self.app_settings_type = type
345 self.app_settings_type = type
345 self.app_settings_value = val
346 self.app_settings_value = val
346
347
347 @validates('_app_settings_value')
348 @validates('_app_settings_value')
348 def validate_settings_value(self, key, val):
349 def validate_settings_value(self, key, val):
349 assert type(val) == unicode
350 assert type(val) == unicode
350 return val
351 return val
351
352
352 @hybrid_property
353 @hybrid_property
353 def app_settings_value(self):
354 def app_settings_value(self):
354 v = self._app_settings_value
355 v = self._app_settings_value
355 _type = self.app_settings_type
356 _type = self.app_settings_type
356 if _type:
357 if _type:
357 _type = self.app_settings_type.split('.')[0]
358 _type = self.app_settings_type.split('.')[0]
358 # decode the encrypted value
359 # decode the encrypted value
359 if 'encrypted' in self.app_settings_type:
360 if 'encrypted' in self.app_settings_type:
360 cipher = EncryptedTextValue()
361 cipher = EncryptedTextValue()
361 v = safe_unicode(cipher.process_result_value(v, None))
362 v = safe_unicode(cipher.process_result_value(v, None))
362
363
363 converter = self.SETTINGS_TYPES.get(_type) or \
364 converter = self.SETTINGS_TYPES.get(_type) or \
364 self.SETTINGS_TYPES['unicode']
365 self.SETTINGS_TYPES['unicode']
365 return converter(v)
366 return converter(v)
366
367
367 @app_settings_value.setter
368 @app_settings_value.setter
368 def app_settings_value(self, val):
369 def app_settings_value(self, val):
369 """
370 """
370 Setter that will always make sure we use unicode in app_settings_value
371 Setter that will always make sure we use unicode in app_settings_value
371
372
372 :param val:
373 :param val:
373 """
374 """
374 val = safe_unicode(val)
375 val = safe_unicode(val)
375 # encode the encrypted value
376 # encode the encrypted value
376 if 'encrypted' in self.app_settings_type:
377 if 'encrypted' in self.app_settings_type:
377 cipher = EncryptedTextValue()
378 cipher = EncryptedTextValue()
378 val = safe_unicode(cipher.process_bind_param(val, None))
379 val = safe_unicode(cipher.process_bind_param(val, None))
379 self._app_settings_value = val
380 self._app_settings_value = val
380
381
381 @hybrid_property
382 @hybrid_property
382 def app_settings_type(self):
383 def app_settings_type(self):
383 return self._app_settings_type
384 return self._app_settings_type
384
385
385 @app_settings_type.setter
386 @app_settings_type.setter
386 def app_settings_type(self, val):
387 def app_settings_type(self, val):
387 if val.split('.')[0] not in self.SETTINGS_TYPES:
388 if val.split('.')[0] not in self.SETTINGS_TYPES:
388 raise Exception('type must be one of %s got %s'
389 raise Exception('type must be one of %s got %s'
389 % (self.SETTINGS_TYPES.keys(), val))
390 % (self.SETTINGS_TYPES.keys(), val))
390 self._app_settings_type = val
391 self._app_settings_type = val
391
392
392 @classmethod
393 @classmethod
393 def get_by_prefix(cls, prefix):
394 def get_by_prefix(cls, prefix):
394 return RhodeCodeSetting.query()\
395 return RhodeCodeSetting.query()\
395 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
396 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
396 .all()
397 .all()
397
398
398 def __unicode__(self):
399 def __unicode__(self):
399 return u"<%s('%s:%s[%s]')>" % (
400 return u"<%s('%s:%s[%s]')>" % (
400 self.__class__.__name__,
401 self.__class__.__name__,
401 self.app_settings_name, self.app_settings_value,
402 self.app_settings_name, self.app_settings_value,
402 self.app_settings_type
403 self.app_settings_type
403 )
404 )
404
405
405
406
406 class RhodeCodeUi(Base, BaseModel):
407 class RhodeCodeUi(Base, BaseModel):
407 __tablename__ = 'rhodecode_ui'
408 __tablename__ = 'rhodecode_ui'
408 __table_args__ = (
409 __table_args__ = (
409 UniqueConstraint('ui_key'),
410 UniqueConstraint('ui_key'),
410 base_table_args
411 base_table_args
411 )
412 )
412
413
413 HOOK_REPO_SIZE = 'changegroup.repo_size'
414 HOOK_REPO_SIZE = 'changegroup.repo_size'
414 # HG
415 # HG
415 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
416 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
416 HOOK_PULL = 'outgoing.pull_logger'
417 HOOK_PULL = 'outgoing.pull_logger'
417 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
418 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
418 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
419 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
419 HOOK_PUSH = 'changegroup.push_logger'
420 HOOK_PUSH = 'changegroup.push_logger'
420 HOOK_PUSH_KEY = 'pushkey.key_push'
421 HOOK_PUSH_KEY = 'pushkey.key_push'
421
422
422 HOOKS_BUILTIN = [
423 HOOKS_BUILTIN = [
423 HOOK_PRE_PULL,
424 HOOK_PRE_PULL,
424 HOOK_PULL,
425 HOOK_PULL,
425 HOOK_PRE_PUSH,
426 HOOK_PRE_PUSH,
426 HOOK_PRETX_PUSH,
427 HOOK_PRETX_PUSH,
427 HOOK_PUSH,
428 HOOK_PUSH,
428 HOOK_PUSH_KEY,
429 HOOK_PUSH_KEY,
429 ]
430 ]
430
431
431 # TODO: johbo: Unify way how hooks are configured for git and hg,
432 # TODO: johbo: Unify way how hooks are configured for git and hg,
432 # git part is currently hardcoded.
433 # git part is currently hardcoded.
433
434
434 # SVN PATTERNS
435 # SVN PATTERNS
435 SVN_BRANCH_ID = 'vcs_svn_branch'
436 SVN_BRANCH_ID = 'vcs_svn_branch'
436 SVN_TAG_ID = 'vcs_svn_tag'
437 SVN_TAG_ID = 'vcs_svn_tag'
437
438
438 ui_id = Column(
439 ui_id = Column(
439 "ui_id", Integer(), nullable=False, unique=True, default=None,
440 "ui_id", Integer(), nullable=False, unique=True, default=None,
440 primary_key=True)
441 primary_key=True)
441 ui_section = Column(
442 ui_section = Column(
442 "ui_section", String(255), nullable=True, unique=None, default=None)
443 "ui_section", String(255), nullable=True, unique=None, default=None)
443 ui_key = Column(
444 ui_key = Column(
444 "ui_key", String(255), nullable=True, unique=None, default=None)
445 "ui_key", String(255), nullable=True, unique=None, default=None)
445 ui_value = Column(
446 ui_value = Column(
446 "ui_value", String(255), nullable=True, unique=None, default=None)
447 "ui_value", String(255), nullable=True, unique=None, default=None)
447 ui_active = Column(
448 ui_active = Column(
448 "ui_active", Boolean(), nullable=True, unique=None, default=True)
449 "ui_active", Boolean(), nullable=True, unique=None, default=True)
449
450
450 def __repr__(self):
451 def __repr__(self):
451 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
452 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
452 self.ui_key, self.ui_value)
453 self.ui_key, self.ui_value)
453
454
454
455
455 class RepoRhodeCodeSetting(Base, BaseModel):
456 class RepoRhodeCodeSetting(Base, BaseModel):
456 __tablename__ = 'repo_rhodecode_settings'
457 __tablename__ = 'repo_rhodecode_settings'
457 __table_args__ = (
458 __table_args__ = (
458 UniqueConstraint(
459 UniqueConstraint(
459 'app_settings_name', 'repository_id',
460 'app_settings_name', 'repository_id',
460 name='uq_repo_rhodecode_setting_name_repo_id'),
461 name='uq_repo_rhodecode_setting_name_repo_id'),
461 base_table_args
462 base_table_args
462 )
463 )
463
464
464 repository_id = Column(
465 repository_id = Column(
465 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
466 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
466 nullable=False)
467 nullable=False)
467 app_settings_id = Column(
468 app_settings_id = Column(
468 "app_settings_id", Integer(), nullable=False, unique=True,
469 "app_settings_id", Integer(), nullable=False, unique=True,
469 default=None, primary_key=True)
470 default=None, primary_key=True)
470 app_settings_name = Column(
471 app_settings_name = Column(
471 "app_settings_name", String(255), nullable=True, unique=None,
472 "app_settings_name", String(255), nullable=True, unique=None,
472 default=None)
473 default=None)
473 _app_settings_value = Column(
474 _app_settings_value = Column(
474 "app_settings_value", String(4096), nullable=True, unique=None,
475 "app_settings_value", String(4096), nullable=True, unique=None,
475 default=None)
476 default=None)
476 _app_settings_type = Column(
477 _app_settings_type = Column(
477 "app_settings_type", String(255), nullable=True, unique=None,
478 "app_settings_type", String(255), nullable=True, unique=None,
478 default=None)
479 default=None)
479
480
480 repository = relationship('Repository')
481 repository = relationship('Repository')
481
482
482 def __init__(self, repository_id, key='', val='', type='unicode'):
483 def __init__(self, repository_id, key='', val='', type='unicode'):
483 self.repository_id = repository_id
484 self.repository_id = repository_id
484 self.app_settings_name = key
485 self.app_settings_name = key
485 self.app_settings_type = type
486 self.app_settings_type = type
486 self.app_settings_value = val
487 self.app_settings_value = val
487
488
488 @validates('_app_settings_value')
489 @validates('_app_settings_value')
489 def validate_settings_value(self, key, val):
490 def validate_settings_value(self, key, val):
490 assert type(val) == unicode
491 assert type(val) == unicode
491 return val
492 return val
492
493
493 @hybrid_property
494 @hybrid_property
494 def app_settings_value(self):
495 def app_settings_value(self):
495 v = self._app_settings_value
496 v = self._app_settings_value
496 type_ = self.app_settings_type
497 type_ = self.app_settings_type
497 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
498 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
498 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
499 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
499 return converter(v)
500 return converter(v)
500
501
501 @app_settings_value.setter
502 @app_settings_value.setter
502 def app_settings_value(self, val):
503 def app_settings_value(self, val):
503 """
504 """
504 Setter that will always make sure we use unicode in app_settings_value
505 Setter that will always make sure we use unicode in app_settings_value
505
506
506 :param val:
507 :param val:
507 """
508 """
508 self._app_settings_value = safe_unicode(val)
509 self._app_settings_value = safe_unicode(val)
509
510
510 @hybrid_property
511 @hybrid_property
511 def app_settings_type(self):
512 def app_settings_type(self):
512 return self._app_settings_type
513 return self._app_settings_type
513
514
514 @app_settings_type.setter
515 @app_settings_type.setter
515 def app_settings_type(self, val):
516 def app_settings_type(self, val):
516 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
517 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
517 if val not in SETTINGS_TYPES:
518 if val not in SETTINGS_TYPES:
518 raise Exception('type must be one of %s got %s'
519 raise Exception('type must be one of %s got %s'
519 % (SETTINGS_TYPES.keys(), val))
520 % (SETTINGS_TYPES.keys(), val))
520 self._app_settings_type = val
521 self._app_settings_type = val
521
522
522 def __unicode__(self):
523 def __unicode__(self):
523 return u"<%s('%s:%s:%s[%s]')>" % (
524 return u"<%s('%s:%s:%s[%s]')>" % (
524 self.__class__.__name__, self.repository.repo_name,
525 self.__class__.__name__, self.repository.repo_name,
525 self.app_settings_name, self.app_settings_value,
526 self.app_settings_name, self.app_settings_value,
526 self.app_settings_type
527 self.app_settings_type
527 )
528 )
528
529
529
530
530 class RepoRhodeCodeUi(Base, BaseModel):
531 class RepoRhodeCodeUi(Base, BaseModel):
531 __tablename__ = 'repo_rhodecode_ui'
532 __tablename__ = 'repo_rhodecode_ui'
532 __table_args__ = (
533 __table_args__ = (
533 UniqueConstraint(
534 UniqueConstraint(
534 'repository_id', 'ui_section', 'ui_key',
535 'repository_id', 'ui_section', 'ui_key',
535 name='uq_repo_rhodecode_ui_repository_id_section_key'),
536 name='uq_repo_rhodecode_ui_repository_id_section_key'),
536 base_table_args
537 base_table_args
537 )
538 )
538
539
539 repository_id = Column(
540 repository_id = Column(
540 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
541 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
541 nullable=False)
542 nullable=False)
542 ui_id = Column(
543 ui_id = Column(
543 "ui_id", Integer(), nullable=False, unique=True, default=None,
544 "ui_id", Integer(), nullable=False, unique=True, default=None,
544 primary_key=True)
545 primary_key=True)
545 ui_section = Column(
546 ui_section = Column(
546 "ui_section", String(255), nullable=True, unique=None, default=None)
547 "ui_section", String(255), nullable=True, unique=None, default=None)
547 ui_key = Column(
548 ui_key = Column(
548 "ui_key", String(255), nullable=True, unique=None, default=None)
549 "ui_key", String(255), nullable=True, unique=None, default=None)
549 ui_value = Column(
550 ui_value = Column(
550 "ui_value", String(255), nullable=True, unique=None, default=None)
551 "ui_value", String(255), nullable=True, unique=None, default=None)
551 ui_active = Column(
552 ui_active = Column(
552 "ui_active", Boolean(), nullable=True, unique=None, default=True)
553 "ui_active", Boolean(), nullable=True, unique=None, default=True)
553
554
554 repository = relationship('Repository')
555 repository = relationship('Repository')
555
556
556 def __repr__(self):
557 def __repr__(self):
557 return '<%s[%s:%s]%s=>%s]>' % (
558 return '<%s[%s:%s]%s=>%s]>' % (
558 self.__class__.__name__, self.repository.repo_name,
559 self.__class__.__name__, self.repository.repo_name,
559 self.ui_section, self.ui_key, self.ui_value)
560 self.ui_section, self.ui_key, self.ui_value)
560
561
561
562
562 class User(Base, BaseModel):
563 class User(Base, BaseModel):
563 __tablename__ = 'users'
564 __tablename__ = 'users'
564 __table_args__ = (
565 __table_args__ = (
565 UniqueConstraint('username'), UniqueConstraint('email'),
566 UniqueConstraint('username'), UniqueConstraint('email'),
566 Index('u_username_idx', 'username'),
567 Index('u_username_idx', 'username'),
567 Index('u_email_idx', 'email'),
568 Index('u_email_idx', 'email'),
568 base_table_args
569 base_table_args
569 )
570 )
570
571
571 DEFAULT_USER = 'default'
572 DEFAULT_USER = 'default'
572 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
573 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
573 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
574 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
574
575
575 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
576 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
576 username = Column("username", String(255), nullable=True, unique=None, default=None)
577 username = Column("username", String(255), nullable=True, unique=None, default=None)
577 password = Column("password", String(255), nullable=True, unique=None, default=None)
578 password = Column("password", String(255), nullable=True, unique=None, default=None)
578 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
579 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
579 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
580 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
580 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
581 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
581 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
582 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
582 _email = Column("email", String(255), nullable=True, unique=None, default=None)
583 _email = Column("email", String(255), nullable=True, unique=None, default=None)
583 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
584 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
584 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
585 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
585 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
586 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
586
587
587 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
588 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
588 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
589 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
589 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
590 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
590 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
591 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
591 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
592 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
592 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
593 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
593
594
594 user_log = relationship('UserLog')
595 user_log = relationship('UserLog')
595 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
596 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
596
597
597 repositories = relationship('Repository')
598 repositories = relationship('Repository')
598 repository_groups = relationship('RepoGroup')
599 repository_groups = relationship('RepoGroup')
599 user_groups = relationship('UserGroup')
600 user_groups = relationship('UserGroup')
600
601
601 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
602 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
602 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
603 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
603
604
604 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
605 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
605 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
606 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
606 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
607 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
607
608
608 group_member = relationship('UserGroupMember', cascade='all')
609 group_member = relationship('UserGroupMember', cascade='all')
609
610
610 notifications = relationship('UserNotification', cascade='all')
611 notifications = relationship('UserNotification', cascade='all')
611 # notifications assigned to this user
612 # notifications assigned to this user
612 user_created_notifications = relationship('Notification', cascade='all')
613 user_created_notifications = relationship('Notification', cascade='all')
613 # comments created by this user
614 # comments created by this user
614 user_comments = relationship('ChangesetComment', cascade='all')
615 user_comments = relationship('ChangesetComment', cascade='all')
615 # user profile extra info
616 # user profile extra info
616 user_emails = relationship('UserEmailMap', cascade='all')
617 user_emails = relationship('UserEmailMap', cascade='all')
617 user_ip_map = relationship('UserIpMap', cascade='all')
618 user_ip_map = relationship('UserIpMap', cascade='all')
618 user_auth_tokens = relationship('UserApiKeys', cascade='all')
619 user_auth_tokens = relationship('UserApiKeys', cascade='all')
619 user_ssh_keys = relationship('UserSshKeys', cascade='all')
620 user_ssh_keys = relationship('UserSshKeys', cascade='all')
620
621
621 # gists
622 # gists
622 user_gists = relationship('Gist', cascade='all')
623 user_gists = relationship('Gist', cascade='all')
623 # user pull requests
624 # user pull requests
624 user_pull_requests = relationship('PullRequest', cascade='all')
625 user_pull_requests = relationship('PullRequest', cascade='all')
625
626
626 # external identities
627 # external identities
627 external_identities = relationship(
628 external_identities = relationship(
628 'ExternalIdentity',
629 'ExternalIdentity',
629 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
630 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
630 cascade='all')
631 cascade='all')
631 # review rules
632 # review rules
632 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
633 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
633
634
634 # artifacts owned
635 # artifacts owned
635 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
636 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
636
637
637 # no cascade, set NULL
638 # no cascade, set NULL
638 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
639 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
639
640
640 def __unicode__(self):
641 def __unicode__(self):
641 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
642 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
642 self.user_id, self.username)
643 self.user_id, self.username)
643
644
644 @hybrid_property
645 @hybrid_property
645 def email(self):
646 def email(self):
646 return self._email
647 return self._email
647
648
648 @email.setter
649 @email.setter
649 def email(self, val):
650 def email(self, val):
650 self._email = val.lower() if val else None
651 self._email = val.lower() if val else None
651
652
652 @hybrid_property
653 @hybrid_property
653 def first_name(self):
654 def first_name(self):
654 from rhodecode.lib import helpers as h
655 from rhodecode.lib import helpers as h
655 if self.name:
656 if self.name:
656 return h.escape(self.name)
657 return h.escape(self.name)
657 return self.name
658 return self.name
658
659
659 @hybrid_property
660 @hybrid_property
660 def last_name(self):
661 def last_name(self):
661 from rhodecode.lib import helpers as h
662 from rhodecode.lib import helpers as h
662 if self.lastname:
663 if self.lastname:
663 return h.escape(self.lastname)
664 return h.escape(self.lastname)
664 return self.lastname
665 return self.lastname
665
666
666 @hybrid_property
667 @hybrid_property
667 def api_key(self):
668 def api_key(self):
668 """
669 """
669 Fetch if exist an auth-token with role ALL connected to this user
670 Fetch if exist an auth-token with role ALL connected to this user
670 """
671 """
671 user_auth_token = UserApiKeys.query()\
672 user_auth_token = UserApiKeys.query()\
672 .filter(UserApiKeys.user_id == self.user_id)\
673 .filter(UserApiKeys.user_id == self.user_id)\
673 .filter(or_(UserApiKeys.expires == -1,
674 .filter(or_(UserApiKeys.expires == -1,
674 UserApiKeys.expires >= time.time()))\
675 UserApiKeys.expires >= time.time()))\
675 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
676 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
676 if user_auth_token:
677 if user_auth_token:
677 user_auth_token = user_auth_token.api_key
678 user_auth_token = user_auth_token.api_key
678
679
679 return user_auth_token
680 return user_auth_token
680
681
681 @api_key.setter
682 @api_key.setter
682 def api_key(self, val):
683 def api_key(self, val):
683 # don't allow to set API key this is deprecated for now
684 # don't allow to set API key this is deprecated for now
684 self._api_key = None
685 self._api_key = None
685
686
686 @property
687 @property
687 def reviewer_pull_requests(self):
688 def reviewer_pull_requests(self):
688 return PullRequestReviewers.query() \
689 return PullRequestReviewers.query() \
689 .options(joinedload(PullRequestReviewers.pull_request)) \
690 .options(joinedload(PullRequestReviewers.pull_request)) \
690 .filter(PullRequestReviewers.user_id == self.user_id) \
691 .filter(PullRequestReviewers.user_id == self.user_id) \
691 .all()
692 .all()
692
693
693 @property
694 @property
694 def firstname(self):
695 def firstname(self):
695 # alias for future
696 # alias for future
696 return self.name
697 return self.name
697
698
698 @property
699 @property
699 def emails(self):
700 def emails(self):
700 other = UserEmailMap.query()\
701 other = UserEmailMap.query()\
701 .filter(UserEmailMap.user == self) \
702 .filter(UserEmailMap.user == self) \
702 .order_by(UserEmailMap.email_id.asc()) \
703 .order_by(UserEmailMap.email_id.asc()) \
703 .all()
704 .all()
704 return [self.email] + [x.email for x in other]
705 return [self.email] + [x.email for x in other]
705
706
706 def emails_cached(self):
707 def emails_cached(self):
707 emails = UserEmailMap.query()\
708 emails = UserEmailMap.query()\
708 .filter(UserEmailMap.user == self) \
709 .filter(UserEmailMap.user == self) \
709 .order_by(UserEmailMap.email_id.asc())
710 .order_by(UserEmailMap.email_id.asc())
710
711
711 emails = emails.options(
712 emails = emails.options(
712 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
713 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
713 )
714 )
714
715
715 return [self.email] + [x.email for x in emails]
716 return [self.email] + [x.email for x in emails]
716
717
717 @property
718 @property
718 def auth_tokens(self):
719 def auth_tokens(self):
719 auth_tokens = self.get_auth_tokens()
720 auth_tokens = self.get_auth_tokens()
720 return [x.api_key for x in auth_tokens]
721 return [x.api_key for x in auth_tokens]
721
722
722 def get_auth_tokens(self):
723 def get_auth_tokens(self):
723 return UserApiKeys.query()\
724 return UserApiKeys.query()\
724 .filter(UserApiKeys.user == self)\
725 .filter(UserApiKeys.user == self)\
725 .order_by(UserApiKeys.user_api_key_id.asc())\
726 .order_by(UserApiKeys.user_api_key_id.asc())\
726 .all()
727 .all()
727
728
728 @LazyProperty
729 @LazyProperty
729 def feed_token(self):
730 def feed_token(self):
730 return self.get_feed_token()
731 return self.get_feed_token()
731
732
732 def get_feed_token(self, cache=True):
733 def get_feed_token(self, cache=True):
733 feed_tokens = UserApiKeys.query()\
734 feed_tokens = UserApiKeys.query()\
734 .filter(UserApiKeys.user == self)\
735 .filter(UserApiKeys.user == self)\
735 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
736 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
736 if cache:
737 if cache:
737 feed_tokens = feed_tokens.options(
738 feed_tokens = feed_tokens.options(
738 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
739 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
739
740
740 feed_tokens = feed_tokens.all()
741 feed_tokens = feed_tokens.all()
741 if feed_tokens:
742 if feed_tokens:
742 return feed_tokens[0].api_key
743 return feed_tokens[0].api_key
743 return 'NO_FEED_TOKEN_AVAILABLE'
744 return 'NO_FEED_TOKEN_AVAILABLE'
744
745
745 @LazyProperty
746 @LazyProperty
746 def artifact_token(self):
747 def artifact_token(self):
747 return self.get_artifact_token()
748 return self.get_artifact_token()
748
749
749 def get_artifact_token(self, cache=True):
750 def get_artifact_token(self, cache=True):
750 artifacts_tokens = UserApiKeys.query()\
751 artifacts_tokens = UserApiKeys.query()\
751 .filter(UserApiKeys.user == self)\
752 .filter(UserApiKeys.user == self)\
752 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
753 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
753 if cache:
754 if cache:
754 artifacts_tokens = artifacts_tokens.options(
755 artifacts_tokens = artifacts_tokens.options(
755 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
756 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
756
757
757 artifacts_tokens = artifacts_tokens.all()
758 artifacts_tokens = artifacts_tokens.all()
758 if artifacts_tokens:
759 if artifacts_tokens:
759 return artifacts_tokens[0].api_key
760 return artifacts_tokens[0].api_key
760 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
761 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
761
762
762 @classmethod
763 @classmethod
763 def get(cls, user_id, cache=False):
764 def get(cls, user_id, cache=False):
764 if not user_id:
765 if not user_id:
765 return
766 return
766
767
767 user = cls.query()
768 user = cls.query()
768 if cache:
769 if cache:
769 user = user.options(
770 user = user.options(
770 FromCache("sql_cache_short", "get_users_%s" % user_id))
771 FromCache("sql_cache_short", "get_users_%s" % user_id))
771 return user.get(user_id)
772 return user.get(user_id)
772
773
773 @classmethod
774 @classmethod
774 def extra_valid_auth_tokens(cls, user, role=None):
775 def extra_valid_auth_tokens(cls, user, role=None):
775 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
776 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
776 .filter(or_(UserApiKeys.expires == -1,
777 .filter(or_(UserApiKeys.expires == -1,
777 UserApiKeys.expires >= time.time()))
778 UserApiKeys.expires >= time.time()))
778 if role:
779 if role:
779 tokens = tokens.filter(or_(UserApiKeys.role == role,
780 tokens = tokens.filter(or_(UserApiKeys.role == role,
780 UserApiKeys.role == UserApiKeys.ROLE_ALL))
781 UserApiKeys.role == UserApiKeys.ROLE_ALL))
781 return tokens.all()
782 return tokens.all()
782
783
783 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
784 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
784 from rhodecode.lib import auth
785 from rhodecode.lib import auth
785
786
786 log.debug('Trying to authenticate user: %s via auth-token, '
787 log.debug('Trying to authenticate user: %s via auth-token, '
787 'and roles: %s', self, roles)
788 'and roles: %s', self, roles)
788
789
789 if not auth_token:
790 if not auth_token:
790 return False
791 return False
791
792
792 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
793 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
793 tokens_q = UserApiKeys.query()\
794 tokens_q = UserApiKeys.query()\
794 .filter(UserApiKeys.user_id == self.user_id)\
795 .filter(UserApiKeys.user_id == self.user_id)\
795 .filter(or_(UserApiKeys.expires == -1,
796 .filter(or_(UserApiKeys.expires == -1,
796 UserApiKeys.expires >= time.time()))
797 UserApiKeys.expires >= time.time()))
797
798
798 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
799 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
799
800
800 crypto_backend = auth.crypto_backend()
801 crypto_backend = auth.crypto_backend()
801 enc_token_map = {}
802 enc_token_map = {}
802 plain_token_map = {}
803 plain_token_map = {}
803 for token in tokens_q:
804 for token in tokens_q:
804 if token.api_key.startswith(crypto_backend.ENC_PREF):
805 if token.api_key.startswith(crypto_backend.ENC_PREF):
805 enc_token_map[token.api_key] = token
806 enc_token_map[token.api_key] = token
806 else:
807 else:
807 plain_token_map[token.api_key] = token
808 plain_token_map[token.api_key] = token
808 log.debug(
809 log.debug(
809 'Found %s plain and %s encrypted tokens to check for authentication for this user',
810 'Found %s plain and %s encrypted tokens to check for authentication for this user',
810 len(plain_token_map), len(enc_token_map))
811 len(plain_token_map), len(enc_token_map))
811
812
812 # plain token match comes first
813 # plain token match comes first
813 match = plain_token_map.get(auth_token)
814 match = plain_token_map.get(auth_token)
814
815
815 # check encrypted tokens now
816 # check encrypted tokens now
816 if not match:
817 if not match:
817 for token_hash, token in enc_token_map.items():
818 for token_hash, token in enc_token_map.items():
818 # NOTE(marcink): this is expensive to calculate, but most secure
819 # NOTE(marcink): this is expensive to calculate, but most secure
819 if crypto_backend.hash_check(auth_token, token_hash):
820 if crypto_backend.hash_check(auth_token, token_hash):
820 match = token
821 match = token
821 break
822 break
822
823
823 if match:
824 if match:
824 log.debug('Found matching token %s', match)
825 log.debug('Found matching token %s', match)
825 if match.repo_id:
826 if match.repo_id:
826 log.debug('Found scope, checking for scope match of token %s', match)
827 log.debug('Found scope, checking for scope match of token %s', match)
827 if match.repo_id == scope_repo_id:
828 if match.repo_id == scope_repo_id:
828 return True
829 return True
829 else:
830 else:
830 log.debug(
831 log.debug(
831 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
832 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
832 'and calling scope is:%s, skipping further checks',
833 'and calling scope is:%s, skipping further checks',
833 match.repo, scope_repo_id)
834 match.repo, scope_repo_id)
834 return False
835 return False
835 else:
836 else:
836 return True
837 return True
837
838
838 return False
839 return False
839
840
840 @property
841 @property
841 def ip_addresses(self):
842 def ip_addresses(self):
842 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
843 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
843 return [x.ip_addr for x in ret]
844 return [x.ip_addr for x in ret]
844
845
845 @property
846 @property
846 def username_and_name(self):
847 def username_and_name(self):
847 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
848 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
848
849
849 @property
850 @property
850 def username_or_name_or_email(self):
851 def username_or_name_or_email(self):
851 full_name = self.full_name if self.full_name is not ' ' else None
852 full_name = self.full_name if self.full_name is not ' ' else None
852 return self.username or full_name or self.email
853 return self.username or full_name or self.email
853
854
854 @property
855 @property
855 def full_name(self):
856 def full_name(self):
856 return '%s %s' % (self.first_name, self.last_name)
857 return '%s %s' % (self.first_name, self.last_name)
857
858
858 @property
859 @property
859 def full_name_or_username(self):
860 def full_name_or_username(self):
860 return ('%s %s' % (self.first_name, self.last_name)
861 return ('%s %s' % (self.first_name, self.last_name)
861 if (self.first_name and self.last_name) else self.username)
862 if (self.first_name and self.last_name) else self.username)
862
863
863 @property
864 @property
864 def full_contact(self):
865 def full_contact(self):
865 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
866 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
866
867
867 @property
868 @property
868 def short_contact(self):
869 def short_contact(self):
869 return '%s %s' % (self.first_name, self.last_name)
870 return '%s %s' % (self.first_name, self.last_name)
870
871
871 @property
872 @property
872 def is_admin(self):
873 def is_admin(self):
873 return self.admin
874 return self.admin
874
875
875 @property
876 @property
876 def language(self):
877 def language(self):
877 return self.user_data.get('language')
878 return self.user_data.get('language')
878
879
879 def AuthUser(self, **kwargs):
880 def AuthUser(self, **kwargs):
880 """
881 """
881 Returns instance of AuthUser for this user
882 Returns instance of AuthUser for this user
882 """
883 """
883 from rhodecode.lib.auth import AuthUser
884 from rhodecode.lib.auth import AuthUser
884 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
885 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
885
886
886 @hybrid_property
887 @hybrid_property
887 def user_data(self):
888 def user_data(self):
888 if not self._user_data:
889 if not self._user_data:
889 return {}
890 return {}
890
891
891 try:
892 try:
892 return json.loads(self._user_data)
893 return json.loads(self._user_data)
893 except TypeError:
894 except TypeError:
894 return {}
895 return {}
895
896
896 @user_data.setter
897 @user_data.setter
897 def user_data(self, val):
898 def user_data(self, val):
898 if not isinstance(val, dict):
899 if not isinstance(val, dict):
899 raise Exception('user_data must be dict, got %s' % type(val))
900 raise Exception('user_data must be dict, got %s' % type(val))
900 try:
901 try:
901 self._user_data = json.dumps(val)
902 self._user_data = json.dumps(val)
902 except Exception:
903 except Exception:
903 log.error(traceback.format_exc())
904 log.error(traceback.format_exc())
904
905
905 @classmethod
906 @classmethod
906 def get_by_username(cls, username, case_insensitive=False,
907 def get_by_username(cls, username, case_insensitive=False,
907 cache=False, identity_cache=False):
908 cache=False, identity_cache=False):
908 session = Session()
909 session = Session()
909
910
910 if case_insensitive:
911 if case_insensitive:
911 q = cls.query().filter(
912 q = cls.query().filter(
912 func.lower(cls.username) == func.lower(username))
913 func.lower(cls.username) == func.lower(username))
913 else:
914 else:
914 q = cls.query().filter(cls.username == username)
915 q = cls.query().filter(cls.username == username)
915
916
916 if cache:
917 if cache:
917 if identity_cache:
918 if identity_cache:
918 val = cls.identity_cache(session, 'username', username)
919 val = cls.identity_cache(session, 'username', username)
919 if val:
920 if val:
920 return val
921 return val
921 else:
922 else:
922 cache_key = "get_user_by_name_%s" % _hash_key(username)
923 cache_key = "get_user_by_name_%s" % _hash_key(username)
923 q = q.options(
924 q = q.options(
924 FromCache("sql_cache_short", cache_key))
925 FromCache("sql_cache_short", cache_key))
925
926
926 return q.scalar()
927 return q.scalar()
927
928
928 @classmethod
929 @classmethod
929 def get_by_auth_token(cls, auth_token, cache=False):
930 def get_by_auth_token(cls, auth_token, cache=False):
930 q = UserApiKeys.query()\
931 q = UserApiKeys.query()\
931 .filter(UserApiKeys.api_key == auth_token)\
932 .filter(UserApiKeys.api_key == auth_token)\
932 .filter(or_(UserApiKeys.expires == -1,
933 .filter(or_(UserApiKeys.expires == -1,
933 UserApiKeys.expires >= time.time()))
934 UserApiKeys.expires >= time.time()))
934 if cache:
935 if cache:
935 q = q.options(
936 q = q.options(
936 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
937 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
937
938
938 match = q.first()
939 match = q.first()
939 if match:
940 if match:
940 return match.user
941 return match.user
941
942
942 @classmethod
943 @classmethod
943 def get_by_email(cls, email, case_insensitive=False, cache=False):
944 def get_by_email(cls, email, case_insensitive=False, cache=False):
944
945
945 if case_insensitive:
946 if case_insensitive:
946 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
947 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
947
948
948 else:
949 else:
949 q = cls.query().filter(cls.email == email)
950 q = cls.query().filter(cls.email == email)
950
951
951 email_key = _hash_key(email)
952 email_key = _hash_key(email)
952 if cache:
953 if cache:
953 q = q.options(
954 q = q.options(
954 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
955 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
955
956
956 ret = q.scalar()
957 ret = q.scalar()
957 if ret is None:
958 if ret is None:
958 q = UserEmailMap.query()
959 q = UserEmailMap.query()
959 # try fetching in alternate email map
960 # try fetching in alternate email map
960 if case_insensitive:
961 if case_insensitive:
961 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
962 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
962 else:
963 else:
963 q = q.filter(UserEmailMap.email == email)
964 q = q.filter(UserEmailMap.email == email)
964 q = q.options(joinedload(UserEmailMap.user))
965 q = q.options(joinedload(UserEmailMap.user))
965 if cache:
966 if cache:
966 q = q.options(
967 q = q.options(
967 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
968 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
968 ret = getattr(q.scalar(), 'user', None)
969 ret = getattr(q.scalar(), 'user', None)
969
970
970 return ret
971 return ret
971
972
972 @classmethod
973 @classmethod
973 def get_from_cs_author(cls, author):
974 def get_from_cs_author(cls, author):
974 """
975 """
975 Tries to get User objects out of commit author string
976 Tries to get User objects out of commit author string
976
977
977 :param author:
978 :param author:
978 """
979 """
979 from rhodecode.lib.helpers import email, author_name
980 from rhodecode.lib.helpers import email, author_name
980 # Valid email in the attribute passed, see if they're in the system
981 # Valid email in the attribute passed, see if they're in the system
981 _email = email(author)
982 _email = email(author)
982 if _email:
983 if _email:
983 user = cls.get_by_email(_email, case_insensitive=True)
984 user = cls.get_by_email(_email, case_insensitive=True)
984 if user:
985 if user:
985 return user
986 return user
986 # Maybe we can match by username?
987 # Maybe we can match by username?
987 _author = author_name(author)
988 _author = author_name(author)
988 user = cls.get_by_username(_author, case_insensitive=True)
989 user = cls.get_by_username(_author, case_insensitive=True)
989 if user:
990 if user:
990 return user
991 return user
991
992
992 def update_userdata(self, **kwargs):
993 def update_userdata(self, **kwargs):
993 usr = self
994 usr = self
994 old = usr.user_data
995 old = usr.user_data
995 old.update(**kwargs)
996 old.update(**kwargs)
996 usr.user_data = old
997 usr.user_data = old
997 Session().add(usr)
998 Session().add(usr)
998 log.debug('updated userdata with %s', kwargs)
999 log.debug('updated userdata with %s', kwargs)
999
1000
1000 def update_lastlogin(self):
1001 def update_lastlogin(self):
1001 """Update user lastlogin"""
1002 """Update user lastlogin"""
1002 self.last_login = datetime.datetime.now()
1003 self.last_login = datetime.datetime.now()
1003 Session().add(self)
1004 Session().add(self)
1004 log.debug('updated user %s lastlogin', self.username)
1005 log.debug('updated user %s lastlogin', self.username)
1005
1006
1006 def update_password(self, new_password):
1007 def update_password(self, new_password):
1007 from rhodecode.lib.auth import get_crypt_password
1008 from rhodecode.lib.auth import get_crypt_password
1008
1009
1009 self.password = get_crypt_password(new_password)
1010 self.password = get_crypt_password(new_password)
1010 Session().add(self)
1011 Session().add(self)
1011
1012
1012 @classmethod
1013 @classmethod
1013 def get_first_super_admin(cls):
1014 def get_first_super_admin(cls):
1014 user = User.query()\
1015 user = User.query()\
1015 .filter(User.admin == true()) \
1016 .filter(User.admin == true()) \
1016 .order_by(User.user_id.asc()) \
1017 .order_by(User.user_id.asc()) \
1017 .first()
1018 .first()
1018
1019
1019 if user is None:
1020 if user is None:
1020 raise Exception('FATAL: Missing administrative account!')
1021 raise Exception('FATAL: Missing administrative account!')
1021 return user
1022 return user
1022
1023
1023 @classmethod
1024 @classmethod
1024 def get_all_super_admins(cls, only_active=False):
1025 def get_all_super_admins(cls, only_active=False):
1025 """
1026 """
1026 Returns all admin accounts sorted by username
1027 Returns all admin accounts sorted by username
1027 """
1028 """
1028 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1029 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1029 if only_active:
1030 if only_active:
1030 qry = qry.filter(User.active == true())
1031 qry = qry.filter(User.active == true())
1031 return qry.all()
1032 return qry.all()
1032
1033
1033 @classmethod
1034 @classmethod
1034 def get_all_user_ids(cls, only_active=True):
1035 def get_all_user_ids(cls, only_active=True):
1035 """
1036 """
1036 Returns all users IDs
1037 Returns all users IDs
1037 """
1038 """
1038 qry = Session().query(User.user_id)
1039 qry = Session().query(User.user_id)
1039
1040
1040 if only_active:
1041 if only_active:
1041 qry = qry.filter(User.active == true())
1042 qry = qry.filter(User.active == true())
1042 return [x.user_id for x in qry]
1043 return [x.user_id for x in qry]
1043
1044
1044 @classmethod
1045 @classmethod
1045 def get_default_user(cls, cache=False, refresh=False):
1046 def get_default_user(cls, cache=False, refresh=False):
1046 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1047 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1047 if user is None:
1048 if user is None:
1048 raise Exception('FATAL: Missing default account!')
1049 raise Exception('FATAL: Missing default account!')
1049 if refresh:
1050 if refresh:
1050 # The default user might be based on outdated state which
1051 # The default user might be based on outdated state which
1051 # has been loaded from the cache.
1052 # has been loaded from the cache.
1052 # A call to refresh() ensures that the
1053 # A call to refresh() ensures that the
1053 # latest state from the database is used.
1054 # latest state from the database is used.
1054 Session().refresh(user)
1055 Session().refresh(user)
1055 return user
1056 return user
1056
1057
1057 @classmethod
1058 @classmethod
1058 def get_default_user_id(cls):
1059 def get_default_user_id(cls):
1059 import rhodecode
1060 import rhodecode
1060 return rhodecode.CONFIG['default_user_id']
1061 return rhodecode.CONFIG['default_user_id']
1061
1062
1062 def _get_default_perms(self, user, suffix=''):
1063 def _get_default_perms(self, user, suffix=''):
1063 from rhodecode.model.permission import PermissionModel
1064 from rhodecode.model.permission import PermissionModel
1064 return PermissionModel().get_default_perms(user.user_perms, suffix)
1065 return PermissionModel().get_default_perms(user.user_perms, suffix)
1065
1066
1066 def get_default_perms(self, suffix=''):
1067 def get_default_perms(self, suffix=''):
1067 return self._get_default_perms(self, suffix)
1068 return self._get_default_perms(self, suffix)
1068
1069
1069 def get_api_data(self, include_secrets=False, details='full'):
1070 def get_api_data(self, include_secrets=False, details='full'):
1070 """
1071 """
1071 Common function for generating user related data for API
1072 Common function for generating user related data for API
1072
1073
1073 :param include_secrets: By default secrets in the API data will be replaced
1074 :param include_secrets: By default secrets in the API data will be replaced
1074 by a placeholder value to prevent exposing this data by accident. In case
1075 by a placeholder value to prevent exposing this data by accident. In case
1075 this data shall be exposed, set this flag to ``True``.
1076 this data shall be exposed, set this flag to ``True``.
1076
1077
1077 :param details: details can be 'basic|full' basic gives only a subset of
1078 :param details: details can be 'basic|full' basic gives only a subset of
1078 the available user information that includes user_id, name and emails.
1079 the available user information that includes user_id, name and emails.
1079 """
1080 """
1080 user = self
1081 user = self
1081 user_data = self.user_data
1082 user_data = self.user_data
1082 data = {
1083 data = {
1083 'user_id': user.user_id,
1084 'user_id': user.user_id,
1084 'username': user.username,
1085 'username': user.username,
1085 'firstname': user.name,
1086 'firstname': user.name,
1086 'lastname': user.lastname,
1087 'lastname': user.lastname,
1087 'description': user.description,
1088 'description': user.description,
1088 'email': user.email,
1089 'email': user.email,
1089 'emails': user.emails,
1090 'emails': user.emails,
1090 }
1091 }
1091 if details == 'basic':
1092 if details == 'basic':
1092 return data
1093 return data
1093
1094
1094 auth_token_length = 40
1095 auth_token_length = 40
1095 auth_token_replacement = '*' * auth_token_length
1096 auth_token_replacement = '*' * auth_token_length
1096
1097
1097 extras = {
1098 extras = {
1098 'auth_tokens': [auth_token_replacement],
1099 'auth_tokens': [auth_token_replacement],
1099 'active': user.active,
1100 'active': user.active,
1100 'admin': user.admin,
1101 'admin': user.admin,
1101 'extern_type': user.extern_type,
1102 'extern_type': user.extern_type,
1102 'extern_name': user.extern_name,
1103 'extern_name': user.extern_name,
1103 'last_login': user.last_login,
1104 'last_login': user.last_login,
1104 'last_activity': user.last_activity,
1105 'last_activity': user.last_activity,
1105 'ip_addresses': user.ip_addresses,
1106 'ip_addresses': user.ip_addresses,
1106 'language': user_data.get('language')
1107 'language': user_data.get('language')
1107 }
1108 }
1108 data.update(extras)
1109 data.update(extras)
1109
1110
1110 if include_secrets:
1111 if include_secrets:
1111 data['auth_tokens'] = user.auth_tokens
1112 data['auth_tokens'] = user.auth_tokens
1112 return data
1113 return data
1113
1114
1114 def __json__(self):
1115 def __json__(self):
1115 data = {
1116 data = {
1116 'full_name': self.full_name,
1117 'full_name': self.full_name,
1117 'full_name_or_username': self.full_name_or_username,
1118 'full_name_or_username': self.full_name_or_username,
1118 'short_contact': self.short_contact,
1119 'short_contact': self.short_contact,
1119 'full_contact': self.full_contact,
1120 'full_contact': self.full_contact,
1120 }
1121 }
1121 data.update(self.get_api_data())
1122 data.update(self.get_api_data())
1122 return data
1123 return data
1123
1124
1124
1125
1125 class UserApiKeys(Base, BaseModel):
1126 class UserApiKeys(Base, BaseModel):
1126 __tablename__ = 'user_api_keys'
1127 __tablename__ = 'user_api_keys'
1127 __table_args__ = (
1128 __table_args__ = (
1128 Index('uak_api_key_idx', 'api_key'),
1129 Index('uak_api_key_idx', 'api_key'),
1129 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1130 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1130 base_table_args
1131 base_table_args
1131 )
1132 )
1132 __mapper_args__ = {}
1133 __mapper_args__ = {}
1133
1134
1134 # ApiKey role
1135 # ApiKey role
1135 ROLE_ALL = 'token_role_all'
1136 ROLE_ALL = 'token_role_all'
1136 ROLE_VCS = 'token_role_vcs'
1137 ROLE_VCS = 'token_role_vcs'
1137 ROLE_API = 'token_role_api'
1138 ROLE_API = 'token_role_api'
1138 ROLE_HTTP = 'token_role_http'
1139 ROLE_HTTP = 'token_role_http'
1139 ROLE_FEED = 'token_role_feed'
1140 ROLE_FEED = 'token_role_feed'
1140 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1141 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1141 # The last one is ignored in the list as we only
1142 # The last one is ignored in the list as we only
1142 # use it for one action, and cannot be created by users
1143 # use it for one action, and cannot be created by users
1143 ROLE_PASSWORD_RESET = 'token_password_reset'
1144 ROLE_PASSWORD_RESET = 'token_password_reset'
1144
1145
1145 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1146 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1146
1147
1147 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1148 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1148 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1149 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1149 api_key = Column("api_key", String(255), nullable=False, unique=True)
1150 api_key = Column("api_key", String(255), nullable=False, unique=True)
1150 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1151 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1151 expires = Column('expires', Float(53), nullable=False)
1152 expires = Column('expires', Float(53), nullable=False)
1152 role = Column('role', String(255), nullable=True)
1153 role = Column('role', String(255), nullable=True)
1153 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1154 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1154
1155
1155 # scope columns
1156 # scope columns
1156 repo_id = Column(
1157 repo_id = Column(
1157 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1158 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1158 nullable=True, unique=None, default=None)
1159 nullable=True, unique=None, default=None)
1159 repo = relationship('Repository', lazy='joined')
1160 repo = relationship('Repository', lazy='joined')
1160
1161
1161 repo_group_id = Column(
1162 repo_group_id = Column(
1162 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1163 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1163 nullable=True, unique=None, default=None)
1164 nullable=True, unique=None, default=None)
1164 repo_group = relationship('RepoGroup', lazy='joined')
1165 repo_group = relationship('RepoGroup', lazy='joined')
1165
1166
1166 user = relationship('User', lazy='joined')
1167 user = relationship('User', lazy='joined')
1167
1168
1168 def __unicode__(self):
1169 def __unicode__(self):
1169 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1170 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1170
1171
1171 def __json__(self):
1172 def __json__(self):
1172 data = {
1173 data = {
1173 'auth_token': self.api_key,
1174 'auth_token': self.api_key,
1174 'role': self.role,
1175 'role': self.role,
1175 'scope': self.scope_humanized,
1176 'scope': self.scope_humanized,
1176 'expired': self.expired
1177 'expired': self.expired
1177 }
1178 }
1178 return data
1179 return data
1179
1180
1180 def get_api_data(self, include_secrets=False):
1181 def get_api_data(self, include_secrets=False):
1181 data = self.__json__()
1182 data = self.__json__()
1182 if include_secrets:
1183 if include_secrets:
1183 return data
1184 return data
1184 else:
1185 else:
1185 data['auth_token'] = self.token_obfuscated
1186 data['auth_token'] = self.token_obfuscated
1186 return data
1187 return data
1187
1188
1188 @hybrid_property
1189 @hybrid_property
1189 def description_safe(self):
1190 def description_safe(self):
1190 from rhodecode.lib import helpers as h
1191 from rhodecode.lib import helpers as h
1191 return h.escape(self.description)
1192 return h.escape(self.description)
1192
1193
1193 @property
1194 @property
1194 def expired(self):
1195 def expired(self):
1195 if self.expires == -1:
1196 if self.expires == -1:
1196 return False
1197 return False
1197 return time.time() > self.expires
1198 return time.time() > self.expires
1198
1199
1199 @classmethod
1200 @classmethod
1200 def _get_role_name(cls, role):
1201 def _get_role_name(cls, role):
1201 return {
1202 return {
1202 cls.ROLE_ALL: _('all'),
1203 cls.ROLE_ALL: _('all'),
1203 cls.ROLE_HTTP: _('http/web interface'),
1204 cls.ROLE_HTTP: _('http/web interface'),
1204 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1205 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1205 cls.ROLE_API: _('api calls'),
1206 cls.ROLE_API: _('api calls'),
1206 cls.ROLE_FEED: _('feed access'),
1207 cls.ROLE_FEED: _('feed access'),
1207 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1208 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1208 }.get(role, role)
1209 }.get(role, role)
1209
1210
1210 @classmethod
1211 @classmethod
1211 def _get_role_description(cls, role):
1212 def _get_role_description(cls, role):
1212 return {
1213 return {
1213 cls.ROLE_ALL: _('Token for all actions.'),
1214 cls.ROLE_ALL: _('Token for all actions.'),
1214 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1215 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1215 'login using `api_access_controllers_whitelist` functionality.'),
1216 'login using `api_access_controllers_whitelist` functionality.'),
1216 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1217 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1217 'Requires auth_token authentication plugin to be active. <br/>'
1218 'Requires auth_token authentication plugin to be active. <br/>'
1218 'Such Token should be used then instead of a password to '
1219 'Such Token should be used then instead of a password to '
1219 'interact with a repository, and additionally can be '
1220 'interact with a repository, and additionally can be '
1220 'limited to single repository using repo scope.'),
1221 'limited to single repository using repo scope.'),
1221 cls.ROLE_API: _('Token limited to api calls.'),
1222 cls.ROLE_API: _('Token limited to api calls.'),
1222 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1223 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1223 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1224 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1224 }.get(role, role)
1225 }.get(role, role)
1225
1226
1226 @property
1227 @property
1227 def role_humanized(self):
1228 def role_humanized(self):
1228 return self._get_role_name(self.role)
1229 return self._get_role_name(self.role)
1229
1230
1230 def _get_scope(self):
1231 def _get_scope(self):
1231 if self.repo:
1232 if self.repo:
1232 return 'Repository: {}'.format(self.repo.repo_name)
1233 return 'Repository: {}'.format(self.repo.repo_name)
1233 if self.repo_group:
1234 if self.repo_group:
1234 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1235 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1235 return 'Global'
1236 return 'Global'
1236
1237
1237 @property
1238 @property
1238 def scope_humanized(self):
1239 def scope_humanized(self):
1239 return self._get_scope()
1240 return self._get_scope()
1240
1241
1241 @property
1242 @property
1242 def token_obfuscated(self):
1243 def token_obfuscated(self):
1243 if self.api_key:
1244 if self.api_key:
1244 return self.api_key[:4] + "****"
1245 return self.api_key[:4] + "****"
1245
1246
1246
1247
1247 class UserEmailMap(Base, BaseModel):
1248 class UserEmailMap(Base, BaseModel):
1248 __tablename__ = 'user_email_map'
1249 __tablename__ = 'user_email_map'
1249 __table_args__ = (
1250 __table_args__ = (
1250 Index('uem_email_idx', 'email'),
1251 Index('uem_email_idx', 'email'),
1251 UniqueConstraint('email'),
1252 UniqueConstraint('email'),
1252 base_table_args
1253 base_table_args
1253 )
1254 )
1254 __mapper_args__ = {}
1255 __mapper_args__ = {}
1255
1256
1256 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1257 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1257 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1258 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1258 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1259 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1259 user = relationship('User', lazy='joined')
1260 user = relationship('User', lazy='joined')
1260
1261
1261 @validates('_email')
1262 @validates('_email')
1262 def validate_email(self, key, email):
1263 def validate_email(self, key, email):
1263 # check if this email is not main one
1264 # check if this email is not main one
1264 main_email = Session().query(User).filter(User.email == email).scalar()
1265 main_email = Session().query(User).filter(User.email == email).scalar()
1265 if main_email is not None:
1266 if main_email is not None:
1266 raise AttributeError('email %s is present is user table' % email)
1267 raise AttributeError('email %s is present is user table' % email)
1267 return email
1268 return email
1268
1269
1269 @hybrid_property
1270 @hybrid_property
1270 def email(self):
1271 def email(self):
1271 return self._email
1272 return self._email
1272
1273
1273 @email.setter
1274 @email.setter
1274 def email(self, val):
1275 def email(self, val):
1275 self._email = val.lower() if val else None
1276 self._email = val.lower() if val else None
1276
1277
1277
1278
1278 class UserIpMap(Base, BaseModel):
1279 class UserIpMap(Base, BaseModel):
1279 __tablename__ = 'user_ip_map'
1280 __tablename__ = 'user_ip_map'
1280 __table_args__ = (
1281 __table_args__ = (
1281 UniqueConstraint('user_id', 'ip_addr'),
1282 UniqueConstraint('user_id', 'ip_addr'),
1282 base_table_args
1283 base_table_args
1283 )
1284 )
1284 __mapper_args__ = {}
1285 __mapper_args__ = {}
1285
1286
1286 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1287 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1287 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1288 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1288 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1289 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1289 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1290 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1290 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1291 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1291 user = relationship('User', lazy='joined')
1292 user = relationship('User', lazy='joined')
1292
1293
1293 @hybrid_property
1294 @hybrid_property
1294 def description_safe(self):
1295 def description_safe(self):
1295 from rhodecode.lib import helpers as h
1296 from rhodecode.lib import helpers as h
1296 return h.escape(self.description)
1297 return h.escape(self.description)
1297
1298
1298 @classmethod
1299 @classmethod
1299 def _get_ip_range(cls, ip_addr):
1300 def _get_ip_range(cls, ip_addr):
1300 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1301 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1301 return [str(net.network_address), str(net.broadcast_address)]
1302 return [str(net.network_address), str(net.broadcast_address)]
1302
1303
1303 def __json__(self):
1304 def __json__(self):
1304 return {
1305 return {
1305 'ip_addr': self.ip_addr,
1306 'ip_addr': self.ip_addr,
1306 'ip_range': self._get_ip_range(self.ip_addr),
1307 'ip_range': self._get_ip_range(self.ip_addr),
1307 }
1308 }
1308
1309
1309 def __unicode__(self):
1310 def __unicode__(self):
1310 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1311 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1311 self.user_id, self.ip_addr)
1312 self.user_id, self.ip_addr)
1312
1313
1313
1314
1314 class UserSshKeys(Base, BaseModel):
1315 class UserSshKeys(Base, BaseModel):
1315 __tablename__ = 'user_ssh_keys'
1316 __tablename__ = 'user_ssh_keys'
1316 __table_args__ = (
1317 __table_args__ = (
1317 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1318 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1318
1319
1319 UniqueConstraint('ssh_key_fingerprint'),
1320 UniqueConstraint('ssh_key_fingerprint'),
1320
1321
1321 base_table_args
1322 base_table_args
1322 )
1323 )
1323 __mapper_args__ = {}
1324 __mapper_args__ = {}
1324
1325
1325 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1326 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1326 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1327 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1327 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1328 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1328
1329
1329 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1330 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1330
1331
1331 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1332 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1332 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1333 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1333 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1334 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1334
1335
1335 user = relationship('User', lazy='joined')
1336 user = relationship('User', lazy='joined')
1336
1337
1337 def __json__(self):
1338 def __json__(self):
1338 data = {
1339 data = {
1339 'ssh_fingerprint': self.ssh_key_fingerprint,
1340 'ssh_fingerprint': self.ssh_key_fingerprint,
1340 'description': self.description,
1341 'description': self.description,
1341 'created_on': self.created_on
1342 'created_on': self.created_on
1342 }
1343 }
1343 return data
1344 return data
1344
1345
1345 def get_api_data(self):
1346 def get_api_data(self):
1346 data = self.__json__()
1347 data = self.__json__()
1347 return data
1348 return data
1348
1349
1349
1350
1350 class UserLog(Base, BaseModel):
1351 class UserLog(Base, BaseModel):
1351 __tablename__ = 'user_logs'
1352 __tablename__ = 'user_logs'
1352 __table_args__ = (
1353 __table_args__ = (
1353 base_table_args,
1354 base_table_args,
1354 )
1355 )
1355
1356
1356 VERSION_1 = 'v1'
1357 VERSION_1 = 'v1'
1357 VERSION_2 = 'v2'
1358 VERSION_2 = 'v2'
1358 VERSIONS = [VERSION_1, VERSION_2]
1359 VERSIONS = [VERSION_1, VERSION_2]
1359
1360
1360 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1361 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1361 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1362 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1362 username = Column("username", String(255), nullable=True, unique=None, default=None)
1363 username = Column("username", String(255), nullable=True, unique=None, default=None)
1363 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1364 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1364 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1365 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1365 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1366 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1366 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1367 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1367 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1368 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1368
1369
1369 version = Column("version", String(255), nullable=True, default=VERSION_1)
1370 version = Column("version", String(255), nullable=True, default=VERSION_1)
1370 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1371 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1371 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1372 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1372
1373
1373 def __unicode__(self):
1374 def __unicode__(self):
1374 return u"<%s('id:%s:%s')>" % (
1375 return u"<%s('id:%s:%s')>" % (
1375 self.__class__.__name__, self.repository_name, self.action)
1376 self.__class__.__name__, self.repository_name, self.action)
1376
1377
1377 def __json__(self):
1378 def __json__(self):
1378 return {
1379 return {
1379 'user_id': self.user_id,
1380 'user_id': self.user_id,
1380 'username': self.username,
1381 'username': self.username,
1381 'repository_id': self.repository_id,
1382 'repository_id': self.repository_id,
1382 'repository_name': self.repository_name,
1383 'repository_name': self.repository_name,
1383 'user_ip': self.user_ip,
1384 'user_ip': self.user_ip,
1384 'action_date': self.action_date,
1385 'action_date': self.action_date,
1385 'action': self.action,
1386 'action': self.action,
1386 }
1387 }
1387
1388
1388 @hybrid_property
1389 @hybrid_property
1389 def entry_id(self):
1390 def entry_id(self):
1390 return self.user_log_id
1391 return self.user_log_id
1391
1392
1392 @property
1393 @property
1393 def action_as_day(self):
1394 def action_as_day(self):
1394 return datetime.date(*self.action_date.timetuple()[:3])
1395 return datetime.date(*self.action_date.timetuple()[:3])
1395
1396
1396 user = relationship('User')
1397 user = relationship('User')
1397 repository = relationship('Repository', cascade='')
1398 repository = relationship('Repository', cascade='')
1398
1399
1399
1400
1400 class UserGroup(Base, BaseModel):
1401 class UserGroup(Base, BaseModel):
1401 __tablename__ = 'users_groups'
1402 __tablename__ = 'users_groups'
1402 __table_args__ = (
1403 __table_args__ = (
1403 base_table_args,
1404 base_table_args,
1404 )
1405 )
1405
1406
1406 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1407 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1407 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1408 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1408 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1409 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1409 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1410 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1410 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1411 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1411 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1412 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1412 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1413 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1413 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1414 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1414
1415
1415 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1416 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1416 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1417 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1417 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1418 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1418 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1419 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1419 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1420 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1420 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1421 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1421
1422
1422 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1423 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1423 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1424 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1424
1425
1425 @classmethod
1426 @classmethod
1426 def _load_group_data(cls, column):
1427 def _load_group_data(cls, column):
1427 if not column:
1428 if not column:
1428 return {}
1429 return {}
1429
1430
1430 try:
1431 try:
1431 return json.loads(column) or {}
1432 return json.loads(column) or {}
1432 except TypeError:
1433 except TypeError:
1433 return {}
1434 return {}
1434
1435
1435 @hybrid_property
1436 @hybrid_property
1436 def description_safe(self):
1437 def description_safe(self):
1437 from rhodecode.lib import helpers as h
1438 from rhodecode.lib import helpers as h
1438 return h.escape(self.user_group_description)
1439 return h.escape(self.user_group_description)
1439
1440
1440 @hybrid_property
1441 @hybrid_property
1441 def group_data(self):
1442 def group_data(self):
1442 return self._load_group_data(self._group_data)
1443 return self._load_group_data(self._group_data)
1443
1444
1444 @group_data.expression
1445 @group_data.expression
1445 def group_data(self, **kwargs):
1446 def group_data(self, **kwargs):
1446 return self._group_data
1447 return self._group_data
1447
1448
1448 @group_data.setter
1449 @group_data.setter
1449 def group_data(self, val):
1450 def group_data(self, val):
1450 try:
1451 try:
1451 self._group_data = json.dumps(val)
1452 self._group_data = json.dumps(val)
1452 except Exception:
1453 except Exception:
1453 log.error(traceback.format_exc())
1454 log.error(traceback.format_exc())
1454
1455
1455 @classmethod
1456 @classmethod
1456 def _load_sync(cls, group_data):
1457 def _load_sync(cls, group_data):
1457 if group_data:
1458 if group_data:
1458 return group_data.get('extern_type')
1459 return group_data.get('extern_type')
1459
1460
1460 @property
1461 @property
1461 def sync(self):
1462 def sync(self):
1462 return self._load_sync(self.group_data)
1463 return self._load_sync(self.group_data)
1463
1464
1464 def __unicode__(self):
1465 def __unicode__(self):
1465 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1466 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1466 self.users_group_id,
1467 self.users_group_id,
1467 self.users_group_name)
1468 self.users_group_name)
1468
1469
1469 @classmethod
1470 @classmethod
1470 def get_by_group_name(cls, group_name, cache=False,
1471 def get_by_group_name(cls, group_name, cache=False,
1471 case_insensitive=False):
1472 case_insensitive=False):
1472 if case_insensitive:
1473 if case_insensitive:
1473 q = cls.query().filter(func.lower(cls.users_group_name) ==
1474 q = cls.query().filter(func.lower(cls.users_group_name) ==
1474 func.lower(group_name))
1475 func.lower(group_name))
1475
1476
1476 else:
1477 else:
1477 q = cls.query().filter(cls.users_group_name == group_name)
1478 q = cls.query().filter(cls.users_group_name == group_name)
1478 if cache:
1479 if cache:
1479 q = q.options(
1480 q = q.options(
1480 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1481 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1481 return q.scalar()
1482 return q.scalar()
1482
1483
1483 @classmethod
1484 @classmethod
1484 def get(cls, user_group_id, cache=False):
1485 def get(cls, user_group_id, cache=False):
1485 if not user_group_id:
1486 if not user_group_id:
1486 return
1487 return
1487
1488
1488 user_group = cls.query()
1489 user_group = cls.query()
1489 if cache:
1490 if cache:
1490 user_group = user_group.options(
1491 user_group = user_group.options(
1491 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1492 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1492 return user_group.get(user_group_id)
1493 return user_group.get(user_group_id)
1493
1494
1494 def permissions(self, with_admins=True, with_owner=True,
1495 def permissions(self, with_admins=True, with_owner=True,
1495 expand_from_user_groups=False):
1496 expand_from_user_groups=False):
1496 """
1497 """
1497 Permissions for user groups
1498 Permissions for user groups
1498 """
1499 """
1499 _admin_perm = 'usergroup.admin'
1500 _admin_perm = 'usergroup.admin'
1500
1501
1501 owner_row = []
1502 owner_row = []
1502 if with_owner:
1503 if with_owner:
1503 usr = AttributeDict(self.user.get_dict())
1504 usr = AttributeDict(self.user.get_dict())
1504 usr.owner_row = True
1505 usr.owner_row = True
1505 usr.permission = _admin_perm
1506 usr.permission = _admin_perm
1506 owner_row.append(usr)
1507 owner_row.append(usr)
1507
1508
1508 super_admin_ids = []
1509 super_admin_ids = []
1509 super_admin_rows = []
1510 super_admin_rows = []
1510 if with_admins:
1511 if with_admins:
1511 for usr in User.get_all_super_admins():
1512 for usr in User.get_all_super_admins():
1512 super_admin_ids.append(usr.user_id)
1513 super_admin_ids.append(usr.user_id)
1513 # if this admin is also owner, don't double the record
1514 # if this admin is also owner, don't double the record
1514 if usr.user_id == owner_row[0].user_id:
1515 if usr.user_id == owner_row[0].user_id:
1515 owner_row[0].admin_row = True
1516 owner_row[0].admin_row = True
1516 else:
1517 else:
1517 usr = AttributeDict(usr.get_dict())
1518 usr = AttributeDict(usr.get_dict())
1518 usr.admin_row = True
1519 usr.admin_row = True
1519 usr.permission = _admin_perm
1520 usr.permission = _admin_perm
1520 super_admin_rows.append(usr)
1521 super_admin_rows.append(usr)
1521
1522
1522 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1523 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1523 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1524 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1524 joinedload(UserUserGroupToPerm.user),
1525 joinedload(UserUserGroupToPerm.user),
1525 joinedload(UserUserGroupToPerm.permission),)
1526 joinedload(UserUserGroupToPerm.permission),)
1526
1527
1527 # get owners and admins and permissions. We do a trick of re-writing
1528 # get owners and admins and permissions. We do a trick of re-writing
1528 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1529 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1529 # has a global reference and changing one object propagates to all
1530 # has a global reference and changing one object propagates to all
1530 # others. This means if admin is also an owner admin_row that change
1531 # others. This means if admin is also an owner admin_row that change
1531 # would propagate to both objects
1532 # would propagate to both objects
1532 perm_rows = []
1533 perm_rows = []
1533 for _usr in q.all():
1534 for _usr in q.all():
1534 usr = AttributeDict(_usr.user.get_dict())
1535 usr = AttributeDict(_usr.user.get_dict())
1535 # if this user is also owner/admin, mark as duplicate record
1536 # if this user is also owner/admin, mark as duplicate record
1536 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1537 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1537 usr.duplicate_perm = True
1538 usr.duplicate_perm = True
1538 usr.permission = _usr.permission.permission_name
1539 usr.permission = _usr.permission.permission_name
1539 perm_rows.append(usr)
1540 perm_rows.append(usr)
1540
1541
1541 # filter the perm rows by 'default' first and then sort them by
1542 # filter the perm rows by 'default' first and then sort them by
1542 # admin,write,read,none permissions sorted again alphabetically in
1543 # admin,write,read,none permissions sorted again alphabetically in
1543 # each group
1544 # each group
1544 perm_rows = sorted(perm_rows, key=display_user_sort)
1545 perm_rows = sorted(perm_rows, key=display_user_sort)
1545
1546
1546 user_groups_rows = []
1547 user_groups_rows = []
1547 if expand_from_user_groups:
1548 if expand_from_user_groups:
1548 for ug in self.permission_user_groups(with_members=True):
1549 for ug in self.permission_user_groups(with_members=True):
1549 for user_data in ug.members:
1550 for user_data in ug.members:
1550 user_groups_rows.append(user_data)
1551 user_groups_rows.append(user_data)
1551
1552
1552 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1553 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1553
1554
1554 def permission_user_groups(self, with_members=False):
1555 def permission_user_groups(self, with_members=False):
1555 q = UserGroupUserGroupToPerm.query()\
1556 q = UserGroupUserGroupToPerm.query()\
1556 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1557 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1557 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1558 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1558 joinedload(UserGroupUserGroupToPerm.target_user_group),
1559 joinedload(UserGroupUserGroupToPerm.target_user_group),
1559 joinedload(UserGroupUserGroupToPerm.permission),)
1560 joinedload(UserGroupUserGroupToPerm.permission),)
1560
1561
1561 perm_rows = []
1562 perm_rows = []
1562 for _user_group in q.all():
1563 for _user_group in q.all():
1563 entry = AttributeDict(_user_group.user_group.get_dict())
1564 entry = AttributeDict(_user_group.user_group.get_dict())
1564 entry.permission = _user_group.permission.permission_name
1565 entry.permission = _user_group.permission.permission_name
1565 if with_members:
1566 if with_members:
1566 entry.members = [x.user.get_dict()
1567 entry.members = [x.user.get_dict()
1567 for x in _user_group.user_group.members]
1568 for x in _user_group.user_group.members]
1568 perm_rows.append(entry)
1569 perm_rows.append(entry)
1569
1570
1570 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1571 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1571 return perm_rows
1572 return perm_rows
1572
1573
1573 def _get_default_perms(self, user_group, suffix=''):
1574 def _get_default_perms(self, user_group, suffix=''):
1574 from rhodecode.model.permission import PermissionModel
1575 from rhodecode.model.permission import PermissionModel
1575 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1576 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1576
1577
1577 def get_default_perms(self, suffix=''):
1578 def get_default_perms(self, suffix=''):
1578 return self._get_default_perms(self, suffix)
1579 return self._get_default_perms(self, suffix)
1579
1580
1580 def get_api_data(self, with_group_members=True, include_secrets=False):
1581 def get_api_data(self, with_group_members=True, include_secrets=False):
1581 """
1582 """
1582 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1583 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1583 basically forwarded.
1584 basically forwarded.
1584
1585
1585 """
1586 """
1586 user_group = self
1587 user_group = self
1587 data = {
1588 data = {
1588 'users_group_id': user_group.users_group_id,
1589 'users_group_id': user_group.users_group_id,
1589 'group_name': user_group.users_group_name,
1590 'group_name': user_group.users_group_name,
1590 'group_description': user_group.user_group_description,
1591 'group_description': user_group.user_group_description,
1591 'active': user_group.users_group_active,
1592 'active': user_group.users_group_active,
1592 'owner': user_group.user.username,
1593 'owner': user_group.user.username,
1593 'sync': user_group.sync,
1594 'sync': user_group.sync,
1594 'owner_email': user_group.user.email,
1595 'owner_email': user_group.user.email,
1595 }
1596 }
1596
1597
1597 if with_group_members:
1598 if with_group_members:
1598 users = []
1599 users = []
1599 for user in user_group.members:
1600 for user in user_group.members:
1600 user = user.user
1601 user = user.user
1601 users.append(user.get_api_data(include_secrets=include_secrets))
1602 users.append(user.get_api_data(include_secrets=include_secrets))
1602 data['users'] = users
1603 data['users'] = users
1603
1604
1604 return data
1605 return data
1605
1606
1606
1607
1607 class UserGroupMember(Base, BaseModel):
1608 class UserGroupMember(Base, BaseModel):
1608 __tablename__ = 'users_groups_members'
1609 __tablename__ = 'users_groups_members'
1609 __table_args__ = (
1610 __table_args__ = (
1610 base_table_args,
1611 base_table_args,
1611 )
1612 )
1612
1613
1613 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1614 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1614 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1615 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1615 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1616 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1616
1617
1617 user = relationship('User', lazy='joined')
1618 user = relationship('User', lazy='joined')
1618 users_group = relationship('UserGroup')
1619 users_group = relationship('UserGroup')
1619
1620
1620 def __init__(self, gr_id='', u_id=''):
1621 def __init__(self, gr_id='', u_id=''):
1621 self.users_group_id = gr_id
1622 self.users_group_id = gr_id
1622 self.user_id = u_id
1623 self.user_id = u_id
1623
1624
1624
1625
1625 class RepositoryField(Base, BaseModel):
1626 class RepositoryField(Base, BaseModel):
1626 __tablename__ = 'repositories_fields'
1627 __tablename__ = 'repositories_fields'
1627 __table_args__ = (
1628 __table_args__ = (
1628 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1629 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1629 base_table_args,
1630 base_table_args,
1630 )
1631 )
1631
1632
1632 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1633 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1633
1634
1634 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1635 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1635 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1636 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1636 field_key = Column("field_key", String(250))
1637 field_key = Column("field_key", String(250))
1637 field_label = Column("field_label", String(1024), nullable=False)
1638 field_label = Column("field_label", String(1024), nullable=False)
1638 field_value = Column("field_value", String(10000), nullable=False)
1639 field_value = Column("field_value", String(10000), nullable=False)
1639 field_desc = Column("field_desc", String(1024), nullable=False)
1640 field_desc = Column("field_desc", String(1024), nullable=False)
1640 field_type = Column("field_type", String(255), nullable=False, unique=None)
1641 field_type = Column("field_type", String(255), nullable=False, unique=None)
1641 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1642 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1642
1643
1643 repository = relationship('Repository')
1644 repository = relationship('Repository')
1644
1645
1645 @property
1646 @property
1646 def field_key_prefixed(self):
1647 def field_key_prefixed(self):
1647 return 'ex_%s' % self.field_key
1648 return 'ex_%s' % self.field_key
1648
1649
1649 @classmethod
1650 @classmethod
1650 def un_prefix_key(cls, key):
1651 def un_prefix_key(cls, key):
1651 if key.startswith(cls.PREFIX):
1652 if key.startswith(cls.PREFIX):
1652 return key[len(cls.PREFIX):]
1653 return key[len(cls.PREFIX):]
1653 return key
1654 return key
1654
1655
1655 @classmethod
1656 @classmethod
1656 def get_by_key_name(cls, key, repo):
1657 def get_by_key_name(cls, key, repo):
1657 row = cls.query()\
1658 row = cls.query()\
1658 .filter(cls.repository == repo)\
1659 .filter(cls.repository == repo)\
1659 .filter(cls.field_key == key).scalar()
1660 .filter(cls.field_key == key).scalar()
1660 return row
1661 return row
1661
1662
1662
1663
1663 class Repository(Base, BaseModel):
1664 class Repository(Base, BaseModel):
1664 __tablename__ = 'repositories'
1665 __tablename__ = 'repositories'
1665 __table_args__ = (
1666 __table_args__ = (
1666 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1667 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1667 base_table_args,
1668 base_table_args,
1668 )
1669 )
1669 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1670 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1670 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1671 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1671 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1672 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1672
1673
1673 STATE_CREATED = 'repo_state_created'
1674 STATE_CREATED = 'repo_state_created'
1674 STATE_PENDING = 'repo_state_pending'
1675 STATE_PENDING = 'repo_state_pending'
1675 STATE_ERROR = 'repo_state_error'
1676 STATE_ERROR = 'repo_state_error'
1676
1677
1677 LOCK_AUTOMATIC = 'lock_auto'
1678 LOCK_AUTOMATIC = 'lock_auto'
1678 LOCK_API = 'lock_api'
1679 LOCK_API = 'lock_api'
1679 LOCK_WEB = 'lock_web'
1680 LOCK_WEB = 'lock_web'
1680 LOCK_PULL = 'lock_pull'
1681 LOCK_PULL = 'lock_pull'
1681
1682
1682 NAME_SEP = URL_SEP
1683 NAME_SEP = URL_SEP
1683
1684
1684 repo_id = Column(
1685 repo_id = Column(
1685 "repo_id", Integer(), nullable=False, unique=True, default=None,
1686 "repo_id", Integer(), nullable=False, unique=True, default=None,
1686 primary_key=True)
1687 primary_key=True)
1687 _repo_name = Column(
1688 _repo_name = Column(
1688 "repo_name", Text(), nullable=False, default=None)
1689 "repo_name", Text(), nullable=False, default=None)
1689 repo_name_hash = Column(
1690 repo_name_hash = Column(
1690 "repo_name_hash", String(255), nullable=False, unique=True)
1691 "repo_name_hash", String(255), nullable=False, unique=True)
1691 repo_state = Column("repo_state", String(255), nullable=True)
1692 repo_state = Column("repo_state", String(255), nullable=True)
1692
1693
1693 clone_uri = Column(
1694 clone_uri = Column(
1694 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1695 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1695 default=None)
1696 default=None)
1696 push_uri = Column(
1697 push_uri = Column(
1697 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1698 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1698 default=None)
1699 default=None)
1699 repo_type = Column(
1700 repo_type = Column(
1700 "repo_type", String(255), nullable=False, unique=False, default=None)
1701 "repo_type", String(255), nullable=False, unique=False, default=None)
1701 user_id = Column(
1702 user_id = Column(
1702 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1703 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1703 unique=False, default=None)
1704 unique=False, default=None)
1704 private = Column(
1705 private = Column(
1705 "private", Boolean(), nullable=True, unique=None, default=None)
1706 "private", Boolean(), nullable=True, unique=None, default=None)
1706 archived = Column(
1707 archived = Column(
1707 "archived", Boolean(), nullable=True, unique=None, default=None)
1708 "archived", Boolean(), nullable=True, unique=None, default=None)
1708 enable_statistics = Column(
1709 enable_statistics = Column(
1709 "statistics", Boolean(), nullable=True, unique=None, default=True)
1710 "statistics", Boolean(), nullable=True, unique=None, default=True)
1710 enable_downloads = Column(
1711 enable_downloads = Column(
1711 "downloads", Boolean(), nullable=True, unique=None, default=True)
1712 "downloads", Boolean(), nullable=True, unique=None, default=True)
1712 description = Column(
1713 description = Column(
1713 "description", String(10000), nullable=True, unique=None, default=None)
1714 "description", String(10000), nullable=True, unique=None, default=None)
1714 created_on = Column(
1715 created_on = Column(
1715 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1716 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1716 default=datetime.datetime.now)
1717 default=datetime.datetime.now)
1717 updated_on = Column(
1718 updated_on = Column(
1718 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1719 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1719 default=datetime.datetime.now)
1720 default=datetime.datetime.now)
1720 _landing_revision = Column(
1721 _landing_revision = Column(
1721 "landing_revision", String(255), nullable=False, unique=False,
1722 "landing_revision", String(255), nullable=False, unique=False,
1722 default=None)
1723 default=None)
1723 enable_locking = Column(
1724 enable_locking = Column(
1724 "enable_locking", Boolean(), nullable=False, unique=None,
1725 "enable_locking", Boolean(), nullable=False, unique=None,
1725 default=False)
1726 default=False)
1726 _locked = Column(
1727 _locked = Column(
1727 "locked", String(255), nullable=True, unique=False, default=None)
1728 "locked", String(255), nullable=True, unique=False, default=None)
1728 _changeset_cache = Column(
1729 _changeset_cache = Column(
1729 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1730 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1730
1731
1731 fork_id = Column(
1732 fork_id = Column(
1732 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1733 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1733 nullable=True, unique=False, default=None)
1734 nullable=True, unique=False, default=None)
1734 group_id = Column(
1735 group_id = Column(
1735 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1736 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1736 unique=False, default=None)
1737 unique=False, default=None)
1737
1738
1738 user = relationship('User', lazy='joined')
1739 user = relationship('User', lazy='joined')
1739 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1740 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1740 group = relationship('RepoGroup', lazy='joined')
1741 group = relationship('RepoGroup', lazy='joined')
1741 repo_to_perm = relationship(
1742 repo_to_perm = relationship(
1742 'UserRepoToPerm', cascade='all',
1743 'UserRepoToPerm', cascade='all',
1743 order_by='UserRepoToPerm.repo_to_perm_id')
1744 order_by='UserRepoToPerm.repo_to_perm_id')
1744 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1745 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1745 stats = relationship('Statistics', cascade='all', uselist=False)
1746 stats = relationship('Statistics', cascade='all', uselist=False)
1746
1747
1747 followers = relationship(
1748 followers = relationship(
1748 'UserFollowing',
1749 'UserFollowing',
1749 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1750 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1750 cascade='all')
1751 cascade='all')
1751 extra_fields = relationship(
1752 extra_fields = relationship(
1752 'RepositoryField', cascade="all, delete-orphan")
1753 'RepositoryField', cascade="all, delete-orphan")
1753 logs = relationship('UserLog')
1754 logs = relationship('UserLog')
1754 comments = relationship(
1755 comments = relationship(
1755 'ChangesetComment', cascade="all, delete-orphan")
1756 'ChangesetComment', cascade="all, delete-orphan")
1756 pull_requests_source = relationship(
1757 pull_requests_source = relationship(
1757 'PullRequest',
1758 'PullRequest',
1758 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1759 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1759 cascade="all, delete-orphan")
1760 cascade="all, delete-orphan")
1760 pull_requests_target = relationship(
1761 pull_requests_target = relationship(
1761 'PullRequest',
1762 'PullRequest',
1762 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1763 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1763 cascade="all, delete-orphan")
1764 cascade="all, delete-orphan")
1764 ui = relationship('RepoRhodeCodeUi', cascade="all")
1765 ui = relationship('RepoRhodeCodeUi', cascade="all")
1765 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1766 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1766 integrations = relationship('Integration', cascade="all, delete-orphan")
1767 integrations = relationship('Integration', cascade="all, delete-orphan")
1767
1768
1768 scoped_tokens = relationship('UserApiKeys', cascade="all")
1769 scoped_tokens = relationship('UserApiKeys', cascade="all")
1769
1770
1770 # no cascade, set NULL
1771 # no cascade, set NULL
1771 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1772 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1772
1773
1773 def __unicode__(self):
1774 def __unicode__(self):
1774 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1775 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1775 safe_unicode(self.repo_name))
1776 safe_unicode(self.repo_name))
1776
1777
1777 @hybrid_property
1778 @hybrid_property
1778 def description_safe(self):
1779 def description_safe(self):
1779 from rhodecode.lib import helpers as h
1780 from rhodecode.lib import helpers as h
1780 return h.escape(self.description)
1781 return h.escape(self.description)
1781
1782
1782 @hybrid_property
1783 @hybrid_property
1783 def landing_rev(self):
1784 def landing_rev(self):
1784 # always should return [rev_type, rev], e.g ['branch', 'master']
1785 # always should return [rev_type, rev], e.g ['branch', 'master']
1785 if self._landing_revision:
1786 if self._landing_revision:
1786 _rev_info = self._landing_revision.split(':')
1787 _rev_info = self._landing_revision.split(':')
1787 if len(_rev_info) < 2:
1788 if len(_rev_info) < 2:
1788 _rev_info.insert(0, 'rev')
1789 _rev_info.insert(0, 'rev')
1789 return [_rev_info[0], _rev_info[1]]
1790 return [_rev_info[0], _rev_info[1]]
1790 return [None, None]
1791 return [None, None]
1791
1792
1792 @property
1793 @property
1793 def landing_ref_type(self):
1794 def landing_ref_type(self):
1794 return self.landing_rev[0]
1795 return self.landing_rev[0]
1795
1796
1796 @property
1797 @property
1797 def landing_ref_name(self):
1798 def landing_ref_name(self):
1798 return self.landing_rev[1]
1799 return self.landing_rev[1]
1799
1800
1800 @landing_rev.setter
1801 @landing_rev.setter
1801 def landing_rev(self, val):
1802 def landing_rev(self, val):
1802 if ':' not in val:
1803 if ':' not in val:
1803 raise ValueError('value must be delimited with `:` and consist '
1804 raise ValueError('value must be delimited with `:` and consist '
1804 'of <rev_type>:<rev>, got %s instead' % val)
1805 'of <rev_type>:<rev>, got %s instead' % val)
1805 self._landing_revision = val
1806 self._landing_revision = val
1806
1807
1807 @hybrid_property
1808 @hybrid_property
1808 def locked(self):
1809 def locked(self):
1809 if self._locked:
1810 if self._locked:
1810 user_id, timelocked, reason = self._locked.split(':')
1811 user_id, timelocked, reason = self._locked.split(':')
1811 lock_values = int(user_id), timelocked, reason
1812 lock_values = int(user_id), timelocked, reason
1812 else:
1813 else:
1813 lock_values = [None, None, None]
1814 lock_values = [None, None, None]
1814 return lock_values
1815 return lock_values
1815
1816
1816 @locked.setter
1817 @locked.setter
1817 def locked(self, val):
1818 def locked(self, val):
1818 if val and isinstance(val, (list, tuple)):
1819 if val and isinstance(val, (list, tuple)):
1819 self._locked = ':'.join(map(str, val))
1820 self._locked = ':'.join(map(str, val))
1820 else:
1821 else:
1821 self._locked = None
1822 self._locked = None
1822
1823
1823 @classmethod
1824 @classmethod
1824 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1825 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1825 from rhodecode.lib.vcs.backends.base import EmptyCommit
1826 from rhodecode.lib.vcs.backends.base import EmptyCommit
1826 dummy = EmptyCommit().__json__()
1827 dummy = EmptyCommit().__json__()
1827 if not changeset_cache_raw:
1828 if not changeset_cache_raw:
1828 dummy['source_repo_id'] = repo_id
1829 dummy['source_repo_id'] = repo_id
1829 return json.loads(json.dumps(dummy))
1830 return json.loads(json.dumps(dummy))
1830
1831
1831 try:
1832 try:
1832 return json.loads(changeset_cache_raw)
1833 return json.loads(changeset_cache_raw)
1833 except TypeError:
1834 except TypeError:
1834 return dummy
1835 return dummy
1835 except Exception:
1836 except Exception:
1836 log.error(traceback.format_exc())
1837 log.error(traceback.format_exc())
1837 return dummy
1838 return dummy
1838
1839
1839 @hybrid_property
1840 @hybrid_property
1840 def changeset_cache(self):
1841 def changeset_cache(self):
1841 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1842 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1842
1843
1843 @changeset_cache.setter
1844 @changeset_cache.setter
1844 def changeset_cache(self, val):
1845 def changeset_cache(self, val):
1845 try:
1846 try:
1846 self._changeset_cache = json.dumps(val)
1847 self._changeset_cache = json.dumps(val)
1847 except Exception:
1848 except Exception:
1848 log.error(traceback.format_exc())
1849 log.error(traceback.format_exc())
1849
1850
1850 @hybrid_property
1851 @hybrid_property
1851 def repo_name(self):
1852 def repo_name(self):
1852 return self._repo_name
1853 return self._repo_name
1853
1854
1854 @repo_name.setter
1855 @repo_name.setter
1855 def repo_name(self, value):
1856 def repo_name(self, value):
1856 self._repo_name = value
1857 self._repo_name = value
1857 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1858 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1858
1859
1859 @classmethod
1860 @classmethod
1860 def normalize_repo_name(cls, repo_name):
1861 def normalize_repo_name(cls, repo_name):
1861 """
1862 """
1862 Normalizes os specific repo_name to the format internally stored inside
1863 Normalizes os specific repo_name to the format internally stored inside
1863 database using URL_SEP
1864 database using URL_SEP
1864
1865
1865 :param cls:
1866 :param cls:
1866 :param repo_name:
1867 :param repo_name:
1867 """
1868 """
1868 return cls.NAME_SEP.join(repo_name.split(os.sep))
1869 return cls.NAME_SEP.join(repo_name.split(os.sep))
1869
1870
1870 @classmethod
1871 @classmethod
1871 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1872 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1872 session = Session()
1873 session = Session()
1873 q = session.query(cls).filter(cls.repo_name == repo_name)
1874 q = session.query(cls).filter(cls.repo_name == repo_name)
1874
1875
1875 if cache:
1876 if cache:
1876 if identity_cache:
1877 if identity_cache:
1877 val = cls.identity_cache(session, 'repo_name', repo_name)
1878 val = cls.identity_cache(session, 'repo_name', repo_name)
1878 if val:
1879 if val:
1879 return val
1880 return val
1880 else:
1881 else:
1881 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1882 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1882 q = q.options(
1883 q = q.options(
1883 FromCache("sql_cache_short", cache_key))
1884 FromCache("sql_cache_short", cache_key))
1884
1885
1885 return q.scalar()
1886 return q.scalar()
1886
1887
1887 @classmethod
1888 @classmethod
1888 def get_by_id_or_repo_name(cls, repoid):
1889 def get_by_id_or_repo_name(cls, repoid):
1889 if isinstance(repoid, (int, long)):
1890 if isinstance(repoid, (int, long)):
1890 try:
1891 try:
1891 repo = cls.get(repoid)
1892 repo = cls.get(repoid)
1892 except ValueError:
1893 except ValueError:
1893 repo = None
1894 repo = None
1894 else:
1895 else:
1895 repo = cls.get_by_repo_name(repoid)
1896 repo = cls.get_by_repo_name(repoid)
1896 return repo
1897 return repo
1897
1898
1898 @classmethod
1899 @classmethod
1899 def get_by_full_path(cls, repo_full_path):
1900 def get_by_full_path(cls, repo_full_path):
1900 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1901 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1901 repo_name = cls.normalize_repo_name(repo_name)
1902 repo_name = cls.normalize_repo_name(repo_name)
1902 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1903 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1903
1904
1904 @classmethod
1905 @classmethod
1905 def get_repo_forks(cls, repo_id):
1906 def get_repo_forks(cls, repo_id):
1906 return cls.query().filter(Repository.fork_id == repo_id)
1907 return cls.query().filter(Repository.fork_id == repo_id)
1907
1908
1908 @classmethod
1909 @classmethod
1909 def base_path(cls):
1910 def base_path(cls):
1910 """
1911 """
1911 Returns base path when all repos are stored
1912 Returns base path when all repos are stored
1912
1913
1913 :param cls:
1914 :param cls:
1914 """
1915 """
1915 q = Session().query(RhodeCodeUi)\
1916 q = Session().query(RhodeCodeUi)\
1916 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1917 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1917 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1918 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1918 return q.one().ui_value
1919 return q.one().ui_value
1919
1920
1920 @classmethod
1921 @classmethod
1921 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1922 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1922 case_insensitive=True, archived=False):
1923 case_insensitive=True, archived=False):
1923 q = Repository.query()
1924 q = Repository.query()
1924
1925
1925 if not archived:
1926 if not archived:
1926 q = q.filter(Repository.archived.isnot(true()))
1927 q = q.filter(Repository.archived.isnot(true()))
1927
1928
1928 if not isinstance(user_id, Optional):
1929 if not isinstance(user_id, Optional):
1929 q = q.filter(Repository.user_id == user_id)
1930 q = q.filter(Repository.user_id == user_id)
1930
1931
1931 if not isinstance(group_id, Optional):
1932 if not isinstance(group_id, Optional):
1932 q = q.filter(Repository.group_id == group_id)
1933 q = q.filter(Repository.group_id == group_id)
1933
1934
1934 if case_insensitive:
1935 if case_insensitive:
1935 q = q.order_by(func.lower(Repository.repo_name))
1936 q = q.order_by(func.lower(Repository.repo_name))
1936 else:
1937 else:
1937 q = q.order_by(Repository.repo_name)
1938 q = q.order_by(Repository.repo_name)
1938
1939
1939 return q.all()
1940 return q.all()
1940
1941
1941 @property
1942 @property
1942 def repo_uid(self):
1943 def repo_uid(self):
1943 return '_{}'.format(self.repo_id)
1944 return '_{}'.format(self.repo_id)
1944
1945
1945 @property
1946 @property
1946 def forks(self):
1947 def forks(self):
1947 """
1948 """
1948 Return forks of this repo
1949 Return forks of this repo
1949 """
1950 """
1950 return Repository.get_repo_forks(self.repo_id)
1951 return Repository.get_repo_forks(self.repo_id)
1951
1952
1952 @property
1953 @property
1953 def parent(self):
1954 def parent(self):
1954 """
1955 """
1955 Returns fork parent
1956 Returns fork parent
1956 """
1957 """
1957 return self.fork
1958 return self.fork
1958
1959
1959 @property
1960 @property
1960 def just_name(self):
1961 def just_name(self):
1961 return self.repo_name.split(self.NAME_SEP)[-1]
1962 return self.repo_name.split(self.NAME_SEP)[-1]
1962
1963
1963 @property
1964 @property
1964 def groups_with_parents(self):
1965 def groups_with_parents(self):
1965 groups = []
1966 groups = []
1966 if self.group is None:
1967 if self.group is None:
1967 return groups
1968 return groups
1968
1969
1969 cur_gr = self.group
1970 cur_gr = self.group
1970 groups.insert(0, cur_gr)
1971 groups.insert(0, cur_gr)
1971 while 1:
1972 while 1:
1972 gr = getattr(cur_gr, 'parent_group', None)
1973 gr = getattr(cur_gr, 'parent_group', None)
1973 cur_gr = cur_gr.parent_group
1974 cur_gr = cur_gr.parent_group
1974 if gr is None:
1975 if gr is None:
1975 break
1976 break
1976 groups.insert(0, gr)
1977 groups.insert(0, gr)
1977
1978
1978 return groups
1979 return groups
1979
1980
1980 @property
1981 @property
1981 def groups_and_repo(self):
1982 def groups_and_repo(self):
1982 return self.groups_with_parents, self
1983 return self.groups_with_parents, self
1983
1984
1984 @LazyProperty
1985 @LazyProperty
1985 def repo_path(self):
1986 def repo_path(self):
1986 """
1987 """
1987 Returns base full path for that repository means where it actually
1988 Returns base full path for that repository means where it actually
1988 exists on a filesystem
1989 exists on a filesystem
1989 """
1990 """
1990 q = Session().query(RhodeCodeUi).filter(
1991 q = Session().query(RhodeCodeUi).filter(
1991 RhodeCodeUi.ui_key == self.NAME_SEP)
1992 RhodeCodeUi.ui_key == self.NAME_SEP)
1992 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1993 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1993 return q.one().ui_value
1994 return q.one().ui_value
1994
1995
1995 @property
1996 @property
1996 def repo_full_path(self):
1997 def repo_full_path(self):
1997 p = [self.repo_path]
1998 p = [self.repo_path]
1998 # we need to split the name by / since this is how we store the
1999 # we need to split the name by / since this is how we store the
1999 # names in the database, but that eventually needs to be converted
2000 # names in the database, but that eventually needs to be converted
2000 # into a valid system path
2001 # into a valid system path
2001 p += self.repo_name.split(self.NAME_SEP)
2002 p += self.repo_name.split(self.NAME_SEP)
2002 return os.path.join(*map(safe_unicode, p))
2003 return os.path.join(*map(safe_unicode, p))
2003
2004
2004 @property
2005 @property
2005 def cache_keys(self):
2006 def cache_keys(self):
2006 """
2007 """
2007 Returns associated cache keys for that repo
2008 Returns associated cache keys for that repo
2008 """
2009 """
2009 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2010 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2010 repo_id=self.repo_id)
2011 repo_id=self.repo_id)
2011 return CacheKey.query()\
2012 return CacheKey.query()\
2012 .filter(CacheKey.cache_args == invalidation_namespace)\
2013 .filter(CacheKey.cache_args == invalidation_namespace)\
2013 .order_by(CacheKey.cache_key)\
2014 .order_by(CacheKey.cache_key)\
2014 .all()
2015 .all()
2015
2016
2016 @property
2017 @property
2017 def cached_diffs_relative_dir(self):
2018 def cached_diffs_relative_dir(self):
2018 """
2019 """
2019 Return a relative to the repository store path of cached diffs
2020 Return a relative to the repository store path of cached diffs
2020 used for safe display for users, who shouldn't know the absolute store
2021 used for safe display for users, who shouldn't know the absolute store
2021 path
2022 path
2022 """
2023 """
2023 return os.path.join(
2024 return os.path.join(
2024 os.path.dirname(self.repo_name),
2025 os.path.dirname(self.repo_name),
2025 self.cached_diffs_dir.split(os.path.sep)[-1])
2026 self.cached_diffs_dir.split(os.path.sep)[-1])
2026
2027
2027 @property
2028 @property
2028 def cached_diffs_dir(self):
2029 def cached_diffs_dir(self):
2029 path = self.repo_full_path
2030 path = self.repo_full_path
2030 return os.path.join(
2031 return os.path.join(
2031 os.path.dirname(path),
2032 os.path.dirname(path),
2032 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2033 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2033
2034
2034 def cached_diffs(self):
2035 def cached_diffs(self):
2035 diff_cache_dir = self.cached_diffs_dir
2036 diff_cache_dir = self.cached_diffs_dir
2036 if os.path.isdir(diff_cache_dir):
2037 if os.path.isdir(diff_cache_dir):
2037 return os.listdir(diff_cache_dir)
2038 return os.listdir(diff_cache_dir)
2038 return []
2039 return []
2039
2040
2040 def shadow_repos(self):
2041 def shadow_repos(self):
2041 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2042 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2042 return [
2043 return [
2043 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2044 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2044 if x.startswith(shadow_repos_pattern)]
2045 if x.startswith(shadow_repos_pattern)]
2045
2046
2046 def get_new_name(self, repo_name):
2047 def get_new_name(self, repo_name):
2047 """
2048 """
2048 returns new full repository name based on assigned group and new new
2049 returns new full repository name based on assigned group and new new
2049
2050
2050 :param group_name:
2051 :param group_name:
2051 """
2052 """
2052 path_prefix = self.group.full_path_splitted if self.group else []
2053 path_prefix = self.group.full_path_splitted if self.group else []
2053 return self.NAME_SEP.join(path_prefix + [repo_name])
2054 return self.NAME_SEP.join(path_prefix + [repo_name])
2054
2055
2055 @property
2056 @property
2056 def _config(self):
2057 def _config(self):
2057 """
2058 """
2058 Returns db based config object.
2059 Returns db based config object.
2059 """
2060 """
2060 from rhodecode.lib.utils import make_db_config
2061 from rhodecode.lib.utils import make_db_config
2061 return make_db_config(clear_session=False, repo=self)
2062 return make_db_config(clear_session=False, repo=self)
2062
2063
2063 def permissions(self, with_admins=True, with_owner=True,
2064 def permissions(self, with_admins=True, with_owner=True,
2064 expand_from_user_groups=False):
2065 expand_from_user_groups=False):
2065 """
2066 """
2066 Permissions for repositories
2067 Permissions for repositories
2067 """
2068 """
2068 _admin_perm = 'repository.admin'
2069 _admin_perm = 'repository.admin'
2069
2070
2070 owner_row = []
2071 owner_row = []
2071 if with_owner:
2072 if with_owner:
2072 usr = AttributeDict(self.user.get_dict())
2073 usr = AttributeDict(self.user.get_dict())
2073 usr.owner_row = True
2074 usr.owner_row = True
2074 usr.permission = _admin_perm
2075 usr.permission = _admin_perm
2075 usr.permission_id = None
2076 usr.permission_id = None
2076 owner_row.append(usr)
2077 owner_row.append(usr)
2077
2078
2078 super_admin_ids = []
2079 super_admin_ids = []
2079 super_admin_rows = []
2080 super_admin_rows = []
2080 if with_admins:
2081 if with_admins:
2081 for usr in User.get_all_super_admins():
2082 for usr in User.get_all_super_admins():
2082 super_admin_ids.append(usr.user_id)
2083 super_admin_ids.append(usr.user_id)
2083 # if this admin is also owner, don't double the record
2084 # if this admin is also owner, don't double the record
2084 if usr.user_id == owner_row[0].user_id:
2085 if usr.user_id == owner_row[0].user_id:
2085 owner_row[0].admin_row = True
2086 owner_row[0].admin_row = True
2086 else:
2087 else:
2087 usr = AttributeDict(usr.get_dict())
2088 usr = AttributeDict(usr.get_dict())
2088 usr.admin_row = True
2089 usr.admin_row = True
2089 usr.permission = _admin_perm
2090 usr.permission = _admin_perm
2090 usr.permission_id = None
2091 usr.permission_id = None
2091 super_admin_rows.append(usr)
2092 super_admin_rows.append(usr)
2092
2093
2093 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2094 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2094 q = q.options(joinedload(UserRepoToPerm.repository),
2095 q = q.options(joinedload(UserRepoToPerm.repository),
2095 joinedload(UserRepoToPerm.user),
2096 joinedload(UserRepoToPerm.user),
2096 joinedload(UserRepoToPerm.permission),)
2097 joinedload(UserRepoToPerm.permission),)
2097
2098
2098 # get owners and admins and permissions. We do a trick of re-writing
2099 # get owners and admins and permissions. We do a trick of re-writing
2099 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2100 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2100 # has a global reference and changing one object propagates to all
2101 # has a global reference and changing one object propagates to all
2101 # others. This means if admin is also an owner admin_row that change
2102 # others. This means if admin is also an owner admin_row that change
2102 # would propagate to both objects
2103 # would propagate to both objects
2103 perm_rows = []
2104 perm_rows = []
2104 for _usr in q.all():
2105 for _usr in q.all():
2105 usr = AttributeDict(_usr.user.get_dict())
2106 usr = AttributeDict(_usr.user.get_dict())
2106 # if this user is also owner/admin, mark as duplicate record
2107 # if this user is also owner/admin, mark as duplicate record
2107 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2108 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2108 usr.duplicate_perm = True
2109 usr.duplicate_perm = True
2109 # also check if this permission is maybe used by branch_permissions
2110 # also check if this permission is maybe used by branch_permissions
2110 if _usr.branch_perm_entry:
2111 if _usr.branch_perm_entry:
2111 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2112 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2112
2113
2113 usr.permission = _usr.permission.permission_name
2114 usr.permission = _usr.permission.permission_name
2114 usr.permission_id = _usr.repo_to_perm_id
2115 usr.permission_id = _usr.repo_to_perm_id
2115 perm_rows.append(usr)
2116 perm_rows.append(usr)
2116
2117
2117 # filter the perm rows by 'default' first and then sort them by
2118 # filter the perm rows by 'default' first and then sort them by
2118 # admin,write,read,none permissions sorted again alphabetically in
2119 # admin,write,read,none permissions sorted again alphabetically in
2119 # each group
2120 # each group
2120 perm_rows = sorted(perm_rows, key=display_user_sort)
2121 perm_rows = sorted(perm_rows, key=display_user_sort)
2121
2122
2122 user_groups_rows = []
2123 user_groups_rows = []
2123 if expand_from_user_groups:
2124 if expand_from_user_groups:
2124 for ug in self.permission_user_groups(with_members=True):
2125 for ug in self.permission_user_groups(with_members=True):
2125 for user_data in ug.members:
2126 for user_data in ug.members:
2126 user_groups_rows.append(user_data)
2127 user_groups_rows.append(user_data)
2127
2128
2128 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2129 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2129
2130
2130 def permission_user_groups(self, with_members=True):
2131 def permission_user_groups(self, with_members=True):
2131 q = UserGroupRepoToPerm.query()\
2132 q = UserGroupRepoToPerm.query()\
2132 .filter(UserGroupRepoToPerm.repository == self)
2133 .filter(UserGroupRepoToPerm.repository == self)
2133 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2134 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2134 joinedload(UserGroupRepoToPerm.users_group),
2135 joinedload(UserGroupRepoToPerm.users_group),
2135 joinedload(UserGroupRepoToPerm.permission),)
2136 joinedload(UserGroupRepoToPerm.permission),)
2136
2137
2137 perm_rows = []
2138 perm_rows = []
2138 for _user_group in q.all():
2139 for _user_group in q.all():
2139 entry = AttributeDict(_user_group.users_group.get_dict())
2140 entry = AttributeDict(_user_group.users_group.get_dict())
2140 entry.permission = _user_group.permission.permission_name
2141 entry.permission = _user_group.permission.permission_name
2141 if with_members:
2142 if with_members:
2142 entry.members = [x.user.get_dict()
2143 entry.members = [x.user.get_dict()
2143 for x in _user_group.users_group.members]
2144 for x in _user_group.users_group.members]
2144 perm_rows.append(entry)
2145 perm_rows.append(entry)
2145
2146
2146 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2147 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2147 return perm_rows
2148 return perm_rows
2148
2149
2149 def get_api_data(self, include_secrets=False):
2150 def get_api_data(self, include_secrets=False):
2150 """
2151 """
2151 Common function for generating repo api data
2152 Common function for generating repo api data
2152
2153
2153 :param include_secrets: See :meth:`User.get_api_data`.
2154 :param include_secrets: See :meth:`User.get_api_data`.
2154
2155
2155 """
2156 """
2156 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2157 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2157 # move this methods on models level.
2158 # move this methods on models level.
2158 from rhodecode.model.settings import SettingsModel
2159 from rhodecode.model.settings import SettingsModel
2159 from rhodecode.model.repo import RepoModel
2160 from rhodecode.model.repo import RepoModel
2160
2161
2161 repo = self
2162 repo = self
2162 _user_id, _time, _reason = self.locked
2163 _user_id, _time, _reason = self.locked
2163
2164
2164 data = {
2165 data = {
2165 'repo_id': repo.repo_id,
2166 'repo_id': repo.repo_id,
2166 'repo_name': repo.repo_name,
2167 'repo_name': repo.repo_name,
2167 'repo_type': repo.repo_type,
2168 'repo_type': repo.repo_type,
2168 'clone_uri': repo.clone_uri or '',
2169 'clone_uri': repo.clone_uri or '',
2169 'push_uri': repo.push_uri or '',
2170 'push_uri': repo.push_uri or '',
2170 'url': RepoModel().get_url(self),
2171 'url': RepoModel().get_url(self),
2171 'private': repo.private,
2172 'private': repo.private,
2172 'created_on': repo.created_on,
2173 'created_on': repo.created_on,
2173 'description': repo.description_safe,
2174 'description': repo.description_safe,
2174 'landing_rev': repo.landing_rev,
2175 'landing_rev': repo.landing_rev,
2175 'owner': repo.user.username,
2176 'owner': repo.user.username,
2176 'fork_of': repo.fork.repo_name if repo.fork else None,
2177 'fork_of': repo.fork.repo_name if repo.fork else None,
2177 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2178 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2178 'enable_statistics': repo.enable_statistics,
2179 'enable_statistics': repo.enable_statistics,
2179 'enable_locking': repo.enable_locking,
2180 'enable_locking': repo.enable_locking,
2180 'enable_downloads': repo.enable_downloads,
2181 'enable_downloads': repo.enable_downloads,
2181 'last_changeset': repo.changeset_cache,
2182 'last_changeset': repo.changeset_cache,
2182 'locked_by': User.get(_user_id).get_api_data(
2183 'locked_by': User.get(_user_id).get_api_data(
2183 include_secrets=include_secrets) if _user_id else None,
2184 include_secrets=include_secrets) if _user_id else None,
2184 'locked_date': time_to_datetime(_time) if _time else None,
2185 'locked_date': time_to_datetime(_time) if _time else None,
2185 'lock_reason': _reason if _reason else None,
2186 'lock_reason': _reason if _reason else None,
2186 }
2187 }
2187
2188
2188 # TODO: mikhail: should be per-repo settings here
2189 # TODO: mikhail: should be per-repo settings here
2189 rc_config = SettingsModel().get_all_settings()
2190 rc_config = SettingsModel().get_all_settings()
2190 repository_fields = str2bool(
2191 repository_fields = str2bool(
2191 rc_config.get('rhodecode_repository_fields'))
2192 rc_config.get('rhodecode_repository_fields'))
2192 if repository_fields:
2193 if repository_fields:
2193 for f in self.extra_fields:
2194 for f in self.extra_fields:
2194 data[f.field_key_prefixed] = f.field_value
2195 data[f.field_key_prefixed] = f.field_value
2195
2196
2196 return data
2197 return data
2197
2198
2198 @classmethod
2199 @classmethod
2199 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2200 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2200 if not lock_time:
2201 if not lock_time:
2201 lock_time = time.time()
2202 lock_time = time.time()
2202 if not lock_reason:
2203 if not lock_reason:
2203 lock_reason = cls.LOCK_AUTOMATIC
2204 lock_reason = cls.LOCK_AUTOMATIC
2204 repo.locked = [user_id, lock_time, lock_reason]
2205 repo.locked = [user_id, lock_time, lock_reason]
2205 Session().add(repo)
2206 Session().add(repo)
2206 Session().commit()
2207 Session().commit()
2207
2208
2208 @classmethod
2209 @classmethod
2209 def unlock(cls, repo):
2210 def unlock(cls, repo):
2210 repo.locked = None
2211 repo.locked = None
2211 Session().add(repo)
2212 Session().add(repo)
2212 Session().commit()
2213 Session().commit()
2213
2214
2214 @classmethod
2215 @classmethod
2215 def getlock(cls, repo):
2216 def getlock(cls, repo):
2216 return repo.locked
2217 return repo.locked
2217
2218
2218 def is_user_lock(self, user_id):
2219 def is_user_lock(self, user_id):
2219 if self.lock[0]:
2220 if self.lock[0]:
2220 lock_user_id = safe_int(self.lock[0])
2221 lock_user_id = safe_int(self.lock[0])
2221 user_id = safe_int(user_id)
2222 user_id = safe_int(user_id)
2222 # both are ints, and they are equal
2223 # both are ints, and they are equal
2223 return all([lock_user_id, user_id]) and lock_user_id == user_id
2224 return all([lock_user_id, user_id]) and lock_user_id == user_id
2224
2225
2225 return False
2226 return False
2226
2227
2227 def get_locking_state(self, action, user_id, only_when_enabled=True):
2228 def get_locking_state(self, action, user_id, only_when_enabled=True):
2228 """
2229 """
2229 Checks locking on this repository, if locking is enabled and lock is
2230 Checks locking on this repository, if locking is enabled and lock is
2230 present returns a tuple of make_lock, locked, locked_by.
2231 present returns a tuple of make_lock, locked, locked_by.
2231 make_lock can have 3 states None (do nothing) True, make lock
2232 make_lock can have 3 states None (do nothing) True, make lock
2232 False release lock, This value is later propagated to hooks, which
2233 False release lock, This value is later propagated to hooks, which
2233 do the locking. Think about this as signals passed to hooks what to do.
2234 do the locking. Think about this as signals passed to hooks what to do.
2234
2235
2235 """
2236 """
2236 # TODO: johbo: This is part of the business logic and should be moved
2237 # TODO: johbo: This is part of the business logic and should be moved
2237 # into the RepositoryModel.
2238 # into the RepositoryModel.
2238
2239
2239 if action not in ('push', 'pull'):
2240 if action not in ('push', 'pull'):
2240 raise ValueError("Invalid action value: %s" % repr(action))
2241 raise ValueError("Invalid action value: %s" % repr(action))
2241
2242
2242 # defines if locked error should be thrown to user
2243 # defines if locked error should be thrown to user
2243 currently_locked = False
2244 currently_locked = False
2244 # defines if new lock should be made, tri-state
2245 # defines if new lock should be made, tri-state
2245 make_lock = None
2246 make_lock = None
2246 repo = self
2247 repo = self
2247 user = User.get(user_id)
2248 user = User.get(user_id)
2248
2249
2249 lock_info = repo.locked
2250 lock_info = repo.locked
2250
2251
2251 if repo and (repo.enable_locking or not only_when_enabled):
2252 if repo and (repo.enable_locking or not only_when_enabled):
2252 if action == 'push':
2253 if action == 'push':
2253 # check if it's already locked !, if it is compare users
2254 # check if it's already locked !, if it is compare users
2254 locked_by_user_id = lock_info[0]
2255 locked_by_user_id = lock_info[0]
2255 if user.user_id == locked_by_user_id:
2256 if user.user_id == locked_by_user_id:
2256 log.debug(
2257 log.debug(
2257 'Got `push` action from user %s, now unlocking', user)
2258 'Got `push` action from user %s, now unlocking', user)
2258 # unlock if we have push from user who locked
2259 # unlock if we have push from user who locked
2259 make_lock = False
2260 make_lock = False
2260 else:
2261 else:
2261 # we're not the same user who locked, ban with
2262 # we're not the same user who locked, ban with
2262 # code defined in settings (default is 423 HTTP Locked) !
2263 # code defined in settings (default is 423 HTTP Locked) !
2263 log.debug('Repo %s is currently locked by %s', repo, user)
2264 log.debug('Repo %s is currently locked by %s', repo, user)
2264 currently_locked = True
2265 currently_locked = True
2265 elif action == 'pull':
2266 elif action == 'pull':
2266 # [0] user [1] date
2267 # [0] user [1] date
2267 if lock_info[0] and lock_info[1]:
2268 if lock_info[0] and lock_info[1]:
2268 log.debug('Repo %s is currently locked by %s', repo, user)
2269 log.debug('Repo %s is currently locked by %s', repo, user)
2269 currently_locked = True
2270 currently_locked = True
2270 else:
2271 else:
2271 log.debug('Setting lock on repo %s by %s', repo, user)
2272 log.debug('Setting lock on repo %s by %s', repo, user)
2272 make_lock = True
2273 make_lock = True
2273
2274
2274 else:
2275 else:
2275 log.debug('Repository %s do not have locking enabled', repo)
2276 log.debug('Repository %s do not have locking enabled', repo)
2276
2277
2277 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2278 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2278 make_lock, currently_locked, lock_info)
2279 make_lock, currently_locked, lock_info)
2279
2280
2280 from rhodecode.lib.auth import HasRepoPermissionAny
2281 from rhodecode.lib.auth import HasRepoPermissionAny
2281 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2282 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2282 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2283 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2283 # if we don't have at least write permission we cannot make a lock
2284 # if we don't have at least write permission we cannot make a lock
2284 log.debug('lock state reset back to FALSE due to lack '
2285 log.debug('lock state reset back to FALSE due to lack '
2285 'of at least read permission')
2286 'of at least read permission')
2286 make_lock = False
2287 make_lock = False
2287
2288
2288 return make_lock, currently_locked, lock_info
2289 return make_lock, currently_locked, lock_info
2289
2290
2290 @property
2291 @property
2291 def last_commit_cache_update_diff(self):
2292 def last_commit_cache_update_diff(self):
2292 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2293 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2293
2294
2294 @classmethod
2295 @classmethod
2295 def _load_commit_change(cls, last_commit_cache):
2296 def _load_commit_change(cls, last_commit_cache):
2296 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2297 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2297 empty_date = datetime.datetime.fromtimestamp(0)
2298 empty_date = datetime.datetime.fromtimestamp(0)
2298 date_latest = last_commit_cache.get('date', empty_date)
2299 date_latest = last_commit_cache.get('date', empty_date)
2299 try:
2300 try:
2300 return parse_datetime(date_latest)
2301 return parse_datetime(date_latest)
2301 except Exception:
2302 except Exception:
2302 return empty_date
2303 return empty_date
2303
2304
2304 @property
2305 @property
2305 def last_commit_change(self):
2306 def last_commit_change(self):
2306 return self._load_commit_change(self.changeset_cache)
2307 return self._load_commit_change(self.changeset_cache)
2307
2308
2308 @property
2309 @property
2309 def last_db_change(self):
2310 def last_db_change(self):
2310 return self.updated_on
2311 return self.updated_on
2311
2312
2312 @property
2313 @property
2313 def clone_uri_hidden(self):
2314 def clone_uri_hidden(self):
2314 clone_uri = self.clone_uri
2315 clone_uri = self.clone_uri
2315 if clone_uri:
2316 if clone_uri:
2316 import urlobject
2317 import urlobject
2317 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2318 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2318 if url_obj.password:
2319 if url_obj.password:
2319 clone_uri = url_obj.with_password('*****')
2320 clone_uri = url_obj.with_password('*****')
2320 return clone_uri
2321 return clone_uri
2321
2322
2322 @property
2323 @property
2323 def push_uri_hidden(self):
2324 def push_uri_hidden(self):
2324 push_uri = self.push_uri
2325 push_uri = self.push_uri
2325 if push_uri:
2326 if push_uri:
2326 import urlobject
2327 import urlobject
2327 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2328 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2328 if url_obj.password:
2329 if url_obj.password:
2329 push_uri = url_obj.with_password('*****')
2330 push_uri = url_obj.with_password('*****')
2330 return push_uri
2331 return push_uri
2331
2332
2332 def clone_url(self, **override):
2333 def clone_url(self, **override):
2333 from rhodecode.model.settings import SettingsModel
2334 from rhodecode.model.settings import SettingsModel
2334
2335
2335 uri_tmpl = None
2336 uri_tmpl = None
2336 if 'with_id' in override:
2337 if 'with_id' in override:
2337 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2338 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2338 del override['with_id']
2339 del override['with_id']
2339
2340
2340 if 'uri_tmpl' in override:
2341 if 'uri_tmpl' in override:
2341 uri_tmpl = override['uri_tmpl']
2342 uri_tmpl = override['uri_tmpl']
2342 del override['uri_tmpl']
2343 del override['uri_tmpl']
2343
2344
2344 ssh = False
2345 ssh = False
2345 if 'ssh' in override:
2346 if 'ssh' in override:
2346 ssh = True
2347 ssh = True
2347 del override['ssh']
2348 del override['ssh']
2348
2349
2349 # we didn't override our tmpl from **overrides
2350 # we didn't override our tmpl from **overrides
2350 request = get_current_request()
2351 request = get_current_request()
2351 if not uri_tmpl:
2352 if not uri_tmpl:
2352 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2353 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2353 rc_config = request.call_context.rc_config
2354 rc_config = request.call_context.rc_config
2354 else:
2355 else:
2355 rc_config = SettingsModel().get_all_settings(cache=True)
2356 rc_config = SettingsModel().get_all_settings(cache=True)
2356
2357
2357 if ssh:
2358 if ssh:
2358 uri_tmpl = rc_config.get(
2359 uri_tmpl = rc_config.get(
2359 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2360 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2360
2361
2361 else:
2362 else:
2362 uri_tmpl = rc_config.get(
2363 uri_tmpl = rc_config.get(
2363 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2364 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2364
2365
2365 return get_clone_url(request=request,
2366 return get_clone_url(request=request,
2366 uri_tmpl=uri_tmpl,
2367 uri_tmpl=uri_tmpl,
2367 repo_name=self.repo_name,
2368 repo_name=self.repo_name,
2368 repo_id=self.repo_id,
2369 repo_id=self.repo_id,
2369 repo_type=self.repo_type,
2370 repo_type=self.repo_type,
2370 **override)
2371 **override)
2371
2372
2372 def set_state(self, state):
2373 def set_state(self, state):
2373 self.repo_state = state
2374 self.repo_state = state
2374 Session().add(self)
2375 Session().add(self)
2375 #==========================================================================
2376 #==========================================================================
2376 # SCM PROPERTIES
2377 # SCM PROPERTIES
2377 #==========================================================================
2378 #==========================================================================
2378
2379
2379 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2380 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2380 return get_commit_safe(
2381 return get_commit_safe(
2381 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2382 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2382 maybe_unreachable=maybe_unreachable)
2383 maybe_unreachable=maybe_unreachable)
2383
2384
2384 def get_changeset(self, rev=None, pre_load=None):
2385 def get_changeset(self, rev=None, pre_load=None):
2385 warnings.warn("Use get_commit", DeprecationWarning)
2386 warnings.warn("Use get_commit", DeprecationWarning)
2386 commit_id = None
2387 commit_id = None
2387 commit_idx = None
2388 commit_idx = None
2388 if isinstance(rev, compat.string_types):
2389 if isinstance(rev, compat.string_types):
2389 commit_id = rev
2390 commit_id = rev
2390 else:
2391 else:
2391 commit_idx = rev
2392 commit_idx = rev
2392 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2393 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2393 pre_load=pre_load)
2394 pre_load=pre_load)
2394
2395
2395 def get_landing_commit(self):
2396 def get_landing_commit(self):
2396 """
2397 """
2397 Returns landing commit, or if that doesn't exist returns the tip
2398 Returns landing commit, or if that doesn't exist returns the tip
2398 """
2399 """
2399 _rev_type, _rev = self.landing_rev
2400 _rev_type, _rev = self.landing_rev
2400 commit = self.get_commit(_rev)
2401 commit = self.get_commit(_rev)
2401 if isinstance(commit, EmptyCommit):
2402 if isinstance(commit, EmptyCommit):
2402 return self.get_commit()
2403 return self.get_commit()
2403 return commit
2404 return commit
2404
2405
2405 def flush_commit_cache(self):
2406 def flush_commit_cache(self):
2406 self.update_commit_cache(cs_cache={'raw_id':'0'})
2407 self.update_commit_cache(cs_cache={'raw_id':'0'})
2407 self.update_commit_cache()
2408 self.update_commit_cache()
2408
2409
2409 def update_commit_cache(self, cs_cache=None, config=None):
2410 def update_commit_cache(self, cs_cache=None, config=None):
2410 """
2411 """
2411 Update cache of last commit for repository
2412 Update cache of last commit for repository
2412 cache_keys should be::
2413 cache_keys should be::
2413
2414
2414 source_repo_id
2415 source_repo_id
2415 short_id
2416 short_id
2416 raw_id
2417 raw_id
2417 revision
2418 revision
2418 parents
2419 parents
2419 message
2420 message
2420 date
2421 date
2421 author
2422 author
2422 updated_on
2423 updated_on
2423
2424
2424 """
2425 """
2425 from rhodecode.lib.vcs.backends.base import BaseChangeset
2426 from rhodecode.lib.vcs.backends.base import BaseChangeset
2426 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2427 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2427 empty_date = datetime.datetime.fromtimestamp(0)
2428 empty_date = datetime.datetime.fromtimestamp(0)
2428
2429
2429 if cs_cache is None:
2430 if cs_cache is None:
2430 # use no-cache version here
2431 # use no-cache version here
2431 try:
2432 try:
2432 scm_repo = self.scm_instance(cache=False, config=config)
2433 scm_repo = self.scm_instance(cache=False, config=config)
2433 except VCSError:
2434 except VCSError:
2434 scm_repo = None
2435 scm_repo = None
2435 empty = scm_repo is None or scm_repo.is_empty()
2436 empty = scm_repo is None or scm_repo.is_empty()
2436
2437
2437 if not empty:
2438 if not empty:
2438 cs_cache = scm_repo.get_commit(
2439 cs_cache = scm_repo.get_commit(
2439 pre_load=["author", "date", "message", "parents", "branch"])
2440 pre_load=["author", "date", "message", "parents", "branch"])
2440 else:
2441 else:
2441 cs_cache = EmptyCommit()
2442 cs_cache = EmptyCommit()
2442
2443
2443 if isinstance(cs_cache, BaseChangeset):
2444 if isinstance(cs_cache, BaseChangeset):
2444 cs_cache = cs_cache.__json__()
2445 cs_cache = cs_cache.__json__()
2445
2446
2446 def is_outdated(new_cs_cache):
2447 def is_outdated(new_cs_cache):
2447 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2448 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2448 new_cs_cache['revision'] != self.changeset_cache['revision']):
2449 new_cs_cache['revision'] != self.changeset_cache['revision']):
2449 return True
2450 return True
2450 return False
2451 return False
2451
2452
2452 # check if we have maybe already latest cached revision
2453 # check if we have maybe already latest cached revision
2453 if is_outdated(cs_cache) or not self.changeset_cache:
2454 if is_outdated(cs_cache) or not self.changeset_cache:
2454 _current_datetime = datetime.datetime.utcnow()
2455 _current_datetime = datetime.datetime.utcnow()
2455 last_change = cs_cache.get('date') or _current_datetime
2456 last_change = cs_cache.get('date') or _current_datetime
2456 # we check if last update is newer than the new value
2457 # we check if last update is newer than the new value
2457 # if yes, we use the current timestamp instead. Imagine you get
2458 # if yes, we use the current timestamp instead. Imagine you get
2458 # old commit pushed 1y ago, we'd set last update 1y to ago.
2459 # old commit pushed 1y ago, we'd set last update 1y to ago.
2459 last_change_timestamp = datetime_to_time(last_change)
2460 last_change_timestamp = datetime_to_time(last_change)
2460 current_timestamp = datetime_to_time(last_change)
2461 current_timestamp = datetime_to_time(last_change)
2461 if last_change_timestamp > current_timestamp and not empty:
2462 if last_change_timestamp > current_timestamp and not empty:
2462 cs_cache['date'] = _current_datetime
2463 cs_cache['date'] = _current_datetime
2463
2464
2464 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2465 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2465 cs_cache['updated_on'] = time.time()
2466 cs_cache['updated_on'] = time.time()
2466 self.changeset_cache = cs_cache
2467 self.changeset_cache = cs_cache
2467 self.updated_on = last_change
2468 self.updated_on = last_change
2468 Session().add(self)
2469 Session().add(self)
2469 Session().commit()
2470 Session().commit()
2470
2471
2471 else:
2472 else:
2472 if empty:
2473 if empty:
2473 cs_cache = EmptyCommit().__json__()
2474 cs_cache = EmptyCommit().__json__()
2474 else:
2475 else:
2475 cs_cache = self.changeset_cache
2476 cs_cache = self.changeset_cache
2476
2477
2477 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2478 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2478
2479
2479 cs_cache['updated_on'] = time.time()
2480 cs_cache['updated_on'] = time.time()
2480 self.changeset_cache = cs_cache
2481 self.changeset_cache = cs_cache
2481 self.updated_on = _date_latest
2482 self.updated_on = _date_latest
2482 Session().add(self)
2483 Session().add(self)
2483 Session().commit()
2484 Session().commit()
2484
2485
2485 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2486 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2486 self.repo_name, cs_cache, _date_latest)
2487 self.repo_name, cs_cache, _date_latest)
2487
2488
2488 @property
2489 @property
2489 def tip(self):
2490 def tip(self):
2490 return self.get_commit('tip')
2491 return self.get_commit('tip')
2491
2492
2492 @property
2493 @property
2493 def author(self):
2494 def author(self):
2494 return self.tip.author
2495 return self.tip.author
2495
2496
2496 @property
2497 @property
2497 def last_change(self):
2498 def last_change(self):
2498 return self.scm_instance().last_change
2499 return self.scm_instance().last_change
2499
2500
2500 def get_comments(self, revisions=None):
2501 def get_comments(self, revisions=None):
2501 """
2502 """
2502 Returns comments for this repository grouped by revisions
2503 Returns comments for this repository grouped by revisions
2503
2504
2504 :param revisions: filter query by revisions only
2505 :param revisions: filter query by revisions only
2505 """
2506 """
2506 cmts = ChangesetComment.query()\
2507 cmts = ChangesetComment.query()\
2507 .filter(ChangesetComment.repo == self)
2508 .filter(ChangesetComment.repo == self)
2508 if revisions:
2509 if revisions:
2509 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2510 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2510 grouped = collections.defaultdict(list)
2511 grouped = collections.defaultdict(list)
2511 for cmt in cmts.all():
2512 for cmt in cmts.all():
2512 grouped[cmt.revision].append(cmt)
2513 grouped[cmt.revision].append(cmt)
2513 return grouped
2514 return grouped
2514
2515
2515 def statuses(self, revisions=None):
2516 def statuses(self, revisions=None):
2516 """
2517 """
2517 Returns statuses for this repository
2518 Returns statuses for this repository
2518
2519
2519 :param revisions: list of revisions to get statuses for
2520 :param revisions: list of revisions to get statuses for
2520 """
2521 """
2521 statuses = ChangesetStatus.query()\
2522 statuses = ChangesetStatus.query()\
2522 .filter(ChangesetStatus.repo == self)\
2523 .filter(ChangesetStatus.repo == self)\
2523 .filter(ChangesetStatus.version == 0)
2524 .filter(ChangesetStatus.version == 0)
2524
2525
2525 if revisions:
2526 if revisions:
2526 # Try doing the filtering in chunks to avoid hitting limits
2527 # Try doing the filtering in chunks to avoid hitting limits
2527 size = 500
2528 size = 500
2528 status_results = []
2529 status_results = []
2529 for chunk in xrange(0, len(revisions), size):
2530 for chunk in xrange(0, len(revisions), size):
2530 status_results += statuses.filter(
2531 status_results += statuses.filter(
2531 ChangesetStatus.revision.in_(
2532 ChangesetStatus.revision.in_(
2532 revisions[chunk: chunk+size])
2533 revisions[chunk: chunk+size])
2533 ).all()
2534 ).all()
2534 else:
2535 else:
2535 status_results = statuses.all()
2536 status_results = statuses.all()
2536
2537
2537 grouped = {}
2538 grouped = {}
2538
2539
2539 # maybe we have open new pullrequest without a status?
2540 # maybe we have open new pullrequest without a status?
2540 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2541 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2541 status_lbl = ChangesetStatus.get_status_lbl(stat)
2542 status_lbl = ChangesetStatus.get_status_lbl(stat)
2542 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2543 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2543 for rev in pr.revisions:
2544 for rev in pr.revisions:
2544 pr_id = pr.pull_request_id
2545 pr_id = pr.pull_request_id
2545 pr_repo = pr.target_repo.repo_name
2546 pr_repo = pr.target_repo.repo_name
2546 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2547 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2547
2548
2548 for stat in status_results:
2549 for stat in status_results:
2549 pr_id = pr_repo = None
2550 pr_id = pr_repo = None
2550 if stat.pull_request:
2551 if stat.pull_request:
2551 pr_id = stat.pull_request.pull_request_id
2552 pr_id = stat.pull_request.pull_request_id
2552 pr_repo = stat.pull_request.target_repo.repo_name
2553 pr_repo = stat.pull_request.target_repo.repo_name
2553 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2554 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2554 pr_id, pr_repo]
2555 pr_id, pr_repo]
2555 return grouped
2556 return grouped
2556
2557
2557 # ==========================================================================
2558 # ==========================================================================
2558 # SCM CACHE INSTANCE
2559 # SCM CACHE INSTANCE
2559 # ==========================================================================
2560 # ==========================================================================
2560
2561
2561 def scm_instance(self, **kwargs):
2562 def scm_instance(self, **kwargs):
2562 import rhodecode
2563 import rhodecode
2563
2564
2564 # Passing a config will not hit the cache currently only used
2565 # Passing a config will not hit the cache currently only used
2565 # for repo2dbmapper
2566 # for repo2dbmapper
2566 config = kwargs.pop('config', None)
2567 config = kwargs.pop('config', None)
2567 cache = kwargs.pop('cache', None)
2568 cache = kwargs.pop('cache', None)
2568 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2569 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2569 if vcs_full_cache is not None:
2570 if vcs_full_cache is not None:
2570 # allows override global config
2571 # allows override global config
2571 full_cache = vcs_full_cache
2572 full_cache = vcs_full_cache
2572 else:
2573 else:
2573 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2574 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2574 # if cache is NOT defined use default global, else we have a full
2575 # if cache is NOT defined use default global, else we have a full
2575 # control over cache behaviour
2576 # control over cache behaviour
2576 if cache is None and full_cache and not config:
2577 if cache is None and full_cache and not config:
2577 log.debug('Initializing pure cached instance for %s', self.repo_path)
2578 log.debug('Initializing pure cached instance for %s', self.repo_path)
2578 return self._get_instance_cached()
2579 return self._get_instance_cached()
2579
2580
2580 # cache here is sent to the "vcs server"
2581 # cache here is sent to the "vcs server"
2581 return self._get_instance(cache=bool(cache), config=config)
2582 return self._get_instance(cache=bool(cache), config=config)
2582
2583
2583 def _get_instance_cached(self):
2584 def _get_instance_cached(self):
2584 from rhodecode.lib import rc_cache
2585 from rhodecode.lib import rc_cache
2585
2586
2586 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2587 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2587 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2588 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2588 repo_id=self.repo_id)
2589 repo_id=self.repo_id)
2589 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2590 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2590
2591
2591 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2592 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2592 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2593 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2593 return self._get_instance(repo_state_uid=_cache_state_uid)
2594 return self._get_instance(repo_state_uid=_cache_state_uid)
2594
2595
2595 # we must use thread scoped cache here,
2596 # we must use thread scoped cache here,
2596 # because each thread of gevent needs it's own not shared connection and cache
2597 # because each thread of gevent needs it's own not shared connection and cache
2597 # we also alter `args` so the cache key is individual for every green thread.
2598 # we also alter `args` so the cache key is individual for every green thread.
2598 inv_context_manager = rc_cache.InvalidationContext(
2599 inv_context_manager = rc_cache.InvalidationContext(
2599 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2600 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2600 thread_scoped=True)
2601 thread_scoped=True)
2601 with inv_context_manager as invalidation_context:
2602 with inv_context_manager as invalidation_context:
2602 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2603 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2603 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2604 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2604
2605
2605 # re-compute and store cache if we get invalidate signal
2606 # re-compute and store cache if we get invalidate signal
2606 if invalidation_context.should_invalidate():
2607 if invalidation_context.should_invalidate():
2607 instance = get_instance_cached.refresh(*args)
2608 instance = get_instance_cached.refresh(*args)
2608 else:
2609 else:
2609 instance = get_instance_cached(*args)
2610 instance = get_instance_cached(*args)
2610
2611
2611 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2612 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2612 return instance
2613 return instance
2613
2614
2614 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2615 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2615 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2616 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2616 self.repo_type, self.repo_path, cache)
2617 self.repo_type, self.repo_path, cache)
2617 config = config or self._config
2618 config = config or self._config
2618 custom_wire = {
2619 custom_wire = {
2619 'cache': cache, # controls the vcs.remote cache
2620 'cache': cache, # controls the vcs.remote cache
2620 'repo_state_uid': repo_state_uid
2621 'repo_state_uid': repo_state_uid
2621 }
2622 }
2622 repo = get_vcs_instance(
2623 repo = get_vcs_instance(
2623 repo_path=safe_str(self.repo_full_path),
2624 repo_path=safe_str(self.repo_full_path),
2624 config=config,
2625 config=config,
2625 with_wire=custom_wire,
2626 with_wire=custom_wire,
2626 create=False,
2627 create=False,
2627 _vcs_alias=self.repo_type)
2628 _vcs_alias=self.repo_type)
2628 if repo is not None:
2629 if repo is not None:
2629 repo.count() # cache rebuild
2630 repo.count() # cache rebuild
2630 return repo
2631 return repo
2631
2632
2632 def get_shadow_repository_path(self, workspace_id):
2633 def get_shadow_repository_path(self, workspace_id):
2633 from rhodecode.lib.vcs.backends.base import BaseRepository
2634 from rhodecode.lib.vcs.backends.base import BaseRepository
2634 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2635 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2635 self.repo_full_path, self.repo_id, workspace_id)
2636 self.repo_full_path, self.repo_id, workspace_id)
2636 return shadow_repo_path
2637 return shadow_repo_path
2637
2638
2638 def __json__(self):
2639 def __json__(self):
2639 return {'landing_rev': self.landing_rev}
2640 return {'landing_rev': self.landing_rev}
2640
2641
2641 def get_dict(self):
2642 def get_dict(self):
2642
2643
2643 # Since we transformed `repo_name` to a hybrid property, we need to
2644 # Since we transformed `repo_name` to a hybrid property, we need to
2644 # keep compatibility with the code which uses `repo_name` field.
2645 # keep compatibility with the code which uses `repo_name` field.
2645
2646
2646 result = super(Repository, self).get_dict()
2647 result = super(Repository, self).get_dict()
2647 result['repo_name'] = result.pop('_repo_name', None)
2648 result['repo_name'] = result.pop('_repo_name', None)
2648 return result
2649 return result
2649
2650
2650
2651
2651 class RepoGroup(Base, BaseModel):
2652 class RepoGroup(Base, BaseModel):
2652 __tablename__ = 'groups'
2653 __tablename__ = 'groups'
2653 __table_args__ = (
2654 __table_args__ = (
2654 UniqueConstraint('group_name', 'group_parent_id'),
2655 UniqueConstraint('group_name', 'group_parent_id'),
2655 base_table_args,
2656 base_table_args,
2656 )
2657 )
2657 __mapper_args__ = {'order_by': 'group_name'}
2658 __mapper_args__ = {'order_by': 'group_name'}
2658
2659
2659 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2660 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2660
2661
2661 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2662 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2662 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2663 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2663 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2664 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2664 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2665 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2665 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2666 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2666 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2667 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2667 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2668 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2668 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2669 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2669 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2670 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2670 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2671 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2671 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2672 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2672
2673
2673 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2674 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2674 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2675 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2675 parent_group = relationship('RepoGroup', remote_side=group_id)
2676 parent_group = relationship('RepoGroup', remote_side=group_id)
2676 user = relationship('User')
2677 user = relationship('User')
2677 integrations = relationship('Integration', cascade="all, delete-orphan")
2678 integrations = relationship('Integration', cascade="all, delete-orphan")
2678
2679
2679 # no cascade, set NULL
2680 # no cascade, set NULL
2680 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2681 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2681
2682
2682 def __init__(self, group_name='', parent_group=None):
2683 def __init__(self, group_name='', parent_group=None):
2683 self.group_name = group_name
2684 self.group_name = group_name
2684 self.parent_group = parent_group
2685 self.parent_group = parent_group
2685
2686
2686 def __unicode__(self):
2687 def __unicode__(self):
2687 return u"<%s('id:%s:%s')>" % (
2688 return u"<%s('id:%s:%s')>" % (
2688 self.__class__.__name__, self.group_id, self.group_name)
2689 self.__class__.__name__, self.group_id, self.group_name)
2689
2690
2690 @hybrid_property
2691 @hybrid_property
2691 def group_name(self):
2692 def group_name(self):
2692 return self._group_name
2693 return self._group_name
2693
2694
2694 @group_name.setter
2695 @group_name.setter
2695 def group_name(self, value):
2696 def group_name(self, value):
2696 self._group_name = value
2697 self._group_name = value
2697 self.group_name_hash = self.hash_repo_group_name(value)
2698 self.group_name_hash = self.hash_repo_group_name(value)
2698
2699
2699 @classmethod
2700 @classmethod
2700 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2701 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2701 from rhodecode.lib.vcs.backends.base import EmptyCommit
2702 from rhodecode.lib.vcs.backends.base import EmptyCommit
2702 dummy = EmptyCommit().__json__()
2703 dummy = EmptyCommit().__json__()
2703 if not changeset_cache_raw:
2704 if not changeset_cache_raw:
2704 dummy['source_repo_id'] = repo_id
2705 dummy['source_repo_id'] = repo_id
2705 return json.loads(json.dumps(dummy))
2706 return json.loads(json.dumps(dummy))
2706
2707
2707 try:
2708 try:
2708 return json.loads(changeset_cache_raw)
2709 return json.loads(changeset_cache_raw)
2709 except TypeError:
2710 except TypeError:
2710 return dummy
2711 return dummy
2711 except Exception:
2712 except Exception:
2712 log.error(traceback.format_exc())
2713 log.error(traceback.format_exc())
2713 return dummy
2714 return dummy
2714
2715
2715 @hybrid_property
2716 @hybrid_property
2716 def changeset_cache(self):
2717 def changeset_cache(self):
2717 return self._load_changeset_cache('', self._changeset_cache)
2718 return self._load_changeset_cache('', self._changeset_cache)
2718
2719
2719 @changeset_cache.setter
2720 @changeset_cache.setter
2720 def changeset_cache(self, val):
2721 def changeset_cache(self, val):
2721 try:
2722 try:
2722 self._changeset_cache = json.dumps(val)
2723 self._changeset_cache = json.dumps(val)
2723 except Exception:
2724 except Exception:
2724 log.error(traceback.format_exc())
2725 log.error(traceback.format_exc())
2725
2726
2726 @validates('group_parent_id')
2727 @validates('group_parent_id')
2727 def validate_group_parent_id(self, key, val):
2728 def validate_group_parent_id(self, key, val):
2728 """
2729 """
2729 Check cycle references for a parent group to self
2730 Check cycle references for a parent group to self
2730 """
2731 """
2731 if self.group_id and val:
2732 if self.group_id and val:
2732 assert val != self.group_id
2733 assert val != self.group_id
2733
2734
2734 return val
2735 return val
2735
2736
2736 @hybrid_property
2737 @hybrid_property
2737 def description_safe(self):
2738 def description_safe(self):
2738 from rhodecode.lib import helpers as h
2739 from rhodecode.lib import helpers as h
2739 return h.escape(self.group_description)
2740 return h.escape(self.group_description)
2740
2741
2741 @classmethod
2742 @classmethod
2742 def hash_repo_group_name(cls, repo_group_name):
2743 def hash_repo_group_name(cls, repo_group_name):
2743 val = remove_formatting(repo_group_name)
2744 val = remove_formatting(repo_group_name)
2744 val = safe_str(val).lower()
2745 val = safe_str(val).lower()
2745 chars = []
2746 chars = []
2746 for c in val:
2747 for c in val:
2747 if c not in string.ascii_letters:
2748 if c not in string.ascii_letters:
2748 c = str(ord(c))
2749 c = str(ord(c))
2749 chars.append(c)
2750 chars.append(c)
2750
2751
2751 return ''.join(chars)
2752 return ''.join(chars)
2752
2753
2753 @classmethod
2754 @classmethod
2754 def _generate_choice(cls, repo_group):
2755 def _generate_choice(cls, repo_group):
2755 from webhelpers2.html import literal as _literal
2756 from webhelpers2.html import literal as _literal
2756 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2757 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2757 return repo_group.group_id, _name(repo_group.full_path_splitted)
2758 return repo_group.group_id, _name(repo_group.full_path_splitted)
2758
2759
2759 @classmethod
2760 @classmethod
2760 def groups_choices(cls, groups=None, show_empty_group=True):
2761 def groups_choices(cls, groups=None, show_empty_group=True):
2761 if not groups:
2762 if not groups:
2762 groups = cls.query().all()
2763 groups = cls.query().all()
2763
2764
2764 repo_groups = []
2765 repo_groups = []
2765 if show_empty_group:
2766 if show_empty_group:
2766 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2767 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2767
2768
2768 repo_groups.extend([cls._generate_choice(x) for x in groups])
2769 repo_groups.extend([cls._generate_choice(x) for x in groups])
2769
2770
2770 repo_groups = sorted(
2771 repo_groups = sorted(
2771 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2772 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2772 return repo_groups
2773 return repo_groups
2773
2774
2774 @classmethod
2775 @classmethod
2775 def url_sep(cls):
2776 def url_sep(cls):
2776 return URL_SEP
2777 return URL_SEP
2777
2778
2778 @classmethod
2779 @classmethod
2779 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2780 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2780 if case_insensitive:
2781 if case_insensitive:
2781 gr = cls.query().filter(func.lower(cls.group_name)
2782 gr = cls.query().filter(func.lower(cls.group_name)
2782 == func.lower(group_name))
2783 == func.lower(group_name))
2783 else:
2784 else:
2784 gr = cls.query().filter(cls.group_name == group_name)
2785 gr = cls.query().filter(cls.group_name == group_name)
2785 if cache:
2786 if cache:
2786 name_key = _hash_key(group_name)
2787 name_key = _hash_key(group_name)
2787 gr = gr.options(
2788 gr = gr.options(
2788 FromCache("sql_cache_short", "get_group_%s" % name_key))
2789 FromCache("sql_cache_short", "get_group_%s" % name_key))
2789 return gr.scalar()
2790 return gr.scalar()
2790
2791
2791 @classmethod
2792 @classmethod
2792 def get_user_personal_repo_group(cls, user_id):
2793 def get_user_personal_repo_group(cls, user_id):
2793 user = User.get(user_id)
2794 user = User.get(user_id)
2794 if user.username == User.DEFAULT_USER:
2795 if user.username == User.DEFAULT_USER:
2795 return None
2796 return None
2796
2797
2797 return cls.query()\
2798 return cls.query()\
2798 .filter(cls.personal == true()) \
2799 .filter(cls.personal == true()) \
2799 .filter(cls.user == user) \
2800 .filter(cls.user == user) \
2800 .order_by(cls.group_id.asc()) \
2801 .order_by(cls.group_id.asc()) \
2801 .first()
2802 .first()
2802
2803
2803 @classmethod
2804 @classmethod
2804 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2805 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2805 case_insensitive=True):
2806 case_insensitive=True):
2806 q = RepoGroup.query()
2807 q = RepoGroup.query()
2807
2808
2808 if not isinstance(user_id, Optional):
2809 if not isinstance(user_id, Optional):
2809 q = q.filter(RepoGroup.user_id == user_id)
2810 q = q.filter(RepoGroup.user_id == user_id)
2810
2811
2811 if not isinstance(group_id, Optional):
2812 if not isinstance(group_id, Optional):
2812 q = q.filter(RepoGroup.group_parent_id == group_id)
2813 q = q.filter(RepoGroup.group_parent_id == group_id)
2813
2814
2814 if case_insensitive:
2815 if case_insensitive:
2815 q = q.order_by(func.lower(RepoGroup.group_name))
2816 q = q.order_by(func.lower(RepoGroup.group_name))
2816 else:
2817 else:
2817 q = q.order_by(RepoGroup.group_name)
2818 q = q.order_by(RepoGroup.group_name)
2818 return q.all()
2819 return q.all()
2819
2820
2820 @property
2821 @property
2821 def parents(self, parents_recursion_limit=10):
2822 def parents(self, parents_recursion_limit=10):
2822 groups = []
2823 groups = []
2823 if self.parent_group is None:
2824 if self.parent_group is None:
2824 return groups
2825 return groups
2825 cur_gr = self.parent_group
2826 cur_gr = self.parent_group
2826 groups.insert(0, cur_gr)
2827 groups.insert(0, cur_gr)
2827 cnt = 0
2828 cnt = 0
2828 while 1:
2829 while 1:
2829 cnt += 1
2830 cnt += 1
2830 gr = getattr(cur_gr, 'parent_group', None)
2831 gr = getattr(cur_gr, 'parent_group', None)
2831 cur_gr = cur_gr.parent_group
2832 cur_gr = cur_gr.parent_group
2832 if gr is None:
2833 if gr is None:
2833 break
2834 break
2834 if cnt == parents_recursion_limit:
2835 if cnt == parents_recursion_limit:
2835 # this will prevent accidental infinit loops
2836 # this will prevent accidental infinit loops
2836 log.error('more than %s parents found for group %s, stopping '
2837 log.error('more than %s parents found for group %s, stopping '
2837 'recursive parent fetching', parents_recursion_limit, self)
2838 'recursive parent fetching', parents_recursion_limit, self)
2838 break
2839 break
2839
2840
2840 groups.insert(0, gr)
2841 groups.insert(0, gr)
2841 return groups
2842 return groups
2842
2843
2843 @property
2844 @property
2844 def last_commit_cache_update_diff(self):
2845 def last_commit_cache_update_diff(self):
2845 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2846 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2846
2847
2847 @classmethod
2848 @classmethod
2848 def _load_commit_change(cls, last_commit_cache):
2849 def _load_commit_change(cls, last_commit_cache):
2849 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2850 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2850 empty_date = datetime.datetime.fromtimestamp(0)
2851 empty_date = datetime.datetime.fromtimestamp(0)
2851 date_latest = last_commit_cache.get('date', empty_date)
2852 date_latest = last_commit_cache.get('date', empty_date)
2852 try:
2853 try:
2853 return parse_datetime(date_latest)
2854 return parse_datetime(date_latest)
2854 except Exception:
2855 except Exception:
2855 return empty_date
2856 return empty_date
2856
2857
2857 @property
2858 @property
2858 def last_commit_change(self):
2859 def last_commit_change(self):
2859 return self._load_commit_change(self.changeset_cache)
2860 return self._load_commit_change(self.changeset_cache)
2860
2861
2861 @property
2862 @property
2862 def last_db_change(self):
2863 def last_db_change(self):
2863 return self.updated_on
2864 return self.updated_on
2864
2865
2865 @property
2866 @property
2866 def children(self):
2867 def children(self):
2867 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2868 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2868
2869
2869 @property
2870 @property
2870 def name(self):
2871 def name(self):
2871 return self.group_name.split(RepoGroup.url_sep())[-1]
2872 return self.group_name.split(RepoGroup.url_sep())[-1]
2872
2873
2873 @property
2874 @property
2874 def full_path(self):
2875 def full_path(self):
2875 return self.group_name
2876 return self.group_name
2876
2877
2877 @property
2878 @property
2878 def full_path_splitted(self):
2879 def full_path_splitted(self):
2879 return self.group_name.split(RepoGroup.url_sep())
2880 return self.group_name.split(RepoGroup.url_sep())
2880
2881
2881 @property
2882 @property
2882 def repositories(self):
2883 def repositories(self):
2883 return Repository.query()\
2884 return Repository.query()\
2884 .filter(Repository.group == self)\
2885 .filter(Repository.group == self)\
2885 .order_by(Repository.repo_name)
2886 .order_by(Repository.repo_name)
2886
2887
2887 @property
2888 @property
2888 def repositories_recursive_count(self):
2889 def repositories_recursive_count(self):
2889 cnt = self.repositories.count()
2890 cnt = self.repositories.count()
2890
2891
2891 def children_count(group):
2892 def children_count(group):
2892 cnt = 0
2893 cnt = 0
2893 for child in group.children:
2894 for child in group.children:
2894 cnt += child.repositories.count()
2895 cnt += child.repositories.count()
2895 cnt += children_count(child)
2896 cnt += children_count(child)
2896 return cnt
2897 return cnt
2897
2898
2898 return cnt + children_count(self)
2899 return cnt + children_count(self)
2899
2900
2900 def _recursive_objects(self, include_repos=True, include_groups=True):
2901 def _recursive_objects(self, include_repos=True, include_groups=True):
2901 all_ = []
2902 all_ = []
2902
2903
2903 def _get_members(root_gr):
2904 def _get_members(root_gr):
2904 if include_repos:
2905 if include_repos:
2905 for r in root_gr.repositories:
2906 for r in root_gr.repositories:
2906 all_.append(r)
2907 all_.append(r)
2907 childs = root_gr.children.all()
2908 childs = root_gr.children.all()
2908 if childs:
2909 if childs:
2909 for gr in childs:
2910 for gr in childs:
2910 if include_groups:
2911 if include_groups:
2911 all_.append(gr)
2912 all_.append(gr)
2912 _get_members(gr)
2913 _get_members(gr)
2913
2914
2914 root_group = []
2915 root_group = []
2915 if include_groups:
2916 if include_groups:
2916 root_group = [self]
2917 root_group = [self]
2917
2918
2918 _get_members(self)
2919 _get_members(self)
2919 return root_group + all_
2920 return root_group + all_
2920
2921
2921 def recursive_groups_and_repos(self):
2922 def recursive_groups_and_repos(self):
2922 """
2923 """
2923 Recursive return all groups, with repositories in those groups
2924 Recursive return all groups, with repositories in those groups
2924 """
2925 """
2925 return self._recursive_objects()
2926 return self._recursive_objects()
2926
2927
2927 def recursive_groups(self):
2928 def recursive_groups(self):
2928 """
2929 """
2929 Returns all children groups for this group including children of children
2930 Returns all children groups for this group including children of children
2930 """
2931 """
2931 return self._recursive_objects(include_repos=False)
2932 return self._recursive_objects(include_repos=False)
2932
2933
2933 def recursive_repos(self):
2934 def recursive_repos(self):
2934 """
2935 """
2935 Returns all children repositories for this group
2936 Returns all children repositories for this group
2936 """
2937 """
2937 return self._recursive_objects(include_groups=False)
2938 return self._recursive_objects(include_groups=False)
2938
2939
2939 def get_new_name(self, group_name):
2940 def get_new_name(self, group_name):
2940 """
2941 """
2941 returns new full group name based on parent and new name
2942 returns new full group name based on parent and new name
2942
2943
2943 :param group_name:
2944 :param group_name:
2944 """
2945 """
2945 path_prefix = (self.parent_group.full_path_splitted if
2946 path_prefix = (self.parent_group.full_path_splitted if
2946 self.parent_group else [])
2947 self.parent_group else [])
2947 return RepoGroup.url_sep().join(path_prefix + [group_name])
2948 return RepoGroup.url_sep().join(path_prefix + [group_name])
2948
2949
2949 def update_commit_cache(self, config=None):
2950 def update_commit_cache(self, config=None):
2950 """
2951 """
2951 Update cache of last commit for newest repository inside this repository group.
2952 Update cache of last commit for newest repository inside this repository group.
2952 cache_keys should be::
2953 cache_keys should be::
2953
2954
2954 source_repo_id
2955 source_repo_id
2955 short_id
2956 short_id
2956 raw_id
2957 raw_id
2957 revision
2958 revision
2958 parents
2959 parents
2959 message
2960 message
2960 date
2961 date
2961 author
2962 author
2962
2963
2963 """
2964 """
2964 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2965 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2965 empty_date = datetime.datetime.fromtimestamp(0)
2966 empty_date = datetime.datetime.fromtimestamp(0)
2966
2967
2967 def repo_groups_and_repos(root_gr):
2968 def repo_groups_and_repos(root_gr):
2968 for _repo in root_gr.repositories:
2969 for _repo in root_gr.repositories:
2969 yield _repo
2970 yield _repo
2970 for child_group in root_gr.children.all():
2971 for child_group in root_gr.children.all():
2971 yield child_group
2972 yield child_group
2972
2973
2973 latest_repo_cs_cache = {}
2974 latest_repo_cs_cache = {}
2974 for obj in repo_groups_and_repos(self):
2975 for obj in repo_groups_and_repos(self):
2975 repo_cs_cache = obj.changeset_cache
2976 repo_cs_cache = obj.changeset_cache
2976 date_latest = latest_repo_cs_cache.get('date', empty_date)
2977 date_latest = latest_repo_cs_cache.get('date', empty_date)
2977 date_current = repo_cs_cache.get('date', empty_date)
2978 date_current = repo_cs_cache.get('date', empty_date)
2978 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2979 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2979 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2980 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2980 latest_repo_cs_cache = repo_cs_cache
2981 latest_repo_cs_cache = repo_cs_cache
2981 if hasattr(obj, 'repo_id'):
2982 if hasattr(obj, 'repo_id'):
2982 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2983 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2983 else:
2984 else:
2984 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2985 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2985
2986
2986 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2987 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2987
2988
2988 latest_repo_cs_cache['updated_on'] = time.time()
2989 latest_repo_cs_cache['updated_on'] = time.time()
2989 self.changeset_cache = latest_repo_cs_cache
2990 self.changeset_cache = latest_repo_cs_cache
2990 self.updated_on = _date_latest
2991 self.updated_on = _date_latest
2991 Session().add(self)
2992 Session().add(self)
2992 Session().commit()
2993 Session().commit()
2993
2994
2994 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
2995 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
2995 self.group_name, latest_repo_cs_cache, _date_latest)
2996 self.group_name, latest_repo_cs_cache, _date_latest)
2996
2997
2997 def permissions(self, with_admins=True, with_owner=True,
2998 def permissions(self, with_admins=True, with_owner=True,
2998 expand_from_user_groups=False):
2999 expand_from_user_groups=False):
2999 """
3000 """
3000 Permissions for repository groups
3001 Permissions for repository groups
3001 """
3002 """
3002 _admin_perm = 'group.admin'
3003 _admin_perm = 'group.admin'
3003
3004
3004 owner_row = []
3005 owner_row = []
3005 if with_owner:
3006 if with_owner:
3006 usr = AttributeDict(self.user.get_dict())
3007 usr = AttributeDict(self.user.get_dict())
3007 usr.owner_row = True
3008 usr.owner_row = True
3008 usr.permission = _admin_perm
3009 usr.permission = _admin_perm
3009 owner_row.append(usr)
3010 owner_row.append(usr)
3010
3011
3011 super_admin_ids = []
3012 super_admin_ids = []
3012 super_admin_rows = []
3013 super_admin_rows = []
3013 if with_admins:
3014 if with_admins:
3014 for usr in User.get_all_super_admins():
3015 for usr in User.get_all_super_admins():
3015 super_admin_ids.append(usr.user_id)
3016 super_admin_ids.append(usr.user_id)
3016 # if this admin is also owner, don't double the record
3017 # if this admin is also owner, don't double the record
3017 if usr.user_id == owner_row[0].user_id:
3018 if usr.user_id == owner_row[0].user_id:
3018 owner_row[0].admin_row = True
3019 owner_row[0].admin_row = True
3019 else:
3020 else:
3020 usr = AttributeDict(usr.get_dict())
3021 usr = AttributeDict(usr.get_dict())
3021 usr.admin_row = True
3022 usr.admin_row = True
3022 usr.permission = _admin_perm
3023 usr.permission = _admin_perm
3023 super_admin_rows.append(usr)
3024 super_admin_rows.append(usr)
3024
3025
3025 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3026 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3026 q = q.options(joinedload(UserRepoGroupToPerm.group),
3027 q = q.options(joinedload(UserRepoGroupToPerm.group),
3027 joinedload(UserRepoGroupToPerm.user),
3028 joinedload(UserRepoGroupToPerm.user),
3028 joinedload(UserRepoGroupToPerm.permission),)
3029 joinedload(UserRepoGroupToPerm.permission),)
3029
3030
3030 # get owners and admins and permissions. We do a trick of re-writing
3031 # get owners and admins and permissions. We do a trick of re-writing
3031 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3032 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3032 # has a global reference and changing one object propagates to all
3033 # has a global reference and changing one object propagates to all
3033 # others. This means if admin is also an owner admin_row that change
3034 # others. This means if admin is also an owner admin_row that change
3034 # would propagate to both objects
3035 # would propagate to both objects
3035 perm_rows = []
3036 perm_rows = []
3036 for _usr in q.all():
3037 for _usr in q.all():
3037 usr = AttributeDict(_usr.user.get_dict())
3038 usr = AttributeDict(_usr.user.get_dict())
3038 # if this user is also owner/admin, mark as duplicate record
3039 # if this user is also owner/admin, mark as duplicate record
3039 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3040 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3040 usr.duplicate_perm = True
3041 usr.duplicate_perm = True
3041 usr.permission = _usr.permission.permission_name
3042 usr.permission = _usr.permission.permission_name
3042 perm_rows.append(usr)
3043 perm_rows.append(usr)
3043
3044
3044 # filter the perm rows by 'default' first and then sort them by
3045 # filter the perm rows by 'default' first and then sort them by
3045 # admin,write,read,none permissions sorted again alphabetically in
3046 # admin,write,read,none permissions sorted again alphabetically in
3046 # each group
3047 # each group
3047 perm_rows = sorted(perm_rows, key=display_user_sort)
3048 perm_rows = sorted(perm_rows, key=display_user_sort)
3048
3049
3049 user_groups_rows = []
3050 user_groups_rows = []
3050 if expand_from_user_groups:
3051 if expand_from_user_groups:
3051 for ug in self.permission_user_groups(with_members=True):
3052 for ug in self.permission_user_groups(with_members=True):
3052 for user_data in ug.members:
3053 for user_data in ug.members:
3053 user_groups_rows.append(user_data)
3054 user_groups_rows.append(user_data)
3054
3055
3055 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3056 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3056
3057
3057 def permission_user_groups(self, with_members=False):
3058 def permission_user_groups(self, with_members=False):
3058 q = UserGroupRepoGroupToPerm.query()\
3059 q = UserGroupRepoGroupToPerm.query()\
3059 .filter(UserGroupRepoGroupToPerm.group == self)
3060 .filter(UserGroupRepoGroupToPerm.group == self)
3060 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3061 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3061 joinedload(UserGroupRepoGroupToPerm.users_group),
3062 joinedload(UserGroupRepoGroupToPerm.users_group),
3062 joinedload(UserGroupRepoGroupToPerm.permission),)
3063 joinedload(UserGroupRepoGroupToPerm.permission),)
3063
3064
3064 perm_rows = []
3065 perm_rows = []
3065 for _user_group in q.all():
3066 for _user_group in q.all():
3066 entry = AttributeDict(_user_group.users_group.get_dict())
3067 entry = AttributeDict(_user_group.users_group.get_dict())
3067 entry.permission = _user_group.permission.permission_name
3068 entry.permission = _user_group.permission.permission_name
3068 if with_members:
3069 if with_members:
3069 entry.members = [x.user.get_dict()
3070 entry.members = [x.user.get_dict()
3070 for x in _user_group.users_group.members]
3071 for x in _user_group.users_group.members]
3071 perm_rows.append(entry)
3072 perm_rows.append(entry)
3072
3073
3073 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3074 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3074 return perm_rows
3075 return perm_rows
3075
3076
3076 def get_api_data(self):
3077 def get_api_data(self):
3077 """
3078 """
3078 Common function for generating api data
3079 Common function for generating api data
3079
3080
3080 """
3081 """
3081 group = self
3082 group = self
3082 data = {
3083 data = {
3083 'group_id': group.group_id,
3084 'group_id': group.group_id,
3084 'group_name': group.group_name,
3085 'group_name': group.group_name,
3085 'group_description': group.description_safe,
3086 'group_description': group.description_safe,
3086 'parent_group': group.parent_group.group_name if group.parent_group else None,
3087 'parent_group': group.parent_group.group_name if group.parent_group else None,
3087 'repositories': [x.repo_name for x in group.repositories],
3088 'repositories': [x.repo_name for x in group.repositories],
3088 'owner': group.user.username,
3089 'owner': group.user.username,
3089 }
3090 }
3090 return data
3091 return data
3091
3092
3092 def get_dict(self):
3093 def get_dict(self):
3093 # Since we transformed `group_name` to a hybrid property, we need to
3094 # Since we transformed `group_name` to a hybrid property, we need to
3094 # keep compatibility with the code which uses `group_name` field.
3095 # keep compatibility with the code which uses `group_name` field.
3095 result = super(RepoGroup, self).get_dict()
3096 result = super(RepoGroup, self).get_dict()
3096 result['group_name'] = result.pop('_group_name', None)
3097 result['group_name'] = result.pop('_group_name', None)
3097 return result
3098 return result
3098
3099
3099
3100
3100 class Permission(Base, BaseModel):
3101 class Permission(Base, BaseModel):
3101 __tablename__ = 'permissions'
3102 __tablename__ = 'permissions'
3102 __table_args__ = (
3103 __table_args__ = (
3103 Index('p_perm_name_idx', 'permission_name'),
3104 Index('p_perm_name_idx', 'permission_name'),
3104 base_table_args,
3105 base_table_args,
3105 )
3106 )
3106
3107
3107 PERMS = [
3108 PERMS = [
3108 ('hg.admin', _('RhodeCode Super Administrator')),
3109 ('hg.admin', _('RhodeCode Super Administrator')),
3109
3110
3110 ('repository.none', _('Repository no access')),
3111 ('repository.none', _('Repository no access')),
3111 ('repository.read', _('Repository read access')),
3112 ('repository.read', _('Repository read access')),
3112 ('repository.write', _('Repository write access')),
3113 ('repository.write', _('Repository write access')),
3113 ('repository.admin', _('Repository admin access')),
3114 ('repository.admin', _('Repository admin access')),
3114
3115
3115 ('group.none', _('Repository group no access')),
3116 ('group.none', _('Repository group no access')),
3116 ('group.read', _('Repository group read access')),
3117 ('group.read', _('Repository group read access')),
3117 ('group.write', _('Repository group write access')),
3118 ('group.write', _('Repository group write access')),
3118 ('group.admin', _('Repository group admin access')),
3119 ('group.admin', _('Repository group admin access')),
3119
3120
3120 ('usergroup.none', _('User group no access')),
3121 ('usergroup.none', _('User group no access')),
3121 ('usergroup.read', _('User group read access')),
3122 ('usergroup.read', _('User group read access')),
3122 ('usergroup.write', _('User group write access')),
3123 ('usergroup.write', _('User group write access')),
3123 ('usergroup.admin', _('User group admin access')),
3124 ('usergroup.admin', _('User group admin access')),
3124
3125
3125 ('branch.none', _('Branch no permissions')),
3126 ('branch.none', _('Branch no permissions')),
3126 ('branch.merge', _('Branch access by web merge')),
3127 ('branch.merge', _('Branch access by web merge')),
3127 ('branch.push', _('Branch access by push')),
3128 ('branch.push', _('Branch access by push')),
3128 ('branch.push_force', _('Branch access by push with force')),
3129 ('branch.push_force', _('Branch access by push with force')),
3129
3130
3130 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3131 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3131 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3132 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3132
3133
3133 ('hg.usergroup.create.false', _('User Group creation disabled')),
3134 ('hg.usergroup.create.false', _('User Group creation disabled')),
3134 ('hg.usergroup.create.true', _('User Group creation enabled')),
3135 ('hg.usergroup.create.true', _('User Group creation enabled')),
3135
3136
3136 ('hg.create.none', _('Repository creation disabled')),
3137 ('hg.create.none', _('Repository creation disabled')),
3137 ('hg.create.repository', _('Repository creation enabled')),
3138 ('hg.create.repository', _('Repository creation enabled')),
3138 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3139 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3139 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3140 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3140
3141
3141 ('hg.fork.none', _('Repository forking disabled')),
3142 ('hg.fork.none', _('Repository forking disabled')),
3142 ('hg.fork.repository', _('Repository forking enabled')),
3143 ('hg.fork.repository', _('Repository forking enabled')),
3143
3144
3144 ('hg.register.none', _('Registration disabled')),
3145 ('hg.register.none', _('Registration disabled')),
3145 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3146 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3146 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3147 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3147
3148
3148 ('hg.password_reset.enabled', _('Password reset enabled')),
3149 ('hg.password_reset.enabled', _('Password reset enabled')),
3149 ('hg.password_reset.hidden', _('Password reset hidden')),
3150 ('hg.password_reset.hidden', _('Password reset hidden')),
3150 ('hg.password_reset.disabled', _('Password reset disabled')),
3151 ('hg.password_reset.disabled', _('Password reset disabled')),
3151
3152
3152 ('hg.extern_activate.manual', _('Manual activation of external account')),
3153 ('hg.extern_activate.manual', _('Manual activation of external account')),
3153 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3154 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3154
3155
3155 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3156 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3156 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3157 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3157 ]
3158 ]
3158
3159
3159 # definition of system default permissions for DEFAULT user, created on
3160 # definition of system default permissions for DEFAULT user, created on
3160 # system setup
3161 # system setup
3161 DEFAULT_USER_PERMISSIONS = [
3162 DEFAULT_USER_PERMISSIONS = [
3162 # object perms
3163 # object perms
3163 'repository.read',
3164 'repository.read',
3164 'group.read',
3165 'group.read',
3165 'usergroup.read',
3166 'usergroup.read',
3166 # branch, for backward compat we need same value as before so forced pushed
3167 # branch, for backward compat we need same value as before so forced pushed
3167 'branch.push_force',
3168 'branch.push_force',
3168 # global
3169 # global
3169 'hg.create.repository',
3170 'hg.create.repository',
3170 'hg.repogroup.create.false',
3171 'hg.repogroup.create.false',
3171 'hg.usergroup.create.false',
3172 'hg.usergroup.create.false',
3172 'hg.create.write_on_repogroup.true',
3173 'hg.create.write_on_repogroup.true',
3173 'hg.fork.repository',
3174 'hg.fork.repository',
3174 'hg.register.manual_activate',
3175 'hg.register.manual_activate',
3175 'hg.password_reset.enabled',
3176 'hg.password_reset.enabled',
3176 'hg.extern_activate.auto',
3177 'hg.extern_activate.auto',
3177 'hg.inherit_default_perms.true',
3178 'hg.inherit_default_perms.true',
3178 ]
3179 ]
3179
3180
3180 # defines which permissions are more important higher the more important
3181 # defines which permissions are more important higher the more important
3181 # Weight defines which permissions are more important.
3182 # Weight defines which permissions are more important.
3182 # The higher number the more important.
3183 # The higher number the more important.
3183 PERM_WEIGHTS = {
3184 PERM_WEIGHTS = {
3184 'repository.none': 0,
3185 'repository.none': 0,
3185 'repository.read': 1,
3186 'repository.read': 1,
3186 'repository.write': 3,
3187 'repository.write': 3,
3187 'repository.admin': 4,
3188 'repository.admin': 4,
3188
3189
3189 'group.none': 0,
3190 'group.none': 0,
3190 'group.read': 1,
3191 'group.read': 1,
3191 'group.write': 3,
3192 'group.write': 3,
3192 'group.admin': 4,
3193 'group.admin': 4,
3193
3194
3194 'usergroup.none': 0,
3195 'usergroup.none': 0,
3195 'usergroup.read': 1,
3196 'usergroup.read': 1,
3196 'usergroup.write': 3,
3197 'usergroup.write': 3,
3197 'usergroup.admin': 4,
3198 'usergroup.admin': 4,
3198
3199
3199 'branch.none': 0,
3200 'branch.none': 0,
3200 'branch.merge': 1,
3201 'branch.merge': 1,
3201 'branch.push': 3,
3202 'branch.push': 3,
3202 'branch.push_force': 4,
3203 'branch.push_force': 4,
3203
3204
3204 'hg.repogroup.create.false': 0,
3205 'hg.repogroup.create.false': 0,
3205 'hg.repogroup.create.true': 1,
3206 'hg.repogroup.create.true': 1,
3206
3207
3207 'hg.usergroup.create.false': 0,
3208 'hg.usergroup.create.false': 0,
3208 'hg.usergroup.create.true': 1,
3209 'hg.usergroup.create.true': 1,
3209
3210
3210 'hg.fork.none': 0,
3211 'hg.fork.none': 0,
3211 'hg.fork.repository': 1,
3212 'hg.fork.repository': 1,
3212 'hg.create.none': 0,
3213 'hg.create.none': 0,
3213 'hg.create.repository': 1
3214 'hg.create.repository': 1
3214 }
3215 }
3215
3216
3216 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3217 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3217 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3218 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3218 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3219 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3219
3220
3220 def __unicode__(self):
3221 def __unicode__(self):
3221 return u"<%s('%s:%s')>" % (
3222 return u"<%s('%s:%s')>" % (
3222 self.__class__.__name__, self.permission_id, self.permission_name
3223 self.__class__.__name__, self.permission_id, self.permission_name
3223 )
3224 )
3224
3225
3225 @classmethod
3226 @classmethod
3226 def get_by_key(cls, key):
3227 def get_by_key(cls, key):
3227 return cls.query().filter(cls.permission_name == key).scalar()
3228 return cls.query().filter(cls.permission_name == key).scalar()
3228
3229
3229 @classmethod
3230 @classmethod
3230 def get_default_repo_perms(cls, user_id, repo_id=None):
3231 def get_default_repo_perms(cls, user_id, repo_id=None):
3231 q = Session().query(UserRepoToPerm, Repository, Permission)\
3232 q = Session().query(UserRepoToPerm, Repository, Permission)\
3232 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3233 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3233 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3234 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3234 .filter(UserRepoToPerm.user_id == user_id)
3235 .filter(UserRepoToPerm.user_id == user_id)
3235 if repo_id:
3236 if repo_id:
3236 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3237 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3237 return q.all()
3238 return q.all()
3238
3239
3239 @classmethod
3240 @classmethod
3240 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3241 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3241 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3242 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3242 .join(
3243 .join(
3243 Permission,
3244 Permission,
3244 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3245 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3245 .join(
3246 .join(
3246 UserRepoToPerm,
3247 UserRepoToPerm,
3247 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3248 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3248 .filter(UserRepoToPerm.user_id == user_id)
3249 .filter(UserRepoToPerm.user_id == user_id)
3249
3250
3250 if repo_id:
3251 if repo_id:
3251 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3252 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3252 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3253 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3253
3254
3254 @classmethod
3255 @classmethod
3255 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3256 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3256 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3257 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3257 .join(
3258 .join(
3258 Permission,
3259 Permission,
3259 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3260 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3260 .join(
3261 .join(
3261 Repository,
3262 Repository,
3262 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3263 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3263 .join(
3264 .join(
3264 UserGroup,
3265 UserGroup,
3265 UserGroupRepoToPerm.users_group_id ==
3266 UserGroupRepoToPerm.users_group_id ==
3266 UserGroup.users_group_id)\
3267 UserGroup.users_group_id)\
3267 .join(
3268 .join(
3268 UserGroupMember,
3269 UserGroupMember,
3269 UserGroupRepoToPerm.users_group_id ==
3270 UserGroupRepoToPerm.users_group_id ==
3270 UserGroupMember.users_group_id)\
3271 UserGroupMember.users_group_id)\
3271 .filter(
3272 .filter(
3272 UserGroupMember.user_id == user_id,
3273 UserGroupMember.user_id == user_id,
3273 UserGroup.users_group_active == true())
3274 UserGroup.users_group_active == true())
3274 if repo_id:
3275 if repo_id:
3275 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3276 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3276 return q.all()
3277 return q.all()
3277
3278
3278 @classmethod
3279 @classmethod
3279 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3280 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3280 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3281 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3281 .join(
3282 .join(
3282 Permission,
3283 Permission,
3283 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3284 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3284 .join(
3285 .join(
3285 UserGroupRepoToPerm,
3286 UserGroupRepoToPerm,
3286 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3287 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3287 .join(
3288 .join(
3288 UserGroup,
3289 UserGroup,
3289 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3290 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3290 .join(
3291 .join(
3291 UserGroupMember,
3292 UserGroupMember,
3292 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3293 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3293 .filter(
3294 .filter(
3294 UserGroupMember.user_id == user_id,
3295 UserGroupMember.user_id == user_id,
3295 UserGroup.users_group_active == true())
3296 UserGroup.users_group_active == true())
3296
3297
3297 if repo_id:
3298 if repo_id:
3298 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3299 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3299 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3300 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3300
3301
3301 @classmethod
3302 @classmethod
3302 def get_default_group_perms(cls, user_id, repo_group_id=None):
3303 def get_default_group_perms(cls, user_id, repo_group_id=None):
3303 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3304 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3304 .join(
3305 .join(
3305 Permission,
3306 Permission,
3306 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3307 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3307 .join(
3308 .join(
3308 RepoGroup,
3309 RepoGroup,
3309 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3310 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3310 .filter(UserRepoGroupToPerm.user_id == user_id)
3311 .filter(UserRepoGroupToPerm.user_id == user_id)
3311 if repo_group_id:
3312 if repo_group_id:
3312 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3313 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3313 return q.all()
3314 return q.all()
3314
3315
3315 @classmethod
3316 @classmethod
3316 def get_default_group_perms_from_user_group(
3317 def get_default_group_perms_from_user_group(
3317 cls, user_id, repo_group_id=None):
3318 cls, user_id, repo_group_id=None):
3318 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3319 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3319 .join(
3320 .join(
3320 Permission,
3321 Permission,
3321 UserGroupRepoGroupToPerm.permission_id ==
3322 UserGroupRepoGroupToPerm.permission_id ==
3322 Permission.permission_id)\
3323 Permission.permission_id)\
3323 .join(
3324 .join(
3324 RepoGroup,
3325 RepoGroup,
3325 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3326 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3326 .join(
3327 .join(
3327 UserGroup,
3328 UserGroup,
3328 UserGroupRepoGroupToPerm.users_group_id ==
3329 UserGroupRepoGroupToPerm.users_group_id ==
3329 UserGroup.users_group_id)\
3330 UserGroup.users_group_id)\
3330 .join(
3331 .join(
3331 UserGroupMember,
3332 UserGroupMember,
3332 UserGroupRepoGroupToPerm.users_group_id ==
3333 UserGroupRepoGroupToPerm.users_group_id ==
3333 UserGroupMember.users_group_id)\
3334 UserGroupMember.users_group_id)\
3334 .filter(
3335 .filter(
3335 UserGroupMember.user_id == user_id,
3336 UserGroupMember.user_id == user_id,
3336 UserGroup.users_group_active == true())
3337 UserGroup.users_group_active == true())
3337 if repo_group_id:
3338 if repo_group_id:
3338 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3339 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3339 return q.all()
3340 return q.all()
3340
3341
3341 @classmethod
3342 @classmethod
3342 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3343 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3343 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3344 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3344 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3345 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3345 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3346 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3346 .filter(UserUserGroupToPerm.user_id == user_id)
3347 .filter(UserUserGroupToPerm.user_id == user_id)
3347 if user_group_id:
3348 if user_group_id:
3348 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3349 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3349 return q.all()
3350 return q.all()
3350
3351
3351 @classmethod
3352 @classmethod
3352 def get_default_user_group_perms_from_user_group(
3353 def get_default_user_group_perms_from_user_group(
3353 cls, user_id, user_group_id=None):
3354 cls, user_id, user_group_id=None):
3354 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3355 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3355 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3356 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3356 .join(
3357 .join(
3357 Permission,
3358 Permission,
3358 UserGroupUserGroupToPerm.permission_id ==
3359 UserGroupUserGroupToPerm.permission_id ==
3359 Permission.permission_id)\
3360 Permission.permission_id)\
3360 .join(
3361 .join(
3361 TargetUserGroup,
3362 TargetUserGroup,
3362 UserGroupUserGroupToPerm.target_user_group_id ==
3363 UserGroupUserGroupToPerm.target_user_group_id ==
3363 TargetUserGroup.users_group_id)\
3364 TargetUserGroup.users_group_id)\
3364 .join(
3365 .join(
3365 UserGroup,
3366 UserGroup,
3366 UserGroupUserGroupToPerm.user_group_id ==
3367 UserGroupUserGroupToPerm.user_group_id ==
3367 UserGroup.users_group_id)\
3368 UserGroup.users_group_id)\
3368 .join(
3369 .join(
3369 UserGroupMember,
3370 UserGroupMember,
3370 UserGroupUserGroupToPerm.user_group_id ==
3371 UserGroupUserGroupToPerm.user_group_id ==
3371 UserGroupMember.users_group_id)\
3372 UserGroupMember.users_group_id)\
3372 .filter(
3373 .filter(
3373 UserGroupMember.user_id == user_id,
3374 UserGroupMember.user_id == user_id,
3374 UserGroup.users_group_active == true())
3375 UserGroup.users_group_active == true())
3375 if user_group_id:
3376 if user_group_id:
3376 q = q.filter(
3377 q = q.filter(
3377 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3378 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3378
3379
3379 return q.all()
3380 return q.all()
3380
3381
3381
3382
3382 class UserRepoToPerm(Base, BaseModel):
3383 class UserRepoToPerm(Base, BaseModel):
3383 __tablename__ = 'repo_to_perm'
3384 __tablename__ = 'repo_to_perm'
3384 __table_args__ = (
3385 __table_args__ = (
3385 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3386 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3386 base_table_args
3387 base_table_args
3387 )
3388 )
3388
3389
3389 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3390 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3390 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3391 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3391 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3392 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3392 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3393 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3393
3394
3394 user = relationship('User')
3395 user = relationship('User')
3395 repository = relationship('Repository')
3396 repository = relationship('Repository')
3396 permission = relationship('Permission')
3397 permission = relationship('Permission')
3397
3398
3398 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3399 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3399
3400
3400 @classmethod
3401 @classmethod
3401 def create(cls, user, repository, permission):
3402 def create(cls, user, repository, permission):
3402 n = cls()
3403 n = cls()
3403 n.user = user
3404 n.user = user
3404 n.repository = repository
3405 n.repository = repository
3405 n.permission = permission
3406 n.permission = permission
3406 Session().add(n)
3407 Session().add(n)
3407 return n
3408 return n
3408
3409
3409 def __unicode__(self):
3410 def __unicode__(self):
3410 return u'<%s => %s >' % (self.user, self.repository)
3411 return u'<%s => %s >' % (self.user, self.repository)
3411
3412
3412
3413
3413 class UserUserGroupToPerm(Base, BaseModel):
3414 class UserUserGroupToPerm(Base, BaseModel):
3414 __tablename__ = 'user_user_group_to_perm'
3415 __tablename__ = 'user_user_group_to_perm'
3415 __table_args__ = (
3416 __table_args__ = (
3416 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3417 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3417 base_table_args
3418 base_table_args
3418 )
3419 )
3419
3420
3420 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3421 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3421 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3422 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3422 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3423 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3423 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3424 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3424
3425
3425 user = relationship('User')
3426 user = relationship('User')
3426 user_group = relationship('UserGroup')
3427 user_group = relationship('UserGroup')
3427 permission = relationship('Permission')
3428 permission = relationship('Permission')
3428
3429
3429 @classmethod
3430 @classmethod
3430 def create(cls, user, user_group, permission):
3431 def create(cls, user, user_group, permission):
3431 n = cls()
3432 n = cls()
3432 n.user = user
3433 n.user = user
3433 n.user_group = user_group
3434 n.user_group = user_group
3434 n.permission = permission
3435 n.permission = permission
3435 Session().add(n)
3436 Session().add(n)
3436 return n
3437 return n
3437
3438
3438 def __unicode__(self):
3439 def __unicode__(self):
3439 return u'<%s => %s >' % (self.user, self.user_group)
3440 return u'<%s => %s >' % (self.user, self.user_group)
3440
3441
3441
3442
3442 class UserToPerm(Base, BaseModel):
3443 class UserToPerm(Base, BaseModel):
3443 __tablename__ = 'user_to_perm'
3444 __tablename__ = 'user_to_perm'
3444 __table_args__ = (
3445 __table_args__ = (
3445 UniqueConstraint('user_id', 'permission_id'),
3446 UniqueConstraint('user_id', 'permission_id'),
3446 base_table_args
3447 base_table_args
3447 )
3448 )
3448
3449
3449 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3450 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3450 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3451 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3451 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3452 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3452
3453
3453 user = relationship('User')
3454 user = relationship('User')
3454 permission = relationship('Permission', lazy='joined')
3455 permission = relationship('Permission', lazy='joined')
3455
3456
3456 def __unicode__(self):
3457 def __unicode__(self):
3457 return u'<%s => %s >' % (self.user, self.permission)
3458 return u'<%s => %s >' % (self.user, self.permission)
3458
3459
3459
3460
3460 class UserGroupRepoToPerm(Base, BaseModel):
3461 class UserGroupRepoToPerm(Base, BaseModel):
3461 __tablename__ = 'users_group_repo_to_perm'
3462 __tablename__ = 'users_group_repo_to_perm'
3462 __table_args__ = (
3463 __table_args__ = (
3463 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3464 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3464 base_table_args
3465 base_table_args
3465 )
3466 )
3466
3467
3467 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3468 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3468 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3469 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3469 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3470 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3470 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3471 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3471
3472
3472 users_group = relationship('UserGroup')
3473 users_group = relationship('UserGroup')
3473 permission = relationship('Permission')
3474 permission = relationship('Permission')
3474 repository = relationship('Repository')
3475 repository = relationship('Repository')
3475 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3476 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3476
3477
3477 @classmethod
3478 @classmethod
3478 def create(cls, users_group, repository, permission):
3479 def create(cls, users_group, repository, permission):
3479 n = cls()
3480 n = cls()
3480 n.users_group = users_group
3481 n.users_group = users_group
3481 n.repository = repository
3482 n.repository = repository
3482 n.permission = permission
3483 n.permission = permission
3483 Session().add(n)
3484 Session().add(n)
3484 return n
3485 return n
3485
3486
3486 def __unicode__(self):
3487 def __unicode__(self):
3487 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3488 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3488
3489
3489
3490
3490 class UserGroupUserGroupToPerm(Base, BaseModel):
3491 class UserGroupUserGroupToPerm(Base, BaseModel):
3491 __tablename__ = 'user_group_user_group_to_perm'
3492 __tablename__ = 'user_group_user_group_to_perm'
3492 __table_args__ = (
3493 __table_args__ = (
3493 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3494 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3494 CheckConstraint('target_user_group_id != user_group_id'),
3495 CheckConstraint('target_user_group_id != user_group_id'),
3495 base_table_args
3496 base_table_args
3496 )
3497 )
3497
3498
3498 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3499 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3499 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3500 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3500 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3501 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3501 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3502 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3502
3503
3503 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3504 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3504 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3505 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3505 permission = relationship('Permission')
3506 permission = relationship('Permission')
3506
3507
3507 @classmethod
3508 @classmethod
3508 def create(cls, target_user_group, user_group, permission):
3509 def create(cls, target_user_group, user_group, permission):
3509 n = cls()
3510 n = cls()
3510 n.target_user_group = target_user_group
3511 n.target_user_group = target_user_group
3511 n.user_group = user_group
3512 n.user_group = user_group
3512 n.permission = permission
3513 n.permission = permission
3513 Session().add(n)
3514 Session().add(n)
3514 return n
3515 return n
3515
3516
3516 def __unicode__(self):
3517 def __unicode__(self):
3517 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3518 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3518
3519
3519
3520
3520 class UserGroupToPerm(Base, BaseModel):
3521 class UserGroupToPerm(Base, BaseModel):
3521 __tablename__ = 'users_group_to_perm'
3522 __tablename__ = 'users_group_to_perm'
3522 __table_args__ = (
3523 __table_args__ = (
3523 UniqueConstraint('users_group_id', 'permission_id',),
3524 UniqueConstraint('users_group_id', 'permission_id',),
3524 base_table_args
3525 base_table_args
3525 )
3526 )
3526
3527
3527 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3528 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3528 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3529 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3529 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3530 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3530
3531
3531 users_group = relationship('UserGroup')
3532 users_group = relationship('UserGroup')
3532 permission = relationship('Permission')
3533 permission = relationship('Permission')
3533
3534
3534
3535
3535 class UserRepoGroupToPerm(Base, BaseModel):
3536 class UserRepoGroupToPerm(Base, BaseModel):
3536 __tablename__ = 'user_repo_group_to_perm'
3537 __tablename__ = 'user_repo_group_to_perm'
3537 __table_args__ = (
3538 __table_args__ = (
3538 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3539 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3539 base_table_args
3540 base_table_args
3540 )
3541 )
3541
3542
3542 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3543 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3543 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3544 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3544 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3545 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3545 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3546 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3546
3547
3547 user = relationship('User')
3548 user = relationship('User')
3548 group = relationship('RepoGroup')
3549 group = relationship('RepoGroup')
3549 permission = relationship('Permission')
3550 permission = relationship('Permission')
3550
3551
3551 @classmethod
3552 @classmethod
3552 def create(cls, user, repository_group, permission):
3553 def create(cls, user, repository_group, permission):
3553 n = cls()
3554 n = cls()
3554 n.user = user
3555 n.user = user
3555 n.group = repository_group
3556 n.group = repository_group
3556 n.permission = permission
3557 n.permission = permission
3557 Session().add(n)
3558 Session().add(n)
3558 return n
3559 return n
3559
3560
3560
3561
3561 class UserGroupRepoGroupToPerm(Base, BaseModel):
3562 class UserGroupRepoGroupToPerm(Base, BaseModel):
3562 __tablename__ = 'users_group_repo_group_to_perm'
3563 __tablename__ = 'users_group_repo_group_to_perm'
3563 __table_args__ = (
3564 __table_args__ = (
3564 UniqueConstraint('users_group_id', 'group_id'),
3565 UniqueConstraint('users_group_id', 'group_id'),
3565 base_table_args
3566 base_table_args
3566 )
3567 )
3567
3568
3568 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3569 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3569 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3570 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3570 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3571 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3571 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3572 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3572
3573
3573 users_group = relationship('UserGroup')
3574 users_group = relationship('UserGroup')
3574 permission = relationship('Permission')
3575 permission = relationship('Permission')
3575 group = relationship('RepoGroup')
3576 group = relationship('RepoGroup')
3576
3577
3577 @classmethod
3578 @classmethod
3578 def create(cls, user_group, repository_group, permission):
3579 def create(cls, user_group, repository_group, permission):
3579 n = cls()
3580 n = cls()
3580 n.users_group = user_group
3581 n.users_group = user_group
3581 n.group = repository_group
3582 n.group = repository_group
3582 n.permission = permission
3583 n.permission = permission
3583 Session().add(n)
3584 Session().add(n)
3584 return n
3585 return n
3585
3586
3586 def __unicode__(self):
3587 def __unicode__(self):
3587 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3588 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3588
3589
3589
3590
3590 class Statistics(Base, BaseModel):
3591 class Statistics(Base, BaseModel):
3591 __tablename__ = 'statistics'
3592 __tablename__ = 'statistics'
3592 __table_args__ = (
3593 __table_args__ = (
3593 base_table_args
3594 base_table_args
3594 )
3595 )
3595
3596
3596 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3597 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3597 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3598 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3598 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3599 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3599 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3600 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3600 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3601 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3601 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3602 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3602
3603
3603 repository = relationship('Repository', single_parent=True)
3604 repository = relationship('Repository', single_parent=True)
3604
3605
3605
3606
3606 class UserFollowing(Base, BaseModel):
3607 class UserFollowing(Base, BaseModel):
3607 __tablename__ = 'user_followings'
3608 __tablename__ = 'user_followings'
3608 __table_args__ = (
3609 __table_args__ = (
3609 UniqueConstraint('user_id', 'follows_repository_id'),
3610 UniqueConstraint('user_id', 'follows_repository_id'),
3610 UniqueConstraint('user_id', 'follows_user_id'),
3611 UniqueConstraint('user_id', 'follows_user_id'),
3611 base_table_args
3612 base_table_args
3612 )
3613 )
3613
3614
3614 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3615 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3615 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3616 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3616 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3617 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3617 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3618 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3618 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3619 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3619
3620
3620 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3621 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3621
3622
3622 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3623 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3623 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3624 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3624
3625
3625 @classmethod
3626 @classmethod
3626 def get_repo_followers(cls, repo_id):
3627 def get_repo_followers(cls, repo_id):
3627 return cls.query().filter(cls.follows_repo_id == repo_id)
3628 return cls.query().filter(cls.follows_repo_id == repo_id)
3628
3629
3629
3630
3630 class CacheKey(Base, BaseModel):
3631 class CacheKey(Base, BaseModel):
3631 __tablename__ = 'cache_invalidation'
3632 __tablename__ = 'cache_invalidation'
3632 __table_args__ = (
3633 __table_args__ = (
3633 UniqueConstraint('cache_key'),
3634 UniqueConstraint('cache_key'),
3634 Index('key_idx', 'cache_key'),
3635 Index('key_idx', 'cache_key'),
3635 base_table_args,
3636 base_table_args,
3636 )
3637 )
3637
3638
3638 CACHE_TYPE_FEED = 'FEED'
3639 CACHE_TYPE_FEED = 'FEED'
3639
3640
3640 # namespaces used to register process/thread aware caches
3641 # namespaces used to register process/thread aware caches
3641 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3642 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3642 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3643 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3643
3644
3644 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3645 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3645 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3646 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3646 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3647 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3647 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3648 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3648 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3649 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3649
3650
3650 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3651 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3651 self.cache_key = cache_key
3652 self.cache_key = cache_key
3652 self.cache_args = cache_args
3653 self.cache_args = cache_args
3653 self.cache_active = False
3654 self.cache_active = False
3654 # first key should be same for all entries, since all workers should share it
3655 # first key should be same for all entries, since all workers should share it
3655 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3656 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3656
3657
3657 def __unicode__(self):
3658 def __unicode__(self):
3658 return u"<%s('%s:%s[%s]')>" % (
3659 return u"<%s('%s:%s[%s]')>" % (
3659 self.__class__.__name__,
3660 self.__class__.__name__,
3660 self.cache_id, self.cache_key, self.cache_active)
3661 self.cache_id, self.cache_key, self.cache_active)
3661
3662
3662 def _cache_key_partition(self):
3663 def _cache_key_partition(self):
3663 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3664 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3664 return prefix, repo_name, suffix
3665 return prefix, repo_name, suffix
3665
3666
3666 def get_prefix(self):
3667 def get_prefix(self):
3667 """
3668 """
3668 Try to extract prefix from existing cache key. The key could consist
3669 Try to extract prefix from existing cache key. The key could consist
3669 of prefix, repo_name, suffix
3670 of prefix, repo_name, suffix
3670 """
3671 """
3671 # this returns prefix, repo_name, suffix
3672 # this returns prefix, repo_name, suffix
3672 return self._cache_key_partition()[0]
3673 return self._cache_key_partition()[0]
3673
3674
3674 def get_suffix(self):
3675 def get_suffix(self):
3675 """
3676 """
3676 get suffix that might have been used in _get_cache_key to
3677 get suffix that might have been used in _get_cache_key to
3677 generate self.cache_key. Only used for informational purposes
3678 generate self.cache_key. Only used for informational purposes
3678 in repo_edit.mako.
3679 in repo_edit.mako.
3679 """
3680 """
3680 # prefix, repo_name, suffix
3681 # prefix, repo_name, suffix
3681 return self._cache_key_partition()[2]
3682 return self._cache_key_partition()[2]
3682
3683
3683 @classmethod
3684 @classmethod
3684 def generate_new_state_uid(cls, based_on=None):
3685 def generate_new_state_uid(cls, based_on=None):
3685 if based_on:
3686 if based_on:
3686 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3687 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3687 else:
3688 else:
3688 return str(uuid.uuid4())
3689 return str(uuid.uuid4())
3689
3690
3690 @classmethod
3691 @classmethod
3691 def delete_all_cache(cls):
3692 def delete_all_cache(cls):
3692 """
3693 """
3693 Delete all cache keys from database.
3694 Delete all cache keys from database.
3694 Should only be run when all instances are down and all entries
3695 Should only be run when all instances are down and all entries
3695 thus stale.
3696 thus stale.
3696 """
3697 """
3697 cls.query().delete()
3698 cls.query().delete()
3698 Session().commit()
3699 Session().commit()
3699
3700
3700 @classmethod
3701 @classmethod
3701 def set_invalidate(cls, cache_uid, delete=False):
3702 def set_invalidate(cls, cache_uid, delete=False):
3702 """
3703 """
3703 Mark all caches of a repo as invalid in the database.
3704 Mark all caches of a repo as invalid in the database.
3704 """
3705 """
3705
3706
3706 try:
3707 try:
3707 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3708 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3708 if delete:
3709 if delete:
3709 qry.delete()
3710 qry.delete()
3710 log.debug('cache objects deleted for cache args %s',
3711 log.debug('cache objects deleted for cache args %s',
3711 safe_str(cache_uid))
3712 safe_str(cache_uid))
3712 else:
3713 else:
3713 qry.update({"cache_active": False,
3714 qry.update({"cache_active": False,
3714 "cache_state_uid": cls.generate_new_state_uid()})
3715 "cache_state_uid": cls.generate_new_state_uid()})
3715 log.debug('cache objects marked as invalid for cache args %s',
3716 log.debug('cache objects marked as invalid for cache args %s',
3716 safe_str(cache_uid))
3717 safe_str(cache_uid))
3717
3718
3718 Session().commit()
3719 Session().commit()
3719 except Exception:
3720 except Exception:
3720 log.exception(
3721 log.exception(
3721 'Cache key invalidation failed for cache args %s',
3722 'Cache key invalidation failed for cache args %s',
3722 safe_str(cache_uid))
3723 safe_str(cache_uid))
3723 Session().rollback()
3724 Session().rollback()
3724
3725
3725 @classmethod
3726 @classmethod
3726 def get_active_cache(cls, cache_key):
3727 def get_active_cache(cls, cache_key):
3727 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3728 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3728 if inv_obj:
3729 if inv_obj:
3729 return inv_obj
3730 return inv_obj
3730 return None
3731 return None
3731
3732
3732 @classmethod
3733 @classmethod
3733 def get_namespace_map(cls, namespace):
3734 def get_namespace_map(cls, namespace):
3734 return {
3735 return {
3735 x.cache_key: x
3736 x.cache_key: x
3736 for x in cls.query().filter(cls.cache_args == namespace)}
3737 for x in cls.query().filter(cls.cache_args == namespace)}
3737
3738
3738
3739
3739 class ChangesetComment(Base, BaseModel):
3740 class ChangesetComment(Base, BaseModel):
3740 __tablename__ = 'changeset_comments'
3741 __tablename__ = 'changeset_comments'
3741 __table_args__ = (
3742 __table_args__ = (
3742 Index('cc_revision_idx', 'revision'),
3743 Index('cc_revision_idx', 'revision'),
3743 base_table_args,
3744 base_table_args,
3744 )
3745 )
3745
3746
3746 COMMENT_OUTDATED = u'comment_outdated'
3747 COMMENT_OUTDATED = u'comment_outdated'
3747 COMMENT_TYPE_NOTE = u'note'
3748 COMMENT_TYPE_NOTE = u'note'
3748 COMMENT_TYPE_TODO = u'todo'
3749 COMMENT_TYPE_TODO = u'todo'
3749 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3750 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3750
3751
3751 OP_IMMUTABLE = u'immutable'
3752 OP_IMMUTABLE = u'immutable'
3752 OP_CHANGEABLE = u'changeable'
3753 OP_CHANGEABLE = u'changeable'
3753
3754
3754 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3755 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3755 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3756 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3756 revision = Column('revision', String(40), nullable=True)
3757 revision = Column('revision', String(40), nullable=True)
3757 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3758 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3758 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3759 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3759 line_no = Column('line_no', Unicode(10), nullable=True)
3760 line_no = Column('line_no', Unicode(10), nullable=True)
3760 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3761 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3761 f_path = Column('f_path', Unicode(1000), nullable=True)
3762 f_path = Column('f_path', Unicode(1000), nullable=True)
3762 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3763 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3763 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3764 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3764 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3765 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3765 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3766 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3766 renderer = Column('renderer', Unicode(64), nullable=True)
3767 renderer = Column('renderer', Unicode(64), nullable=True)
3767 display_state = Column('display_state', Unicode(128), nullable=True)
3768 display_state = Column('display_state', Unicode(128), nullable=True)
3768 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3769 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3769
3770
3770 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3771 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3771 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3772 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3772
3773
3773 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3774 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3774 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3775 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3775
3776
3776 author = relationship('User', lazy='select')
3777 author = relationship('User', lazy='select')
3777 repo = relationship('Repository')
3778 repo = relationship('Repository')
3778 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select')
3779 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select')
3779 pull_request = relationship('PullRequest', lazy='select')
3780 pull_request = relationship('PullRequest', lazy='select')
3780 pull_request_version = relationship('PullRequestVersion', lazy='select')
3781 pull_request_version = relationship('PullRequestVersion', lazy='select')
3781 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version')
3782 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version')
3782
3783
3783 @classmethod
3784 @classmethod
3784 def get_users(cls, revision=None, pull_request_id=None):
3785 def get_users(cls, revision=None, pull_request_id=None):
3785 """
3786 """
3786 Returns user associated with this ChangesetComment. ie those
3787 Returns user associated with this ChangesetComment. ie those
3787 who actually commented
3788 who actually commented
3788
3789
3789 :param cls:
3790 :param cls:
3790 :param revision:
3791 :param revision:
3791 """
3792 """
3792 q = Session().query(User)\
3793 q = Session().query(User)\
3793 .join(ChangesetComment.author)
3794 .join(ChangesetComment.author)
3794 if revision:
3795 if revision:
3795 q = q.filter(cls.revision == revision)
3796 q = q.filter(cls.revision == revision)
3796 elif pull_request_id:
3797 elif pull_request_id:
3797 q = q.filter(cls.pull_request_id == pull_request_id)
3798 q = q.filter(cls.pull_request_id == pull_request_id)
3798 return q.all()
3799 return q.all()
3799
3800
3800 @classmethod
3801 @classmethod
3801 def get_index_from_version(cls, pr_version, versions):
3802 def get_index_from_version(cls, pr_version, versions):
3802 num_versions = [x.pull_request_version_id for x in versions]
3803 num_versions = [x.pull_request_version_id for x in versions]
3803 try:
3804 try:
3804 return num_versions.index(pr_version) + 1
3805 return num_versions.index(pr_version) + 1
3805 except (IndexError, ValueError):
3806 except (IndexError, ValueError):
3806 return
3807 return
3807
3808
3808 @property
3809 @property
3809 def outdated(self):
3810 def outdated(self):
3810 return self.display_state == self.COMMENT_OUTDATED
3811 return self.display_state == self.COMMENT_OUTDATED
3811
3812
3812 @property
3813 @property
3813 def outdated_js(self):
3814 def outdated_js(self):
3814 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3815 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3815
3816
3816 @property
3817 @property
3817 def immutable(self):
3818 def immutable(self):
3818 return self.immutable_state == self.OP_IMMUTABLE
3819 return self.immutable_state == self.OP_IMMUTABLE
3819
3820
3820 def outdated_at_version(self, version):
3821 def outdated_at_version(self, version):
3821 """
3822 """
3822 Checks if comment is outdated for given pull request version
3823 Checks if comment is outdated for given pull request version
3823 """
3824 """
3824 def version_check():
3825 def version_check():
3825 return self.pull_request_version_id and self.pull_request_version_id != version
3826 return self.pull_request_version_id and self.pull_request_version_id != version
3826
3827
3827 if self.is_inline:
3828 if self.is_inline:
3828 return self.outdated and version_check()
3829 return self.outdated and version_check()
3829 else:
3830 else:
3830 # general comments don't have .outdated set, also latest don't have a version
3831 # general comments don't have .outdated set, also latest don't have a version
3831 return version_check()
3832 return version_check()
3832
3833
3833 def outdated_at_version_js(self, version):
3834 def outdated_at_version_js(self, version):
3834 """
3835 """
3835 Checks if comment is outdated for given pull request version
3836 Checks if comment is outdated for given pull request version
3836 """
3837 """
3837 return json.dumps(self.outdated_at_version(version))
3838 return json.dumps(self.outdated_at_version(version))
3838
3839
3839 def older_than_version(self, version):
3840 def older_than_version(self, version):
3840 """
3841 """
3841 Checks if comment is made from previous version than given
3842 Checks if comment is made from previous version than given
3842 """
3843 """
3843 if version is None:
3844 if version is None:
3844 return self.pull_request_version != version
3845 return self.pull_request_version != version
3845
3846
3846 return self.pull_request_version < version
3847 return self.pull_request_version < version
3847
3848
3848 def older_than_version_js(self, version):
3849 def older_than_version_js(self, version):
3849 """
3850 """
3850 Checks if comment is made from previous version than given
3851 Checks if comment is made from previous version than given
3851 """
3852 """
3852 return json.dumps(self.older_than_version(version))
3853 return json.dumps(self.older_than_version(version))
3853
3854
3854 @property
3855 @property
3855 def commit_id(self):
3856 def commit_id(self):
3856 """New style naming to stop using .revision"""
3857 """New style naming to stop using .revision"""
3857 return self.revision
3858 return self.revision
3858
3859
3859 @property
3860 @property
3860 def resolved(self):
3861 def resolved(self):
3861 return self.resolved_by[0] if self.resolved_by else None
3862 return self.resolved_by[0] if self.resolved_by else None
3862
3863
3863 @property
3864 @property
3864 def is_todo(self):
3865 def is_todo(self):
3865 return self.comment_type == self.COMMENT_TYPE_TODO
3866 return self.comment_type == self.COMMENT_TYPE_TODO
3866
3867
3867 @property
3868 @property
3868 def is_inline(self):
3869 def is_inline(self):
3869 if self.line_no and self.f_path:
3870 if self.line_no and self.f_path:
3870 return True
3871 return True
3871 return False
3872 return False
3872
3873
3873 @property
3874 @property
3874 def last_version(self):
3875 def last_version(self):
3875 version = 0
3876 version = 0
3876 if self.history:
3877 if self.history:
3877 version = self.history[-1].version
3878 version = self.history[-1].version
3878 return version
3879 return version
3879
3880
3880 def get_index_version(self, versions):
3881 def get_index_version(self, versions):
3881 return self.get_index_from_version(
3882 return self.get_index_from_version(
3882 self.pull_request_version_id, versions)
3883 self.pull_request_version_id, versions)
3883
3884
3884 @property
3885 @property
3885 def review_status(self):
3886 def review_status(self):
3886 if self.status_change:
3887 if self.status_change:
3887 return self.status_change[0].status
3888 return self.status_change[0].status
3888
3889
3889 @property
3890 @property
3890 def review_status_lbl(self):
3891 def review_status_lbl(self):
3891 if self.status_change:
3892 if self.status_change:
3892 return self.status_change[0].status_lbl
3893 return self.status_change[0].status_lbl
3893
3894
3894 def __repr__(self):
3895 def __repr__(self):
3895 if self.comment_id:
3896 if self.comment_id:
3896 return '<DB:Comment #%s>' % self.comment_id
3897 return '<DB:Comment #%s>' % self.comment_id
3897 else:
3898 else:
3898 return '<DB:Comment at %#x>' % id(self)
3899 return '<DB:Comment at %#x>' % id(self)
3899
3900
3900 def get_api_data(self):
3901 def get_api_data(self):
3901 comment = self
3902 comment = self
3902
3903
3903 data = {
3904 data = {
3904 'comment_id': comment.comment_id,
3905 'comment_id': comment.comment_id,
3905 'comment_type': comment.comment_type,
3906 'comment_type': comment.comment_type,
3906 'comment_text': comment.text,
3907 'comment_text': comment.text,
3907 'comment_status': comment.status_change,
3908 'comment_status': comment.status_change,
3908 'comment_f_path': comment.f_path,
3909 'comment_f_path': comment.f_path,
3909 'comment_lineno': comment.line_no,
3910 'comment_lineno': comment.line_no,
3910 'comment_author': comment.author,
3911 'comment_author': comment.author,
3911 'comment_created_on': comment.created_on,
3912 'comment_created_on': comment.created_on,
3912 'comment_resolved_by': self.resolved,
3913 'comment_resolved_by': self.resolved,
3913 'comment_commit_id': comment.revision,
3914 'comment_commit_id': comment.revision,
3914 'comment_pull_request_id': comment.pull_request_id,
3915 'comment_pull_request_id': comment.pull_request_id,
3915 'comment_last_version': self.last_version
3916 'comment_last_version': self.last_version
3916 }
3917 }
3917 return data
3918 return data
3918
3919
3919 def __json__(self):
3920 def __json__(self):
3920 data = dict()
3921 data = dict()
3921 data.update(self.get_api_data())
3922 data.update(self.get_api_data())
3922 return data
3923 return data
3923
3924
3924
3925
3925 class ChangesetCommentHistory(Base, BaseModel):
3926 class ChangesetCommentHistory(Base, BaseModel):
3926 __tablename__ = 'changeset_comments_history'
3927 __tablename__ = 'changeset_comments_history'
3927 __table_args__ = (
3928 __table_args__ = (
3928 Index('cch_comment_id_idx', 'comment_id'),
3929 Index('cch_comment_id_idx', 'comment_id'),
3929 base_table_args,
3930 base_table_args,
3930 )
3931 )
3931
3932
3932 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3933 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3933 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3934 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3934 version = Column("version", Integer(), nullable=False, default=0)
3935 version = Column("version", Integer(), nullable=False, default=0)
3935 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3936 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3936 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3937 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3937 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3938 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3938 deleted = Column('deleted', Boolean(), default=False)
3939 deleted = Column('deleted', Boolean(), default=False)
3939
3940
3940 author = relationship('User', lazy='joined')
3941 author = relationship('User', lazy='joined')
3941 comment = relationship('ChangesetComment', cascade="all, delete")
3942 comment = relationship('ChangesetComment', cascade="all, delete")
3942
3943
3943 @classmethod
3944 @classmethod
3944 def get_version(cls, comment_id):
3945 def get_version(cls, comment_id):
3945 q = Session().query(ChangesetCommentHistory).filter(
3946 q = Session().query(ChangesetCommentHistory).filter(
3946 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3947 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3947 if q.count() == 0:
3948 if q.count() == 0:
3948 return 1
3949 return 1
3949 elif q.count() >= q[0].version:
3950 elif q.count() >= q[0].version:
3950 return q.count() + 1
3951 return q.count() + 1
3951 else:
3952 else:
3952 return q[0].version + 1
3953 return q[0].version + 1
3953
3954
3954
3955
3955 class ChangesetStatus(Base, BaseModel):
3956 class ChangesetStatus(Base, BaseModel):
3956 __tablename__ = 'changeset_statuses'
3957 __tablename__ = 'changeset_statuses'
3957 __table_args__ = (
3958 __table_args__ = (
3958 Index('cs_revision_idx', 'revision'),
3959 Index('cs_revision_idx', 'revision'),
3959 Index('cs_version_idx', 'version'),
3960 Index('cs_version_idx', 'version'),
3960 UniqueConstraint('repo_id', 'revision', 'version'),
3961 UniqueConstraint('repo_id', 'revision', 'version'),
3961 base_table_args
3962 base_table_args
3962 )
3963 )
3963
3964
3964 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3965 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3965 STATUS_APPROVED = 'approved'
3966 STATUS_APPROVED = 'approved'
3966 STATUS_REJECTED = 'rejected'
3967 STATUS_REJECTED = 'rejected'
3967 STATUS_UNDER_REVIEW = 'under_review'
3968 STATUS_UNDER_REVIEW = 'under_review'
3968
3969
3969 STATUSES = [
3970 STATUSES = [
3970 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3971 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3971 (STATUS_APPROVED, _("Approved")),
3972 (STATUS_APPROVED, _("Approved")),
3972 (STATUS_REJECTED, _("Rejected")),
3973 (STATUS_REJECTED, _("Rejected")),
3973 (STATUS_UNDER_REVIEW, _("Under Review")),
3974 (STATUS_UNDER_REVIEW, _("Under Review")),
3974 ]
3975 ]
3975
3976
3976 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3977 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3977 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3978 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3978 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3979 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3979 revision = Column('revision', String(40), nullable=False)
3980 revision = Column('revision', String(40), nullable=False)
3980 status = Column('status', String(128), nullable=False, default=DEFAULT)
3981 status = Column('status', String(128), nullable=False, default=DEFAULT)
3981 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3982 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3982 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3983 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3983 version = Column('version', Integer(), nullable=False, default=0)
3984 version = Column('version', Integer(), nullable=False, default=0)
3984 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3985 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3985
3986
3986 author = relationship('User', lazy='select')
3987 author = relationship('User', lazy='select')
3987 repo = relationship('Repository', lazy='select')
3988 repo = relationship('Repository', lazy='select')
3988 comment = relationship('ChangesetComment', lazy='select')
3989 comment = relationship('ChangesetComment', lazy='select')
3989 pull_request = relationship('PullRequest', lazy='select')
3990 pull_request = relationship('PullRequest', lazy='select')
3990
3991
3991 def __unicode__(self):
3992 def __unicode__(self):
3992 return u"<%s('%s[v%s]:%s')>" % (
3993 return u"<%s('%s[v%s]:%s')>" % (
3993 self.__class__.__name__,
3994 self.__class__.__name__,
3994 self.status, self.version, self.author
3995 self.status, self.version, self.author
3995 )
3996 )
3996
3997
3997 @classmethod
3998 @classmethod
3998 def get_status_lbl(cls, value):
3999 def get_status_lbl(cls, value):
3999 return dict(cls.STATUSES).get(value)
4000 return dict(cls.STATUSES).get(value)
4000
4001
4001 @property
4002 @property
4002 def status_lbl(self):
4003 def status_lbl(self):
4003 return ChangesetStatus.get_status_lbl(self.status)
4004 return ChangesetStatus.get_status_lbl(self.status)
4004
4005
4005 def get_api_data(self):
4006 def get_api_data(self):
4006 status = self
4007 status = self
4007 data = {
4008 data = {
4008 'status_id': status.changeset_status_id,
4009 'status_id': status.changeset_status_id,
4009 'status': status.status,
4010 'status': status.status,
4010 }
4011 }
4011 return data
4012 return data
4012
4013
4013 def __json__(self):
4014 def __json__(self):
4014 data = dict()
4015 data = dict()
4015 data.update(self.get_api_data())
4016 data.update(self.get_api_data())
4016 return data
4017 return data
4017
4018
4018
4019
4019 class _SetState(object):
4020 class _SetState(object):
4020 """
4021 """
4021 Context processor allowing changing state for sensitive operation such as
4022 Context processor allowing changing state for sensitive operation such as
4022 pull request update or merge
4023 pull request update or merge
4023 """
4024 """
4024
4025
4025 def __init__(self, pull_request, pr_state, back_state=None):
4026 def __init__(self, pull_request, pr_state, back_state=None):
4026 self._pr = pull_request
4027 self._pr = pull_request
4027 self._org_state = back_state or pull_request.pull_request_state
4028 self._org_state = back_state or pull_request.pull_request_state
4028 self._pr_state = pr_state
4029 self._pr_state = pr_state
4029 self._current_state = None
4030 self._current_state = None
4030
4031
4031 def __enter__(self):
4032 def __enter__(self):
4032 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4033 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4033 self._pr, self._pr_state)
4034 self._pr, self._pr_state)
4034 self.set_pr_state(self._pr_state)
4035 self.set_pr_state(self._pr_state)
4035 return self
4036 return self
4036
4037
4037 def __exit__(self, exc_type, exc_val, exc_tb):
4038 def __exit__(self, exc_type, exc_val, exc_tb):
4038 if exc_val is not None:
4039 if exc_val is not None:
4039 log.error(traceback.format_exc(exc_tb))
4040 log.error(traceback.format_exc(exc_tb))
4040 return None
4041 return None
4041
4042
4042 self.set_pr_state(self._org_state)
4043 self.set_pr_state(self._org_state)
4043 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4044 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4044 self._pr, self._org_state)
4045 self._pr, self._org_state)
4045
4046
4046 @property
4047 @property
4047 def state(self):
4048 def state(self):
4048 return self._current_state
4049 return self._current_state
4049
4050
4050 def set_pr_state(self, pr_state):
4051 def set_pr_state(self, pr_state):
4051 try:
4052 try:
4052 self._pr.pull_request_state = pr_state
4053 self._pr.pull_request_state = pr_state
4053 Session().add(self._pr)
4054 Session().add(self._pr)
4054 Session().commit()
4055 Session().commit()
4055 self._current_state = pr_state
4056 self._current_state = pr_state
4056 except Exception:
4057 except Exception:
4057 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4058 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4058 raise
4059 raise
4059
4060
4060
4061
4061 class _PullRequestBase(BaseModel):
4062 class _PullRequestBase(BaseModel):
4062 """
4063 """
4063 Common attributes of pull request and version entries.
4064 Common attributes of pull request and version entries.
4064 """
4065 """
4065
4066
4066 # .status values
4067 # .status values
4067 STATUS_NEW = u'new'
4068 STATUS_NEW = u'new'
4068 STATUS_OPEN = u'open'
4069 STATUS_OPEN = u'open'
4069 STATUS_CLOSED = u'closed'
4070 STATUS_CLOSED = u'closed'
4070
4071
4071 # available states
4072 # available states
4072 STATE_CREATING = u'creating'
4073 STATE_CREATING = u'creating'
4073 STATE_UPDATING = u'updating'
4074 STATE_UPDATING = u'updating'
4074 STATE_MERGING = u'merging'
4075 STATE_MERGING = u'merging'
4075 STATE_CREATED = u'created'
4076 STATE_CREATED = u'created'
4076
4077
4077 title = Column('title', Unicode(255), nullable=True)
4078 title = Column('title', Unicode(255), nullable=True)
4078 description = Column(
4079 description = Column(
4079 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4080 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4080 nullable=True)
4081 nullable=True)
4081 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4082 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4082
4083
4083 # new/open/closed status of pull request (not approve/reject/etc)
4084 # new/open/closed status of pull request (not approve/reject/etc)
4084 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4085 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4085 created_on = Column(
4086 created_on = Column(
4086 'created_on', DateTime(timezone=False), nullable=False,
4087 'created_on', DateTime(timezone=False), nullable=False,
4087 default=datetime.datetime.now)
4088 default=datetime.datetime.now)
4088 updated_on = Column(
4089 updated_on = Column(
4089 'updated_on', DateTime(timezone=False), nullable=False,
4090 'updated_on', DateTime(timezone=False), nullable=False,
4090 default=datetime.datetime.now)
4091 default=datetime.datetime.now)
4091
4092
4092 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4093 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4093
4094
4094 @declared_attr
4095 @declared_attr
4095 def user_id(cls):
4096 def user_id(cls):
4096 return Column(
4097 return Column(
4097 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4098 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4098 unique=None)
4099 unique=None)
4099
4100
4100 # 500 revisions max
4101 # 500 revisions max
4101 _revisions = Column(
4102 _revisions = Column(
4102 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4103 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4103
4104
4104 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4105 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4105
4106
4106 @declared_attr
4107 @declared_attr
4107 def source_repo_id(cls):
4108 def source_repo_id(cls):
4108 # TODO: dan: rename column to source_repo_id
4109 # TODO: dan: rename column to source_repo_id
4109 return Column(
4110 return Column(
4110 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4111 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4111 nullable=False)
4112 nullable=False)
4112
4113
4113 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4114 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4114
4115
4115 @hybrid_property
4116 @hybrid_property
4116 def source_ref(self):
4117 def source_ref(self):
4117 return self._source_ref
4118 return self._source_ref
4118
4119
4119 @source_ref.setter
4120 @source_ref.setter
4120 def source_ref(self, val):
4121 def source_ref(self, val):
4121 parts = (val or '').split(':')
4122 parts = (val or '').split(':')
4122 if len(parts) != 3:
4123 if len(parts) != 3:
4123 raise ValueError(
4124 raise ValueError(
4124 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4125 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4125 self._source_ref = safe_unicode(val)
4126 self._source_ref = safe_unicode(val)
4126
4127
4127 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4128 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4128
4129
4129 @hybrid_property
4130 @hybrid_property
4130 def target_ref(self):
4131 def target_ref(self):
4131 return self._target_ref
4132 return self._target_ref
4132
4133
4133 @target_ref.setter
4134 @target_ref.setter
4134 def target_ref(self, val):
4135 def target_ref(self, val):
4135 parts = (val or '').split(':')
4136 parts = (val or '').split(':')
4136 if len(parts) != 3:
4137 if len(parts) != 3:
4137 raise ValueError(
4138 raise ValueError(
4138 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4139 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4139 self._target_ref = safe_unicode(val)
4140 self._target_ref = safe_unicode(val)
4140
4141
4141 @declared_attr
4142 @declared_attr
4142 def target_repo_id(cls):
4143 def target_repo_id(cls):
4143 # TODO: dan: rename column to target_repo_id
4144 # TODO: dan: rename column to target_repo_id
4144 return Column(
4145 return Column(
4145 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4146 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4146 nullable=False)
4147 nullable=False)
4147
4148
4148 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4149 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4149
4150
4150 # TODO: dan: rename column to last_merge_source_rev
4151 # TODO: dan: rename column to last_merge_source_rev
4151 _last_merge_source_rev = Column(
4152 _last_merge_source_rev = Column(
4152 'last_merge_org_rev', String(40), nullable=True)
4153 'last_merge_org_rev', String(40), nullable=True)
4153 # TODO: dan: rename column to last_merge_target_rev
4154 # TODO: dan: rename column to last_merge_target_rev
4154 _last_merge_target_rev = Column(
4155 _last_merge_target_rev = Column(
4155 'last_merge_other_rev', String(40), nullable=True)
4156 'last_merge_other_rev', String(40), nullable=True)
4156 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4157 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4157 last_merge_metadata = Column(
4158 last_merge_metadata = Column(
4158 'last_merge_metadata', MutationObj.as_mutable(
4159 'last_merge_metadata', MutationObj.as_mutable(
4159 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4160 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4160
4161
4161 merge_rev = Column('merge_rev', String(40), nullable=True)
4162 merge_rev = Column('merge_rev', String(40), nullable=True)
4162
4163
4163 reviewer_data = Column(
4164 reviewer_data = Column(
4164 'reviewer_data_json', MutationObj.as_mutable(
4165 'reviewer_data_json', MutationObj.as_mutable(
4165 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4166 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4166
4167
4167 @property
4168 @property
4168 def reviewer_data_json(self):
4169 def reviewer_data_json(self):
4169 return json.dumps(self.reviewer_data)
4170 return json.dumps(self.reviewer_data)
4170
4171
4171 @property
4172 @property
4172 def last_merge_metadata_parsed(self):
4173 def last_merge_metadata_parsed(self):
4173 metadata = {}
4174 metadata = {}
4174 if not self.last_merge_metadata:
4175 if not self.last_merge_metadata:
4175 return metadata
4176 return metadata
4176
4177
4177 if hasattr(self.last_merge_metadata, 'de_coerce'):
4178 if hasattr(self.last_merge_metadata, 'de_coerce'):
4178 for k, v in self.last_merge_metadata.de_coerce().items():
4179 for k, v in self.last_merge_metadata.de_coerce().items():
4179 if k in ['target_ref', 'source_ref']:
4180 if k in ['target_ref', 'source_ref']:
4180 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4181 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4181 else:
4182 else:
4182 if hasattr(v, 'de_coerce'):
4183 if hasattr(v, 'de_coerce'):
4183 metadata[k] = v.de_coerce()
4184 metadata[k] = v.de_coerce()
4184 else:
4185 else:
4185 metadata[k] = v
4186 metadata[k] = v
4186 return metadata
4187 return metadata
4187
4188
4188 @property
4189 @property
4189 def work_in_progress(self):
4190 def work_in_progress(self):
4190 """checks if pull request is work in progress by checking the title"""
4191 """checks if pull request is work in progress by checking the title"""
4191 title = self.title.upper()
4192 title = self.title.upper()
4192 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4193 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4193 return True
4194 return True
4194 return False
4195 return False
4195
4196
4196 @hybrid_property
4197 @hybrid_property
4197 def description_safe(self):
4198 def description_safe(self):
4198 from rhodecode.lib import helpers as h
4199 from rhodecode.lib import helpers as h
4199 return h.escape(self.description)
4200 return h.escape(self.description)
4200
4201
4201 @hybrid_property
4202 @hybrid_property
4202 def revisions(self):
4203 def revisions(self):
4203 return self._revisions.split(':') if self._revisions else []
4204 return self._revisions.split(':') if self._revisions else []
4204
4205
4205 @revisions.setter
4206 @revisions.setter
4206 def revisions(self, val):
4207 def revisions(self, val):
4207 self._revisions = u':'.join(val)
4208 self._revisions = u':'.join(val)
4208
4209
4209 @hybrid_property
4210 @hybrid_property
4210 def last_merge_status(self):
4211 def last_merge_status(self):
4211 return safe_int(self._last_merge_status)
4212 return safe_int(self._last_merge_status)
4212
4213
4213 @last_merge_status.setter
4214 @last_merge_status.setter
4214 def last_merge_status(self, val):
4215 def last_merge_status(self, val):
4215 self._last_merge_status = val
4216 self._last_merge_status = val
4216
4217
4217 @declared_attr
4218 @declared_attr
4218 def author(cls):
4219 def author(cls):
4219 return relationship('User', lazy='joined')
4220 return relationship('User', lazy='joined')
4220
4221
4221 @declared_attr
4222 @declared_attr
4222 def source_repo(cls):
4223 def source_repo(cls):
4223 return relationship(
4224 return relationship(
4224 'Repository',
4225 'Repository',
4225 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4226 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4226
4227
4227 @property
4228 @property
4228 def source_ref_parts(self):
4229 def source_ref_parts(self):
4229 return self.unicode_to_reference(self.source_ref)
4230 return self.unicode_to_reference(self.source_ref)
4230
4231
4231 @declared_attr
4232 @declared_attr
4232 def target_repo(cls):
4233 def target_repo(cls):
4233 return relationship(
4234 return relationship(
4234 'Repository',
4235 'Repository',
4235 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4236 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4236
4237
4237 @property
4238 @property
4238 def target_ref_parts(self):
4239 def target_ref_parts(self):
4239 return self.unicode_to_reference(self.target_ref)
4240 return self.unicode_to_reference(self.target_ref)
4240
4241
4241 @property
4242 @property
4242 def shadow_merge_ref(self):
4243 def shadow_merge_ref(self):
4243 return self.unicode_to_reference(self._shadow_merge_ref)
4244 return self.unicode_to_reference(self._shadow_merge_ref)
4244
4245
4245 @shadow_merge_ref.setter
4246 @shadow_merge_ref.setter
4246 def shadow_merge_ref(self, ref):
4247 def shadow_merge_ref(self, ref):
4247 self._shadow_merge_ref = self.reference_to_unicode(ref)
4248 self._shadow_merge_ref = self.reference_to_unicode(ref)
4248
4249
4249 @staticmethod
4250 @staticmethod
4250 def unicode_to_reference(raw):
4251 def unicode_to_reference(raw):
4251 """
4252 return unicode_to_reference(raw)
4252 Convert a unicode (or string) to a reference object.
4253 If unicode evaluates to False it returns None.
4254 """
4255 if raw:
4256 refs = raw.split(':')
4257 return Reference(*refs)
4258 else:
4259 return None
4260
4253
4261 @staticmethod
4254 @staticmethod
4262 def reference_to_unicode(ref):
4255 def reference_to_unicode(ref):
4263 """
4256 return reference_to_unicode(ref)
4264 Convert a reference object to unicode.
4265 If reference is None it returns None.
4266 """
4267 if ref:
4268 return u':'.join(ref)
4269 else:
4270 return None
4271
4257
4272 def get_api_data(self, with_merge_state=True):
4258 def get_api_data(self, with_merge_state=True):
4273 from rhodecode.model.pull_request import PullRequestModel
4259 from rhodecode.model.pull_request import PullRequestModel
4274
4260
4275 pull_request = self
4261 pull_request = self
4276 if with_merge_state:
4262 if with_merge_state:
4277 merge_response, merge_status, msg = \
4263 merge_response, merge_status, msg = \
4278 PullRequestModel().merge_status(pull_request)
4264 PullRequestModel().merge_status(pull_request)
4279 merge_state = {
4265 merge_state = {
4280 'status': merge_status,
4266 'status': merge_status,
4281 'message': safe_unicode(msg),
4267 'message': safe_unicode(msg),
4282 }
4268 }
4283 else:
4269 else:
4284 merge_state = {'status': 'not_available',
4270 merge_state = {'status': 'not_available',
4285 'message': 'not_available'}
4271 'message': 'not_available'}
4286
4272
4287 merge_data = {
4273 merge_data = {
4288 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4274 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4289 'reference': (
4275 'reference': (
4290 pull_request.shadow_merge_ref._asdict()
4276 pull_request.shadow_merge_ref._asdict()
4291 if pull_request.shadow_merge_ref else None),
4277 if pull_request.shadow_merge_ref else None),
4292 }
4278 }
4293
4279
4294 data = {
4280 data = {
4295 'pull_request_id': pull_request.pull_request_id,
4281 'pull_request_id': pull_request.pull_request_id,
4296 'url': PullRequestModel().get_url(pull_request),
4282 'url': PullRequestModel().get_url(pull_request),
4297 'title': pull_request.title,
4283 'title': pull_request.title,
4298 'description': pull_request.description,
4284 'description': pull_request.description,
4299 'status': pull_request.status,
4285 'status': pull_request.status,
4300 'state': pull_request.pull_request_state,
4286 'state': pull_request.pull_request_state,
4301 'created_on': pull_request.created_on,
4287 'created_on': pull_request.created_on,
4302 'updated_on': pull_request.updated_on,
4288 'updated_on': pull_request.updated_on,
4303 'commit_ids': pull_request.revisions,
4289 'commit_ids': pull_request.revisions,
4304 'review_status': pull_request.calculated_review_status(),
4290 'review_status': pull_request.calculated_review_status(),
4305 'mergeable': merge_state,
4291 'mergeable': merge_state,
4306 'source': {
4292 'source': {
4307 'clone_url': pull_request.source_repo.clone_url(),
4293 'clone_url': pull_request.source_repo.clone_url(),
4308 'repository': pull_request.source_repo.repo_name,
4294 'repository': pull_request.source_repo.repo_name,
4309 'reference': {
4295 'reference': {
4310 'name': pull_request.source_ref_parts.name,
4296 'name': pull_request.source_ref_parts.name,
4311 'type': pull_request.source_ref_parts.type,
4297 'type': pull_request.source_ref_parts.type,
4312 'commit_id': pull_request.source_ref_parts.commit_id,
4298 'commit_id': pull_request.source_ref_parts.commit_id,
4313 },
4299 },
4314 },
4300 },
4315 'target': {
4301 'target': {
4316 'clone_url': pull_request.target_repo.clone_url(),
4302 'clone_url': pull_request.target_repo.clone_url(),
4317 'repository': pull_request.target_repo.repo_name,
4303 'repository': pull_request.target_repo.repo_name,
4318 'reference': {
4304 'reference': {
4319 'name': pull_request.target_ref_parts.name,
4305 'name': pull_request.target_ref_parts.name,
4320 'type': pull_request.target_ref_parts.type,
4306 'type': pull_request.target_ref_parts.type,
4321 'commit_id': pull_request.target_ref_parts.commit_id,
4307 'commit_id': pull_request.target_ref_parts.commit_id,
4322 },
4308 },
4323 },
4309 },
4324 'merge': merge_data,
4310 'merge': merge_data,
4325 'author': pull_request.author.get_api_data(include_secrets=False,
4311 'author': pull_request.author.get_api_data(include_secrets=False,
4326 details='basic'),
4312 details='basic'),
4327 'reviewers': [
4313 'reviewers': [
4328 {
4314 {
4329 'user': reviewer.get_api_data(include_secrets=False,
4315 'user': reviewer.get_api_data(include_secrets=False,
4330 details='basic'),
4316 details='basic'),
4331 'reasons': reasons,
4317 'reasons': reasons,
4332 'review_status': st[0][1].status if st else 'not_reviewed',
4318 'review_status': st[0][1].status if st else 'not_reviewed',
4333 }
4319 }
4334 for obj, reviewer, reasons, mandatory, st in
4320 for obj, reviewer, reasons, mandatory, st in
4335 pull_request.reviewers_statuses()
4321 pull_request.reviewers_statuses()
4336 ]
4322 ]
4337 }
4323 }
4338
4324
4339 return data
4325 return data
4340
4326
4341 def set_state(self, pull_request_state, final_state=None):
4327 def set_state(self, pull_request_state, final_state=None):
4342 """
4328 """
4343 # goes from initial state to updating to initial state.
4329 # goes from initial state to updating to initial state.
4344 # initial state can be changed by specifying back_state=
4330 # initial state can be changed by specifying back_state=
4345 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4331 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4346 pull_request.merge()
4332 pull_request.merge()
4347
4333
4348 :param pull_request_state:
4334 :param pull_request_state:
4349 :param final_state:
4335 :param final_state:
4350
4336
4351 """
4337 """
4352
4338
4353 return _SetState(self, pull_request_state, back_state=final_state)
4339 return _SetState(self, pull_request_state, back_state=final_state)
4354
4340
4355
4341
4356 class PullRequest(Base, _PullRequestBase):
4342 class PullRequest(Base, _PullRequestBase):
4357 __tablename__ = 'pull_requests'
4343 __tablename__ = 'pull_requests'
4358 __table_args__ = (
4344 __table_args__ = (
4359 base_table_args,
4345 base_table_args,
4360 )
4346 )
4361 LATEST_VER = 'latest'
4347 LATEST_VER = 'latest'
4362
4348
4363 pull_request_id = Column(
4349 pull_request_id = Column(
4364 'pull_request_id', Integer(), nullable=False, primary_key=True)
4350 'pull_request_id', Integer(), nullable=False, primary_key=True)
4365
4351
4366 def __repr__(self):
4352 def __repr__(self):
4367 if self.pull_request_id:
4353 if self.pull_request_id:
4368 return '<DB:PullRequest #%s>' % self.pull_request_id
4354 return '<DB:PullRequest #%s>' % self.pull_request_id
4369 else:
4355 else:
4370 return '<DB:PullRequest at %#x>' % id(self)
4356 return '<DB:PullRequest at %#x>' % id(self)
4371
4357
4372 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4358 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4373 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4359 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4374 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4360 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4375 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4361 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4376 lazy='dynamic')
4362 lazy='dynamic')
4377
4363
4378 @classmethod
4364 @classmethod
4379 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4365 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4380 internal_methods=None):
4366 internal_methods=None):
4381
4367
4382 class PullRequestDisplay(object):
4368 class PullRequestDisplay(object):
4383 """
4369 """
4384 Special object wrapper for showing PullRequest data via Versions
4370 Special object wrapper for showing PullRequest data via Versions
4385 It mimics PR object as close as possible. This is read only object
4371 It mimics PR object as close as possible. This is read only object
4386 just for display
4372 just for display
4387 """
4373 """
4388
4374
4389 def __init__(self, attrs, internal=None):
4375 def __init__(self, attrs, internal=None):
4390 self.attrs = attrs
4376 self.attrs = attrs
4391 # internal have priority over the given ones via attrs
4377 # internal have priority over the given ones via attrs
4392 self.internal = internal or ['versions']
4378 self.internal = internal or ['versions']
4393
4379
4394 def __getattr__(self, item):
4380 def __getattr__(self, item):
4395 if item in self.internal:
4381 if item in self.internal:
4396 return getattr(self, item)
4382 return getattr(self, item)
4397 try:
4383 try:
4398 return self.attrs[item]
4384 return self.attrs[item]
4399 except KeyError:
4385 except KeyError:
4400 raise AttributeError(
4386 raise AttributeError(
4401 '%s object has no attribute %s' % (self, item))
4387 '%s object has no attribute %s' % (self, item))
4402
4388
4403 def __repr__(self):
4389 def __repr__(self):
4404 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4390 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4405
4391
4406 def versions(self):
4392 def versions(self):
4407 return pull_request_obj.versions.order_by(
4393 return pull_request_obj.versions.order_by(
4408 PullRequestVersion.pull_request_version_id).all()
4394 PullRequestVersion.pull_request_version_id).all()
4409
4395
4410 def is_closed(self):
4396 def is_closed(self):
4411 return pull_request_obj.is_closed()
4397 return pull_request_obj.is_closed()
4412
4398
4413 def is_state_changing(self):
4399 def is_state_changing(self):
4414 return pull_request_obj.is_state_changing()
4400 return pull_request_obj.is_state_changing()
4415
4401
4416 @property
4402 @property
4417 def pull_request_version_id(self):
4403 def pull_request_version_id(self):
4418 return getattr(pull_request_obj, 'pull_request_version_id', None)
4404 return getattr(pull_request_obj, 'pull_request_version_id', None)
4419
4405
4420 @property
4406 @property
4421 def pull_request_last_version(self):
4407 def pull_request_last_version(self):
4422 return pull_request_obj.pull_request_last_version
4408 return pull_request_obj.pull_request_last_version
4423
4409
4424 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4410 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4425
4411
4426 attrs.author = StrictAttributeDict(
4412 attrs.author = StrictAttributeDict(
4427 pull_request_obj.author.get_api_data())
4413 pull_request_obj.author.get_api_data())
4428 if pull_request_obj.target_repo:
4414 if pull_request_obj.target_repo:
4429 attrs.target_repo = StrictAttributeDict(
4415 attrs.target_repo = StrictAttributeDict(
4430 pull_request_obj.target_repo.get_api_data())
4416 pull_request_obj.target_repo.get_api_data())
4431 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4417 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4432
4418
4433 if pull_request_obj.source_repo:
4419 if pull_request_obj.source_repo:
4434 attrs.source_repo = StrictAttributeDict(
4420 attrs.source_repo = StrictAttributeDict(
4435 pull_request_obj.source_repo.get_api_data())
4421 pull_request_obj.source_repo.get_api_data())
4436 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4422 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4437
4423
4438 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4424 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4439 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4425 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4440 attrs.revisions = pull_request_obj.revisions
4426 attrs.revisions = pull_request_obj.revisions
4441 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4427 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4442 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4428 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4443 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4429 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4444 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4430 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4445
4431
4446 return PullRequestDisplay(attrs, internal=internal_methods)
4432 return PullRequestDisplay(attrs, internal=internal_methods)
4447
4433
4448 def is_closed(self):
4434 def is_closed(self):
4449 return self.status == self.STATUS_CLOSED
4435 return self.status == self.STATUS_CLOSED
4450
4436
4451 def is_state_changing(self):
4437 def is_state_changing(self):
4452 return self.pull_request_state != PullRequest.STATE_CREATED
4438 return self.pull_request_state != PullRequest.STATE_CREATED
4453
4439
4454 def __json__(self):
4440 def __json__(self):
4455 return {
4441 return {
4456 'revisions': self.revisions,
4442 'revisions': self.revisions,
4457 'versions': self.versions_count
4443 'versions': self.versions_count
4458 }
4444 }
4459
4445
4460 def calculated_review_status(self):
4446 def calculated_review_status(self):
4461 from rhodecode.model.changeset_status import ChangesetStatusModel
4447 from rhodecode.model.changeset_status import ChangesetStatusModel
4462 return ChangesetStatusModel().calculated_review_status(self)
4448 return ChangesetStatusModel().calculated_review_status(self)
4463
4449
4464 def reviewers_statuses(self):
4450 def reviewers_statuses(self):
4465 from rhodecode.model.changeset_status import ChangesetStatusModel
4451 from rhodecode.model.changeset_status import ChangesetStatusModel
4466 return ChangesetStatusModel().reviewers_statuses(self)
4452 return ChangesetStatusModel().reviewers_statuses(self)
4467
4453
4468 def get_pull_request_reviewers(self, role=None):
4454 def get_pull_request_reviewers(self, role=None):
4469 qry = PullRequestReviewers.query()\
4455 qry = PullRequestReviewers.query()\
4470 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4456 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4471 if role:
4457 if role:
4472 qry = qry.filter(PullRequestReviewers.role == role)
4458 qry = qry.filter(PullRequestReviewers.role == role)
4473
4459
4474 return qry.all()
4460 return qry.all()
4475
4461
4476 @property
4462 @property
4477 def reviewers_count(self):
4463 def reviewers_count(self):
4478 qry = PullRequestReviewers.query()\
4464 qry = PullRequestReviewers.query()\
4479 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4465 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4480 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4466 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4481 return qry.count()
4467 return qry.count()
4482
4468
4483 @property
4469 @property
4484 def observers_count(self):
4470 def observers_count(self):
4485 qry = PullRequestReviewers.query()\
4471 qry = PullRequestReviewers.query()\
4486 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4472 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4487 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4473 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4488 return qry.count()
4474 return qry.count()
4489
4475
4490 def observers(self):
4476 def observers(self):
4491 qry = PullRequestReviewers.query()\
4477 qry = PullRequestReviewers.query()\
4492 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4478 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4493 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4479 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4494 .all()
4480 .all()
4495
4481
4496 for entry in qry:
4482 for entry in qry:
4497 yield entry, entry.user
4483 yield entry, entry.user
4498
4484
4499 @property
4485 @property
4500 def workspace_id(self):
4486 def workspace_id(self):
4501 from rhodecode.model.pull_request import PullRequestModel
4487 from rhodecode.model.pull_request import PullRequestModel
4502 return PullRequestModel()._workspace_id(self)
4488 return PullRequestModel()._workspace_id(self)
4503
4489
4504 def get_shadow_repo(self):
4490 def get_shadow_repo(self):
4505 workspace_id = self.workspace_id
4491 workspace_id = self.workspace_id
4506 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4492 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4507 if os.path.isdir(shadow_repository_path):
4493 if os.path.isdir(shadow_repository_path):
4508 vcs_obj = self.target_repo.scm_instance()
4494 vcs_obj = self.target_repo.scm_instance()
4509 return vcs_obj.get_shadow_instance(shadow_repository_path)
4495 return vcs_obj.get_shadow_instance(shadow_repository_path)
4510
4496
4511 @property
4497 @property
4512 def versions_count(self):
4498 def versions_count(self):
4513 """
4499 """
4514 return number of versions this PR have, e.g a PR that once been
4500 return number of versions this PR have, e.g a PR that once been
4515 updated will have 2 versions
4501 updated will have 2 versions
4516 """
4502 """
4517 return self.versions.count() + 1
4503 return self.versions.count() + 1
4518
4504
4519 @property
4505 @property
4520 def pull_request_last_version(self):
4506 def pull_request_last_version(self):
4521 return self.versions_count
4507 return self.versions_count
4522
4508
4523
4509
4524 class PullRequestVersion(Base, _PullRequestBase):
4510 class PullRequestVersion(Base, _PullRequestBase):
4525 __tablename__ = 'pull_request_versions'
4511 __tablename__ = 'pull_request_versions'
4526 __table_args__ = (
4512 __table_args__ = (
4527 base_table_args,
4513 base_table_args,
4528 )
4514 )
4529
4515
4530 pull_request_version_id = Column(
4516 pull_request_version_id = Column(
4531 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4517 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4532 pull_request_id = Column(
4518 pull_request_id = Column(
4533 'pull_request_id', Integer(),
4519 'pull_request_id', Integer(),
4534 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4520 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4535 pull_request = relationship('PullRequest')
4521 pull_request = relationship('PullRequest')
4536
4522
4537 def __repr__(self):
4523 def __repr__(self):
4538 if self.pull_request_version_id:
4524 if self.pull_request_version_id:
4539 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4525 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4540 else:
4526 else:
4541 return '<DB:PullRequestVersion at %#x>' % id(self)
4527 return '<DB:PullRequestVersion at %#x>' % id(self)
4542
4528
4543 @property
4529 @property
4544 def reviewers(self):
4530 def reviewers(self):
4545 return self.pull_request.reviewers
4531 return self.pull_request.reviewers
4532 @property
4533 def reviewers(self):
4534 return self.pull_request.reviewers
4546
4535
4547 @property
4536 @property
4548 def versions(self):
4537 def versions(self):
4549 return self.pull_request.versions
4538 return self.pull_request.versions
4550
4539
4551 def is_closed(self):
4540 def is_closed(self):
4552 # calculate from original
4541 # calculate from original
4553 return self.pull_request.status == self.STATUS_CLOSED
4542 return self.pull_request.status == self.STATUS_CLOSED
4554
4543
4555 def is_state_changing(self):
4544 def is_state_changing(self):
4556 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4545 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4557
4546
4558 def calculated_review_status(self):
4547 def calculated_review_status(self):
4559 return self.pull_request.calculated_review_status()
4548 return self.pull_request.calculated_review_status()
4560
4549
4561 def reviewers_statuses(self):
4550 def reviewers_statuses(self):
4562 return self.pull_request.reviewers_statuses()
4551 return self.pull_request.reviewers_statuses()
4563
4552
4564 def observer(self):
4553 def observers(self):
4565 return self.pull_request.observers()
4554 return self.pull_request.observers()
4566
4555
4567
4556
4568 class PullRequestReviewers(Base, BaseModel):
4557 class PullRequestReviewers(Base, BaseModel):
4569 __tablename__ = 'pull_request_reviewers'
4558 __tablename__ = 'pull_request_reviewers'
4570 __table_args__ = (
4559 __table_args__ = (
4571 base_table_args,
4560 base_table_args,
4572 )
4561 )
4573 ROLE_REVIEWER = u'reviewer'
4562 ROLE_REVIEWER = u'reviewer'
4574 ROLE_OBSERVER = u'observer'
4563 ROLE_OBSERVER = u'observer'
4575 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4564 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4576
4565
4577 @hybrid_property
4566 @hybrid_property
4578 def reasons(self):
4567 def reasons(self):
4579 if not self._reasons:
4568 if not self._reasons:
4580 return []
4569 return []
4581 return self._reasons
4570 return self._reasons
4582
4571
4583 @reasons.setter
4572 @reasons.setter
4584 def reasons(self, val):
4573 def reasons(self, val):
4585 val = val or []
4574 val = val or []
4586 if any(not isinstance(x, compat.string_types) for x in val):
4575 if any(not isinstance(x, compat.string_types) for x in val):
4587 raise Exception('invalid reasons type, must be list of strings')
4576 raise Exception('invalid reasons type, must be list of strings')
4588 self._reasons = val
4577 self._reasons = val
4589
4578
4590 pull_requests_reviewers_id = Column(
4579 pull_requests_reviewers_id = Column(
4591 'pull_requests_reviewers_id', Integer(), nullable=False,
4580 'pull_requests_reviewers_id', Integer(), nullable=False,
4592 primary_key=True)
4581 primary_key=True)
4593 pull_request_id = Column(
4582 pull_request_id = Column(
4594 "pull_request_id", Integer(),
4583 "pull_request_id", Integer(),
4595 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4584 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4596 user_id = Column(
4585 user_id = Column(
4597 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4586 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4598 _reasons = Column(
4587 _reasons = Column(
4599 'reason', MutationList.as_mutable(
4588 'reason', MutationList.as_mutable(
4600 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4589 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4601
4590
4602 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4591 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4603 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4592 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4604
4593
4605 user = relationship('User')
4594 user = relationship('User')
4606 pull_request = relationship('PullRequest')
4595 pull_request = relationship('PullRequest')
4607
4596
4608 rule_data = Column(
4597 rule_data = Column(
4609 'rule_data_json',
4598 'rule_data_json',
4610 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4599 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4611
4600
4612 def rule_user_group_data(self):
4601 def rule_user_group_data(self):
4613 """
4602 """
4614 Returns the voting user group rule data for this reviewer
4603 Returns the voting user group rule data for this reviewer
4615 """
4604 """
4616
4605
4617 if self.rule_data and 'vote_rule' in self.rule_data:
4606 if self.rule_data and 'vote_rule' in self.rule_data:
4618 user_group_data = {}
4607 user_group_data = {}
4619 if 'rule_user_group_entry_id' in self.rule_data:
4608 if 'rule_user_group_entry_id' in self.rule_data:
4620 # means a group with voting rules !
4609 # means a group with voting rules !
4621 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4610 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4622 user_group_data['name'] = self.rule_data['rule_name']
4611 user_group_data['name'] = self.rule_data['rule_name']
4623 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4612 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4624
4613
4625 return user_group_data
4614 return user_group_data
4626
4615
4627 @classmethod
4616 @classmethod
4628 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4617 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4629 qry = PullRequestReviewers.query()\
4618 qry = PullRequestReviewers.query()\
4630 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4619 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4631 if role:
4620 if role:
4632 qry = qry.filter(PullRequestReviewers.role == role)
4621 qry = qry.filter(PullRequestReviewers.role == role)
4633
4622
4634 return qry.all()
4623 return qry.all()
4635
4624
4636 def __unicode__(self):
4625 def __unicode__(self):
4637 return u"<%s('id:%s')>" % (self.__class__.__name__,
4626 return u"<%s('id:%s')>" % (self.__class__.__name__,
4638 self.pull_requests_reviewers_id)
4627 self.pull_requests_reviewers_id)
4639
4628
4640
4629
4641 class Notification(Base, BaseModel):
4630 class Notification(Base, BaseModel):
4642 __tablename__ = 'notifications'
4631 __tablename__ = 'notifications'
4643 __table_args__ = (
4632 __table_args__ = (
4644 Index('notification_type_idx', 'type'),
4633 Index('notification_type_idx', 'type'),
4645 base_table_args,
4634 base_table_args,
4646 )
4635 )
4647
4636
4648 TYPE_CHANGESET_COMMENT = u'cs_comment'
4637 TYPE_CHANGESET_COMMENT = u'cs_comment'
4649 TYPE_MESSAGE = u'message'
4638 TYPE_MESSAGE = u'message'
4650 TYPE_MENTION = u'mention'
4639 TYPE_MENTION = u'mention'
4651 TYPE_REGISTRATION = u'registration'
4640 TYPE_REGISTRATION = u'registration'
4652 TYPE_PULL_REQUEST = u'pull_request'
4641 TYPE_PULL_REQUEST = u'pull_request'
4653 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4642 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4654 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4643 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4655
4644
4656 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4645 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4657 subject = Column('subject', Unicode(512), nullable=True)
4646 subject = Column('subject', Unicode(512), nullable=True)
4658 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4647 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4659 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4648 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4660 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4649 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4661 type_ = Column('type', Unicode(255))
4650 type_ = Column('type', Unicode(255))
4662
4651
4663 created_by_user = relationship('User')
4652 created_by_user = relationship('User')
4664 notifications_to_users = relationship('UserNotification', lazy='joined',
4653 notifications_to_users = relationship('UserNotification', lazy='joined',
4665 cascade="all, delete-orphan")
4654 cascade="all, delete-orphan")
4666
4655
4667 @property
4656 @property
4668 def recipients(self):
4657 def recipients(self):
4669 return [x.user for x in UserNotification.query()\
4658 return [x.user for x in UserNotification.query()\
4670 .filter(UserNotification.notification == self)\
4659 .filter(UserNotification.notification == self)\
4671 .order_by(UserNotification.user_id.asc()).all()]
4660 .order_by(UserNotification.user_id.asc()).all()]
4672
4661
4673 @classmethod
4662 @classmethod
4674 def create(cls, created_by, subject, body, recipients, type_=None):
4663 def create(cls, created_by, subject, body, recipients, type_=None):
4675 if type_ is None:
4664 if type_ is None:
4676 type_ = Notification.TYPE_MESSAGE
4665 type_ = Notification.TYPE_MESSAGE
4677
4666
4678 notification = cls()
4667 notification = cls()
4679 notification.created_by_user = created_by
4668 notification.created_by_user = created_by
4680 notification.subject = subject
4669 notification.subject = subject
4681 notification.body = body
4670 notification.body = body
4682 notification.type_ = type_
4671 notification.type_ = type_
4683 notification.created_on = datetime.datetime.now()
4672 notification.created_on = datetime.datetime.now()
4684
4673
4685 # For each recipient link the created notification to his account
4674 # For each recipient link the created notification to his account
4686 for u in recipients:
4675 for u in recipients:
4687 assoc = UserNotification()
4676 assoc = UserNotification()
4688 assoc.user_id = u.user_id
4677 assoc.user_id = u.user_id
4689 assoc.notification = notification
4678 assoc.notification = notification
4690
4679
4691 # if created_by is inside recipients mark his notification
4680 # if created_by is inside recipients mark his notification
4692 # as read
4681 # as read
4693 if u.user_id == created_by.user_id:
4682 if u.user_id == created_by.user_id:
4694 assoc.read = True
4683 assoc.read = True
4695 Session().add(assoc)
4684 Session().add(assoc)
4696
4685
4697 Session().add(notification)
4686 Session().add(notification)
4698
4687
4699 return notification
4688 return notification
4700
4689
4701
4690
4702 class UserNotification(Base, BaseModel):
4691 class UserNotification(Base, BaseModel):
4703 __tablename__ = 'user_to_notification'
4692 __tablename__ = 'user_to_notification'
4704 __table_args__ = (
4693 __table_args__ = (
4705 UniqueConstraint('user_id', 'notification_id'),
4694 UniqueConstraint('user_id', 'notification_id'),
4706 base_table_args
4695 base_table_args
4707 )
4696 )
4708
4697
4709 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4698 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4710 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4699 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4711 read = Column('read', Boolean, default=False)
4700 read = Column('read', Boolean, default=False)
4712 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4701 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4713
4702
4714 user = relationship('User', lazy="joined")
4703 user = relationship('User', lazy="joined")
4715 notification = relationship('Notification', lazy="joined",
4704 notification = relationship('Notification', lazy="joined",
4716 order_by=lambda: Notification.created_on.desc(),)
4705 order_by=lambda: Notification.created_on.desc(),)
4717
4706
4718 def mark_as_read(self):
4707 def mark_as_read(self):
4719 self.read = True
4708 self.read = True
4720 Session().add(self)
4709 Session().add(self)
4721
4710
4722
4711
4723 class UserNotice(Base, BaseModel):
4712 class UserNotice(Base, BaseModel):
4724 __tablename__ = 'user_notices'
4713 __tablename__ = 'user_notices'
4725 __table_args__ = (
4714 __table_args__ = (
4726 base_table_args
4715 base_table_args
4727 )
4716 )
4728
4717
4729 NOTIFICATION_TYPE_MESSAGE = 'message'
4718 NOTIFICATION_TYPE_MESSAGE = 'message'
4730 NOTIFICATION_TYPE_NOTICE = 'notice'
4719 NOTIFICATION_TYPE_NOTICE = 'notice'
4731
4720
4732 NOTIFICATION_LEVEL_INFO = 'info'
4721 NOTIFICATION_LEVEL_INFO = 'info'
4733 NOTIFICATION_LEVEL_WARNING = 'warning'
4722 NOTIFICATION_LEVEL_WARNING = 'warning'
4734 NOTIFICATION_LEVEL_ERROR = 'error'
4723 NOTIFICATION_LEVEL_ERROR = 'error'
4735
4724
4736 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4725 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4737
4726
4738 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4727 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4739 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4728 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4740
4729
4741 notice_read = Column('notice_read', Boolean, default=False)
4730 notice_read = Column('notice_read', Boolean, default=False)
4742
4731
4743 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4732 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4744 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4733 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4745
4734
4746 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4735 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4747 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4736 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4748
4737
4749 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4738 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4750 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4739 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4751
4740
4752 @classmethod
4741 @classmethod
4753 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4742 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4754
4743
4755 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4744 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4756 cls.NOTIFICATION_LEVEL_WARNING,
4745 cls.NOTIFICATION_LEVEL_WARNING,
4757 cls.NOTIFICATION_LEVEL_INFO]:
4746 cls.NOTIFICATION_LEVEL_INFO]:
4758 return
4747 return
4759
4748
4760 from rhodecode.model.user import UserModel
4749 from rhodecode.model.user import UserModel
4761 user = UserModel().get_user(user)
4750 user = UserModel().get_user(user)
4762
4751
4763 new_notice = UserNotice()
4752 new_notice = UserNotice()
4764 if not allow_duplicate:
4753 if not allow_duplicate:
4765 existing_msg = UserNotice().query() \
4754 existing_msg = UserNotice().query() \
4766 .filter(UserNotice.user == user) \
4755 .filter(UserNotice.user == user) \
4767 .filter(UserNotice.notice_body == body) \
4756 .filter(UserNotice.notice_body == body) \
4768 .filter(UserNotice.notice_read == false()) \
4757 .filter(UserNotice.notice_read == false()) \
4769 .scalar()
4758 .scalar()
4770 if existing_msg:
4759 if existing_msg:
4771 log.warning('Ignoring duplicate notice for user %s', user)
4760 log.warning('Ignoring duplicate notice for user %s', user)
4772 return
4761 return
4773
4762
4774 new_notice.user = user
4763 new_notice.user = user
4775 new_notice.notice_subject = subject
4764 new_notice.notice_subject = subject
4776 new_notice.notice_body = body
4765 new_notice.notice_body = body
4777 new_notice.notification_level = notice_level
4766 new_notice.notification_level = notice_level
4778 Session().add(new_notice)
4767 Session().add(new_notice)
4779 Session().commit()
4768 Session().commit()
4780
4769
4781
4770
4782 class Gist(Base, BaseModel):
4771 class Gist(Base, BaseModel):
4783 __tablename__ = 'gists'
4772 __tablename__ = 'gists'
4784 __table_args__ = (
4773 __table_args__ = (
4785 Index('g_gist_access_id_idx', 'gist_access_id'),
4774 Index('g_gist_access_id_idx', 'gist_access_id'),
4786 Index('g_created_on_idx', 'created_on'),
4775 Index('g_created_on_idx', 'created_on'),
4787 base_table_args
4776 base_table_args
4788 )
4777 )
4789
4778
4790 GIST_PUBLIC = u'public'
4779 GIST_PUBLIC = u'public'
4791 GIST_PRIVATE = u'private'
4780 GIST_PRIVATE = u'private'
4792 DEFAULT_FILENAME = u'gistfile1.txt'
4781 DEFAULT_FILENAME = u'gistfile1.txt'
4793
4782
4794 ACL_LEVEL_PUBLIC = u'acl_public'
4783 ACL_LEVEL_PUBLIC = u'acl_public'
4795 ACL_LEVEL_PRIVATE = u'acl_private'
4784 ACL_LEVEL_PRIVATE = u'acl_private'
4796
4785
4797 gist_id = Column('gist_id', Integer(), primary_key=True)
4786 gist_id = Column('gist_id', Integer(), primary_key=True)
4798 gist_access_id = Column('gist_access_id', Unicode(250))
4787 gist_access_id = Column('gist_access_id', Unicode(250))
4799 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4788 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4800 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4789 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4801 gist_expires = Column('gist_expires', Float(53), nullable=False)
4790 gist_expires = Column('gist_expires', Float(53), nullable=False)
4802 gist_type = Column('gist_type', Unicode(128), nullable=False)
4791 gist_type = Column('gist_type', Unicode(128), nullable=False)
4803 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4792 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4804 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4793 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4805 acl_level = Column('acl_level', Unicode(128), nullable=True)
4794 acl_level = Column('acl_level', Unicode(128), nullable=True)
4806
4795
4807 owner = relationship('User')
4796 owner = relationship('User')
4808
4797
4809 def __repr__(self):
4798 def __repr__(self):
4810 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4799 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4811
4800
4812 @hybrid_property
4801 @hybrid_property
4813 def description_safe(self):
4802 def description_safe(self):
4814 from rhodecode.lib import helpers as h
4803 from rhodecode.lib import helpers as h
4815 return h.escape(self.gist_description)
4804 return h.escape(self.gist_description)
4816
4805
4817 @classmethod
4806 @classmethod
4818 def get_or_404(cls, id_):
4807 def get_or_404(cls, id_):
4819 from pyramid.httpexceptions import HTTPNotFound
4808 from pyramid.httpexceptions import HTTPNotFound
4820
4809
4821 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4810 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4822 if not res:
4811 if not res:
4823 raise HTTPNotFound()
4812 raise HTTPNotFound()
4824 return res
4813 return res
4825
4814
4826 @classmethod
4815 @classmethod
4827 def get_by_access_id(cls, gist_access_id):
4816 def get_by_access_id(cls, gist_access_id):
4828 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4817 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4829
4818
4830 def gist_url(self):
4819 def gist_url(self):
4831 from rhodecode.model.gist import GistModel
4820 from rhodecode.model.gist import GistModel
4832 return GistModel().get_url(self)
4821 return GistModel().get_url(self)
4833
4822
4834 @classmethod
4823 @classmethod
4835 def base_path(cls):
4824 def base_path(cls):
4836 """
4825 """
4837 Returns base path when all gists are stored
4826 Returns base path when all gists are stored
4838
4827
4839 :param cls:
4828 :param cls:
4840 """
4829 """
4841 from rhodecode.model.gist import GIST_STORE_LOC
4830 from rhodecode.model.gist import GIST_STORE_LOC
4842 q = Session().query(RhodeCodeUi)\
4831 q = Session().query(RhodeCodeUi)\
4843 .filter(RhodeCodeUi.ui_key == URL_SEP)
4832 .filter(RhodeCodeUi.ui_key == URL_SEP)
4844 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4833 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4845 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4834 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4846
4835
4847 def get_api_data(self):
4836 def get_api_data(self):
4848 """
4837 """
4849 Common function for generating gist related data for API
4838 Common function for generating gist related data for API
4850 """
4839 """
4851 gist = self
4840 gist = self
4852 data = {
4841 data = {
4853 'gist_id': gist.gist_id,
4842 'gist_id': gist.gist_id,
4854 'type': gist.gist_type,
4843 'type': gist.gist_type,
4855 'access_id': gist.gist_access_id,
4844 'access_id': gist.gist_access_id,
4856 'description': gist.gist_description,
4845 'description': gist.gist_description,
4857 'url': gist.gist_url(),
4846 'url': gist.gist_url(),
4858 'expires': gist.gist_expires,
4847 'expires': gist.gist_expires,
4859 'created_on': gist.created_on,
4848 'created_on': gist.created_on,
4860 'modified_at': gist.modified_at,
4849 'modified_at': gist.modified_at,
4861 'content': None,
4850 'content': None,
4862 'acl_level': gist.acl_level,
4851 'acl_level': gist.acl_level,
4863 }
4852 }
4864 return data
4853 return data
4865
4854
4866 def __json__(self):
4855 def __json__(self):
4867 data = dict(
4856 data = dict(
4868 )
4857 )
4869 data.update(self.get_api_data())
4858 data.update(self.get_api_data())
4870 return data
4859 return data
4871 # SCM functions
4860 # SCM functions
4872
4861
4873 def scm_instance(self, **kwargs):
4862 def scm_instance(self, **kwargs):
4874 """
4863 """
4875 Get an instance of VCS Repository
4864 Get an instance of VCS Repository
4876
4865
4877 :param kwargs:
4866 :param kwargs:
4878 """
4867 """
4879 from rhodecode.model.gist import GistModel
4868 from rhodecode.model.gist import GistModel
4880 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4869 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4881 return get_vcs_instance(
4870 return get_vcs_instance(
4882 repo_path=safe_str(full_repo_path), create=False,
4871 repo_path=safe_str(full_repo_path), create=False,
4883 _vcs_alias=GistModel.vcs_backend)
4872 _vcs_alias=GistModel.vcs_backend)
4884
4873
4885
4874
4886 class ExternalIdentity(Base, BaseModel):
4875 class ExternalIdentity(Base, BaseModel):
4887 __tablename__ = 'external_identities'
4876 __tablename__ = 'external_identities'
4888 __table_args__ = (
4877 __table_args__ = (
4889 Index('local_user_id_idx', 'local_user_id'),
4878 Index('local_user_id_idx', 'local_user_id'),
4890 Index('external_id_idx', 'external_id'),
4879 Index('external_id_idx', 'external_id'),
4891 base_table_args
4880 base_table_args
4892 )
4881 )
4893
4882
4894 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4883 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4895 external_username = Column('external_username', Unicode(1024), default=u'')
4884 external_username = Column('external_username', Unicode(1024), default=u'')
4896 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4885 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4897 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4886 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4898 access_token = Column('access_token', String(1024), default=u'')
4887 access_token = Column('access_token', String(1024), default=u'')
4899 alt_token = Column('alt_token', String(1024), default=u'')
4888 alt_token = Column('alt_token', String(1024), default=u'')
4900 token_secret = Column('token_secret', String(1024), default=u'')
4889 token_secret = Column('token_secret', String(1024), default=u'')
4901
4890
4902 @classmethod
4891 @classmethod
4903 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4892 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4904 """
4893 """
4905 Returns ExternalIdentity instance based on search params
4894 Returns ExternalIdentity instance based on search params
4906
4895
4907 :param external_id:
4896 :param external_id:
4908 :param provider_name:
4897 :param provider_name:
4909 :return: ExternalIdentity
4898 :return: ExternalIdentity
4910 """
4899 """
4911 query = cls.query()
4900 query = cls.query()
4912 query = query.filter(cls.external_id == external_id)
4901 query = query.filter(cls.external_id == external_id)
4913 query = query.filter(cls.provider_name == provider_name)
4902 query = query.filter(cls.provider_name == provider_name)
4914 if local_user_id:
4903 if local_user_id:
4915 query = query.filter(cls.local_user_id == local_user_id)
4904 query = query.filter(cls.local_user_id == local_user_id)
4916 return query.first()
4905 return query.first()
4917
4906
4918 @classmethod
4907 @classmethod
4919 def user_by_external_id_and_provider(cls, external_id, provider_name):
4908 def user_by_external_id_and_provider(cls, external_id, provider_name):
4920 """
4909 """
4921 Returns User instance based on search params
4910 Returns User instance based on search params
4922
4911
4923 :param external_id:
4912 :param external_id:
4924 :param provider_name:
4913 :param provider_name:
4925 :return: User
4914 :return: User
4926 """
4915 """
4927 query = User.query()
4916 query = User.query()
4928 query = query.filter(cls.external_id == external_id)
4917 query = query.filter(cls.external_id == external_id)
4929 query = query.filter(cls.provider_name == provider_name)
4918 query = query.filter(cls.provider_name == provider_name)
4930 query = query.filter(User.user_id == cls.local_user_id)
4919 query = query.filter(User.user_id == cls.local_user_id)
4931 return query.first()
4920 return query.first()
4932
4921
4933 @classmethod
4922 @classmethod
4934 def by_local_user_id(cls, local_user_id):
4923 def by_local_user_id(cls, local_user_id):
4935 """
4924 """
4936 Returns all tokens for user
4925 Returns all tokens for user
4937
4926
4938 :param local_user_id:
4927 :param local_user_id:
4939 :return: ExternalIdentity
4928 :return: ExternalIdentity
4940 """
4929 """
4941 query = cls.query()
4930 query = cls.query()
4942 query = query.filter(cls.local_user_id == local_user_id)
4931 query = query.filter(cls.local_user_id == local_user_id)
4943 return query
4932 return query
4944
4933
4945 @classmethod
4934 @classmethod
4946 def load_provider_plugin(cls, plugin_id):
4935 def load_provider_plugin(cls, plugin_id):
4947 from rhodecode.authentication.base import loadplugin
4936 from rhodecode.authentication.base import loadplugin
4948 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4937 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4949 auth_plugin = loadplugin(_plugin_id)
4938 auth_plugin = loadplugin(_plugin_id)
4950 return auth_plugin
4939 return auth_plugin
4951
4940
4952
4941
4953 class Integration(Base, BaseModel):
4942 class Integration(Base, BaseModel):
4954 __tablename__ = 'integrations'
4943 __tablename__ = 'integrations'
4955 __table_args__ = (
4944 __table_args__ = (
4956 base_table_args
4945 base_table_args
4957 )
4946 )
4958
4947
4959 integration_id = Column('integration_id', Integer(), primary_key=True)
4948 integration_id = Column('integration_id', Integer(), primary_key=True)
4960 integration_type = Column('integration_type', String(255))
4949 integration_type = Column('integration_type', String(255))
4961 enabled = Column('enabled', Boolean(), nullable=False)
4950 enabled = Column('enabled', Boolean(), nullable=False)
4962 name = Column('name', String(255), nullable=False)
4951 name = Column('name', String(255), nullable=False)
4963 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4952 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4964 default=False)
4953 default=False)
4965
4954
4966 settings = Column(
4955 settings = Column(
4967 'settings_json', MutationObj.as_mutable(
4956 'settings_json', MutationObj.as_mutable(
4968 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4957 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4969 repo_id = Column(
4958 repo_id = Column(
4970 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4959 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4971 nullable=True, unique=None, default=None)
4960 nullable=True, unique=None, default=None)
4972 repo = relationship('Repository', lazy='joined')
4961 repo = relationship('Repository', lazy='joined')
4973
4962
4974 repo_group_id = Column(
4963 repo_group_id = Column(
4975 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4964 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4976 nullable=True, unique=None, default=None)
4965 nullable=True, unique=None, default=None)
4977 repo_group = relationship('RepoGroup', lazy='joined')
4966 repo_group = relationship('RepoGroup', lazy='joined')
4978
4967
4979 @property
4968 @property
4980 def scope(self):
4969 def scope(self):
4981 if self.repo:
4970 if self.repo:
4982 return repr(self.repo)
4971 return repr(self.repo)
4983 if self.repo_group:
4972 if self.repo_group:
4984 if self.child_repos_only:
4973 if self.child_repos_only:
4985 return repr(self.repo_group) + ' (child repos only)'
4974 return repr(self.repo_group) + ' (child repos only)'
4986 else:
4975 else:
4987 return repr(self.repo_group) + ' (recursive)'
4976 return repr(self.repo_group) + ' (recursive)'
4988 if self.child_repos_only:
4977 if self.child_repos_only:
4989 return 'root_repos'
4978 return 'root_repos'
4990 return 'global'
4979 return 'global'
4991
4980
4992 def __repr__(self):
4981 def __repr__(self):
4993 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4982 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4994
4983
4995
4984
4996 class RepoReviewRuleUser(Base, BaseModel):
4985 class RepoReviewRuleUser(Base, BaseModel):
4997 __tablename__ = 'repo_review_rules_users'
4986 __tablename__ = 'repo_review_rules_users'
4998 __table_args__ = (
4987 __table_args__ = (
4999 base_table_args
4988 base_table_args
5000 )
4989 )
5001 ROLE_REVIEWER = u'reviewer'
4990 ROLE_REVIEWER = u'reviewer'
5002 ROLE_OBSERVER = u'observer'
4991 ROLE_OBSERVER = u'observer'
5003 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4992 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5004
4993
5005 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4994 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5006 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4995 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5007 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4996 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5008 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4997 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5009 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4998 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5010 user = relationship('User')
4999 user = relationship('User')
5011
5000
5012 def rule_data(self):
5001 def rule_data(self):
5013 return {
5002 return {
5014 'mandatory': self.mandatory,
5003 'mandatory': self.mandatory,
5015 'role': self.role,
5004 'role': self.role,
5016 }
5005 }
5017
5006
5018
5007
5019 class RepoReviewRuleUserGroup(Base, BaseModel):
5008 class RepoReviewRuleUserGroup(Base, BaseModel):
5020 __tablename__ = 'repo_review_rules_users_groups'
5009 __tablename__ = 'repo_review_rules_users_groups'
5021 __table_args__ = (
5010 __table_args__ = (
5022 base_table_args
5011 base_table_args
5023 )
5012 )
5024
5013
5025 VOTE_RULE_ALL = -1
5014 VOTE_RULE_ALL = -1
5026 ROLE_REVIEWER = u'reviewer'
5015 ROLE_REVIEWER = u'reviewer'
5027 ROLE_OBSERVER = u'observer'
5016 ROLE_OBSERVER = u'observer'
5028 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5017 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5029
5018
5030 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5019 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5031 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5020 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5032 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5021 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5033 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5022 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5034 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5023 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5035 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5024 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5036 users_group = relationship('UserGroup')
5025 users_group = relationship('UserGroup')
5037
5026
5038 def rule_data(self):
5027 def rule_data(self):
5039 return {
5028 return {
5040 'mandatory': self.mandatory,
5029 'mandatory': self.mandatory,
5041 'role': self.role,
5030 'role': self.role,
5042 'vote_rule': self.vote_rule
5031 'vote_rule': self.vote_rule
5043 }
5032 }
5044
5033
5045 @property
5034 @property
5046 def vote_rule_label(self):
5035 def vote_rule_label(self):
5047 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5036 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5048 return 'all must vote'
5037 return 'all must vote'
5049 else:
5038 else:
5050 return 'min. vote {}'.format(self.vote_rule)
5039 return 'min. vote {}'.format(self.vote_rule)
5051
5040
5052
5041
5053 class RepoReviewRule(Base, BaseModel):
5042 class RepoReviewRule(Base, BaseModel):
5054 __tablename__ = 'repo_review_rules'
5043 __tablename__ = 'repo_review_rules'
5055 __table_args__ = (
5044 __table_args__ = (
5056 base_table_args
5045 base_table_args
5057 )
5046 )
5058
5047
5059 repo_review_rule_id = Column(
5048 repo_review_rule_id = Column(
5060 'repo_review_rule_id', Integer(), primary_key=True)
5049 'repo_review_rule_id', Integer(), primary_key=True)
5061 repo_id = Column(
5050 repo_id = Column(
5062 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5051 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5063 repo = relationship('Repository', backref='review_rules')
5052 repo = relationship('Repository', backref='review_rules')
5064
5053
5065 review_rule_name = Column('review_rule_name', String(255))
5054 review_rule_name = Column('review_rule_name', String(255))
5066 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5055 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5067 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5056 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5068 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5057 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5069
5058
5070 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5059 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5071 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5060 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5072 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5061 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5073 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5062 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5074
5063
5075 rule_users = relationship('RepoReviewRuleUser')
5064 rule_users = relationship('RepoReviewRuleUser')
5076 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5065 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5077
5066
5078 def _validate_pattern(self, value):
5067 def _validate_pattern(self, value):
5079 re.compile('^' + glob2re(value) + '$')
5068 re.compile('^' + glob2re(value) + '$')
5080
5069
5081 @hybrid_property
5070 @hybrid_property
5082 def source_branch_pattern(self):
5071 def source_branch_pattern(self):
5083 return self._branch_pattern or '*'
5072 return self._branch_pattern or '*'
5084
5073
5085 @source_branch_pattern.setter
5074 @source_branch_pattern.setter
5086 def source_branch_pattern(self, value):
5075 def source_branch_pattern(self, value):
5087 self._validate_pattern(value)
5076 self._validate_pattern(value)
5088 self._branch_pattern = value or '*'
5077 self._branch_pattern = value or '*'
5089
5078
5090 @hybrid_property
5079 @hybrid_property
5091 def target_branch_pattern(self):
5080 def target_branch_pattern(self):
5092 return self._target_branch_pattern or '*'
5081 return self._target_branch_pattern or '*'
5093
5082
5094 @target_branch_pattern.setter
5083 @target_branch_pattern.setter
5095 def target_branch_pattern(self, value):
5084 def target_branch_pattern(self, value):
5096 self._validate_pattern(value)
5085 self._validate_pattern(value)
5097 self._target_branch_pattern = value or '*'
5086 self._target_branch_pattern = value or '*'
5098
5087
5099 @hybrid_property
5088 @hybrid_property
5100 def file_pattern(self):
5089 def file_pattern(self):
5101 return self._file_pattern or '*'
5090 return self._file_pattern or '*'
5102
5091
5103 @file_pattern.setter
5092 @file_pattern.setter
5104 def file_pattern(self, value):
5093 def file_pattern(self, value):
5105 self._validate_pattern(value)
5094 self._validate_pattern(value)
5106 self._file_pattern = value or '*'
5095 self._file_pattern = value or '*'
5107
5096
5108 def matches(self, source_branch, target_branch, files_changed):
5097 def matches(self, source_branch, target_branch, files_changed):
5109 """
5098 """
5110 Check if this review rule matches a branch/files in a pull request
5099 Check if this review rule matches a branch/files in a pull request
5111
5100
5112 :param source_branch: source branch name for the commit
5101 :param source_branch: source branch name for the commit
5113 :param target_branch: target branch name for the commit
5102 :param target_branch: target branch name for the commit
5114 :param files_changed: list of file paths changed in the pull request
5103 :param files_changed: list of file paths changed in the pull request
5115 """
5104 """
5116
5105
5117 source_branch = source_branch or ''
5106 source_branch = source_branch or ''
5118 target_branch = target_branch or ''
5107 target_branch = target_branch or ''
5119 files_changed = files_changed or []
5108 files_changed = files_changed or []
5120
5109
5121 branch_matches = True
5110 branch_matches = True
5122 if source_branch or target_branch:
5111 if source_branch or target_branch:
5123 if self.source_branch_pattern == '*':
5112 if self.source_branch_pattern == '*':
5124 source_branch_match = True
5113 source_branch_match = True
5125 else:
5114 else:
5126 if self.source_branch_pattern.startswith('re:'):
5115 if self.source_branch_pattern.startswith('re:'):
5127 source_pattern = self.source_branch_pattern[3:]
5116 source_pattern = self.source_branch_pattern[3:]
5128 else:
5117 else:
5129 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5118 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5130 source_branch_regex = re.compile(source_pattern)
5119 source_branch_regex = re.compile(source_pattern)
5131 source_branch_match = bool(source_branch_regex.search(source_branch))
5120 source_branch_match = bool(source_branch_regex.search(source_branch))
5132 if self.target_branch_pattern == '*':
5121 if self.target_branch_pattern == '*':
5133 target_branch_match = True
5122 target_branch_match = True
5134 else:
5123 else:
5135 if self.target_branch_pattern.startswith('re:'):
5124 if self.target_branch_pattern.startswith('re:'):
5136 target_pattern = self.target_branch_pattern[3:]
5125 target_pattern = self.target_branch_pattern[3:]
5137 else:
5126 else:
5138 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5127 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5139 target_branch_regex = re.compile(target_pattern)
5128 target_branch_regex = re.compile(target_pattern)
5140 target_branch_match = bool(target_branch_regex.search(target_branch))
5129 target_branch_match = bool(target_branch_regex.search(target_branch))
5141
5130
5142 branch_matches = source_branch_match and target_branch_match
5131 branch_matches = source_branch_match and target_branch_match
5143
5132
5144 files_matches = True
5133 files_matches = True
5145 if self.file_pattern != '*':
5134 if self.file_pattern != '*':
5146 files_matches = False
5135 files_matches = False
5147 if self.file_pattern.startswith('re:'):
5136 if self.file_pattern.startswith('re:'):
5148 file_pattern = self.file_pattern[3:]
5137 file_pattern = self.file_pattern[3:]
5149 else:
5138 else:
5150 file_pattern = glob2re(self.file_pattern)
5139 file_pattern = glob2re(self.file_pattern)
5151 file_regex = re.compile(file_pattern)
5140 file_regex = re.compile(file_pattern)
5152 for file_data in files_changed:
5141 for file_data in files_changed:
5153 filename = file_data.get('filename')
5142 filename = file_data.get('filename')
5154
5143
5155 if file_regex.search(filename):
5144 if file_regex.search(filename):
5156 files_matches = True
5145 files_matches = True
5157 break
5146 break
5158
5147
5159 return branch_matches and files_matches
5148 return branch_matches and files_matches
5160
5149
5161 @property
5150 @property
5162 def review_users(self):
5151 def review_users(self):
5163 """ Returns the users which this rule applies to """
5152 """ Returns the users which this rule applies to """
5164
5153
5165 users = collections.OrderedDict()
5154 users = collections.OrderedDict()
5166
5155
5167 for rule_user in self.rule_users:
5156 for rule_user in self.rule_users:
5168 if rule_user.user.active:
5157 if rule_user.user.active:
5169 if rule_user.user not in users:
5158 if rule_user.user not in users:
5170 users[rule_user.user.username] = {
5159 users[rule_user.user.username] = {
5171 'user': rule_user.user,
5160 'user': rule_user.user,
5172 'source': 'user',
5161 'source': 'user',
5173 'source_data': {},
5162 'source_data': {},
5174 'data': rule_user.rule_data()
5163 'data': rule_user.rule_data()
5175 }
5164 }
5176
5165
5177 for rule_user_group in self.rule_user_groups:
5166 for rule_user_group in self.rule_user_groups:
5178 source_data = {
5167 source_data = {
5179 'user_group_id': rule_user_group.users_group.users_group_id,
5168 'user_group_id': rule_user_group.users_group.users_group_id,
5180 'name': rule_user_group.users_group.users_group_name,
5169 'name': rule_user_group.users_group.users_group_name,
5181 'members': len(rule_user_group.users_group.members)
5170 'members': len(rule_user_group.users_group.members)
5182 }
5171 }
5183 for member in rule_user_group.users_group.members:
5172 for member in rule_user_group.users_group.members:
5184 if member.user.active:
5173 if member.user.active:
5185 key = member.user.username
5174 key = member.user.username
5186 if key in users:
5175 if key in users:
5187 # skip this member as we have him already
5176 # skip this member as we have him already
5188 # this prevents from override the "first" matched
5177 # this prevents from override the "first" matched
5189 # users with duplicates in multiple groups
5178 # users with duplicates in multiple groups
5190 continue
5179 continue
5191
5180
5192 users[key] = {
5181 users[key] = {
5193 'user': member.user,
5182 'user': member.user,
5194 'source': 'user_group',
5183 'source': 'user_group',
5195 'source_data': source_data,
5184 'source_data': source_data,
5196 'data': rule_user_group.rule_data()
5185 'data': rule_user_group.rule_data()
5197 }
5186 }
5198
5187
5199 return users
5188 return users
5200
5189
5201 def user_group_vote_rule(self, user_id):
5190 def user_group_vote_rule(self, user_id):
5202
5191
5203 rules = []
5192 rules = []
5204 if not self.rule_user_groups:
5193 if not self.rule_user_groups:
5205 return rules
5194 return rules
5206
5195
5207 for user_group in self.rule_user_groups:
5196 for user_group in self.rule_user_groups:
5208 user_group_members = [x.user_id for x in user_group.users_group.members]
5197 user_group_members = [x.user_id for x in user_group.users_group.members]
5209 if user_id in user_group_members:
5198 if user_id in user_group_members:
5210 rules.append(user_group)
5199 rules.append(user_group)
5211 return rules
5200 return rules
5212
5201
5213 def __repr__(self):
5202 def __repr__(self):
5214 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5203 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5215 self.repo_review_rule_id, self.repo)
5204 self.repo_review_rule_id, self.repo)
5216
5205
5217
5206
5218 class ScheduleEntry(Base, BaseModel):
5207 class ScheduleEntry(Base, BaseModel):
5219 __tablename__ = 'schedule_entries'
5208 __tablename__ = 'schedule_entries'
5220 __table_args__ = (
5209 __table_args__ = (
5221 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5210 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5222 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5211 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5223 base_table_args,
5212 base_table_args,
5224 )
5213 )
5225
5214
5226 schedule_types = ['crontab', 'timedelta', 'integer']
5215 schedule_types = ['crontab', 'timedelta', 'integer']
5227 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5216 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5228
5217
5229 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5218 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5230 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5219 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5231 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5220 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5232
5221
5233 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5222 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5234 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5223 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5235
5224
5236 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5225 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5237 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5226 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5238
5227
5239 # task
5228 # task
5240 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5229 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5241 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5230 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5242 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5231 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5243 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5232 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5244
5233
5245 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5234 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5246 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5235 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5247
5236
5248 @hybrid_property
5237 @hybrid_property
5249 def schedule_type(self):
5238 def schedule_type(self):
5250 return self._schedule_type
5239 return self._schedule_type
5251
5240
5252 @schedule_type.setter
5241 @schedule_type.setter
5253 def schedule_type(self, val):
5242 def schedule_type(self, val):
5254 if val not in self.schedule_types:
5243 if val not in self.schedule_types:
5255 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5244 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5256 val, self.schedule_type))
5245 val, self.schedule_type))
5257
5246
5258 self._schedule_type = val
5247 self._schedule_type = val
5259
5248
5260 @classmethod
5249 @classmethod
5261 def get_uid(cls, obj):
5250 def get_uid(cls, obj):
5262 args = obj.task_args
5251 args = obj.task_args
5263 kwargs = obj.task_kwargs
5252 kwargs = obj.task_kwargs
5264 if isinstance(args, JsonRaw):
5253 if isinstance(args, JsonRaw):
5265 try:
5254 try:
5266 args = json.loads(args)
5255 args = json.loads(args)
5267 except ValueError:
5256 except ValueError:
5268 args = tuple()
5257 args = tuple()
5269
5258
5270 if isinstance(kwargs, JsonRaw):
5259 if isinstance(kwargs, JsonRaw):
5271 try:
5260 try:
5272 kwargs = json.loads(kwargs)
5261 kwargs = json.loads(kwargs)
5273 except ValueError:
5262 except ValueError:
5274 kwargs = dict()
5263 kwargs = dict()
5275
5264
5276 dot_notation = obj.task_dot_notation
5265 dot_notation = obj.task_dot_notation
5277 val = '.'.join(map(safe_str, [
5266 val = '.'.join(map(safe_str, [
5278 sorted(dot_notation), args, sorted(kwargs.items())]))
5267 sorted(dot_notation), args, sorted(kwargs.items())]))
5279 return hashlib.sha1(val).hexdigest()
5268 return hashlib.sha1(val).hexdigest()
5280
5269
5281 @classmethod
5270 @classmethod
5282 def get_by_schedule_name(cls, schedule_name):
5271 def get_by_schedule_name(cls, schedule_name):
5283 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5272 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5284
5273
5285 @classmethod
5274 @classmethod
5286 def get_by_schedule_id(cls, schedule_id):
5275 def get_by_schedule_id(cls, schedule_id):
5287 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5276 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5288
5277
5289 @property
5278 @property
5290 def task(self):
5279 def task(self):
5291 return self.task_dot_notation
5280 return self.task_dot_notation
5292
5281
5293 @property
5282 @property
5294 def schedule(self):
5283 def schedule(self):
5295 from rhodecode.lib.celerylib.utils import raw_2_schedule
5284 from rhodecode.lib.celerylib.utils import raw_2_schedule
5296 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5285 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5297 return schedule
5286 return schedule
5298
5287
5299 @property
5288 @property
5300 def args(self):
5289 def args(self):
5301 try:
5290 try:
5302 return list(self.task_args or [])
5291 return list(self.task_args or [])
5303 except ValueError:
5292 except ValueError:
5304 return list()
5293 return list()
5305
5294
5306 @property
5295 @property
5307 def kwargs(self):
5296 def kwargs(self):
5308 try:
5297 try:
5309 return dict(self.task_kwargs or {})
5298 return dict(self.task_kwargs or {})
5310 except ValueError:
5299 except ValueError:
5311 return dict()
5300 return dict()
5312
5301
5313 def _as_raw(self, val):
5302 def _as_raw(self, val):
5314 if hasattr(val, 'de_coerce'):
5303 if hasattr(val, 'de_coerce'):
5315 val = val.de_coerce()
5304 val = val.de_coerce()
5316 if val:
5305 if val:
5317 val = json.dumps(val)
5306 val = json.dumps(val)
5318
5307
5319 return val
5308 return val
5320
5309
5321 @property
5310 @property
5322 def schedule_definition_raw(self):
5311 def schedule_definition_raw(self):
5323 return self._as_raw(self.schedule_definition)
5312 return self._as_raw(self.schedule_definition)
5324
5313
5325 @property
5314 @property
5326 def args_raw(self):
5315 def args_raw(self):
5327 return self._as_raw(self.task_args)
5316 return self._as_raw(self.task_args)
5328
5317
5329 @property
5318 @property
5330 def kwargs_raw(self):
5319 def kwargs_raw(self):
5331 return self._as_raw(self.task_kwargs)
5320 return self._as_raw(self.task_kwargs)
5332
5321
5333 def __repr__(self):
5322 def __repr__(self):
5334 return '<DB:ScheduleEntry({}:{})>'.format(
5323 return '<DB:ScheduleEntry({}:{})>'.format(
5335 self.schedule_entry_id, self.schedule_name)
5324 self.schedule_entry_id, self.schedule_name)
5336
5325
5337
5326
5338 @event.listens_for(ScheduleEntry, 'before_update')
5327 @event.listens_for(ScheduleEntry, 'before_update')
5339 def update_task_uid(mapper, connection, target):
5328 def update_task_uid(mapper, connection, target):
5340 target.task_uid = ScheduleEntry.get_uid(target)
5329 target.task_uid = ScheduleEntry.get_uid(target)
5341
5330
5342
5331
5343 @event.listens_for(ScheduleEntry, 'before_insert')
5332 @event.listens_for(ScheduleEntry, 'before_insert')
5344 def set_task_uid(mapper, connection, target):
5333 def set_task_uid(mapper, connection, target):
5345 target.task_uid = ScheduleEntry.get_uid(target)
5334 target.task_uid = ScheduleEntry.get_uid(target)
5346
5335
5347
5336
5348 class _BaseBranchPerms(BaseModel):
5337 class _BaseBranchPerms(BaseModel):
5349 @classmethod
5338 @classmethod
5350 def compute_hash(cls, value):
5339 def compute_hash(cls, value):
5351 return sha1_safe(value)
5340 return sha1_safe(value)
5352
5341
5353 @hybrid_property
5342 @hybrid_property
5354 def branch_pattern(self):
5343 def branch_pattern(self):
5355 return self._branch_pattern or '*'
5344 return self._branch_pattern or '*'
5356
5345
5357 @hybrid_property
5346 @hybrid_property
5358 def branch_hash(self):
5347 def branch_hash(self):
5359 return self._branch_hash
5348 return self._branch_hash
5360
5349
5361 def _validate_glob(self, value):
5350 def _validate_glob(self, value):
5362 re.compile('^' + glob2re(value) + '$')
5351 re.compile('^' + glob2re(value) + '$')
5363
5352
5364 @branch_pattern.setter
5353 @branch_pattern.setter
5365 def branch_pattern(self, value):
5354 def branch_pattern(self, value):
5366 self._validate_glob(value)
5355 self._validate_glob(value)
5367 self._branch_pattern = value or '*'
5356 self._branch_pattern = value or '*'
5368 # set the Hash when setting the branch pattern
5357 # set the Hash when setting the branch pattern
5369 self._branch_hash = self.compute_hash(self._branch_pattern)
5358 self._branch_hash = self.compute_hash(self._branch_pattern)
5370
5359
5371 def matches(self, branch):
5360 def matches(self, branch):
5372 """
5361 """
5373 Check if this the branch matches entry
5362 Check if this the branch matches entry
5374
5363
5375 :param branch: branch name for the commit
5364 :param branch: branch name for the commit
5376 """
5365 """
5377
5366
5378 branch = branch or ''
5367 branch = branch or ''
5379
5368
5380 branch_matches = True
5369 branch_matches = True
5381 if branch:
5370 if branch:
5382 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5371 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5383 branch_matches = bool(branch_regex.search(branch))
5372 branch_matches = bool(branch_regex.search(branch))
5384
5373
5385 return branch_matches
5374 return branch_matches
5386
5375
5387
5376
5388 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5377 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5389 __tablename__ = 'user_to_repo_branch_permissions'
5378 __tablename__ = 'user_to_repo_branch_permissions'
5390 __table_args__ = (
5379 __table_args__ = (
5391 base_table_args
5380 base_table_args
5392 )
5381 )
5393
5382
5394 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5383 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5395
5384
5396 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5385 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5397 repo = relationship('Repository', backref='user_branch_perms')
5386 repo = relationship('Repository', backref='user_branch_perms')
5398
5387
5399 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5388 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5400 permission = relationship('Permission')
5389 permission = relationship('Permission')
5401
5390
5402 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5391 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5403 user_repo_to_perm = relationship('UserRepoToPerm')
5392 user_repo_to_perm = relationship('UserRepoToPerm')
5404
5393
5405 rule_order = Column('rule_order', Integer(), nullable=False)
5394 rule_order = Column('rule_order', Integer(), nullable=False)
5406 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5395 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5407 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5396 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5408
5397
5409 def __unicode__(self):
5398 def __unicode__(self):
5410 return u'<UserBranchPermission(%s => %r)>' % (
5399 return u'<UserBranchPermission(%s => %r)>' % (
5411 self.user_repo_to_perm, self.branch_pattern)
5400 self.user_repo_to_perm, self.branch_pattern)
5412
5401
5413
5402
5414 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5403 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5415 __tablename__ = 'user_group_to_repo_branch_permissions'
5404 __tablename__ = 'user_group_to_repo_branch_permissions'
5416 __table_args__ = (
5405 __table_args__ = (
5417 base_table_args
5406 base_table_args
5418 )
5407 )
5419
5408
5420 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5409 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5421
5410
5422 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5411 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5423 repo = relationship('Repository', backref='user_group_branch_perms')
5412 repo = relationship('Repository', backref='user_group_branch_perms')
5424
5413
5425 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5414 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5426 permission = relationship('Permission')
5415 permission = relationship('Permission')
5427
5416
5428 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5417 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5429 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5418 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5430
5419
5431 rule_order = Column('rule_order', Integer(), nullable=False)
5420 rule_order = Column('rule_order', Integer(), nullable=False)
5432 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5421 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5433 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5422 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5434
5423
5435 def __unicode__(self):
5424 def __unicode__(self):
5436 return u'<UserBranchPermission(%s => %r)>' % (
5425 return u'<UserBranchPermission(%s => %r)>' % (
5437 self.user_group_repo_to_perm, self.branch_pattern)
5426 self.user_group_repo_to_perm, self.branch_pattern)
5438
5427
5439
5428
5440 class UserBookmark(Base, BaseModel):
5429 class UserBookmark(Base, BaseModel):
5441 __tablename__ = 'user_bookmarks'
5430 __tablename__ = 'user_bookmarks'
5442 __table_args__ = (
5431 __table_args__ = (
5443 UniqueConstraint('user_id', 'bookmark_repo_id'),
5432 UniqueConstraint('user_id', 'bookmark_repo_id'),
5444 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5433 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5445 UniqueConstraint('user_id', 'bookmark_position'),
5434 UniqueConstraint('user_id', 'bookmark_position'),
5446 base_table_args
5435 base_table_args
5447 )
5436 )
5448
5437
5449 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5438 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5450 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5439 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5451 position = Column("bookmark_position", Integer(), nullable=False)
5440 position = Column("bookmark_position", Integer(), nullable=False)
5452 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5441 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5453 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5442 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5454 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5443 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5455
5444
5456 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5445 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5457 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5446 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5458
5447
5459 user = relationship("User")
5448 user = relationship("User")
5460
5449
5461 repository = relationship("Repository")
5450 repository = relationship("Repository")
5462 repository_group = relationship("RepoGroup")
5451 repository_group = relationship("RepoGroup")
5463
5452
5464 @classmethod
5453 @classmethod
5465 def get_by_position_for_user(cls, position, user_id):
5454 def get_by_position_for_user(cls, position, user_id):
5466 return cls.query() \
5455 return cls.query() \
5467 .filter(UserBookmark.user_id == user_id) \
5456 .filter(UserBookmark.user_id == user_id) \
5468 .filter(UserBookmark.position == position).scalar()
5457 .filter(UserBookmark.position == position).scalar()
5469
5458
5470 @classmethod
5459 @classmethod
5471 def get_bookmarks_for_user(cls, user_id, cache=True):
5460 def get_bookmarks_for_user(cls, user_id, cache=True):
5472 bookmarks = cls.query() \
5461 bookmarks = cls.query() \
5473 .filter(UserBookmark.user_id == user_id) \
5462 .filter(UserBookmark.user_id == user_id) \
5474 .options(joinedload(UserBookmark.repository)) \
5463 .options(joinedload(UserBookmark.repository)) \
5475 .options(joinedload(UserBookmark.repository_group)) \
5464 .options(joinedload(UserBookmark.repository_group)) \
5476 .order_by(UserBookmark.position.asc())
5465 .order_by(UserBookmark.position.asc())
5477
5466
5478 if cache:
5467 if cache:
5479 bookmarks = bookmarks.options(
5468 bookmarks = bookmarks.options(
5480 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5469 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5481 )
5470 )
5482
5471
5483 return bookmarks.all()
5472 return bookmarks.all()
5484
5473
5485 def __unicode__(self):
5474 def __unicode__(self):
5486 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5475 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5487
5476
5488
5477
5489 class FileStore(Base, BaseModel):
5478 class FileStore(Base, BaseModel):
5490 __tablename__ = 'file_store'
5479 __tablename__ = 'file_store'
5491 __table_args__ = (
5480 __table_args__ = (
5492 base_table_args
5481 base_table_args
5493 )
5482 )
5494
5483
5495 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5484 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5496 file_uid = Column('file_uid', String(1024), nullable=False)
5485 file_uid = Column('file_uid', String(1024), nullable=False)
5497 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5486 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5498 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5487 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5499 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5488 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5500
5489
5501 # sha256 hash
5490 # sha256 hash
5502 file_hash = Column('file_hash', String(512), nullable=False)
5491 file_hash = Column('file_hash', String(512), nullable=False)
5503 file_size = Column('file_size', BigInteger(), nullable=False)
5492 file_size = Column('file_size', BigInteger(), nullable=False)
5504
5493
5505 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5494 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5506 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5495 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5507 accessed_count = Column('accessed_count', Integer(), default=0)
5496 accessed_count = Column('accessed_count', Integer(), default=0)
5508
5497
5509 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5498 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5510
5499
5511 # if repo/repo_group reference is set, check for permissions
5500 # if repo/repo_group reference is set, check for permissions
5512 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5501 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5513
5502
5514 # hidden defines an attachment that should be hidden from showing in artifact listing
5503 # hidden defines an attachment that should be hidden from showing in artifact listing
5515 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5504 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5516
5505
5517 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5506 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5518 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5507 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5519
5508
5520 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5509 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5521
5510
5522 # scope limited to user, which requester have access to
5511 # scope limited to user, which requester have access to
5523 scope_user_id = Column(
5512 scope_user_id = Column(
5524 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5513 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5525 nullable=True, unique=None, default=None)
5514 nullable=True, unique=None, default=None)
5526 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5515 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5527
5516
5528 # scope limited to user group, which requester have access to
5517 # scope limited to user group, which requester have access to
5529 scope_user_group_id = Column(
5518 scope_user_group_id = Column(
5530 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5519 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5531 nullable=True, unique=None, default=None)
5520 nullable=True, unique=None, default=None)
5532 user_group = relationship('UserGroup', lazy='joined')
5521 user_group = relationship('UserGroup', lazy='joined')
5533
5522
5534 # scope limited to repo, which requester have access to
5523 # scope limited to repo, which requester have access to
5535 scope_repo_id = Column(
5524 scope_repo_id = Column(
5536 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5525 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5537 nullable=True, unique=None, default=None)
5526 nullable=True, unique=None, default=None)
5538 repo = relationship('Repository', lazy='joined')
5527 repo = relationship('Repository', lazy='joined')
5539
5528
5540 # scope limited to repo group, which requester have access to
5529 # scope limited to repo group, which requester have access to
5541 scope_repo_group_id = Column(
5530 scope_repo_group_id = Column(
5542 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5531 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5543 nullable=True, unique=None, default=None)
5532 nullable=True, unique=None, default=None)
5544 repo_group = relationship('RepoGroup', lazy='joined')
5533 repo_group = relationship('RepoGroup', lazy='joined')
5545
5534
5546 @classmethod
5535 @classmethod
5547 def get_by_store_uid(cls, file_store_uid, safe=False):
5536 def get_by_store_uid(cls, file_store_uid, safe=False):
5548 if safe:
5537 if safe:
5549 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5538 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5550 else:
5539 else:
5551 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5540 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5552
5541
5553 @classmethod
5542 @classmethod
5554 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5543 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5555 file_description='', enabled=True, hidden=False, check_acl=True,
5544 file_description='', enabled=True, hidden=False, check_acl=True,
5556 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5545 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5557
5546
5558 store_entry = FileStore()
5547 store_entry = FileStore()
5559 store_entry.file_uid = file_uid
5548 store_entry.file_uid = file_uid
5560 store_entry.file_display_name = file_display_name
5549 store_entry.file_display_name = file_display_name
5561 store_entry.file_org_name = filename
5550 store_entry.file_org_name = filename
5562 store_entry.file_size = file_size
5551 store_entry.file_size = file_size
5563 store_entry.file_hash = file_hash
5552 store_entry.file_hash = file_hash
5564 store_entry.file_description = file_description
5553 store_entry.file_description = file_description
5565
5554
5566 store_entry.check_acl = check_acl
5555 store_entry.check_acl = check_acl
5567 store_entry.enabled = enabled
5556 store_entry.enabled = enabled
5568 store_entry.hidden = hidden
5557 store_entry.hidden = hidden
5569
5558
5570 store_entry.user_id = user_id
5559 store_entry.user_id = user_id
5571 store_entry.scope_user_id = scope_user_id
5560 store_entry.scope_user_id = scope_user_id
5572 store_entry.scope_repo_id = scope_repo_id
5561 store_entry.scope_repo_id = scope_repo_id
5573 store_entry.scope_repo_group_id = scope_repo_group_id
5562 store_entry.scope_repo_group_id = scope_repo_group_id
5574
5563
5575 return store_entry
5564 return store_entry
5576
5565
5577 @classmethod
5566 @classmethod
5578 def store_metadata(cls, file_store_id, args, commit=True):
5567 def store_metadata(cls, file_store_id, args, commit=True):
5579 file_store = FileStore.get(file_store_id)
5568 file_store = FileStore.get(file_store_id)
5580 if file_store is None:
5569 if file_store is None:
5581 return
5570 return
5582
5571
5583 for section, key, value, value_type in args:
5572 for section, key, value, value_type in args:
5584 has_key = FileStoreMetadata().query() \
5573 has_key = FileStoreMetadata().query() \
5585 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5574 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5586 .filter(FileStoreMetadata.file_store_meta_section == section) \
5575 .filter(FileStoreMetadata.file_store_meta_section == section) \
5587 .filter(FileStoreMetadata.file_store_meta_key == key) \
5576 .filter(FileStoreMetadata.file_store_meta_key == key) \
5588 .scalar()
5577 .scalar()
5589 if has_key:
5578 if has_key:
5590 msg = 'key `{}` already defined under section `{}` for this file.'\
5579 msg = 'key `{}` already defined under section `{}` for this file.'\
5591 .format(key, section)
5580 .format(key, section)
5592 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5581 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5593
5582
5594 # NOTE(marcink): raises ArtifactMetadataBadValueType
5583 # NOTE(marcink): raises ArtifactMetadataBadValueType
5595 FileStoreMetadata.valid_value_type(value_type)
5584 FileStoreMetadata.valid_value_type(value_type)
5596
5585
5597 meta_entry = FileStoreMetadata()
5586 meta_entry = FileStoreMetadata()
5598 meta_entry.file_store = file_store
5587 meta_entry.file_store = file_store
5599 meta_entry.file_store_meta_section = section
5588 meta_entry.file_store_meta_section = section
5600 meta_entry.file_store_meta_key = key
5589 meta_entry.file_store_meta_key = key
5601 meta_entry.file_store_meta_value_type = value_type
5590 meta_entry.file_store_meta_value_type = value_type
5602 meta_entry.file_store_meta_value = value
5591 meta_entry.file_store_meta_value = value
5603
5592
5604 Session().add(meta_entry)
5593 Session().add(meta_entry)
5605
5594
5606 try:
5595 try:
5607 if commit:
5596 if commit:
5608 Session().commit()
5597 Session().commit()
5609 except IntegrityError:
5598 except IntegrityError:
5610 Session().rollback()
5599 Session().rollback()
5611 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5600 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5612
5601
5613 @classmethod
5602 @classmethod
5614 def bump_access_counter(cls, file_uid, commit=True):
5603 def bump_access_counter(cls, file_uid, commit=True):
5615 FileStore().query()\
5604 FileStore().query()\
5616 .filter(FileStore.file_uid == file_uid)\
5605 .filter(FileStore.file_uid == file_uid)\
5617 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5606 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5618 FileStore.accessed_on: datetime.datetime.now()})
5607 FileStore.accessed_on: datetime.datetime.now()})
5619 if commit:
5608 if commit:
5620 Session().commit()
5609 Session().commit()
5621
5610
5622 def __json__(self):
5611 def __json__(self):
5623 data = {
5612 data = {
5624 'filename': self.file_display_name,
5613 'filename': self.file_display_name,
5625 'filename_org': self.file_org_name,
5614 'filename_org': self.file_org_name,
5626 'file_uid': self.file_uid,
5615 'file_uid': self.file_uid,
5627 'description': self.file_description,
5616 'description': self.file_description,
5628 'hidden': self.hidden,
5617 'hidden': self.hidden,
5629 'size': self.file_size,
5618 'size': self.file_size,
5630 'created_on': self.created_on,
5619 'created_on': self.created_on,
5631 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5620 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5632 'downloaded_times': self.accessed_count,
5621 'downloaded_times': self.accessed_count,
5633 'sha256': self.file_hash,
5622 'sha256': self.file_hash,
5634 'metadata': self.file_metadata,
5623 'metadata': self.file_metadata,
5635 }
5624 }
5636
5625
5637 return data
5626 return data
5638
5627
5639 def __repr__(self):
5628 def __repr__(self):
5640 return '<FileStore({})>'.format(self.file_store_id)
5629 return '<FileStore({})>'.format(self.file_store_id)
5641
5630
5642
5631
5643 class FileStoreMetadata(Base, BaseModel):
5632 class FileStoreMetadata(Base, BaseModel):
5644 __tablename__ = 'file_store_metadata'
5633 __tablename__ = 'file_store_metadata'
5645 __table_args__ = (
5634 __table_args__ = (
5646 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5635 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5647 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5636 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5648 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5637 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5649 base_table_args
5638 base_table_args
5650 )
5639 )
5651 SETTINGS_TYPES = {
5640 SETTINGS_TYPES = {
5652 'str': safe_str,
5641 'str': safe_str,
5653 'int': safe_int,
5642 'int': safe_int,
5654 'unicode': safe_unicode,
5643 'unicode': safe_unicode,
5655 'bool': str2bool,
5644 'bool': str2bool,
5656 'list': functools.partial(aslist, sep=',')
5645 'list': functools.partial(aslist, sep=',')
5657 }
5646 }
5658
5647
5659 file_store_meta_id = Column(
5648 file_store_meta_id = Column(
5660 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5649 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5661 primary_key=True)
5650 primary_key=True)
5662 _file_store_meta_section = Column(
5651 _file_store_meta_section = Column(
5663 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5652 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5664 nullable=True, unique=None, default=None)
5653 nullable=True, unique=None, default=None)
5665 _file_store_meta_section_hash = Column(
5654 _file_store_meta_section_hash = Column(
5666 "file_store_meta_section_hash", String(255),
5655 "file_store_meta_section_hash", String(255),
5667 nullable=True, unique=None, default=None)
5656 nullable=True, unique=None, default=None)
5668 _file_store_meta_key = Column(
5657 _file_store_meta_key = Column(
5669 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5658 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5670 nullable=True, unique=None, default=None)
5659 nullable=True, unique=None, default=None)
5671 _file_store_meta_key_hash = Column(
5660 _file_store_meta_key_hash = Column(
5672 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5661 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5673 _file_store_meta_value = Column(
5662 _file_store_meta_value = Column(
5674 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5663 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5675 nullable=True, unique=None, default=None)
5664 nullable=True, unique=None, default=None)
5676 _file_store_meta_value_type = Column(
5665 _file_store_meta_value_type = Column(
5677 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5666 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5678 default='unicode')
5667 default='unicode')
5679
5668
5680 file_store_id = Column(
5669 file_store_id = Column(
5681 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5670 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5682 nullable=True, unique=None, default=None)
5671 nullable=True, unique=None, default=None)
5683
5672
5684 file_store = relationship('FileStore', lazy='joined')
5673 file_store = relationship('FileStore', lazy='joined')
5685
5674
5686 @classmethod
5675 @classmethod
5687 def valid_value_type(cls, value):
5676 def valid_value_type(cls, value):
5688 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5677 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5689 raise ArtifactMetadataBadValueType(
5678 raise ArtifactMetadataBadValueType(
5690 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5679 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5691
5680
5692 @hybrid_property
5681 @hybrid_property
5693 def file_store_meta_section(self):
5682 def file_store_meta_section(self):
5694 return self._file_store_meta_section
5683 return self._file_store_meta_section
5695
5684
5696 @file_store_meta_section.setter
5685 @file_store_meta_section.setter
5697 def file_store_meta_section(self, value):
5686 def file_store_meta_section(self, value):
5698 self._file_store_meta_section = value
5687 self._file_store_meta_section = value
5699 self._file_store_meta_section_hash = _hash_key(value)
5688 self._file_store_meta_section_hash = _hash_key(value)
5700
5689
5701 @hybrid_property
5690 @hybrid_property
5702 def file_store_meta_key(self):
5691 def file_store_meta_key(self):
5703 return self._file_store_meta_key
5692 return self._file_store_meta_key
5704
5693
5705 @file_store_meta_key.setter
5694 @file_store_meta_key.setter
5706 def file_store_meta_key(self, value):
5695 def file_store_meta_key(self, value):
5707 self._file_store_meta_key = value
5696 self._file_store_meta_key = value
5708 self._file_store_meta_key_hash = _hash_key(value)
5697 self._file_store_meta_key_hash = _hash_key(value)
5709
5698
5710 @hybrid_property
5699 @hybrid_property
5711 def file_store_meta_value(self):
5700 def file_store_meta_value(self):
5712 val = self._file_store_meta_value
5701 val = self._file_store_meta_value
5713
5702
5714 if self._file_store_meta_value_type:
5703 if self._file_store_meta_value_type:
5715 # e.g unicode.encrypted == unicode
5704 # e.g unicode.encrypted == unicode
5716 _type = self._file_store_meta_value_type.split('.')[0]
5705 _type = self._file_store_meta_value_type.split('.')[0]
5717 # decode the encrypted value if it's encrypted field type
5706 # decode the encrypted value if it's encrypted field type
5718 if '.encrypted' in self._file_store_meta_value_type:
5707 if '.encrypted' in self._file_store_meta_value_type:
5719 cipher = EncryptedTextValue()
5708 cipher = EncryptedTextValue()
5720 val = safe_unicode(cipher.process_result_value(val, None))
5709 val = safe_unicode(cipher.process_result_value(val, None))
5721 # do final type conversion
5710 # do final type conversion
5722 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5711 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5723 val = converter(val)
5712 val = converter(val)
5724
5713
5725 return val
5714 return val
5726
5715
5727 @file_store_meta_value.setter
5716 @file_store_meta_value.setter
5728 def file_store_meta_value(self, val):
5717 def file_store_meta_value(self, val):
5729 val = safe_unicode(val)
5718 val = safe_unicode(val)
5730 # encode the encrypted value
5719 # encode the encrypted value
5731 if '.encrypted' in self.file_store_meta_value_type:
5720 if '.encrypted' in self.file_store_meta_value_type:
5732 cipher = EncryptedTextValue()
5721 cipher = EncryptedTextValue()
5733 val = safe_unicode(cipher.process_bind_param(val, None))
5722 val = safe_unicode(cipher.process_bind_param(val, None))
5734 self._file_store_meta_value = val
5723 self._file_store_meta_value = val
5735
5724
5736 @hybrid_property
5725 @hybrid_property
5737 def file_store_meta_value_type(self):
5726 def file_store_meta_value_type(self):
5738 return self._file_store_meta_value_type
5727 return self._file_store_meta_value_type
5739
5728
5740 @file_store_meta_value_type.setter
5729 @file_store_meta_value_type.setter
5741 def file_store_meta_value_type(self, val):
5730 def file_store_meta_value_type(self, val):
5742 # e.g unicode.encrypted
5731 # e.g unicode.encrypted
5743 self.valid_value_type(val)
5732 self.valid_value_type(val)
5744 self._file_store_meta_value_type = val
5733 self._file_store_meta_value_type = val
5745
5734
5746 def __json__(self):
5735 def __json__(self):
5747 data = {
5736 data = {
5748 'artifact': self.file_store.file_uid,
5737 'artifact': self.file_store.file_uid,
5749 'section': self.file_store_meta_section,
5738 'section': self.file_store_meta_section,
5750 'key': self.file_store_meta_key,
5739 'key': self.file_store_meta_key,
5751 'value': self.file_store_meta_value,
5740 'value': self.file_store_meta_value,
5752 }
5741 }
5753
5742
5754 return data
5743 return data
5755
5744
5756 def __repr__(self):
5745 def __repr__(self):
5757 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5746 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5758 self.file_store_meta_key, self.file_store_meta_value)
5747 self.file_store_meta_key, self.file_store_meta_value)
5759
5748
5760
5749
5761 class DbMigrateVersion(Base, BaseModel):
5750 class DbMigrateVersion(Base, BaseModel):
5762 __tablename__ = 'db_migrate_version'
5751 __tablename__ = 'db_migrate_version'
5763 __table_args__ = (
5752 __table_args__ = (
5764 base_table_args,
5753 base_table_args,
5765 )
5754 )
5766
5755
5767 repository_id = Column('repository_id', String(250), primary_key=True)
5756 repository_id = Column('repository_id', String(250), primary_key=True)
5768 repository_path = Column('repository_path', Text)
5757 repository_path = Column('repository_path', Text)
5769 version = Column('version', Integer)
5758 version = Column('version', Integer)
5770
5759
5771 @classmethod
5760 @classmethod
5772 def set_version(cls, version):
5761 def set_version(cls, version):
5773 """
5762 """
5774 Helper for forcing a different version, usually for debugging purposes via ishell.
5763 Helper for forcing a different version, usually for debugging purposes via ishell.
5775 """
5764 """
5776 ver = DbMigrateVersion.query().first()
5765 ver = DbMigrateVersion.query().first()
5777 ver.version = version
5766 ver.version = version
5778 Session().commit()
5767 Session().commit()
5779
5768
5780
5769
5781 class DbSession(Base, BaseModel):
5770 class DbSession(Base, BaseModel):
5782 __tablename__ = 'db_session'
5771 __tablename__ = 'db_session'
5783 __table_args__ = (
5772 __table_args__ = (
5784 base_table_args,
5773 base_table_args,
5785 )
5774 )
5786
5775
5787 def __repr__(self):
5776 def __repr__(self):
5788 return '<DB:DbSession({})>'.format(self.id)
5777 return '<DB:DbSession({})>'.format(self.id)
5789
5778
5790 id = Column('id', Integer())
5779 id = Column('id', Integer())
5791 namespace = Column('namespace', String(255), primary_key=True)
5780 namespace = Column('namespace', String(255), primary_key=True)
5792 accessed = Column('accessed', DateTime, nullable=False)
5781 accessed = Column('accessed', DateTime, nullable=False)
5793 created = Column('created', DateTime, nullable=False)
5782 created = Column('created', DateTime, nullable=False)
5794 data = Column('data', PickleType, nullable=False)
5783 data = Column('data', PickleType, nullable=False)
@@ -1,2237 +1,2237 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30
30
31 import datetime
31 import datetime
32 import urllib
32 import urllib
33 import collections
33 import collections
34
34
35 from pyramid import compat
35 from pyramid import compat
36 from pyramid.threadlocal import get_current_request
36 from pyramid.threadlocal import get_current_request
37
37
38 from rhodecode.lib.vcs.nodes import FileNode
38 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.translation import lazy_ugettext
39 from rhodecode.translation import lazy_ugettext
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 from rhodecode.lib import audit_logger
41 from rhodecode.lib import audit_logger
42 from rhodecode.lib.compat import OrderedDict
42 from rhodecode.lib.compat import OrderedDict
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.markup_renderer import (
44 from rhodecode.lib.markup_renderer import (
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 from rhodecode.lib.utils2 import (
46 from rhodecode.lib.utils2 import (
47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 get_current_rhodecode_user)
48 get_current_rhodecode_user)
49 from rhodecode.lib.vcs.backends.base import (
49 from rhodecode.lib.vcs.backends.base import (
50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 TargetRefMissing, SourceRefMissing)
51 TargetRefMissing, SourceRefMissing)
52 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 from rhodecode.lib.vcs.exceptions import (
53 from rhodecode.lib.vcs.exceptions import (
54 CommitDoesNotExistError, EmptyRepositoryError)
54 CommitDoesNotExistError, EmptyRepositoryError)
55 from rhodecode.model import BaseModel
55 from rhodecode.model import BaseModel
56 from rhodecode.model.changeset_status import ChangesetStatusModel
56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 from rhodecode.model.comment import CommentsModel
57 from rhodecode.model.comment import CommentsModel
58 from rhodecode.model.db import (
58 from rhodecode.model.db import (
59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 from rhodecode.model.meta import Session
61 from rhodecode.model.meta import Session
62 from rhodecode.model.notification import NotificationModel, \
62 from rhodecode.model.notification import NotificationModel, \
63 EmailNotificationModel
63 EmailNotificationModel
64 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.scm import ScmModel
65 from rhodecode.model.settings import VcsSettingsModel
65 from rhodecode.model.settings import VcsSettingsModel
66
66
67
67
68 log = logging.getLogger(__name__)
68 log = logging.getLogger(__name__)
69
69
70
70
71 # Data structure to hold the response data when updating commits during a pull
71 # Data structure to hold the response data when updating commits during a pull
72 # request update.
72 # request update.
73 class UpdateResponse(object):
73 class UpdateResponse(object):
74
74
75 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 commit_changes, source_changed, target_changed):
76 commit_changes, source_changed, target_changed):
77
77
78 self.executed = executed
78 self.executed = executed
79 self.reason = reason
79 self.reason = reason
80 self.new = new
80 self.new = new
81 self.old = old
81 self.old = old
82 self.common_ancestor_id = common_ancestor_id
82 self.common_ancestor_id = common_ancestor_id
83 self.changes = commit_changes
83 self.changes = commit_changes
84 self.source_changed = source_changed
84 self.source_changed = source_changed
85 self.target_changed = target_changed
85 self.target_changed = target_changed
86
86
87
87
88 def get_diff_info(
88 def get_diff_info(
89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 get_commit_authors=True):
90 get_commit_authors=True):
91 """
91 """
92 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 This is also used for default reviewers logic
93 This is also used for default reviewers logic
94 """
94 """
95
95
96 source_scm = source_repo.scm_instance()
96 source_scm = source_repo.scm_instance()
97 target_scm = target_repo.scm_instance()
97 target_scm = target_repo.scm_instance()
98
98
99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 if not ancestor_id:
100 if not ancestor_id:
101 raise ValueError(
101 raise ValueError(
102 'cannot calculate diff info without a common ancestor. '
102 'cannot calculate diff info without a common ancestor. '
103 'Make sure both repositories are related, and have a common forking commit.')
103 'Make sure both repositories are related, and have a common forking commit.')
104
104
105 # case here is that want a simple diff without incoming commits,
105 # case here is that want a simple diff without incoming commits,
106 # previewing what will be merged based only on commits in the source.
106 # previewing what will be merged based only on commits in the source.
107 log.debug('Using ancestor %s as source_ref instead of %s',
107 log.debug('Using ancestor %s as source_ref instead of %s',
108 ancestor_id, source_ref)
108 ancestor_id, source_ref)
109
109
110 # source of changes now is the common ancestor
110 # source of changes now is the common ancestor
111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 # target commit becomes the source ref as it is the last commit
112 # target commit becomes the source ref as it is the last commit
113 # for diff generation this logic gives proper diff
113 # for diff generation this logic gives proper diff
114 target_commit = source_scm.get_commit(commit_id=source_ref)
114 target_commit = source_scm.get_commit(commit_id=source_ref)
115
115
116 vcs_diff = \
116 vcs_diff = \
117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 ignore_whitespace=False, context=3)
118 ignore_whitespace=False, context=3)
119
119
120 diff_processor = diffs.DiffProcessor(
120 diff_processor = diffs.DiffProcessor(
121 vcs_diff, format='newdiff', diff_limit=None,
121 vcs_diff, format='newdiff', diff_limit=None,
122 file_limit=None, show_full_diff=True)
122 file_limit=None, show_full_diff=True)
123
123
124 _parsed = diff_processor.prepare()
124 _parsed = diff_processor.prepare()
125
125
126 all_files = []
126 all_files = []
127 all_files_changes = []
127 all_files_changes = []
128 changed_lines = {}
128 changed_lines = {}
129 stats = [0, 0]
129 stats = [0, 0]
130 for f in _parsed:
130 for f in _parsed:
131 all_files.append(f['filename'])
131 all_files.append(f['filename'])
132 all_files_changes.append({
132 all_files_changes.append({
133 'filename': f['filename'],
133 'filename': f['filename'],
134 'stats': f['stats']
134 'stats': f['stats']
135 })
135 })
136 stats[0] += f['stats']['added']
136 stats[0] += f['stats']['added']
137 stats[1] += f['stats']['deleted']
137 stats[1] += f['stats']['deleted']
138
138
139 changed_lines[f['filename']] = []
139 changed_lines[f['filename']] = []
140 if len(f['chunks']) < 2:
140 if len(f['chunks']) < 2:
141 continue
141 continue
142 # first line is "context" information
142 # first line is "context" information
143 for chunks in f['chunks'][1:]:
143 for chunks in f['chunks'][1:]:
144 for chunk in chunks['lines']:
144 for chunk in chunks['lines']:
145 if chunk['action'] not in ('del', 'mod'):
145 if chunk['action'] not in ('del', 'mod'):
146 continue
146 continue
147 changed_lines[f['filename']].append(chunk['old_lineno'])
147 changed_lines[f['filename']].append(chunk['old_lineno'])
148
148
149 commit_authors = []
149 commit_authors = []
150 user_counts = {}
150 user_counts = {}
151 email_counts = {}
151 email_counts = {}
152 author_counts = {}
152 author_counts = {}
153 _commit_cache = {}
153 _commit_cache = {}
154
154
155 commits = []
155 commits = []
156 if get_commit_authors:
156 if get_commit_authors:
157 log.debug('Obtaining commit authors from set of commits')
157 log.debug('Obtaining commit authors from set of commits')
158 _compare_data = target_scm.compare(
158 _compare_data = target_scm.compare(
159 target_ref, source_ref, source_scm, merge=True,
159 target_ref, source_ref, source_scm, merge=True,
160 pre_load=["author", "date", "message"]
160 pre_load=["author", "date", "message"]
161 )
161 )
162
162
163 for commit in _compare_data:
163 for commit in _compare_data:
164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 # at this function which is later called via JSON serialization
165 # at this function which is later called via JSON serialization
166 serialized_commit = dict(
166 serialized_commit = dict(
167 author=commit.author,
167 author=commit.author,
168 date=commit.date,
168 date=commit.date,
169 message=commit.message,
169 message=commit.message,
170 commit_id=commit.raw_id,
170 commit_id=commit.raw_id,
171 raw_id=commit.raw_id
171 raw_id=commit.raw_id
172 )
172 )
173 commits.append(serialized_commit)
173 commits.append(serialized_commit)
174 user = User.get_from_cs_author(serialized_commit['author'])
174 user = User.get_from_cs_author(serialized_commit['author'])
175 if user and user not in commit_authors:
175 if user and user not in commit_authors:
176 commit_authors.append(user)
176 commit_authors.append(user)
177
177
178 # lines
178 # lines
179 if get_authors:
179 if get_authors:
180 log.debug('Calculating authors of changed files')
180 log.debug('Calculating authors of changed files')
181 target_commit = source_repo.get_commit(ancestor_id)
181 target_commit = source_repo.get_commit(ancestor_id)
182
182
183 for fname, lines in changed_lines.items():
183 for fname, lines in changed_lines.items():
184
184
185 try:
185 try:
186 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 except Exception:
187 except Exception:
188 log.exception("Failed to load node with path %s", fname)
188 log.exception("Failed to load node with path %s", fname)
189 continue
189 continue
190
190
191 if not isinstance(node, FileNode):
191 if not isinstance(node, FileNode):
192 continue
192 continue
193
193
194 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 if node.is_binary:
195 if node.is_binary:
196 author = node.last_commit.author
196 author = node.last_commit.author
197 email = node.last_commit.author_email
197 email = node.last_commit.author_email
198
198
199 user = User.get_from_cs_author(author)
199 user = User.get_from_cs_author(author)
200 if user:
200 if user:
201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 author_counts[author] = author_counts.get(author, 0) + 1
202 author_counts[author] = author_counts.get(author, 0) + 1
203 email_counts[email] = email_counts.get(email, 0) + 1
203 email_counts[email] = email_counts.get(email, 0) + 1
204
204
205 continue
205 continue
206
206
207 for annotation in node.annotate:
207 for annotation in node.annotate:
208 line_no, commit_id, get_commit_func, line_text = annotation
208 line_no, commit_id, get_commit_func, line_text = annotation
209 if line_no in lines:
209 if line_no in lines:
210 if commit_id not in _commit_cache:
210 if commit_id not in _commit_cache:
211 _commit_cache[commit_id] = get_commit_func()
211 _commit_cache[commit_id] = get_commit_func()
212 commit = _commit_cache[commit_id]
212 commit = _commit_cache[commit_id]
213 author = commit.author
213 author = commit.author
214 email = commit.author_email
214 email = commit.author_email
215 user = User.get_from_cs_author(author)
215 user = User.get_from_cs_author(author)
216 if user:
216 if user:
217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 author_counts[author] = author_counts.get(author, 0) + 1
218 author_counts[author] = author_counts.get(author, 0) + 1
219 email_counts[email] = email_counts.get(email, 0) + 1
219 email_counts[email] = email_counts.get(email, 0) + 1
220
220
221 log.debug('Default reviewers processing finished')
221 log.debug('Default reviewers processing finished')
222
222
223 return {
223 return {
224 'commits': commits,
224 'commits': commits,
225 'files': all_files_changes,
225 'files': all_files_changes,
226 'stats': stats,
226 'stats': stats,
227 'ancestor': ancestor_id,
227 'ancestor': ancestor_id,
228 # original authors of modified files
228 # original authors of modified files
229 'original_authors': {
229 'original_authors': {
230 'users': user_counts,
230 'users': user_counts,
231 'authors': author_counts,
231 'authors': author_counts,
232 'emails': email_counts,
232 'emails': email_counts,
233 },
233 },
234 'commit_authors': commit_authors
234 'commit_authors': commit_authors
235 }
235 }
236
236
237
237
238 class PullRequestModel(BaseModel):
238 class PullRequestModel(BaseModel):
239
239
240 cls = PullRequest
240 cls = PullRequest
241
241
242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243
243
244 UPDATE_STATUS_MESSAGES = {
244 UPDATE_STATUS_MESSAGES = {
245 UpdateFailureReason.NONE: lazy_ugettext(
245 UpdateFailureReason.NONE: lazy_ugettext(
246 'Pull request update successful.'),
246 'Pull request update successful.'),
247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 'Pull request update failed because of an unknown error.'),
248 'Pull request update failed because of an unknown error.'),
249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 'No update needed because the source and target have not changed.'),
250 'No update needed because the source and target have not changed.'),
251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 'Pull request cannot be updated because the reference type is '
252 'Pull request cannot be updated because the reference type is '
253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 'This pull request cannot be updated because the target '
255 'This pull request cannot be updated because the target '
256 'reference is missing.'),
256 'reference is missing.'),
257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 'This pull request cannot be updated because the source '
258 'This pull request cannot be updated because the source '
259 'reference is missing.'),
259 'reference is missing.'),
260 }
260 }
261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263
263
264 def __get_pull_request(self, pull_request):
264 def __get_pull_request(self, pull_request):
265 return self._get_instance((
265 return self._get_instance((
266 PullRequest, PullRequestVersion), pull_request)
266 PullRequest, PullRequestVersion), pull_request)
267
267
268 def _check_perms(self, perms, pull_request, user, api=False):
268 def _check_perms(self, perms, pull_request, user, api=False):
269 if not api:
269 if not api:
270 return h.HasRepoPermissionAny(*perms)(
270 return h.HasRepoPermissionAny(*perms)(
271 user=user, repo_name=pull_request.target_repo.repo_name)
271 user=user, repo_name=pull_request.target_repo.repo_name)
272 else:
272 else:
273 return h.HasRepoPermissionAnyApi(*perms)(
273 return h.HasRepoPermissionAnyApi(*perms)(
274 user=user, repo_name=pull_request.target_repo.repo_name)
274 user=user, repo_name=pull_request.target_repo.repo_name)
275
275
276 def check_user_read(self, pull_request, user, api=False):
276 def check_user_read(self, pull_request, user, api=False):
277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 return self._check_perms(_perms, pull_request, user, api)
278 return self._check_perms(_perms, pull_request, user, api)
279
279
280 def check_user_merge(self, pull_request, user, api=False):
280 def check_user_merge(self, pull_request, user, api=False):
281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 return self._check_perms(_perms, pull_request, user, api)
282 return self._check_perms(_perms, pull_request, user, api)
283
283
284 def check_user_update(self, pull_request, user, api=False):
284 def check_user_update(self, pull_request, user, api=False):
285 owner = user.user_id == pull_request.user_id
285 owner = user.user_id == pull_request.user_id
286 return self.check_user_merge(pull_request, user, api) or owner
286 return self.check_user_merge(pull_request, user, api) or owner
287
287
288 def check_user_delete(self, pull_request, user):
288 def check_user_delete(self, pull_request, user):
289 owner = user.user_id == pull_request.user_id
289 owner = user.user_id == pull_request.user_id
290 _perms = ('repository.admin',)
290 _perms = ('repository.admin',)
291 return self._check_perms(_perms, pull_request, user) or owner
291 return self._check_perms(_perms, pull_request, user) or owner
292
292
293 def is_user_reviewer(self, pull_request, user):
293 def is_user_reviewer(self, pull_request, user):
294 return user.user_id in [
294 return user.user_id in [
295 x.user_id for x in
295 x.user_id for x in
296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 if x.user
297 if x.user
298 ]
298 ]
299
299
300 def check_user_change_status(self, pull_request, user, api=False):
300 def check_user_change_status(self, pull_request, user, api=False):
301 return self.check_user_update(pull_request, user, api) \
301 return self.check_user_update(pull_request, user, api) \
302 or self.is_user_reviewer(pull_request, user)
302 or self.is_user_reviewer(pull_request, user)
303
303
304 def check_user_comment(self, pull_request, user):
304 def check_user_comment(self, pull_request, user):
305 owner = user.user_id == pull_request.user_id
305 owner = user.user_id == pull_request.user_id
306 return self.check_user_read(pull_request, user) or owner
306 return self.check_user_read(pull_request, user) or owner
307
307
308 def get(self, pull_request):
308 def get(self, pull_request):
309 return self.__get_pull_request(pull_request)
309 return self.__get_pull_request(pull_request)
310
310
311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 statuses=None, opened_by=None, order_by=None,
312 statuses=None, opened_by=None, order_by=None,
313 order_dir='desc', only_created=False):
313 order_dir='desc', only_created=False):
314 repo = None
314 repo = None
315 if repo_name:
315 if repo_name:
316 repo = self._get_repo(repo_name)
316 repo = self._get_repo(repo_name)
317
317
318 q = PullRequest.query()
318 q = PullRequest.query()
319
319
320 if search_q:
320 if search_q:
321 like_expression = u'%{}%'.format(safe_unicode(search_q))
321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 q = q.join(User)
322 q = q.join(User)
323 q = q.filter(or_(
323 q = q.filter(or_(
324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 User.username.ilike(like_expression),
325 User.username.ilike(like_expression),
326 PullRequest.title.ilike(like_expression),
326 PullRequest.title.ilike(like_expression),
327 PullRequest.description.ilike(like_expression),
327 PullRequest.description.ilike(like_expression),
328 ))
328 ))
329
329
330 # source or target
330 # source or target
331 if repo and source:
331 if repo and source:
332 q = q.filter(PullRequest.source_repo == repo)
332 q = q.filter(PullRequest.source_repo == repo)
333 elif repo:
333 elif repo:
334 q = q.filter(PullRequest.target_repo == repo)
334 q = q.filter(PullRequest.target_repo == repo)
335
335
336 # closed,opened
336 # closed,opened
337 if statuses:
337 if statuses:
338 q = q.filter(PullRequest.status.in_(statuses))
338 q = q.filter(PullRequest.status.in_(statuses))
339
339
340 # opened by filter
340 # opened by filter
341 if opened_by:
341 if opened_by:
342 q = q.filter(PullRequest.user_id.in_(opened_by))
342 q = q.filter(PullRequest.user_id.in_(opened_by))
343
343
344 # only get those that are in "created" state
344 # only get those that are in "created" state
345 if only_created:
345 if only_created:
346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347
347
348 if order_by:
348 if order_by:
349 order_map = {
349 order_map = {
350 'name_raw': PullRequest.pull_request_id,
350 'name_raw': PullRequest.pull_request_id,
351 'id': PullRequest.pull_request_id,
351 'id': PullRequest.pull_request_id,
352 'title': PullRequest.title,
352 'title': PullRequest.title,
353 'updated_on_raw': PullRequest.updated_on,
353 'updated_on_raw': PullRequest.updated_on,
354 'target_repo': PullRequest.target_repo_id
354 'target_repo': PullRequest.target_repo_id
355 }
355 }
356 if order_dir == 'asc':
356 if order_dir == 'asc':
357 q = q.order_by(order_map[order_by].asc())
357 q = q.order_by(order_map[order_by].asc())
358 else:
358 else:
359 q = q.order_by(order_map[order_by].desc())
359 q = q.order_by(order_map[order_by].desc())
360
360
361 return q
361 return q
362
362
363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 opened_by=None):
364 opened_by=None):
365 """
365 """
366 Count the number of pull requests for a specific repository.
366 Count the number of pull requests for a specific repository.
367
367
368 :param repo_name: target or source repo
368 :param repo_name: target or source repo
369 :param search_q: filter by text
369 :param search_q: filter by text
370 :param source: boolean flag to specify if repo_name refers to source
370 :param source: boolean flag to specify if repo_name refers to source
371 :param statuses: list of pull request statuses
371 :param statuses: list of pull request statuses
372 :param opened_by: author user of the pull request
372 :param opened_by: author user of the pull request
373 :returns: int number of pull requests
373 :returns: int number of pull requests
374 """
374 """
375 q = self._prepare_get_all_query(
375 q = self._prepare_get_all_query(
376 repo_name, search_q=search_q, source=source, statuses=statuses,
376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 opened_by=opened_by)
377 opened_by=opened_by)
378
378
379 return q.count()
379 return q.count()
380
380
381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 """
383 """
384 Get all pull requests for a specific repository.
384 Get all pull requests for a specific repository.
385
385
386 :param repo_name: target or source repo
386 :param repo_name: target or source repo
387 :param search_q: filter by text
387 :param search_q: filter by text
388 :param source: boolean flag to specify if repo_name refers to source
388 :param source: boolean flag to specify if repo_name refers to source
389 :param statuses: list of pull request statuses
389 :param statuses: list of pull request statuses
390 :param opened_by: author user of the pull request
390 :param opened_by: author user of the pull request
391 :param offset: pagination offset
391 :param offset: pagination offset
392 :param length: length of returned list
392 :param length: length of returned list
393 :param order_by: order of the returned list
393 :param order_by: order of the returned list
394 :param order_dir: 'asc' or 'desc' ordering direction
394 :param order_dir: 'asc' or 'desc' ordering direction
395 :returns: list of pull requests
395 :returns: list of pull requests
396 """
396 """
397 q = self._prepare_get_all_query(
397 q = self._prepare_get_all_query(
398 repo_name, search_q=search_q, source=source, statuses=statuses,
398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400
400
401 if length:
401 if length:
402 pull_requests = q.limit(length).offset(offset).all()
402 pull_requests = q.limit(length).offset(offset).all()
403 else:
403 else:
404 pull_requests = q.all()
404 pull_requests = q.all()
405
405
406 return pull_requests
406 return pull_requests
407
407
408 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
408 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
409 opened_by=None):
409 opened_by=None):
410 """
410 """
411 Count the number of pull requests for a specific repository that are
411 Count the number of pull requests for a specific repository that are
412 awaiting review.
412 awaiting review.
413
413
414 :param repo_name: target or source repo
414 :param repo_name: target or source repo
415 :param search_q: filter by text
415 :param search_q: filter by text
416 :param source: boolean flag to specify if repo_name refers to source
416 :param source: boolean flag to specify if repo_name refers to source
417 :param statuses: list of pull request statuses
417 :param statuses: list of pull request statuses
418 :param opened_by: author user of the pull request
418 :param opened_by: author user of the pull request
419 :returns: int number of pull requests
419 :returns: int number of pull requests
420 """
420 """
421 pull_requests = self.get_awaiting_review(
421 pull_requests = self.get_awaiting_review(
422 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
422 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
423
423
424 return len(pull_requests)
424 return len(pull_requests)
425
425
426 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
426 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
427 opened_by=None, offset=0, length=None,
427 opened_by=None, offset=0, length=None,
428 order_by=None, order_dir='desc'):
428 order_by=None, order_dir='desc'):
429 """
429 """
430 Get all pull requests for a specific repository that are awaiting
430 Get all pull requests for a specific repository that are awaiting
431 review.
431 review.
432
432
433 :param repo_name: target or source repo
433 :param repo_name: target or source repo
434 :param search_q: filter by text
434 :param search_q: filter by text
435 :param source: boolean flag to specify if repo_name refers to source
435 :param source: boolean flag to specify if repo_name refers to source
436 :param statuses: list of pull request statuses
436 :param statuses: list of pull request statuses
437 :param opened_by: author user of the pull request
437 :param opened_by: author user of the pull request
438 :param offset: pagination offset
438 :param offset: pagination offset
439 :param length: length of returned list
439 :param length: length of returned list
440 :param order_by: order of the returned list
440 :param order_by: order of the returned list
441 :param order_dir: 'asc' or 'desc' ordering direction
441 :param order_dir: 'asc' or 'desc' ordering direction
442 :returns: list of pull requests
442 :returns: list of pull requests
443 """
443 """
444 pull_requests = self.get_all(
444 pull_requests = self.get_all(
445 repo_name, search_q=search_q, source=source, statuses=statuses,
445 repo_name, search_q=search_q, source=source, statuses=statuses,
446 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
446 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
447
447
448 _filtered_pull_requests = []
448 _filtered_pull_requests = []
449 for pr in pull_requests:
449 for pr in pull_requests:
450 status = pr.calculated_review_status()
450 status = pr.calculated_review_status()
451 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
451 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
452 ChangesetStatus.STATUS_UNDER_REVIEW]:
452 ChangesetStatus.STATUS_UNDER_REVIEW]:
453 _filtered_pull_requests.append(pr)
453 _filtered_pull_requests.append(pr)
454 if length:
454 if length:
455 return _filtered_pull_requests[offset:offset+length]
455 return _filtered_pull_requests[offset:offset+length]
456 else:
456 else:
457 return _filtered_pull_requests
457 return _filtered_pull_requests
458
458
459 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
459 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
460 opened_by=None, user_id=None):
460 opened_by=None, user_id=None):
461 """
461 """
462 Count the number of pull requests for a specific repository that are
462 Count the number of pull requests for a specific repository that are
463 awaiting review from a specific user.
463 awaiting review from a specific user.
464
464
465 :param repo_name: target or source repo
465 :param repo_name: target or source repo
466 :param search_q: filter by text
466 :param search_q: filter by text
467 :param source: boolean flag to specify if repo_name refers to source
467 :param source: boolean flag to specify if repo_name refers to source
468 :param statuses: list of pull request statuses
468 :param statuses: list of pull request statuses
469 :param opened_by: author user of the pull request
469 :param opened_by: author user of the pull request
470 :param user_id: reviewer user of the pull request
470 :param user_id: reviewer user of the pull request
471 :returns: int number of pull requests
471 :returns: int number of pull requests
472 """
472 """
473 pull_requests = self.get_awaiting_my_review(
473 pull_requests = self.get_awaiting_my_review(
474 repo_name, search_q=search_q, source=source, statuses=statuses,
474 repo_name, search_q=search_q, source=source, statuses=statuses,
475 opened_by=opened_by, user_id=user_id)
475 opened_by=opened_by, user_id=user_id)
476
476
477 return len(pull_requests)
477 return len(pull_requests)
478
478
479 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
479 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
480 opened_by=None, user_id=None, offset=0,
480 opened_by=None, user_id=None, offset=0,
481 length=None, order_by=None, order_dir='desc'):
481 length=None, order_by=None, order_dir='desc'):
482 """
482 """
483 Get all pull requests for a specific repository that are awaiting
483 Get all pull requests for a specific repository that are awaiting
484 review from a specific user.
484 review from a specific user.
485
485
486 :param repo_name: target or source repo
486 :param repo_name: target or source repo
487 :param search_q: filter by text
487 :param search_q: filter by text
488 :param source: boolean flag to specify if repo_name refers to source
488 :param source: boolean flag to specify if repo_name refers to source
489 :param statuses: list of pull request statuses
489 :param statuses: list of pull request statuses
490 :param opened_by: author user of the pull request
490 :param opened_by: author user of the pull request
491 :param user_id: reviewer user of the pull request
491 :param user_id: reviewer user of the pull request
492 :param offset: pagination offset
492 :param offset: pagination offset
493 :param length: length of returned list
493 :param length: length of returned list
494 :param order_by: order of the returned list
494 :param order_by: order of the returned list
495 :param order_dir: 'asc' or 'desc' ordering direction
495 :param order_dir: 'asc' or 'desc' ordering direction
496 :returns: list of pull requests
496 :returns: list of pull requests
497 """
497 """
498 pull_requests = self.get_all(
498 pull_requests = self.get_all(
499 repo_name, search_q=search_q, source=source, statuses=statuses,
499 repo_name, search_q=search_q, source=source, statuses=statuses,
500 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
500 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
501
501
502 _my = PullRequestModel().get_not_reviewed(user_id)
502 _my = PullRequestModel().get_not_reviewed(user_id)
503 my_participation = []
503 my_participation = []
504 for pr in pull_requests:
504 for pr in pull_requests:
505 if pr in _my:
505 if pr in _my:
506 my_participation.append(pr)
506 my_participation.append(pr)
507 _filtered_pull_requests = my_participation
507 _filtered_pull_requests = my_participation
508 if length:
508 if length:
509 return _filtered_pull_requests[offset:offset+length]
509 return _filtered_pull_requests[offset:offset+length]
510 else:
510 else:
511 return _filtered_pull_requests
511 return _filtered_pull_requests
512
512
513 def get_not_reviewed(self, user_id):
513 def get_not_reviewed(self, user_id):
514 return [
514 return [
515 x.pull_request for x in PullRequestReviewers.query().filter(
515 x.pull_request for x in PullRequestReviewers.query().filter(
516 PullRequestReviewers.user_id == user_id).all()
516 PullRequestReviewers.user_id == user_id).all()
517 ]
517 ]
518
518
519 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
519 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
520 order_by=None, order_dir='desc'):
520 order_by=None, order_dir='desc'):
521 q = PullRequest.query()
521 q = PullRequest.query()
522 if user_id:
522 if user_id:
523 reviewers_subquery = Session().query(
523 reviewers_subquery = Session().query(
524 PullRequestReviewers.pull_request_id).filter(
524 PullRequestReviewers.pull_request_id).filter(
525 PullRequestReviewers.user_id == user_id).subquery()
525 PullRequestReviewers.user_id == user_id).subquery()
526 user_filter = or_(
526 user_filter = or_(
527 PullRequest.user_id == user_id,
527 PullRequest.user_id == user_id,
528 PullRequest.pull_request_id.in_(reviewers_subquery)
528 PullRequest.pull_request_id.in_(reviewers_subquery)
529 )
529 )
530 q = PullRequest.query().filter(user_filter)
530 q = PullRequest.query().filter(user_filter)
531
531
532 # closed,opened
532 # closed,opened
533 if statuses:
533 if statuses:
534 q = q.filter(PullRequest.status.in_(statuses))
534 q = q.filter(PullRequest.status.in_(statuses))
535
535
536 if query:
536 if query:
537 like_expression = u'%{}%'.format(safe_unicode(query))
537 like_expression = u'%{}%'.format(safe_unicode(query))
538 q = q.join(User)
538 q = q.join(User)
539 q = q.filter(or_(
539 q = q.filter(or_(
540 cast(PullRequest.pull_request_id, String).ilike(like_expression),
540 cast(PullRequest.pull_request_id, String).ilike(like_expression),
541 User.username.ilike(like_expression),
541 User.username.ilike(like_expression),
542 PullRequest.title.ilike(like_expression),
542 PullRequest.title.ilike(like_expression),
543 PullRequest.description.ilike(like_expression),
543 PullRequest.description.ilike(like_expression),
544 ))
544 ))
545 if order_by:
545 if order_by:
546 order_map = {
546 order_map = {
547 'name_raw': PullRequest.pull_request_id,
547 'name_raw': PullRequest.pull_request_id,
548 'title': PullRequest.title,
548 'title': PullRequest.title,
549 'updated_on_raw': PullRequest.updated_on,
549 'updated_on_raw': PullRequest.updated_on,
550 'target_repo': PullRequest.target_repo_id
550 'target_repo': PullRequest.target_repo_id
551 }
551 }
552 if order_dir == 'asc':
552 if order_dir == 'asc':
553 q = q.order_by(order_map[order_by].asc())
553 q = q.order_by(order_map[order_by].asc())
554 else:
554 else:
555 q = q.order_by(order_map[order_by].desc())
555 q = q.order_by(order_map[order_by].desc())
556
556
557 return q
557 return q
558
558
559 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
559 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
560 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
560 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
561 return q.count()
561 return q.count()
562
562
563 def get_im_participating_in(
563 def get_im_participating_in(
564 self, user_id=None, statuses=None, query='', offset=0,
564 self, user_id=None, statuses=None, query='', offset=0,
565 length=None, order_by=None, order_dir='desc'):
565 length=None, order_by=None, order_dir='desc'):
566 """
566 """
567 Get all Pull requests that i'm participating in, or i have opened
567 Get all Pull requests that i'm participating in, or i have opened
568 """
568 """
569
569
570 q = self._prepare_participating_query(
570 q = self._prepare_participating_query(
571 user_id, statuses=statuses, query=query, order_by=order_by,
571 user_id, statuses=statuses, query=query, order_by=order_by,
572 order_dir=order_dir)
572 order_dir=order_dir)
573
573
574 if length:
574 if length:
575 pull_requests = q.limit(length).offset(offset).all()
575 pull_requests = q.limit(length).offset(offset).all()
576 else:
576 else:
577 pull_requests = q.all()
577 pull_requests = q.all()
578
578
579 return pull_requests
579 return pull_requests
580
580
581 def get_versions(self, pull_request):
581 def get_versions(self, pull_request):
582 """
582 """
583 returns version of pull request sorted by ID descending
583 returns version of pull request sorted by ID descending
584 """
584 """
585 return PullRequestVersion.query()\
585 return PullRequestVersion.query()\
586 .filter(PullRequestVersion.pull_request == pull_request)\
586 .filter(PullRequestVersion.pull_request == pull_request)\
587 .order_by(PullRequestVersion.pull_request_version_id.asc())\
587 .order_by(PullRequestVersion.pull_request_version_id.asc())\
588 .all()
588 .all()
589
589
590 def get_pr_version(self, pull_request_id, version=None):
590 def get_pr_version(self, pull_request_id, version=None):
591 at_version = None
591 at_version = None
592
592
593 if version and version == 'latest':
593 if version and version == 'latest':
594 pull_request_ver = PullRequest.get(pull_request_id)
594 pull_request_ver = PullRequest.get(pull_request_id)
595 pull_request_obj = pull_request_ver
595 pull_request_obj = pull_request_ver
596 _org_pull_request_obj = pull_request_obj
596 _org_pull_request_obj = pull_request_obj
597 at_version = 'latest'
597 at_version = 'latest'
598 elif version:
598 elif version:
599 pull_request_ver = PullRequestVersion.get_or_404(version)
599 pull_request_ver = PullRequestVersion.get_or_404(version)
600 pull_request_obj = pull_request_ver
600 pull_request_obj = pull_request_ver
601 _org_pull_request_obj = pull_request_ver.pull_request
601 _org_pull_request_obj = pull_request_ver.pull_request
602 at_version = pull_request_ver.pull_request_version_id
602 at_version = pull_request_ver.pull_request_version_id
603 else:
603 else:
604 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
604 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
605 pull_request_id)
605 pull_request_id)
606
606
607 pull_request_display_obj = PullRequest.get_pr_display_object(
607 pull_request_display_obj = PullRequest.get_pr_display_object(
608 pull_request_obj, _org_pull_request_obj)
608 pull_request_obj, _org_pull_request_obj)
609
609
610 return _org_pull_request_obj, pull_request_obj, \
610 return _org_pull_request_obj, pull_request_obj, \
611 pull_request_display_obj, at_version
611 pull_request_display_obj, at_version
612
612
613 def create(self, created_by, source_repo, source_ref, target_repo,
613 def create(self, created_by, source_repo, source_ref, target_repo,
614 target_ref, revisions, reviewers, observers, title, description=None,
614 target_ref, revisions, reviewers, observers, title, description=None,
615 common_ancestor_id=None,
615 common_ancestor_id=None,
616 description_renderer=None,
616 description_renderer=None,
617 reviewer_data=None, translator=None, auth_user=None):
617 reviewer_data=None, translator=None, auth_user=None):
618 translator = translator or get_current_request().translate
618 translator = translator or get_current_request().translate
619
619
620 created_by_user = self._get_user(created_by)
620 created_by_user = self._get_user(created_by)
621 auth_user = auth_user or created_by_user.AuthUser()
621 auth_user = auth_user or created_by_user.AuthUser()
622 source_repo = self._get_repo(source_repo)
622 source_repo = self._get_repo(source_repo)
623 target_repo = self._get_repo(target_repo)
623 target_repo = self._get_repo(target_repo)
624
624
625 pull_request = PullRequest()
625 pull_request = PullRequest()
626 pull_request.source_repo = source_repo
626 pull_request.source_repo = source_repo
627 pull_request.source_ref = source_ref
627 pull_request.source_ref = source_ref
628 pull_request.target_repo = target_repo
628 pull_request.target_repo = target_repo
629 pull_request.target_ref = target_ref
629 pull_request.target_ref = target_ref
630 pull_request.revisions = revisions
630 pull_request.revisions = revisions
631 pull_request.title = title
631 pull_request.title = title
632 pull_request.description = description
632 pull_request.description = description
633 pull_request.description_renderer = description_renderer
633 pull_request.description_renderer = description_renderer
634 pull_request.author = created_by_user
634 pull_request.author = created_by_user
635 pull_request.reviewer_data = reviewer_data
635 pull_request.reviewer_data = reviewer_data
636 pull_request.pull_request_state = pull_request.STATE_CREATING
636 pull_request.pull_request_state = pull_request.STATE_CREATING
637 pull_request.common_ancestor_id = common_ancestor_id
637 pull_request.common_ancestor_id = common_ancestor_id
638
638
639 Session().add(pull_request)
639 Session().add(pull_request)
640 Session().flush()
640 Session().flush()
641
641
642 reviewer_ids = set()
642 reviewer_ids = set()
643 # members / reviewers
643 # members / reviewers
644 for reviewer_object in reviewers:
644 for reviewer_object in reviewers:
645 user_id, reasons, mandatory, role, rules = reviewer_object
645 user_id, reasons, mandatory, role, rules = reviewer_object
646 user = self._get_user(user_id)
646 user = self._get_user(user_id)
647
647
648 # skip duplicates
648 # skip duplicates
649 if user.user_id in reviewer_ids:
649 if user.user_id in reviewer_ids:
650 continue
650 continue
651
651
652 reviewer_ids.add(user.user_id)
652 reviewer_ids.add(user.user_id)
653
653
654 reviewer = PullRequestReviewers()
654 reviewer = PullRequestReviewers()
655 reviewer.user = user
655 reviewer.user = user
656 reviewer.pull_request = pull_request
656 reviewer.pull_request = pull_request
657 reviewer.reasons = reasons
657 reviewer.reasons = reasons
658 reviewer.mandatory = mandatory
658 reviewer.mandatory = mandatory
659 reviewer.role = role
659 reviewer.role = role
660
660
661 # NOTE(marcink): pick only first rule for now
661 # NOTE(marcink): pick only first rule for now
662 rule_id = list(rules)[0] if rules else None
662 rule_id = list(rules)[0] if rules else None
663 rule = RepoReviewRule.get(rule_id) if rule_id else None
663 rule = RepoReviewRule.get(rule_id) if rule_id else None
664 if rule:
664 if rule:
665 review_group = rule.user_group_vote_rule(user_id)
665 review_group = rule.user_group_vote_rule(user_id)
666 # we check if this particular reviewer is member of a voting group
666 # we check if this particular reviewer is member of a voting group
667 if review_group:
667 if review_group:
668 # NOTE(marcink):
668 # NOTE(marcink):
669 # can be that user is member of more but we pick the first same,
669 # can be that user is member of more but we pick the first same,
670 # same as default reviewers algo
670 # same as default reviewers algo
671 review_group = review_group[0]
671 review_group = review_group[0]
672
672
673 rule_data = {
673 rule_data = {
674 'rule_name':
674 'rule_name':
675 rule.review_rule_name,
675 rule.review_rule_name,
676 'rule_user_group_entry_id':
676 'rule_user_group_entry_id':
677 review_group.repo_review_rule_users_group_id,
677 review_group.repo_review_rule_users_group_id,
678 'rule_user_group_name':
678 'rule_user_group_name':
679 review_group.users_group.users_group_name,
679 review_group.users_group.users_group_name,
680 'rule_user_group_members':
680 'rule_user_group_members':
681 [x.user.username for x in review_group.users_group.members],
681 [x.user.username for x in review_group.users_group.members],
682 'rule_user_group_members_id':
682 'rule_user_group_members_id':
683 [x.user.user_id for x in review_group.users_group.members],
683 [x.user.user_id for x in review_group.users_group.members],
684 }
684 }
685 # e.g {'vote_rule': -1, 'mandatory': True}
685 # e.g {'vote_rule': -1, 'mandatory': True}
686 rule_data.update(review_group.rule_data())
686 rule_data.update(review_group.rule_data())
687
687
688 reviewer.rule_data = rule_data
688 reviewer.rule_data = rule_data
689
689
690 Session().add(reviewer)
690 Session().add(reviewer)
691 Session().flush()
691 Session().flush()
692
692
693 for observer_object in observers:
693 for observer_object in observers:
694 user_id, reasons, mandatory, role, rules = observer_object
694 user_id, reasons, mandatory, role, rules = observer_object
695 user = self._get_user(user_id)
695 user = self._get_user(user_id)
696
696
697 # skip duplicates from reviewers
697 # skip duplicates from reviewers
698 if user.user_id in reviewer_ids:
698 if user.user_id in reviewer_ids:
699 continue
699 continue
700
700
701 #reviewer_ids.add(user.user_id)
701 #reviewer_ids.add(user.user_id)
702
702
703 observer = PullRequestReviewers()
703 observer = PullRequestReviewers()
704 observer.user = user
704 observer.user = user
705 observer.pull_request = pull_request
705 observer.pull_request = pull_request
706 observer.reasons = reasons
706 observer.reasons = reasons
707 observer.mandatory = mandatory
707 observer.mandatory = mandatory
708 observer.role = role
708 observer.role = role
709
709
710 # NOTE(marcink): pick only first rule for now
710 # NOTE(marcink): pick only first rule for now
711 rule_id = list(rules)[0] if rules else None
711 rule_id = list(rules)[0] if rules else None
712 rule = RepoReviewRule.get(rule_id) if rule_id else None
712 rule = RepoReviewRule.get(rule_id) if rule_id else None
713 if rule:
713 if rule:
714 # TODO(marcink): do we need this for observers ??
714 # TODO(marcink): do we need this for observers ??
715 pass
715 pass
716
716
717 Session().add(observer)
717 Session().add(observer)
718 Session().flush()
718 Session().flush()
719
719
720 # Set approval status to "Under Review" for all commits which are
720 # Set approval status to "Under Review" for all commits which are
721 # part of this pull request.
721 # part of this pull request.
722 ChangesetStatusModel().set_status(
722 ChangesetStatusModel().set_status(
723 repo=target_repo,
723 repo=target_repo,
724 status=ChangesetStatus.STATUS_UNDER_REVIEW,
724 status=ChangesetStatus.STATUS_UNDER_REVIEW,
725 user=created_by_user,
725 user=created_by_user,
726 pull_request=pull_request
726 pull_request=pull_request
727 )
727 )
728 # we commit early at this point. This has to do with a fact
728 # we commit early at this point. This has to do with a fact
729 # that before queries do some row-locking. And because of that
729 # that before queries do some row-locking. And because of that
730 # we need to commit and finish transaction before below validate call
730 # we need to commit and finish transaction before below validate call
731 # that for large repos could be long resulting in long row locks
731 # that for large repos could be long resulting in long row locks
732 Session().commit()
732 Session().commit()
733
733
734 # prepare workspace, and run initial merge simulation. Set state during that
734 # prepare workspace, and run initial merge simulation. Set state during that
735 # operation
735 # operation
736 pull_request = PullRequest.get(pull_request.pull_request_id)
736 pull_request = PullRequest.get(pull_request.pull_request_id)
737
737
738 # set as merging, for merge simulation, and if finished to created so we mark
738 # set as merging, for merge simulation, and if finished to created so we mark
739 # simulation is working fine
739 # simulation is working fine
740 with pull_request.set_state(PullRequest.STATE_MERGING,
740 with pull_request.set_state(PullRequest.STATE_MERGING,
741 final_state=PullRequest.STATE_CREATED) as state_obj:
741 final_state=PullRequest.STATE_CREATED) as state_obj:
742 MergeCheck.validate(
742 MergeCheck.validate(
743 pull_request, auth_user=auth_user, translator=translator)
743 pull_request, auth_user=auth_user, translator=translator)
744
744
745 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
745 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
746 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
746 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
747
747
748 creation_data = pull_request.get_api_data(with_merge_state=False)
748 creation_data = pull_request.get_api_data(with_merge_state=False)
749 self._log_audit_action(
749 self._log_audit_action(
750 'repo.pull_request.create', {'data': creation_data},
750 'repo.pull_request.create', {'data': creation_data},
751 auth_user, pull_request)
751 auth_user, pull_request)
752
752
753 return pull_request
753 return pull_request
754
754
755 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
755 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
756 pull_request = self.__get_pull_request(pull_request)
756 pull_request = self.__get_pull_request(pull_request)
757 target_scm = pull_request.target_repo.scm_instance()
757 target_scm = pull_request.target_repo.scm_instance()
758 if action == 'create':
758 if action == 'create':
759 trigger_hook = hooks_utils.trigger_create_pull_request_hook
759 trigger_hook = hooks_utils.trigger_create_pull_request_hook
760 elif action == 'merge':
760 elif action == 'merge':
761 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
761 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
762 elif action == 'close':
762 elif action == 'close':
763 trigger_hook = hooks_utils.trigger_close_pull_request_hook
763 trigger_hook = hooks_utils.trigger_close_pull_request_hook
764 elif action == 'review_status_change':
764 elif action == 'review_status_change':
765 trigger_hook = hooks_utils.trigger_review_pull_request_hook
765 trigger_hook = hooks_utils.trigger_review_pull_request_hook
766 elif action == 'update':
766 elif action == 'update':
767 trigger_hook = hooks_utils.trigger_update_pull_request_hook
767 trigger_hook = hooks_utils.trigger_update_pull_request_hook
768 elif action == 'comment':
768 elif action == 'comment':
769 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
769 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
770 elif action == 'comment_edit':
770 elif action == 'comment_edit':
771 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
771 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
772 else:
772 else:
773 return
773 return
774
774
775 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
775 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
776 pull_request, action, trigger_hook)
776 pull_request, action, trigger_hook)
777 trigger_hook(
777 trigger_hook(
778 username=user.username,
778 username=user.username,
779 repo_name=pull_request.target_repo.repo_name,
779 repo_name=pull_request.target_repo.repo_name,
780 repo_type=target_scm.alias,
780 repo_type=target_scm.alias,
781 pull_request=pull_request,
781 pull_request=pull_request,
782 data=data)
782 data=data)
783
783
784 def _get_commit_ids(self, pull_request):
784 def _get_commit_ids(self, pull_request):
785 """
785 """
786 Return the commit ids of the merged pull request.
786 Return the commit ids of the merged pull request.
787
787
788 This method is not dealing correctly yet with the lack of autoupdates
788 This method is not dealing correctly yet with the lack of autoupdates
789 nor with the implicit target updates.
789 nor with the implicit target updates.
790 For example: if a commit in the source repo is already in the target it
790 For example: if a commit in the source repo is already in the target it
791 will be reported anyways.
791 will be reported anyways.
792 """
792 """
793 merge_rev = pull_request.merge_rev
793 merge_rev = pull_request.merge_rev
794 if merge_rev is None:
794 if merge_rev is None:
795 raise ValueError('This pull request was not merged yet')
795 raise ValueError('This pull request was not merged yet')
796
796
797 commit_ids = list(pull_request.revisions)
797 commit_ids = list(pull_request.revisions)
798 if merge_rev not in commit_ids:
798 if merge_rev not in commit_ids:
799 commit_ids.append(merge_rev)
799 commit_ids.append(merge_rev)
800
800
801 return commit_ids
801 return commit_ids
802
802
803 def merge_repo(self, pull_request, user, extras):
803 def merge_repo(self, pull_request, user, extras):
804 log.debug("Merging pull request %s", pull_request.pull_request_id)
804 log.debug("Merging pull request %s", pull_request.pull_request_id)
805 extras['user_agent'] = 'internal-merge'
805 extras['user_agent'] = 'internal-merge'
806 merge_state = self._merge_pull_request(pull_request, user, extras)
806 merge_state = self._merge_pull_request(pull_request, user, extras)
807 if merge_state.executed:
807 if merge_state.executed:
808 log.debug("Merge was successful, updating the pull request comments.")
808 log.debug("Merge was successful, updating the pull request comments.")
809 self._comment_and_close_pr(pull_request, user, merge_state)
809 self._comment_and_close_pr(pull_request, user, merge_state)
810
810
811 self._log_audit_action(
811 self._log_audit_action(
812 'repo.pull_request.merge',
812 'repo.pull_request.merge',
813 {'merge_state': merge_state.__dict__},
813 {'merge_state': merge_state.__dict__},
814 user, pull_request)
814 user, pull_request)
815
815
816 else:
816 else:
817 log.warn("Merge failed, not updating the pull request.")
817 log.warn("Merge failed, not updating the pull request.")
818 return merge_state
818 return merge_state
819
819
820 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
820 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
821 target_vcs = pull_request.target_repo.scm_instance()
821 target_vcs = pull_request.target_repo.scm_instance()
822 source_vcs = pull_request.source_repo.scm_instance()
822 source_vcs = pull_request.source_repo.scm_instance()
823
823
824 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
824 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
825 pr_id=pull_request.pull_request_id,
825 pr_id=pull_request.pull_request_id,
826 pr_title=pull_request.title,
826 pr_title=pull_request.title,
827 source_repo=source_vcs.name,
827 source_repo=source_vcs.name,
828 source_ref_name=pull_request.source_ref_parts.name,
828 source_ref_name=pull_request.source_ref_parts.name,
829 target_repo=target_vcs.name,
829 target_repo=target_vcs.name,
830 target_ref_name=pull_request.target_ref_parts.name,
830 target_ref_name=pull_request.target_ref_parts.name,
831 )
831 )
832
832
833 workspace_id = self._workspace_id(pull_request)
833 workspace_id = self._workspace_id(pull_request)
834 repo_id = pull_request.target_repo.repo_id
834 repo_id = pull_request.target_repo.repo_id
835 use_rebase = self._use_rebase_for_merging(pull_request)
835 use_rebase = self._use_rebase_for_merging(pull_request)
836 close_branch = self._close_branch_before_merging(pull_request)
836 close_branch = self._close_branch_before_merging(pull_request)
837 user_name = self._user_name_for_merging(pull_request, user)
837 user_name = self._user_name_for_merging(pull_request, user)
838
838
839 target_ref = self._refresh_reference(
839 target_ref = self._refresh_reference(
840 pull_request.target_ref_parts, target_vcs)
840 pull_request.target_ref_parts, target_vcs)
841
841
842 callback_daemon, extras = prepare_callback_daemon(
842 callback_daemon, extras = prepare_callback_daemon(
843 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
843 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
844 host=vcs_settings.HOOKS_HOST,
844 host=vcs_settings.HOOKS_HOST,
845 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
845 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
846
846
847 with callback_daemon:
847 with callback_daemon:
848 # TODO: johbo: Implement a clean way to run a config_override
848 # TODO: johbo: Implement a clean way to run a config_override
849 # for a single call.
849 # for a single call.
850 target_vcs.config.set(
850 target_vcs.config.set(
851 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
851 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
852
852
853 merge_state = target_vcs.merge(
853 merge_state = target_vcs.merge(
854 repo_id, workspace_id, target_ref, source_vcs,
854 repo_id, workspace_id, target_ref, source_vcs,
855 pull_request.source_ref_parts,
855 pull_request.source_ref_parts,
856 user_name=user_name, user_email=user.email,
856 user_name=user_name, user_email=user.email,
857 message=message, use_rebase=use_rebase,
857 message=message, use_rebase=use_rebase,
858 close_branch=close_branch)
858 close_branch=close_branch)
859 return merge_state
859 return merge_state
860
860
861 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
861 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
862 pull_request.merge_rev = merge_state.merge_ref.commit_id
862 pull_request.merge_rev = merge_state.merge_ref.commit_id
863 pull_request.updated_on = datetime.datetime.now()
863 pull_request.updated_on = datetime.datetime.now()
864 close_msg = close_msg or 'Pull request merged and closed'
864 close_msg = close_msg or 'Pull request merged and closed'
865
865
866 CommentsModel().create(
866 CommentsModel().create(
867 text=safe_unicode(close_msg),
867 text=safe_unicode(close_msg),
868 repo=pull_request.target_repo.repo_id,
868 repo=pull_request.target_repo.repo_id,
869 user=user.user_id,
869 user=user.user_id,
870 pull_request=pull_request.pull_request_id,
870 pull_request=pull_request.pull_request_id,
871 f_path=None,
871 f_path=None,
872 line_no=None,
872 line_no=None,
873 closing_pr=True
873 closing_pr=True
874 )
874 )
875
875
876 Session().add(pull_request)
876 Session().add(pull_request)
877 Session().flush()
877 Session().flush()
878 # TODO: paris: replace invalidation with less radical solution
878 # TODO: paris: replace invalidation with less radical solution
879 ScmModel().mark_for_invalidation(
879 ScmModel().mark_for_invalidation(
880 pull_request.target_repo.repo_name)
880 pull_request.target_repo.repo_name)
881 self.trigger_pull_request_hook(pull_request, user, 'merge')
881 self.trigger_pull_request_hook(pull_request, user, 'merge')
882
882
883 def has_valid_update_type(self, pull_request):
883 def has_valid_update_type(self, pull_request):
884 source_ref_type = pull_request.source_ref_parts.type
884 source_ref_type = pull_request.source_ref_parts.type
885 return source_ref_type in self.REF_TYPES
885 return source_ref_type in self.REF_TYPES
886
886
887 def get_flow_commits(self, pull_request):
887 def get_flow_commits(self, pull_request):
888
888
889 # source repo
889 # source repo
890 source_ref_name = pull_request.source_ref_parts.name
890 source_ref_name = pull_request.source_ref_parts.name
891 source_ref_type = pull_request.source_ref_parts.type
891 source_ref_type = pull_request.source_ref_parts.type
892 source_ref_id = pull_request.source_ref_parts.commit_id
892 source_ref_id = pull_request.source_ref_parts.commit_id
893 source_repo = pull_request.source_repo.scm_instance()
893 source_repo = pull_request.source_repo.scm_instance()
894
894
895 try:
895 try:
896 if source_ref_type in self.REF_TYPES:
896 if source_ref_type in self.REF_TYPES:
897 source_commit = source_repo.get_commit(source_ref_name)
897 source_commit = source_repo.get_commit(source_ref_name)
898 else:
898 else:
899 source_commit = source_repo.get_commit(source_ref_id)
899 source_commit = source_repo.get_commit(source_ref_id)
900 except CommitDoesNotExistError:
900 except CommitDoesNotExistError:
901 raise SourceRefMissing()
901 raise SourceRefMissing()
902
902
903 # target repo
903 # target repo
904 target_ref_name = pull_request.target_ref_parts.name
904 target_ref_name = pull_request.target_ref_parts.name
905 target_ref_type = pull_request.target_ref_parts.type
905 target_ref_type = pull_request.target_ref_parts.type
906 target_ref_id = pull_request.target_ref_parts.commit_id
906 target_ref_id = pull_request.target_ref_parts.commit_id
907 target_repo = pull_request.target_repo.scm_instance()
907 target_repo = pull_request.target_repo.scm_instance()
908
908
909 try:
909 try:
910 if target_ref_type in self.REF_TYPES:
910 if target_ref_type in self.REF_TYPES:
911 target_commit = target_repo.get_commit(target_ref_name)
911 target_commit = target_repo.get_commit(target_ref_name)
912 else:
912 else:
913 target_commit = target_repo.get_commit(target_ref_id)
913 target_commit = target_repo.get_commit(target_ref_id)
914 except CommitDoesNotExistError:
914 except CommitDoesNotExistError:
915 raise TargetRefMissing()
915 raise TargetRefMissing()
916
916
917 return source_commit, target_commit
917 return source_commit, target_commit
918
918
919 def update_commits(self, pull_request, updating_user):
919 def update_commits(self, pull_request, updating_user):
920 """
920 """
921 Get the updated list of commits for the pull request
921 Get the updated list of commits for the pull request
922 and return the new pull request version and the list
922 and return the new pull request version and the list
923 of commits processed by this update action
923 of commits processed by this update action
924
924
925 updating_user is the user_object who triggered the update
925 updating_user is the user_object who triggered the update
926 """
926 """
927 pull_request = self.__get_pull_request(pull_request)
927 pull_request = self.__get_pull_request(pull_request)
928 source_ref_type = pull_request.source_ref_parts.type
928 source_ref_type = pull_request.source_ref_parts.type
929 source_ref_name = pull_request.source_ref_parts.name
929 source_ref_name = pull_request.source_ref_parts.name
930 source_ref_id = pull_request.source_ref_parts.commit_id
930 source_ref_id = pull_request.source_ref_parts.commit_id
931
931
932 target_ref_type = pull_request.target_ref_parts.type
932 target_ref_type = pull_request.target_ref_parts.type
933 target_ref_name = pull_request.target_ref_parts.name
933 target_ref_name = pull_request.target_ref_parts.name
934 target_ref_id = pull_request.target_ref_parts.commit_id
934 target_ref_id = pull_request.target_ref_parts.commit_id
935
935
936 if not self.has_valid_update_type(pull_request):
936 if not self.has_valid_update_type(pull_request):
937 log.debug("Skipping update of pull request %s due to ref type: %s",
937 log.debug("Skipping update of pull request %s due to ref type: %s",
938 pull_request, source_ref_type)
938 pull_request, source_ref_type)
939 return UpdateResponse(
939 return UpdateResponse(
940 executed=False,
940 executed=False,
941 reason=UpdateFailureReason.WRONG_REF_TYPE,
941 reason=UpdateFailureReason.WRONG_REF_TYPE,
942 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
942 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
943 source_changed=False, target_changed=False)
943 source_changed=False, target_changed=False)
944
944
945 try:
945 try:
946 source_commit, target_commit = self.get_flow_commits(pull_request)
946 source_commit, target_commit = self.get_flow_commits(pull_request)
947 except SourceRefMissing:
947 except SourceRefMissing:
948 return UpdateResponse(
948 return UpdateResponse(
949 executed=False,
949 executed=False,
950 reason=UpdateFailureReason.MISSING_SOURCE_REF,
950 reason=UpdateFailureReason.MISSING_SOURCE_REF,
951 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
951 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
952 source_changed=False, target_changed=False)
952 source_changed=False, target_changed=False)
953 except TargetRefMissing:
953 except TargetRefMissing:
954 return UpdateResponse(
954 return UpdateResponse(
955 executed=False,
955 executed=False,
956 reason=UpdateFailureReason.MISSING_TARGET_REF,
956 reason=UpdateFailureReason.MISSING_TARGET_REF,
957 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
957 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
958 source_changed=False, target_changed=False)
958 source_changed=False, target_changed=False)
959
959
960 source_changed = source_ref_id != source_commit.raw_id
960 source_changed = source_ref_id != source_commit.raw_id
961 target_changed = target_ref_id != target_commit.raw_id
961 target_changed = target_ref_id != target_commit.raw_id
962
962
963 if not (source_changed or target_changed):
963 if not (source_changed or target_changed):
964 log.debug("Nothing changed in pull request %s", pull_request)
964 log.debug("Nothing changed in pull request %s", pull_request)
965 return UpdateResponse(
965 return UpdateResponse(
966 executed=False,
966 executed=False,
967 reason=UpdateFailureReason.NO_CHANGE,
967 reason=UpdateFailureReason.NO_CHANGE,
968 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
968 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
969 source_changed=target_changed, target_changed=source_changed)
969 source_changed=target_changed, target_changed=source_changed)
970
970
971 change_in_found = 'target repo' if target_changed else 'source repo'
971 change_in_found = 'target repo' if target_changed else 'source repo'
972 log.debug('Updating pull request because of change in %s detected',
972 log.debug('Updating pull request because of change in %s detected',
973 change_in_found)
973 change_in_found)
974
974
975 # Finally there is a need for an update, in case of source change
975 # Finally there is a need for an update, in case of source change
976 # we create a new version, else just an update
976 # we create a new version, else just an update
977 if source_changed:
977 if source_changed:
978 pull_request_version = self._create_version_from_snapshot(pull_request)
978 pull_request_version = self._create_version_from_snapshot(pull_request)
979 self._link_comments_to_version(pull_request_version)
979 self._link_comments_to_version(pull_request_version)
980 else:
980 else:
981 try:
981 try:
982 ver = pull_request.versions[-1]
982 ver = pull_request.versions[-1]
983 except IndexError:
983 except IndexError:
984 ver = None
984 ver = None
985
985
986 pull_request.pull_request_version_id = \
986 pull_request.pull_request_version_id = \
987 ver.pull_request_version_id if ver else None
987 ver.pull_request_version_id if ver else None
988 pull_request_version = pull_request
988 pull_request_version = pull_request
989
989
990 source_repo = pull_request.source_repo.scm_instance()
990 source_repo = pull_request.source_repo.scm_instance()
991 target_repo = pull_request.target_repo.scm_instance()
991 target_repo = pull_request.target_repo.scm_instance()
992
992
993 # re-compute commit ids
993 # re-compute commit ids
994 old_commit_ids = pull_request.revisions
994 old_commit_ids = pull_request.revisions
995 pre_load = ["author", "date", "message", "branch"]
995 pre_load = ["author", "date", "message", "branch"]
996 commit_ranges = target_repo.compare(
996 commit_ranges = target_repo.compare(
997 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
997 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
998 pre_load=pre_load)
998 pre_load=pre_load)
999
999
1000 target_ref = target_commit.raw_id
1000 target_ref = target_commit.raw_id
1001 source_ref = source_commit.raw_id
1001 source_ref = source_commit.raw_id
1002 ancestor_commit_id = target_repo.get_common_ancestor(
1002 ancestor_commit_id = target_repo.get_common_ancestor(
1003 target_ref, source_ref, source_repo)
1003 target_ref, source_ref, source_repo)
1004
1004
1005 if not ancestor_commit_id:
1005 if not ancestor_commit_id:
1006 raise ValueError(
1006 raise ValueError(
1007 'cannot calculate diff info without a common ancestor. '
1007 'cannot calculate diff info without a common ancestor. '
1008 'Make sure both repositories are related, and have a common forking commit.')
1008 'Make sure both repositories are related, and have a common forking commit.')
1009
1009
1010 pull_request.common_ancestor_id = ancestor_commit_id
1010 pull_request.common_ancestor_id = ancestor_commit_id
1011
1011
1012 pull_request.source_ref = '%s:%s:%s' % (
1012 pull_request.source_ref = '%s:%s:%s' % (
1013 source_ref_type, source_ref_name, source_commit.raw_id)
1013 source_ref_type, source_ref_name, source_commit.raw_id)
1014 pull_request.target_ref = '%s:%s:%s' % (
1014 pull_request.target_ref = '%s:%s:%s' % (
1015 target_ref_type, target_ref_name, ancestor_commit_id)
1015 target_ref_type, target_ref_name, ancestor_commit_id)
1016
1016
1017 pull_request.revisions = [
1017 pull_request.revisions = [
1018 commit.raw_id for commit in reversed(commit_ranges)]
1018 commit.raw_id for commit in reversed(commit_ranges)]
1019 pull_request.updated_on = datetime.datetime.now()
1019 pull_request.updated_on = datetime.datetime.now()
1020 Session().add(pull_request)
1020 Session().add(pull_request)
1021 new_commit_ids = pull_request.revisions
1021 new_commit_ids = pull_request.revisions
1022
1022
1023 old_diff_data, new_diff_data = self._generate_update_diffs(
1023 old_diff_data, new_diff_data = self._generate_update_diffs(
1024 pull_request, pull_request_version)
1024 pull_request, pull_request_version)
1025
1025
1026 # calculate commit and file changes
1026 # calculate commit and file changes
1027 commit_changes = self._calculate_commit_id_changes(
1027 commit_changes = self._calculate_commit_id_changes(
1028 old_commit_ids, new_commit_ids)
1028 old_commit_ids, new_commit_ids)
1029 file_changes = self._calculate_file_changes(
1029 file_changes = self._calculate_file_changes(
1030 old_diff_data, new_diff_data)
1030 old_diff_data, new_diff_data)
1031
1031
1032 # set comments as outdated if DIFFS changed
1032 # set comments as outdated if DIFFS changed
1033 CommentsModel().outdate_comments(
1033 CommentsModel().outdate_comments(
1034 pull_request, old_diff_data=old_diff_data,
1034 pull_request, old_diff_data=old_diff_data,
1035 new_diff_data=new_diff_data)
1035 new_diff_data=new_diff_data)
1036
1036
1037 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1037 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1038 file_node_changes = (
1038 file_node_changes = (
1039 file_changes.added or file_changes.modified or file_changes.removed)
1039 file_changes.added or file_changes.modified or file_changes.removed)
1040 pr_has_changes = valid_commit_changes or file_node_changes
1040 pr_has_changes = valid_commit_changes or file_node_changes
1041
1041
1042 # Add an automatic comment to the pull request, in case
1042 # Add an automatic comment to the pull request, in case
1043 # anything has changed
1043 # anything has changed
1044 if pr_has_changes:
1044 if pr_has_changes:
1045 update_comment = CommentsModel().create(
1045 update_comment = CommentsModel().create(
1046 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1046 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1047 repo=pull_request.target_repo,
1047 repo=pull_request.target_repo,
1048 user=pull_request.author,
1048 user=pull_request.author,
1049 pull_request=pull_request,
1049 pull_request=pull_request,
1050 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1050 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1051
1051
1052 # Update status to "Under Review" for added commits
1052 # Update status to "Under Review" for added commits
1053 for commit_id in commit_changes.added:
1053 for commit_id in commit_changes.added:
1054 ChangesetStatusModel().set_status(
1054 ChangesetStatusModel().set_status(
1055 repo=pull_request.source_repo,
1055 repo=pull_request.source_repo,
1056 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1056 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1057 comment=update_comment,
1057 comment=update_comment,
1058 user=pull_request.author,
1058 user=pull_request.author,
1059 pull_request=pull_request,
1059 pull_request=pull_request,
1060 revision=commit_id)
1060 revision=commit_id)
1061
1061
1062 # send update email to users
1062 # send update email to users
1063 try:
1063 try:
1064 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1064 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1065 ancestor_commit_id=ancestor_commit_id,
1065 ancestor_commit_id=ancestor_commit_id,
1066 commit_changes=commit_changes,
1066 commit_changes=commit_changes,
1067 file_changes=file_changes)
1067 file_changes=file_changes)
1068 except Exception:
1068 except Exception:
1069 log.exception('Failed to send email notification to users')
1069 log.exception('Failed to send email notification to users')
1070
1070
1071 log.debug(
1071 log.debug(
1072 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1072 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1073 'removed_ids: %s', pull_request.pull_request_id,
1073 'removed_ids: %s', pull_request.pull_request_id,
1074 commit_changes.added, commit_changes.common, commit_changes.removed)
1074 commit_changes.added, commit_changes.common, commit_changes.removed)
1075 log.debug(
1075 log.debug(
1076 'Updated pull request with the following file changes: %s',
1076 'Updated pull request with the following file changes: %s',
1077 file_changes)
1077 file_changes)
1078
1078
1079 log.info(
1079 log.info(
1080 "Updated pull request %s from commit %s to commit %s, "
1080 "Updated pull request %s from commit %s to commit %s, "
1081 "stored new version %s of this pull request.",
1081 "stored new version %s of this pull request.",
1082 pull_request.pull_request_id, source_ref_id,
1082 pull_request.pull_request_id, source_ref_id,
1083 pull_request.source_ref_parts.commit_id,
1083 pull_request.source_ref_parts.commit_id,
1084 pull_request_version.pull_request_version_id)
1084 pull_request_version.pull_request_version_id)
1085 Session().commit()
1085 Session().commit()
1086 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1086 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1087
1087
1088 return UpdateResponse(
1088 return UpdateResponse(
1089 executed=True, reason=UpdateFailureReason.NONE,
1089 executed=True, reason=UpdateFailureReason.NONE,
1090 old=pull_request, new=pull_request_version,
1090 old=pull_request, new=pull_request_version,
1091 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1091 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1092 source_changed=source_changed, target_changed=target_changed)
1092 source_changed=source_changed, target_changed=target_changed)
1093
1093
1094 def _create_version_from_snapshot(self, pull_request):
1094 def _create_version_from_snapshot(self, pull_request):
1095 version = PullRequestVersion()
1095 version = PullRequestVersion()
1096 version.title = pull_request.title
1096 version.title = pull_request.title
1097 version.description = pull_request.description
1097 version.description = pull_request.description
1098 version.status = pull_request.status
1098 version.status = pull_request.status
1099 version.pull_request_state = pull_request.pull_request_state
1099 version.pull_request_state = pull_request.pull_request_state
1100 version.created_on = datetime.datetime.now()
1100 version.created_on = datetime.datetime.now()
1101 version.updated_on = pull_request.updated_on
1101 version.updated_on = pull_request.updated_on
1102 version.user_id = pull_request.user_id
1102 version.user_id = pull_request.user_id
1103 version.source_repo = pull_request.source_repo
1103 version.source_repo = pull_request.source_repo
1104 version.source_ref = pull_request.source_ref
1104 version.source_ref = pull_request.source_ref
1105 version.target_repo = pull_request.target_repo
1105 version.target_repo = pull_request.target_repo
1106 version.target_ref = pull_request.target_ref
1106 version.target_ref = pull_request.target_ref
1107
1107
1108 version._last_merge_source_rev = pull_request._last_merge_source_rev
1108 version._last_merge_source_rev = pull_request._last_merge_source_rev
1109 version._last_merge_target_rev = pull_request._last_merge_target_rev
1109 version._last_merge_target_rev = pull_request._last_merge_target_rev
1110 version.last_merge_status = pull_request.last_merge_status
1110 version.last_merge_status = pull_request.last_merge_status
1111 version.last_merge_metadata = pull_request.last_merge_metadata
1111 version.last_merge_metadata = pull_request.last_merge_metadata
1112 version.shadow_merge_ref = pull_request.shadow_merge_ref
1112 version.shadow_merge_ref = pull_request.shadow_merge_ref
1113 version.merge_rev = pull_request.merge_rev
1113 version.merge_rev = pull_request.merge_rev
1114 version.reviewer_data = pull_request.reviewer_data
1114 version.reviewer_data = pull_request.reviewer_data
1115
1115
1116 version.revisions = pull_request.revisions
1116 version.revisions = pull_request.revisions
1117 version.common_ancestor_id = pull_request.common_ancestor_id
1117 version.common_ancestor_id = pull_request.common_ancestor_id
1118 version.pull_request = pull_request
1118 version.pull_request = pull_request
1119 Session().add(version)
1119 Session().add(version)
1120 Session().flush()
1120 Session().flush()
1121
1121
1122 return version
1122 return version
1123
1123
1124 def _generate_update_diffs(self, pull_request, pull_request_version):
1124 def _generate_update_diffs(self, pull_request, pull_request_version):
1125
1125
1126 diff_context = (
1126 diff_context = (
1127 self.DIFF_CONTEXT +
1127 self.DIFF_CONTEXT +
1128 CommentsModel.needed_extra_diff_context())
1128 CommentsModel.needed_extra_diff_context())
1129 hide_whitespace_changes = False
1129 hide_whitespace_changes = False
1130 source_repo = pull_request_version.source_repo
1130 source_repo = pull_request_version.source_repo
1131 source_ref_id = pull_request_version.source_ref_parts.commit_id
1131 source_ref_id = pull_request_version.source_ref_parts.commit_id
1132 target_ref_id = pull_request_version.target_ref_parts.commit_id
1132 target_ref_id = pull_request_version.target_ref_parts.commit_id
1133 old_diff = self._get_diff_from_pr_or_version(
1133 old_diff = self._get_diff_from_pr_or_version(
1134 source_repo, source_ref_id, target_ref_id,
1134 source_repo, source_ref_id, target_ref_id,
1135 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1135 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1136
1136
1137 source_repo = pull_request.source_repo
1137 source_repo = pull_request.source_repo
1138 source_ref_id = pull_request.source_ref_parts.commit_id
1138 source_ref_id = pull_request.source_ref_parts.commit_id
1139 target_ref_id = pull_request.target_ref_parts.commit_id
1139 target_ref_id = pull_request.target_ref_parts.commit_id
1140
1140
1141 new_diff = self._get_diff_from_pr_or_version(
1141 new_diff = self._get_diff_from_pr_or_version(
1142 source_repo, source_ref_id, target_ref_id,
1142 source_repo, source_ref_id, target_ref_id,
1143 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1143 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1144
1144
1145 old_diff_data = diffs.DiffProcessor(old_diff)
1145 old_diff_data = diffs.DiffProcessor(old_diff)
1146 old_diff_data.prepare()
1146 old_diff_data.prepare()
1147 new_diff_data = diffs.DiffProcessor(new_diff)
1147 new_diff_data = diffs.DiffProcessor(new_diff)
1148 new_diff_data.prepare()
1148 new_diff_data.prepare()
1149
1149
1150 return old_diff_data, new_diff_data
1150 return old_diff_data, new_diff_data
1151
1151
1152 def _link_comments_to_version(self, pull_request_version):
1152 def _link_comments_to_version(self, pull_request_version):
1153 """
1153 """
1154 Link all unlinked comments of this pull request to the given version.
1154 Link all unlinked comments of this pull request to the given version.
1155
1155
1156 :param pull_request_version: The `PullRequestVersion` to which
1156 :param pull_request_version: The `PullRequestVersion` to which
1157 the comments shall be linked.
1157 the comments shall be linked.
1158
1158
1159 """
1159 """
1160 pull_request = pull_request_version.pull_request
1160 pull_request = pull_request_version.pull_request
1161 comments = ChangesetComment.query()\
1161 comments = ChangesetComment.query()\
1162 .filter(
1162 .filter(
1163 # TODO: johbo: Should we query for the repo at all here?
1163 # TODO: johbo: Should we query for the repo at all here?
1164 # Pending decision on how comments of PRs are to be related
1164 # Pending decision on how comments of PRs are to be related
1165 # to either the source repo, the target repo or no repo at all.
1165 # to either the source repo, the target repo or no repo at all.
1166 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1166 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1167 ChangesetComment.pull_request == pull_request,
1167 ChangesetComment.pull_request == pull_request,
1168 ChangesetComment.pull_request_version == None)\
1168 ChangesetComment.pull_request_version == None)\
1169 .order_by(ChangesetComment.comment_id.asc())
1169 .order_by(ChangesetComment.comment_id.asc())
1170
1170
1171 # TODO: johbo: Find out why this breaks if it is done in a bulk
1171 # TODO: johbo: Find out why this breaks if it is done in a bulk
1172 # operation.
1172 # operation.
1173 for comment in comments:
1173 for comment in comments:
1174 comment.pull_request_version_id = (
1174 comment.pull_request_version_id = (
1175 pull_request_version.pull_request_version_id)
1175 pull_request_version.pull_request_version_id)
1176 Session().add(comment)
1176 Session().add(comment)
1177
1177
1178 def _calculate_commit_id_changes(self, old_ids, new_ids):
1178 def _calculate_commit_id_changes(self, old_ids, new_ids):
1179 added = [x for x in new_ids if x not in old_ids]
1179 added = [x for x in new_ids if x not in old_ids]
1180 common = [x for x in new_ids if x in old_ids]
1180 common = [x for x in new_ids if x in old_ids]
1181 removed = [x for x in old_ids if x not in new_ids]
1181 removed = [x for x in old_ids if x not in new_ids]
1182 total = new_ids
1182 total = new_ids
1183 return ChangeTuple(added, common, removed, total)
1183 return ChangeTuple(added, common, removed, total)
1184
1184
1185 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1185 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1186
1186
1187 old_files = OrderedDict()
1187 old_files = OrderedDict()
1188 for diff_data in old_diff_data.parsed_diff:
1188 for diff_data in old_diff_data.parsed_diff:
1189 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1189 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1190
1190
1191 added_files = []
1191 added_files = []
1192 modified_files = []
1192 modified_files = []
1193 removed_files = []
1193 removed_files = []
1194 for diff_data in new_diff_data.parsed_diff:
1194 for diff_data in new_diff_data.parsed_diff:
1195 new_filename = diff_data['filename']
1195 new_filename = diff_data['filename']
1196 new_hash = md5_safe(diff_data['raw_diff'])
1196 new_hash = md5_safe(diff_data['raw_diff'])
1197
1197
1198 old_hash = old_files.get(new_filename)
1198 old_hash = old_files.get(new_filename)
1199 if not old_hash:
1199 if not old_hash:
1200 # file is not present in old diff, we have to figure out from parsed diff
1200 # file is not present in old diff, we have to figure out from parsed diff
1201 # operation ADD/REMOVE
1201 # operation ADD/REMOVE
1202 operations_dict = diff_data['stats']['ops']
1202 operations_dict = diff_data['stats']['ops']
1203 if diffs.DEL_FILENODE in operations_dict:
1203 if diffs.DEL_FILENODE in operations_dict:
1204 removed_files.append(new_filename)
1204 removed_files.append(new_filename)
1205 else:
1205 else:
1206 added_files.append(new_filename)
1206 added_files.append(new_filename)
1207 else:
1207 else:
1208 if new_hash != old_hash:
1208 if new_hash != old_hash:
1209 modified_files.append(new_filename)
1209 modified_files.append(new_filename)
1210 # now remove a file from old, since we have seen it already
1210 # now remove a file from old, since we have seen it already
1211 del old_files[new_filename]
1211 del old_files[new_filename]
1212
1212
1213 # removed files is when there are present in old, but not in NEW,
1213 # removed files is when there are present in old, but not in NEW,
1214 # since we remove old files that are present in new diff, left-overs
1214 # since we remove old files that are present in new diff, left-overs
1215 # if any should be the removed files
1215 # if any should be the removed files
1216 removed_files.extend(old_files.keys())
1216 removed_files.extend(old_files.keys())
1217
1217
1218 return FileChangeTuple(added_files, modified_files, removed_files)
1218 return FileChangeTuple(added_files, modified_files, removed_files)
1219
1219
1220 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1220 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1221 """
1221 """
1222 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1222 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1223 so it's always looking the same disregarding on which default
1223 so it's always looking the same disregarding on which default
1224 renderer system is using.
1224 renderer system is using.
1225
1225
1226 :param ancestor_commit_id: ancestor raw_id
1226 :param ancestor_commit_id: ancestor raw_id
1227 :param changes: changes named tuple
1227 :param changes: changes named tuple
1228 :param file_changes: file changes named tuple
1228 :param file_changes: file changes named tuple
1229
1229
1230 """
1230 """
1231 new_status = ChangesetStatus.get_status_lbl(
1231 new_status = ChangesetStatus.get_status_lbl(
1232 ChangesetStatus.STATUS_UNDER_REVIEW)
1232 ChangesetStatus.STATUS_UNDER_REVIEW)
1233
1233
1234 changed_files = (
1234 changed_files = (
1235 file_changes.added + file_changes.modified + file_changes.removed)
1235 file_changes.added + file_changes.modified + file_changes.removed)
1236
1236
1237 params = {
1237 params = {
1238 'under_review_label': new_status,
1238 'under_review_label': new_status,
1239 'added_commits': changes.added,
1239 'added_commits': changes.added,
1240 'removed_commits': changes.removed,
1240 'removed_commits': changes.removed,
1241 'changed_files': changed_files,
1241 'changed_files': changed_files,
1242 'added_files': file_changes.added,
1242 'added_files': file_changes.added,
1243 'modified_files': file_changes.modified,
1243 'modified_files': file_changes.modified,
1244 'removed_files': file_changes.removed,
1244 'removed_files': file_changes.removed,
1245 'ancestor_commit_id': ancestor_commit_id
1245 'ancestor_commit_id': ancestor_commit_id
1246 }
1246 }
1247 renderer = RstTemplateRenderer()
1247 renderer = RstTemplateRenderer()
1248 return renderer.render('pull_request_update.mako', **params)
1248 return renderer.render('pull_request_update.mako', **params)
1249
1249
1250 def edit(self, pull_request, title, description, description_renderer, user):
1250 def edit(self, pull_request, title, description, description_renderer, user):
1251 pull_request = self.__get_pull_request(pull_request)
1251 pull_request = self.__get_pull_request(pull_request)
1252 old_data = pull_request.get_api_data(with_merge_state=False)
1252 old_data = pull_request.get_api_data(with_merge_state=False)
1253 if pull_request.is_closed():
1253 if pull_request.is_closed():
1254 raise ValueError('This pull request is closed')
1254 raise ValueError('This pull request is closed')
1255 if title:
1255 if title:
1256 pull_request.title = title
1256 pull_request.title = title
1257 pull_request.description = description
1257 pull_request.description = description
1258 pull_request.updated_on = datetime.datetime.now()
1258 pull_request.updated_on = datetime.datetime.now()
1259 pull_request.description_renderer = description_renderer
1259 pull_request.description_renderer = description_renderer
1260 Session().add(pull_request)
1260 Session().add(pull_request)
1261 self._log_audit_action(
1261 self._log_audit_action(
1262 'repo.pull_request.edit', {'old_data': old_data},
1262 'repo.pull_request.edit', {'old_data': old_data},
1263 user, pull_request)
1263 user, pull_request)
1264
1264
1265 def update_reviewers(self, pull_request, reviewer_data, user):
1265 def update_reviewers(self, pull_request, reviewer_data, user):
1266 """
1266 """
1267 Update the reviewers in the pull request
1267 Update the reviewers in the pull request
1268
1268
1269 :param pull_request: the pr to update
1269 :param pull_request: the pr to update
1270 :param reviewer_data: list of tuples
1270 :param reviewer_data: list of tuples
1271 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1271 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1272 :param user: current use who triggers this action
1272 :param user: current use who triggers this action
1273 """
1273 """
1274
1274
1275 pull_request = self.__get_pull_request(pull_request)
1275 pull_request = self.__get_pull_request(pull_request)
1276 if pull_request.is_closed():
1276 if pull_request.is_closed():
1277 raise ValueError('This pull request is closed')
1277 raise ValueError('This pull request is closed')
1278
1278
1279 reviewers = {}
1279 reviewers = {}
1280 for user_id, reasons, mandatory, role, rules in reviewer_data:
1280 for user_id, reasons, mandatory, role, rules in reviewer_data:
1281 if isinstance(user_id, (int, compat.string_types)):
1281 if isinstance(user_id, (int, compat.string_types)):
1282 user_id = self._get_user(user_id).user_id
1282 user_id = self._get_user(user_id).user_id
1283 reviewers[user_id] = {
1283 reviewers[user_id] = {
1284 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1284 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1285
1285
1286 reviewers_ids = set(reviewers.keys())
1286 reviewers_ids = set(reviewers.keys())
1287 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1287 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1288 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1288 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1289
1289
1290 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1290 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1291
1291
1292 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1292 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1293 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1293 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1294
1294
1295 log.debug("Adding %s reviewers", ids_to_add)
1295 log.debug("Adding %s reviewers", ids_to_add)
1296 log.debug("Removing %s reviewers", ids_to_remove)
1296 log.debug("Removing %s reviewers", ids_to_remove)
1297 changed = False
1297 changed = False
1298 added_audit_reviewers = []
1298 added_audit_reviewers = []
1299 removed_audit_reviewers = []
1299 removed_audit_reviewers = []
1300
1300
1301 for uid in ids_to_add:
1301 for uid in ids_to_add:
1302 changed = True
1302 changed = True
1303 _usr = self._get_user(uid)
1303 _usr = self._get_user(uid)
1304 reviewer = PullRequestReviewers()
1304 reviewer = PullRequestReviewers()
1305 reviewer.user = _usr
1305 reviewer.user = _usr
1306 reviewer.pull_request = pull_request
1306 reviewer.pull_request = pull_request
1307 reviewer.reasons = reviewers[uid]['reasons']
1307 reviewer.reasons = reviewers[uid]['reasons']
1308 # NOTE(marcink): mandatory shouldn't be changed now
1308 # NOTE(marcink): mandatory shouldn't be changed now
1309 # reviewer.mandatory = reviewers[uid]['reasons']
1309 # reviewer.mandatory = reviewers[uid]['reasons']
1310 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1310 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1311 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1311 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1312 Session().add(reviewer)
1312 Session().add(reviewer)
1313 added_audit_reviewers.append(reviewer.get_dict())
1313 added_audit_reviewers.append(reviewer.get_dict())
1314
1314
1315 for uid in ids_to_remove:
1315 for uid in ids_to_remove:
1316 changed = True
1316 changed = True
1317 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1317 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1318 # This is an edge case that handles previous state of having the same reviewer twice.
1318 # This is an edge case that handles previous state of having the same reviewer twice.
1319 # this CAN happen due to the lack of DB checks
1319 # this CAN happen due to the lack of DB checks
1320 reviewers = PullRequestReviewers.query()\
1320 reviewers = PullRequestReviewers.query()\
1321 .filter(PullRequestReviewers.user_id == uid,
1321 .filter(PullRequestReviewers.user_id == uid,
1322 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1322 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1323 PullRequestReviewers.pull_request == pull_request)\
1323 PullRequestReviewers.pull_request == pull_request)\
1324 .all()
1324 .all()
1325
1325
1326 for obj in reviewers:
1326 for obj in reviewers:
1327 added_audit_reviewers.append(obj.get_dict())
1327 added_audit_reviewers.append(obj.get_dict())
1328 Session().delete(obj)
1328 Session().delete(obj)
1329
1329
1330 if changed:
1330 if changed:
1331 Session().expire_all()
1331 Session().expire_all()
1332 pull_request.updated_on = datetime.datetime.now()
1332 pull_request.updated_on = datetime.datetime.now()
1333 Session().add(pull_request)
1333 Session().add(pull_request)
1334
1334
1335 # finally store audit logs
1335 # finally store audit logs
1336 for user_data in added_audit_reviewers:
1336 for user_data in added_audit_reviewers:
1337 self._log_audit_action(
1337 self._log_audit_action(
1338 'repo.pull_request.reviewer.add', {'data': user_data},
1338 'repo.pull_request.reviewer.add', {'data': user_data},
1339 user, pull_request)
1339 user, pull_request)
1340 for user_data in removed_audit_reviewers:
1340 for user_data in removed_audit_reviewers:
1341 self._log_audit_action(
1341 self._log_audit_action(
1342 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1342 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1343 user, pull_request)
1343 user, pull_request)
1344
1344
1345 self.notify_reviewers(pull_request, ids_to_add, user.get_instance())
1345 self.notify_reviewers(pull_request, ids_to_add, user)
1346 return ids_to_add, ids_to_remove
1346 return ids_to_add, ids_to_remove
1347
1347
1348 def update_observers(self, pull_request, observer_data, user):
1348 def update_observers(self, pull_request, observer_data, user):
1349 """
1349 """
1350 Update the observers in the pull request
1350 Update the observers in the pull request
1351
1351
1352 :param pull_request: the pr to update
1352 :param pull_request: the pr to update
1353 :param observer_data: list of tuples
1353 :param observer_data: list of tuples
1354 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1354 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1355 :param user: current use who triggers this action
1355 :param user: current use who triggers this action
1356 """
1356 """
1357 pull_request = self.__get_pull_request(pull_request)
1357 pull_request = self.__get_pull_request(pull_request)
1358 if pull_request.is_closed():
1358 if pull_request.is_closed():
1359 raise ValueError('This pull request is closed')
1359 raise ValueError('This pull request is closed')
1360
1360
1361 observers = {}
1361 observers = {}
1362 for user_id, reasons, mandatory, role, rules in observer_data:
1362 for user_id, reasons, mandatory, role, rules in observer_data:
1363 if isinstance(user_id, (int, compat.string_types)):
1363 if isinstance(user_id, (int, compat.string_types)):
1364 user_id = self._get_user(user_id).user_id
1364 user_id = self._get_user(user_id).user_id
1365 observers[user_id] = {
1365 observers[user_id] = {
1366 'reasons': reasons, 'observers': mandatory, 'role': role}
1366 'reasons': reasons, 'observers': mandatory, 'role': role}
1367
1367
1368 observers_ids = set(observers.keys())
1368 observers_ids = set(observers.keys())
1369 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1369 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1370 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1370 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1371
1371
1372 current_observers_ids = set([x.user.user_id for x in current_observers])
1372 current_observers_ids = set([x.user.user_id for x in current_observers])
1373
1373
1374 ids_to_add = observers_ids.difference(current_observers_ids)
1374 ids_to_add = observers_ids.difference(current_observers_ids)
1375 ids_to_remove = current_observers_ids.difference(observers_ids)
1375 ids_to_remove = current_observers_ids.difference(observers_ids)
1376
1376
1377 log.debug("Adding %s observer", ids_to_add)
1377 log.debug("Adding %s observer", ids_to_add)
1378 log.debug("Removing %s observer", ids_to_remove)
1378 log.debug("Removing %s observer", ids_to_remove)
1379 changed = False
1379 changed = False
1380 added_audit_observers = []
1380 added_audit_observers = []
1381 removed_audit_observers = []
1381 removed_audit_observers = []
1382
1382
1383 for uid in ids_to_add:
1383 for uid in ids_to_add:
1384 changed = True
1384 changed = True
1385 _usr = self._get_user(uid)
1385 _usr = self._get_user(uid)
1386 observer = PullRequestReviewers()
1386 observer = PullRequestReviewers()
1387 observer.user = _usr
1387 observer.user = _usr
1388 observer.pull_request = pull_request
1388 observer.pull_request = pull_request
1389 observer.reasons = observers[uid]['reasons']
1389 observer.reasons = observers[uid]['reasons']
1390 # NOTE(marcink): mandatory shouldn't be changed now
1390 # NOTE(marcink): mandatory shouldn't be changed now
1391 # observer.mandatory = observer[uid]['reasons']
1391 # observer.mandatory = observer[uid]['reasons']
1392
1392
1393 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1393 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1394 observer.role = PullRequestReviewers.ROLE_OBSERVER
1394 observer.role = PullRequestReviewers.ROLE_OBSERVER
1395 Session().add(observer)
1395 Session().add(observer)
1396 added_audit_observers.append(observer.get_dict())
1396 added_audit_observers.append(observer.get_dict())
1397
1397
1398 for uid in ids_to_remove:
1398 for uid in ids_to_remove:
1399 changed = True
1399 changed = True
1400 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1400 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1401 # This is an edge case that handles previous state of having the same reviewer twice.
1401 # This is an edge case that handles previous state of having the same reviewer twice.
1402 # this CAN happen due to the lack of DB checks
1402 # this CAN happen due to the lack of DB checks
1403 observers = PullRequestReviewers.query()\
1403 observers = PullRequestReviewers.query()\
1404 .filter(PullRequestReviewers.user_id == uid,
1404 .filter(PullRequestReviewers.user_id == uid,
1405 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1405 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1406 PullRequestReviewers.pull_request == pull_request)\
1406 PullRequestReviewers.pull_request == pull_request)\
1407 .all()
1407 .all()
1408
1408
1409 for obj in observers:
1409 for obj in observers:
1410 added_audit_observers.append(obj.get_dict())
1410 added_audit_observers.append(obj.get_dict())
1411 Session().delete(obj)
1411 Session().delete(obj)
1412
1412
1413 if changed:
1413 if changed:
1414 Session().expire_all()
1414 Session().expire_all()
1415 pull_request.updated_on = datetime.datetime.now()
1415 pull_request.updated_on = datetime.datetime.now()
1416 Session().add(pull_request)
1416 Session().add(pull_request)
1417
1417
1418 # finally store audit logs
1418 # finally store audit logs
1419 for user_data in added_audit_observers:
1419 for user_data in added_audit_observers:
1420 self._log_audit_action(
1420 self._log_audit_action(
1421 'repo.pull_request.observer.add', {'data': user_data},
1421 'repo.pull_request.observer.add', {'data': user_data},
1422 user, pull_request)
1422 user, pull_request)
1423 for user_data in removed_audit_observers:
1423 for user_data in removed_audit_observers:
1424 self._log_audit_action(
1424 self._log_audit_action(
1425 'repo.pull_request.observer.delete', {'old_data': user_data},
1425 'repo.pull_request.observer.delete', {'old_data': user_data},
1426 user, pull_request)
1426 user, pull_request)
1427
1427
1428 self.notify_observers(pull_request, ids_to_add, user.get_instance())
1428 self.notify_observers(pull_request, ids_to_add, user)
1429 return ids_to_add, ids_to_remove
1429 return ids_to_add, ids_to_remove
1430
1430
1431 def get_url(self, pull_request, request=None, permalink=False):
1431 def get_url(self, pull_request, request=None, permalink=False):
1432 if not request:
1432 if not request:
1433 request = get_current_request()
1433 request = get_current_request()
1434
1434
1435 if permalink:
1435 if permalink:
1436 return request.route_url(
1436 return request.route_url(
1437 'pull_requests_global',
1437 'pull_requests_global',
1438 pull_request_id=pull_request.pull_request_id,)
1438 pull_request_id=pull_request.pull_request_id,)
1439 else:
1439 else:
1440 return request.route_url('pullrequest_show',
1440 return request.route_url('pullrequest_show',
1441 repo_name=safe_str(pull_request.target_repo.repo_name),
1441 repo_name=safe_str(pull_request.target_repo.repo_name),
1442 pull_request_id=pull_request.pull_request_id,)
1442 pull_request_id=pull_request.pull_request_id,)
1443
1443
1444 def get_shadow_clone_url(self, pull_request, request=None):
1444 def get_shadow_clone_url(self, pull_request, request=None):
1445 """
1445 """
1446 Returns qualified url pointing to the shadow repository. If this pull
1446 Returns qualified url pointing to the shadow repository. If this pull
1447 request is closed there is no shadow repository and ``None`` will be
1447 request is closed there is no shadow repository and ``None`` will be
1448 returned.
1448 returned.
1449 """
1449 """
1450 if pull_request.is_closed():
1450 if pull_request.is_closed():
1451 return None
1451 return None
1452 else:
1452 else:
1453 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1453 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1454 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1454 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1455
1455
1456 def _notify_reviewers(self, pull_request, user_ids, role, user):
1456 def _notify_reviewers(self, pull_request, user_ids, role, user):
1457 # notification to reviewers/observers
1457 # notification to reviewers/observers
1458 if not user_ids:
1458 if not user_ids:
1459 return
1459 return
1460
1460
1461 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1461 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1462
1462
1463 pull_request_obj = pull_request
1463 pull_request_obj = pull_request
1464 # get the current participants of this pull request
1464 # get the current participants of this pull request
1465 recipients = user_ids
1465 recipients = user_ids
1466 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1466 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1467
1467
1468 pr_source_repo = pull_request_obj.source_repo
1468 pr_source_repo = pull_request_obj.source_repo
1469 pr_target_repo = pull_request_obj.target_repo
1469 pr_target_repo = pull_request_obj.target_repo
1470
1470
1471 pr_url = h.route_url('pullrequest_show',
1471 pr_url = h.route_url('pullrequest_show',
1472 repo_name=pr_target_repo.repo_name,
1472 repo_name=pr_target_repo.repo_name,
1473 pull_request_id=pull_request_obj.pull_request_id,)
1473 pull_request_id=pull_request_obj.pull_request_id,)
1474
1474
1475 # set some variables for email notification
1475 # set some variables for email notification
1476 pr_target_repo_url = h.route_url(
1476 pr_target_repo_url = h.route_url(
1477 'repo_summary', repo_name=pr_target_repo.repo_name)
1477 'repo_summary', repo_name=pr_target_repo.repo_name)
1478
1478
1479 pr_source_repo_url = h.route_url(
1479 pr_source_repo_url = h.route_url(
1480 'repo_summary', repo_name=pr_source_repo.repo_name)
1480 'repo_summary', repo_name=pr_source_repo.repo_name)
1481
1481
1482 # pull request specifics
1482 # pull request specifics
1483 pull_request_commits = [
1483 pull_request_commits = [
1484 (x.raw_id, x.message)
1484 (x.raw_id, x.message)
1485 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1485 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1486
1486
1487 current_rhodecode_user = user
1487 current_rhodecode_user = user
1488 kwargs = {
1488 kwargs = {
1489 'user': current_rhodecode_user,
1489 'user': current_rhodecode_user,
1490 'pull_request_author': pull_request.author,
1490 'pull_request_author': pull_request.author,
1491 'pull_request': pull_request_obj,
1491 'pull_request': pull_request_obj,
1492 'pull_request_commits': pull_request_commits,
1492 'pull_request_commits': pull_request_commits,
1493
1493
1494 'pull_request_target_repo': pr_target_repo,
1494 'pull_request_target_repo': pr_target_repo,
1495 'pull_request_target_repo_url': pr_target_repo_url,
1495 'pull_request_target_repo_url': pr_target_repo_url,
1496
1496
1497 'pull_request_source_repo': pr_source_repo,
1497 'pull_request_source_repo': pr_source_repo,
1498 'pull_request_source_repo_url': pr_source_repo_url,
1498 'pull_request_source_repo_url': pr_source_repo_url,
1499
1499
1500 'pull_request_url': pr_url,
1500 'pull_request_url': pr_url,
1501 'thread_ids': [pr_url],
1501 'thread_ids': [pr_url],
1502 'user_role': role
1502 'user_role': role
1503 }
1503 }
1504
1504
1505 # pre-generate the subject for notification itself
1505 # pre-generate the subject for notification itself
1506 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1506 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1507 notification_type, **kwargs)
1507 notification_type, **kwargs)
1508
1508
1509 # create notification objects, and emails
1509 # create notification objects, and emails
1510 NotificationModel().create(
1510 NotificationModel().create(
1511 created_by=current_rhodecode_user,
1511 created_by=current_rhodecode_user,
1512 notification_subject=subject,
1512 notification_subject=subject,
1513 notification_body=body_plaintext,
1513 notification_body=body_plaintext,
1514 notification_type=notification_type,
1514 notification_type=notification_type,
1515 recipients=recipients,
1515 recipients=recipients,
1516 email_kwargs=kwargs,
1516 email_kwargs=kwargs,
1517 )
1517 )
1518
1518
1519 def notify_reviewers(self, pull_request, reviewers_ids, user):
1519 def notify_reviewers(self, pull_request, reviewers_ids, user):
1520 return self._notify_reviewers(pull_request, reviewers_ids,
1520 return self._notify_reviewers(pull_request, reviewers_ids,
1521 PullRequestReviewers.ROLE_REVIEWER, user)
1521 PullRequestReviewers.ROLE_REVIEWER, user)
1522
1522
1523 def notify_observers(self, pull_request, observers_ids, user):
1523 def notify_observers(self, pull_request, observers_ids, user):
1524 return self._notify_reviewers(pull_request, observers_ids,
1524 return self._notify_reviewers(pull_request, observers_ids,
1525 PullRequestReviewers.ROLE_OBSERVER, user)
1525 PullRequestReviewers.ROLE_OBSERVER, user)
1526
1526
1527 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1527 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1528 commit_changes, file_changes):
1528 commit_changes, file_changes):
1529
1529
1530 updating_user_id = updating_user.user_id
1530 updating_user_id = updating_user.user_id
1531 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1531 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1532 # NOTE(marcink): send notification to all other users except to
1532 # NOTE(marcink): send notification to all other users except to
1533 # person who updated the PR
1533 # person who updated the PR
1534 recipients = reviewers.difference(set([updating_user_id]))
1534 recipients = reviewers.difference(set([updating_user_id]))
1535
1535
1536 log.debug('Notify following recipients about pull-request update %s', recipients)
1536 log.debug('Notify following recipients about pull-request update %s', recipients)
1537
1537
1538 pull_request_obj = pull_request
1538 pull_request_obj = pull_request
1539
1539
1540 # send email about the update
1540 # send email about the update
1541 changed_files = (
1541 changed_files = (
1542 file_changes.added + file_changes.modified + file_changes.removed)
1542 file_changes.added + file_changes.modified + file_changes.removed)
1543
1543
1544 pr_source_repo = pull_request_obj.source_repo
1544 pr_source_repo = pull_request_obj.source_repo
1545 pr_target_repo = pull_request_obj.target_repo
1545 pr_target_repo = pull_request_obj.target_repo
1546
1546
1547 pr_url = h.route_url('pullrequest_show',
1547 pr_url = h.route_url('pullrequest_show',
1548 repo_name=pr_target_repo.repo_name,
1548 repo_name=pr_target_repo.repo_name,
1549 pull_request_id=pull_request_obj.pull_request_id,)
1549 pull_request_id=pull_request_obj.pull_request_id,)
1550
1550
1551 # set some variables for email notification
1551 # set some variables for email notification
1552 pr_target_repo_url = h.route_url(
1552 pr_target_repo_url = h.route_url(
1553 'repo_summary', repo_name=pr_target_repo.repo_name)
1553 'repo_summary', repo_name=pr_target_repo.repo_name)
1554
1554
1555 pr_source_repo_url = h.route_url(
1555 pr_source_repo_url = h.route_url(
1556 'repo_summary', repo_name=pr_source_repo.repo_name)
1556 'repo_summary', repo_name=pr_source_repo.repo_name)
1557
1557
1558 email_kwargs = {
1558 email_kwargs = {
1559 'date': datetime.datetime.now(),
1559 'date': datetime.datetime.now(),
1560 'updating_user': updating_user,
1560 'updating_user': updating_user,
1561
1561
1562 'pull_request': pull_request_obj,
1562 'pull_request': pull_request_obj,
1563
1563
1564 'pull_request_target_repo': pr_target_repo,
1564 'pull_request_target_repo': pr_target_repo,
1565 'pull_request_target_repo_url': pr_target_repo_url,
1565 'pull_request_target_repo_url': pr_target_repo_url,
1566
1566
1567 'pull_request_source_repo': pr_source_repo,
1567 'pull_request_source_repo': pr_source_repo,
1568 'pull_request_source_repo_url': pr_source_repo_url,
1568 'pull_request_source_repo_url': pr_source_repo_url,
1569
1569
1570 'pull_request_url': pr_url,
1570 'pull_request_url': pr_url,
1571
1571
1572 'ancestor_commit_id': ancestor_commit_id,
1572 'ancestor_commit_id': ancestor_commit_id,
1573 'added_commits': commit_changes.added,
1573 'added_commits': commit_changes.added,
1574 'removed_commits': commit_changes.removed,
1574 'removed_commits': commit_changes.removed,
1575 'changed_files': changed_files,
1575 'changed_files': changed_files,
1576 'added_files': file_changes.added,
1576 'added_files': file_changes.added,
1577 'modified_files': file_changes.modified,
1577 'modified_files': file_changes.modified,
1578 'removed_files': file_changes.removed,
1578 'removed_files': file_changes.removed,
1579 'thread_ids': [pr_url],
1579 'thread_ids': [pr_url],
1580 }
1580 }
1581
1581
1582 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1582 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1583 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1583 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1584
1584
1585 # create notification objects, and emails
1585 # create notification objects, and emails
1586 NotificationModel().create(
1586 NotificationModel().create(
1587 created_by=updating_user,
1587 created_by=updating_user,
1588 notification_subject=subject,
1588 notification_subject=subject,
1589 notification_body=body_plaintext,
1589 notification_body=body_plaintext,
1590 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1590 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1591 recipients=recipients,
1591 recipients=recipients,
1592 email_kwargs=email_kwargs,
1592 email_kwargs=email_kwargs,
1593 )
1593 )
1594
1594
1595 def delete(self, pull_request, user=None):
1595 def delete(self, pull_request, user=None):
1596 if not user:
1596 if not user:
1597 user = getattr(get_current_rhodecode_user(), 'username', None)
1597 user = getattr(get_current_rhodecode_user(), 'username', None)
1598
1598
1599 pull_request = self.__get_pull_request(pull_request)
1599 pull_request = self.__get_pull_request(pull_request)
1600 old_data = pull_request.get_api_data(with_merge_state=False)
1600 old_data = pull_request.get_api_data(with_merge_state=False)
1601 self._cleanup_merge_workspace(pull_request)
1601 self._cleanup_merge_workspace(pull_request)
1602 self._log_audit_action(
1602 self._log_audit_action(
1603 'repo.pull_request.delete', {'old_data': old_data},
1603 'repo.pull_request.delete', {'old_data': old_data},
1604 user, pull_request)
1604 user, pull_request)
1605 Session().delete(pull_request)
1605 Session().delete(pull_request)
1606
1606
1607 def close_pull_request(self, pull_request, user):
1607 def close_pull_request(self, pull_request, user):
1608 pull_request = self.__get_pull_request(pull_request)
1608 pull_request = self.__get_pull_request(pull_request)
1609 self._cleanup_merge_workspace(pull_request)
1609 self._cleanup_merge_workspace(pull_request)
1610 pull_request.status = PullRequest.STATUS_CLOSED
1610 pull_request.status = PullRequest.STATUS_CLOSED
1611 pull_request.updated_on = datetime.datetime.now()
1611 pull_request.updated_on = datetime.datetime.now()
1612 Session().add(pull_request)
1612 Session().add(pull_request)
1613 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1613 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1614
1614
1615 pr_data = pull_request.get_api_data(with_merge_state=False)
1615 pr_data = pull_request.get_api_data(with_merge_state=False)
1616 self._log_audit_action(
1616 self._log_audit_action(
1617 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1617 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1618
1618
1619 def close_pull_request_with_comment(
1619 def close_pull_request_with_comment(
1620 self, pull_request, user, repo, message=None, auth_user=None):
1620 self, pull_request, user, repo, message=None, auth_user=None):
1621
1621
1622 pull_request_review_status = pull_request.calculated_review_status()
1622 pull_request_review_status = pull_request.calculated_review_status()
1623
1623
1624 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1624 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1625 # approved only if we have voting consent
1625 # approved only if we have voting consent
1626 status = ChangesetStatus.STATUS_APPROVED
1626 status = ChangesetStatus.STATUS_APPROVED
1627 else:
1627 else:
1628 status = ChangesetStatus.STATUS_REJECTED
1628 status = ChangesetStatus.STATUS_REJECTED
1629 status_lbl = ChangesetStatus.get_status_lbl(status)
1629 status_lbl = ChangesetStatus.get_status_lbl(status)
1630
1630
1631 default_message = (
1631 default_message = (
1632 'Closing with status change {transition_icon} {status}.'
1632 'Closing with status change {transition_icon} {status}.'
1633 ).format(transition_icon='>', status=status_lbl)
1633 ).format(transition_icon='>', status=status_lbl)
1634 text = message or default_message
1634 text = message or default_message
1635
1635
1636 # create a comment, and link it to new status
1636 # create a comment, and link it to new status
1637 comment = CommentsModel().create(
1637 comment = CommentsModel().create(
1638 text=text,
1638 text=text,
1639 repo=repo.repo_id,
1639 repo=repo.repo_id,
1640 user=user.user_id,
1640 user=user.user_id,
1641 pull_request=pull_request.pull_request_id,
1641 pull_request=pull_request.pull_request_id,
1642 status_change=status_lbl,
1642 status_change=status_lbl,
1643 status_change_type=status,
1643 status_change_type=status,
1644 closing_pr=True,
1644 closing_pr=True,
1645 auth_user=auth_user,
1645 auth_user=auth_user,
1646 )
1646 )
1647
1647
1648 # calculate old status before we change it
1648 # calculate old status before we change it
1649 old_calculated_status = pull_request.calculated_review_status()
1649 old_calculated_status = pull_request.calculated_review_status()
1650 ChangesetStatusModel().set_status(
1650 ChangesetStatusModel().set_status(
1651 repo.repo_id,
1651 repo.repo_id,
1652 status,
1652 status,
1653 user.user_id,
1653 user.user_id,
1654 comment=comment,
1654 comment=comment,
1655 pull_request=pull_request.pull_request_id
1655 pull_request=pull_request.pull_request_id
1656 )
1656 )
1657
1657
1658 Session().flush()
1658 Session().flush()
1659
1659
1660 self.trigger_pull_request_hook(pull_request, user, 'comment',
1660 self.trigger_pull_request_hook(pull_request, user, 'comment',
1661 data={'comment': comment})
1661 data={'comment': comment})
1662
1662
1663 # we now calculate the status of pull request again, and based on that
1663 # we now calculate the status of pull request again, and based on that
1664 # calculation trigger status change. This might happen in cases
1664 # calculation trigger status change. This might happen in cases
1665 # that non-reviewer admin closes a pr, which means his vote doesn't
1665 # that non-reviewer admin closes a pr, which means his vote doesn't
1666 # change the status, while if he's a reviewer this might change it.
1666 # change the status, while if he's a reviewer this might change it.
1667 calculated_status = pull_request.calculated_review_status()
1667 calculated_status = pull_request.calculated_review_status()
1668 if old_calculated_status != calculated_status:
1668 if old_calculated_status != calculated_status:
1669 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1669 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1670 data={'status': calculated_status})
1670 data={'status': calculated_status})
1671
1671
1672 # finally close the PR
1672 # finally close the PR
1673 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1673 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1674
1674
1675 return comment, status
1675 return comment, status
1676
1676
1677 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1677 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1678 _ = translator or get_current_request().translate
1678 _ = translator or get_current_request().translate
1679
1679
1680 if not self._is_merge_enabled(pull_request):
1680 if not self._is_merge_enabled(pull_request):
1681 return None, False, _('Server-side pull request merging is disabled.')
1681 return None, False, _('Server-side pull request merging is disabled.')
1682
1682
1683 if pull_request.is_closed():
1683 if pull_request.is_closed():
1684 return None, False, _('This pull request is closed.')
1684 return None, False, _('This pull request is closed.')
1685
1685
1686 merge_possible, msg = self._check_repo_requirements(
1686 merge_possible, msg = self._check_repo_requirements(
1687 target=pull_request.target_repo, source=pull_request.source_repo,
1687 target=pull_request.target_repo, source=pull_request.source_repo,
1688 translator=_)
1688 translator=_)
1689 if not merge_possible:
1689 if not merge_possible:
1690 return None, merge_possible, msg
1690 return None, merge_possible, msg
1691
1691
1692 try:
1692 try:
1693 merge_response = self._try_merge(
1693 merge_response = self._try_merge(
1694 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1694 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1695 log.debug("Merge response: %s", merge_response)
1695 log.debug("Merge response: %s", merge_response)
1696 return merge_response, merge_response.possible, merge_response.merge_status_message
1696 return merge_response, merge_response.possible, merge_response.merge_status_message
1697 except NotImplementedError:
1697 except NotImplementedError:
1698 return None, False, _('Pull request merging is not supported.')
1698 return None, False, _('Pull request merging is not supported.')
1699
1699
1700 def _check_repo_requirements(self, target, source, translator):
1700 def _check_repo_requirements(self, target, source, translator):
1701 """
1701 """
1702 Check if `target` and `source` have compatible requirements.
1702 Check if `target` and `source` have compatible requirements.
1703
1703
1704 Currently this is just checking for largefiles.
1704 Currently this is just checking for largefiles.
1705 """
1705 """
1706 _ = translator
1706 _ = translator
1707 target_has_largefiles = self._has_largefiles(target)
1707 target_has_largefiles = self._has_largefiles(target)
1708 source_has_largefiles = self._has_largefiles(source)
1708 source_has_largefiles = self._has_largefiles(source)
1709 merge_possible = True
1709 merge_possible = True
1710 message = u''
1710 message = u''
1711
1711
1712 if target_has_largefiles != source_has_largefiles:
1712 if target_has_largefiles != source_has_largefiles:
1713 merge_possible = False
1713 merge_possible = False
1714 if source_has_largefiles:
1714 if source_has_largefiles:
1715 message = _(
1715 message = _(
1716 'Target repository large files support is disabled.')
1716 'Target repository large files support is disabled.')
1717 else:
1717 else:
1718 message = _(
1718 message = _(
1719 'Source repository large files support is disabled.')
1719 'Source repository large files support is disabled.')
1720
1720
1721 return merge_possible, message
1721 return merge_possible, message
1722
1722
1723 def _has_largefiles(self, repo):
1723 def _has_largefiles(self, repo):
1724 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1724 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1725 'extensions', 'largefiles')
1725 'extensions', 'largefiles')
1726 return largefiles_ui and largefiles_ui[0].active
1726 return largefiles_ui and largefiles_ui[0].active
1727
1727
1728 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1728 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1729 """
1729 """
1730 Try to merge the pull request and return the merge status.
1730 Try to merge the pull request and return the merge status.
1731 """
1731 """
1732 log.debug(
1732 log.debug(
1733 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1733 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1734 pull_request.pull_request_id, force_shadow_repo_refresh)
1734 pull_request.pull_request_id, force_shadow_repo_refresh)
1735 target_vcs = pull_request.target_repo.scm_instance()
1735 target_vcs = pull_request.target_repo.scm_instance()
1736 # Refresh the target reference.
1736 # Refresh the target reference.
1737 try:
1737 try:
1738 target_ref = self._refresh_reference(
1738 target_ref = self._refresh_reference(
1739 pull_request.target_ref_parts, target_vcs)
1739 pull_request.target_ref_parts, target_vcs)
1740 except CommitDoesNotExistError:
1740 except CommitDoesNotExistError:
1741 merge_state = MergeResponse(
1741 merge_state = MergeResponse(
1742 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1742 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1743 metadata={'target_ref': pull_request.target_ref_parts})
1743 metadata={'target_ref': pull_request.target_ref_parts})
1744 return merge_state
1744 return merge_state
1745
1745
1746 target_locked = pull_request.target_repo.locked
1746 target_locked = pull_request.target_repo.locked
1747 if target_locked and target_locked[0]:
1747 if target_locked and target_locked[0]:
1748 locked_by = 'user:{}'.format(target_locked[0])
1748 locked_by = 'user:{}'.format(target_locked[0])
1749 log.debug("The target repository is locked by %s.", locked_by)
1749 log.debug("The target repository is locked by %s.", locked_by)
1750 merge_state = MergeResponse(
1750 merge_state = MergeResponse(
1751 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1751 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1752 metadata={'locked_by': locked_by})
1752 metadata={'locked_by': locked_by})
1753 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1753 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1754 pull_request, target_ref):
1754 pull_request, target_ref):
1755 log.debug("Refreshing the merge status of the repository.")
1755 log.debug("Refreshing the merge status of the repository.")
1756 merge_state = self._refresh_merge_state(
1756 merge_state = self._refresh_merge_state(
1757 pull_request, target_vcs, target_ref)
1757 pull_request, target_vcs, target_ref)
1758 else:
1758 else:
1759 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1759 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1760 metadata = {
1760 metadata = {
1761 'unresolved_files': '',
1761 'unresolved_files': '',
1762 'target_ref': pull_request.target_ref_parts,
1762 'target_ref': pull_request.target_ref_parts,
1763 'source_ref': pull_request.source_ref_parts,
1763 'source_ref': pull_request.source_ref_parts,
1764 }
1764 }
1765 if pull_request.last_merge_metadata:
1765 if pull_request.last_merge_metadata:
1766 metadata.update(pull_request.last_merge_metadata_parsed)
1766 metadata.update(pull_request.last_merge_metadata_parsed)
1767
1767
1768 if not possible and target_ref.type == 'branch':
1768 if not possible and target_ref.type == 'branch':
1769 # NOTE(marcink): case for mercurial multiple heads on branch
1769 # NOTE(marcink): case for mercurial multiple heads on branch
1770 heads = target_vcs._heads(target_ref.name)
1770 heads = target_vcs._heads(target_ref.name)
1771 if len(heads) != 1:
1771 if len(heads) != 1:
1772 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1772 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1773 metadata.update({
1773 metadata.update({
1774 'heads': heads
1774 'heads': heads
1775 })
1775 })
1776
1776
1777 merge_state = MergeResponse(
1777 merge_state = MergeResponse(
1778 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1778 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1779
1779
1780 return merge_state
1780 return merge_state
1781
1781
1782 def _refresh_reference(self, reference, vcs_repository):
1782 def _refresh_reference(self, reference, vcs_repository):
1783 if reference.type in self.UPDATABLE_REF_TYPES:
1783 if reference.type in self.UPDATABLE_REF_TYPES:
1784 name_or_id = reference.name
1784 name_or_id = reference.name
1785 else:
1785 else:
1786 name_or_id = reference.commit_id
1786 name_or_id = reference.commit_id
1787
1787
1788 refreshed_commit = vcs_repository.get_commit(name_or_id)
1788 refreshed_commit = vcs_repository.get_commit(name_or_id)
1789 refreshed_reference = Reference(
1789 refreshed_reference = Reference(
1790 reference.type, reference.name, refreshed_commit.raw_id)
1790 reference.type, reference.name, refreshed_commit.raw_id)
1791 return refreshed_reference
1791 return refreshed_reference
1792
1792
1793 def _needs_merge_state_refresh(self, pull_request, target_reference):
1793 def _needs_merge_state_refresh(self, pull_request, target_reference):
1794 return not(
1794 return not(
1795 pull_request.revisions and
1795 pull_request.revisions and
1796 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1796 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1797 target_reference.commit_id == pull_request._last_merge_target_rev)
1797 target_reference.commit_id == pull_request._last_merge_target_rev)
1798
1798
1799 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1799 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1800 workspace_id = self._workspace_id(pull_request)
1800 workspace_id = self._workspace_id(pull_request)
1801 source_vcs = pull_request.source_repo.scm_instance()
1801 source_vcs = pull_request.source_repo.scm_instance()
1802 repo_id = pull_request.target_repo.repo_id
1802 repo_id = pull_request.target_repo.repo_id
1803 use_rebase = self._use_rebase_for_merging(pull_request)
1803 use_rebase = self._use_rebase_for_merging(pull_request)
1804 close_branch = self._close_branch_before_merging(pull_request)
1804 close_branch = self._close_branch_before_merging(pull_request)
1805 merge_state = target_vcs.merge(
1805 merge_state = target_vcs.merge(
1806 repo_id, workspace_id,
1806 repo_id, workspace_id,
1807 target_reference, source_vcs, pull_request.source_ref_parts,
1807 target_reference, source_vcs, pull_request.source_ref_parts,
1808 dry_run=True, use_rebase=use_rebase,
1808 dry_run=True, use_rebase=use_rebase,
1809 close_branch=close_branch)
1809 close_branch=close_branch)
1810
1810
1811 # Do not store the response if there was an unknown error.
1811 # Do not store the response if there was an unknown error.
1812 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1812 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1813 pull_request._last_merge_source_rev = \
1813 pull_request._last_merge_source_rev = \
1814 pull_request.source_ref_parts.commit_id
1814 pull_request.source_ref_parts.commit_id
1815 pull_request._last_merge_target_rev = target_reference.commit_id
1815 pull_request._last_merge_target_rev = target_reference.commit_id
1816 pull_request.last_merge_status = merge_state.failure_reason
1816 pull_request.last_merge_status = merge_state.failure_reason
1817 pull_request.last_merge_metadata = merge_state.metadata
1817 pull_request.last_merge_metadata = merge_state.metadata
1818
1818
1819 pull_request.shadow_merge_ref = merge_state.merge_ref
1819 pull_request.shadow_merge_ref = merge_state.merge_ref
1820 Session().add(pull_request)
1820 Session().add(pull_request)
1821 Session().commit()
1821 Session().commit()
1822
1822
1823 return merge_state
1823 return merge_state
1824
1824
1825 def _workspace_id(self, pull_request):
1825 def _workspace_id(self, pull_request):
1826 workspace_id = 'pr-%s' % pull_request.pull_request_id
1826 workspace_id = 'pr-%s' % pull_request.pull_request_id
1827 return workspace_id
1827 return workspace_id
1828
1828
1829 def generate_repo_data(self, repo, commit_id=None, branch=None,
1829 def generate_repo_data(self, repo, commit_id=None, branch=None,
1830 bookmark=None, translator=None):
1830 bookmark=None, translator=None):
1831 from rhodecode.model.repo import RepoModel
1831 from rhodecode.model.repo import RepoModel
1832
1832
1833 all_refs, selected_ref = \
1833 all_refs, selected_ref = \
1834 self._get_repo_pullrequest_sources(
1834 self._get_repo_pullrequest_sources(
1835 repo.scm_instance(), commit_id=commit_id,
1835 repo.scm_instance(), commit_id=commit_id,
1836 branch=branch, bookmark=bookmark, translator=translator)
1836 branch=branch, bookmark=bookmark, translator=translator)
1837
1837
1838 refs_select2 = []
1838 refs_select2 = []
1839 for element in all_refs:
1839 for element in all_refs:
1840 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1840 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1841 refs_select2.append({'text': element[1], 'children': children})
1841 refs_select2.append({'text': element[1], 'children': children})
1842
1842
1843 return {
1843 return {
1844 'user': {
1844 'user': {
1845 'user_id': repo.user.user_id,
1845 'user_id': repo.user.user_id,
1846 'username': repo.user.username,
1846 'username': repo.user.username,
1847 'firstname': repo.user.first_name,
1847 'firstname': repo.user.first_name,
1848 'lastname': repo.user.last_name,
1848 'lastname': repo.user.last_name,
1849 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1849 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1850 },
1850 },
1851 'name': repo.repo_name,
1851 'name': repo.repo_name,
1852 'link': RepoModel().get_url(repo),
1852 'link': RepoModel().get_url(repo),
1853 'description': h.chop_at_smart(repo.description_safe, '\n'),
1853 'description': h.chop_at_smart(repo.description_safe, '\n'),
1854 'refs': {
1854 'refs': {
1855 'all_refs': all_refs,
1855 'all_refs': all_refs,
1856 'selected_ref': selected_ref,
1856 'selected_ref': selected_ref,
1857 'select2_refs': refs_select2
1857 'select2_refs': refs_select2
1858 }
1858 }
1859 }
1859 }
1860
1860
1861 def generate_pullrequest_title(self, source, source_ref, target):
1861 def generate_pullrequest_title(self, source, source_ref, target):
1862 return u'{source}#{at_ref} to {target}'.format(
1862 return u'{source}#{at_ref} to {target}'.format(
1863 source=source,
1863 source=source,
1864 at_ref=source_ref,
1864 at_ref=source_ref,
1865 target=target,
1865 target=target,
1866 )
1866 )
1867
1867
1868 def _cleanup_merge_workspace(self, pull_request):
1868 def _cleanup_merge_workspace(self, pull_request):
1869 # Merging related cleanup
1869 # Merging related cleanup
1870 repo_id = pull_request.target_repo.repo_id
1870 repo_id = pull_request.target_repo.repo_id
1871 target_scm = pull_request.target_repo.scm_instance()
1871 target_scm = pull_request.target_repo.scm_instance()
1872 workspace_id = self._workspace_id(pull_request)
1872 workspace_id = self._workspace_id(pull_request)
1873
1873
1874 try:
1874 try:
1875 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1875 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1876 except NotImplementedError:
1876 except NotImplementedError:
1877 pass
1877 pass
1878
1878
1879 def _get_repo_pullrequest_sources(
1879 def _get_repo_pullrequest_sources(
1880 self, repo, commit_id=None, branch=None, bookmark=None,
1880 self, repo, commit_id=None, branch=None, bookmark=None,
1881 translator=None):
1881 translator=None):
1882 """
1882 """
1883 Return a structure with repo's interesting commits, suitable for
1883 Return a structure with repo's interesting commits, suitable for
1884 the selectors in pullrequest controller
1884 the selectors in pullrequest controller
1885
1885
1886 :param commit_id: a commit that must be in the list somehow
1886 :param commit_id: a commit that must be in the list somehow
1887 and selected by default
1887 and selected by default
1888 :param branch: a branch that must be in the list and selected
1888 :param branch: a branch that must be in the list and selected
1889 by default - even if closed
1889 by default - even if closed
1890 :param bookmark: a bookmark that must be in the list and selected
1890 :param bookmark: a bookmark that must be in the list and selected
1891 """
1891 """
1892 _ = translator or get_current_request().translate
1892 _ = translator or get_current_request().translate
1893
1893
1894 commit_id = safe_str(commit_id) if commit_id else None
1894 commit_id = safe_str(commit_id) if commit_id else None
1895 branch = safe_unicode(branch) if branch else None
1895 branch = safe_unicode(branch) if branch else None
1896 bookmark = safe_unicode(bookmark) if bookmark else None
1896 bookmark = safe_unicode(bookmark) if bookmark else None
1897
1897
1898 selected = None
1898 selected = None
1899
1899
1900 # order matters: first source that has commit_id in it will be selected
1900 # order matters: first source that has commit_id in it will be selected
1901 sources = []
1901 sources = []
1902 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1902 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1903 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1903 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1904
1904
1905 if commit_id:
1905 if commit_id:
1906 ref_commit = (h.short_id(commit_id), commit_id)
1906 ref_commit = (h.short_id(commit_id), commit_id)
1907 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1907 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1908
1908
1909 sources.append(
1909 sources.append(
1910 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1910 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1911 )
1911 )
1912
1912
1913 groups = []
1913 groups = []
1914
1914
1915 for group_key, ref_list, group_name, match in sources:
1915 for group_key, ref_list, group_name, match in sources:
1916 group_refs = []
1916 group_refs = []
1917 for ref_name, ref_id in ref_list:
1917 for ref_name, ref_id in ref_list:
1918 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1918 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1919 group_refs.append((ref_key, ref_name))
1919 group_refs.append((ref_key, ref_name))
1920
1920
1921 if not selected:
1921 if not selected:
1922 if set([commit_id, match]) & set([ref_id, ref_name]):
1922 if set([commit_id, match]) & set([ref_id, ref_name]):
1923 selected = ref_key
1923 selected = ref_key
1924
1924
1925 if group_refs:
1925 if group_refs:
1926 groups.append((group_refs, group_name))
1926 groups.append((group_refs, group_name))
1927
1927
1928 if not selected:
1928 if not selected:
1929 ref = commit_id or branch or bookmark
1929 ref = commit_id or branch or bookmark
1930 if ref:
1930 if ref:
1931 raise CommitDoesNotExistError(
1931 raise CommitDoesNotExistError(
1932 u'No commit refs could be found matching: {}'.format(ref))
1932 u'No commit refs could be found matching: {}'.format(ref))
1933 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1933 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1934 selected = u'branch:{}:{}'.format(
1934 selected = u'branch:{}:{}'.format(
1935 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1935 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1936 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1936 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1937 )
1937 )
1938 elif repo.commit_ids:
1938 elif repo.commit_ids:
1939 # make the user select in this case
1939 # make the user select in this case
1940 selected = None
1940 selected = None
1941 else:
1941 else:
1942 raise EmptyRepositoryError()
1942 raise EmptyRepositoryError()
1943 return groups, selected
1943 return groups, selected
1944
1944
1945 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1945 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1946 hide_whitespace_changes, diff_context):
1946 hide_whitespace_changes, diff_context):
1947
1947
1948 return self._get_diff_from_pr_or_version(
1948 return self._get_diff_from_pr_or_version(
1949 source_repo, source_ref_id, target_ref_id,
1949 source_repo, source_ref_id, target_ref_id,
1950 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1950 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1951
1951
1952 def _get_diff_from_pr_or_version(
1952 def _get_diff_from_pr_or_version(
1953 self, source_repo, source_ref_id, target_ref_id,
1953 self, source_repo, source_ref_id, target_ref_id,
1954 hide_whitespace_changes, diff_context):
1954 hide_whitespace_changes, diff_context):
1955
1955
1956 target_commit = source_repo.get_commit(
1956 target_commit = source_repo.get_commit(
1957 commit_id=safe_str(target_ref_id))
1957 commit_id=safe_str(target_ref_id))
1958 source_commit = source_repo.get_commit(
1958 source_commit = source_repo.get_commit(
1959 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1959 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1960 if isinstance(source_repo, Repository):
1960 if isinstance(source_repo, Repository):
1961 vcs_repo = source_repo.scm_instance()
1961 vcs_repo = source_repo.scm_instance()
1962 else:
1962 else:
1963 vcs_repo = source_repo
1963 vcs_repo = source_repo
1964
1964
1965 # TODO: johbo: In the context of an update, we cannot reach
1965 # TODO: johbo: In the context of an update, we cannot reach
1966 # the old commit anymore with our normal mechanisms. It needs
1966 # the old commit anymore with our normal mechanisms. It needs
1967 # some sort of special support in the vcs layer to avoid this
1967 # some sort of special support in the vcs layer to avoid this
1968 # workaround.
1968 # workaround.
1969 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1969 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1970 vcs_repo.alias == 'git'):
1970 vcs_repo.alias == 'git'):
1971 source_commit.raw_id = safe_str(source_ref_id)
1971 source_commit.raw_id = safe_str(source_ref_id)
1972
1972
1973 log.debug('calculating diff between '
1973 log.debug('calculating diff between '
1974 'source_ref:%s and target_ref:%s for repo `%s`',
1974 'source_ref:%s and target_ref:%s for repo `%s`',
1975 target_ref_id, source_ref_id,
1975 target_ref_id, source_ref_id,
1976 safe_unicode(vcs_repo.path))
1976 safe_unicode(vcs_repo.path))
1977
1977
1978 vcs_diff = vcs_repo.get_diff(
1978 vcs_diff = vcs_repo.get_diff(
1979 commit1=target_commit, commit2=source_commit,
1979 commit1=target_commit, commit2=source_commit,
1980 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1980 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1981 return vcs_diff
1981 return vcs_diff
1982
1982
1983 def _is_merge_enabled(self, pull_request):
1983 def _is_merge_enabled(self, pull_request):
1984 return self._get_general_setting(
1984 return self._get_general_setting(
1985 pull_request, 'rhodecode_pr_merge_enabled')
1985 pull_request, 'rhodecode_pr_merge_enabled')
1986
1986
1987 def _use_rebase_for_merging(self, pull_request):
1987 def _use_rebase_for_merging(self, pull_request):
1988 repo_type = pull_request.target_repo.repo_type
1988 repo_type = pull_request.target_repo.repo_type
1989 if repo_type == 'hg':
1989 if repo_type == 'hg':
1990 return self._get_general_setting(
1990 return self._get_general_setting(
1991 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1991 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1992 elif repo_type == 'git':
1992 elif repo_type == 'git':
1993 return self._get_general_setting(
1993 return self._get_general_setting(
1994 pull_request, 'rhodecode_git_use_rebase_for_merging')
1994 pull_request, 'rhodecode_git_use_rebase_for_merging')
1995
1995
1996 return False
1996 return False
1997
1997
1998 def _user_name_for_merging(self, pull_request, user):
1998 def _user_name_for_merging(self, pull_request, user):
1999 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1999 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2000 if env_user_name_attr and hasattr(user, env_user_name_attr):
2000 if env_user_name_attr and hasattr(user, env_user_name_attr):
2001 user_name_attr = env_user_name_attr
2001 user_name_attr = env_user_name_attr
2002 else:
2002 else:
2003 user_name_attr = 'short_contact'
2003 user_name_attr = 'short_contact'
2004
2004
2005 user_name = getattr(user, user_name_attr)
2005 user_name = getattr(user, user_name_attr)
2006 return user_name
2006 return user_name
2007
2007
2008 def _close_branch_before_merging(self, pull_request):
2008 def _close_branch_before_merging(self, pull_request):
2009 repo_type = pull_request.target_repo.repo_type
2009 repo_type = pull_request.target_repo.repo_type
2010 if repo_type == 'hg':
2010 if repo_type == 'hg':
2011 return self._get_general_setting(
2011 return self._get_general_setting(
2012 pull_request, 'rhodecode_hg_close_branch_before_merging')
2012 pull_request, 'rhodecode_hg_close_branch_before_merging')
2013 elif repo_type == 'git':
2013 elif repo_type == 'git':
2014 return self._get_general_setting(
2014 return self._get_general_setting(
2015 pull_request, 'rhodecode_git_close_branch_before_merging')
2015 pull_request, 'rhodecode_git_close_branch_before_merging')
2016
2016
2017 return False
2017 return False
2018
2018
2019 def _get_general_setting(self, pull_request, settings_key, default=False):
2019 def _get_general_setting(self, pull_request, settings_key, default=False):
2020 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2020 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2021 settings = settings_model.get_general_settings()
2021 settings = settings_model.get_general_settings()
2022 return settings.get(settings_key, default)
2022 return settings.get(settings_key, default)
2023
2023
2024 def _log_audit_action(self, action, action_data, user, pull_request):
2024 def _log_audit_action(self, action, action_data, user, pull_request):
2025 audit_logger.store(
2025 audit_logger.store(
2026 action=action,
2026 action=action,
2027 action_data=action_data,
2027 action_data=action_data,
2028 user=user,
2028 user=user,
2029 repo=pull_request.target_repo)
2029 repo=pull_request.target_repo)
2030
2030
2031 def get_reviewer_functions(self):
2031 def get_reviewer_functions(self):
2032 """
2032 """
2033 Fetches functions for validation and fetching default reviewers.
2033 Fetches functions for validation and fetching default reviewers.
2034 If available we use the EE package, else we fallback to CE
2034 If available we use the EE package, else we fallback to CE
2035 package functions
2035 package functions
2036 """
2036 """
2037 try:
2037 try:
2038 from rc_reviewers.utils import get_default_reviewers_data
2038 from rc_reviewers.utils import get_default_reviewers_data
2039 from rc_reviewers.utils import validate_default_reviewers
2039 from rc_reviewers.utils import validate_default_reviewers
2040 from rc_reviewers.utils import validate_observers
2040 from rc_reviewers.utils import validate_observers
2041 except ImportError:
2041 except ImportError:
2042 from rhodecode.apps.repository.utils import get_default_reviewers_data
2042 from rhodecode.apps.repository.utils import get_default_reviewers_data
2043 from rhodecode.apps.repository.utils import validate_default_reviewers
2043 from rhodecode.apps.repository.utils import validate_default_reviewers
2044 from rhodecode.apps.repository.utils import validate_observers
2044 from rhodecode.apps.repository.utils import validate_observers
2045
2045
2046 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2046 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2047
2047
2048
2048
2049 class MergeCheck(object):
2049 class MergeCheck(object):
2050 """
2050 """
2051 Perform Merge Checks and returns a check object which stores information
2051 Perform Merge Checks and returns a check object which stores information
2052 about merge errors, and merge conditions
2052 about merge errors, and merge conditions
2053 """
2053 """
2054 TODO_CHECK = 'todo'
2054 TODO_CHECK = 'todo'
2055 PERM_CHECK = 'perm'
2055 PERM_CHECK = 'perm'
2056 REVIEW_CHECK = 'review'
2056 REVIEW_CHECK = 'review'
2057 MERGE_CHECK = 'merge'
2057 MERGE_CHECK = 'merge'
2058 WIP_CHECK = 'wip'
2058 WIP_CHECK = 'wip'
2059
2059
2060 def __init__(self):
2060 def __init__(self):
2061 self.review_status = None
2061 self.review_status = None
2062 self.merge_possible = None
2062 self.merge_possible = None
2063 self.merge_msg = ''
2063 self.merge_msg = ''
2064 self.merge_response = None
2064 self.merge_response = None
2065 self.failed = None
2065 self.failed = None
2066 self.errors = []
2066 self.errors = []
2067 self.error_details = OrderedDict()
2067 self.error_details = OrderedDict()
2068 self.source_commit = AttributeDict()
2068 self.source_commit = AttributeDict()
2069 self.target_commit = AttributeDict()
2069 self.target_commit = AttributeDict()
2070
2070
2071 def __repr__(self):
2071 def __repr__(self):
2072 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2072 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2073 self.merge_possible, self.failed, self.errors)
2073 self.merge_possible, self.failed, self.errors)
2074
2074
2075 def push_error(self, error_type, message, error_key, details):
2075 def push_error(self, error_type, message, error_key, details):
2076 self.failed = True
2076 self.failed = True
2077 self.errors.append([error_type, message])
2077 self.errors.append([error_type, message])
2078 self.error_details[error_key] = dict(
2078 self.error_details[error_key] = dict(
2079 details=details,
2079 details=details,
2080 error_type=error_type,
2080 error_type=error_type,
2081 message=message
2081 message=message
2082 )
2082 )
2083
2083
2084 @classmethod
2084 @classmethod
2085 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2085 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2086 force_shadow_repo_refresh=False):
2086 force_shadow_repo_refresh=False):
2087 _ = translator
2087 _ = translator
2088 merge_check = cls()
2088 merge_check = cls()
2089
2089
2090 # title has WIP:
2090 # title has WIP:
2091 if pull_request.work_in_progress:
2091 if pull_request.work_in_progress:
2092 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2092 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2093
2093
2094 msg = _('WIP marker in title prevents from accidental merge.')
2094 msg = _('WIP marker in title prevents from accidental merge.')
2095 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2095 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2096 if fail_early:
2096 if fail_early:
2097 return merge_check
2097 return merge_check
2098
2098
2099 # permissions to merge
2099 # permissions to merge
2100 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2100 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2101 if not user_allowed_to_merge:
2101 if not user_allowed_to_merge:
2102 log.debug("MergeCheck: cannot merge, approval is pending.")
2102 log.debug("MergeCheck: cannot merge, approval is pending.")
2103
2103
2104 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2104 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2105 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2105 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2106 if fail_early:
2106 if fail_early:
2107 return merge_check
2107 return merge_check
2108
2108
2109 # permission to merge into the target branch
2109 # permission to merge into the target branch
2110 target_commit_id = pull_request.target_ref_parts.commit_id
2110 target_commit_id = pull_request.target_ref_parts.commit_id
2111 if pull_request.target_ref_parts.type == 'branch':
2111 if pull_request.target_ref_parts.type == 'branch':
2112 branch_name = pull_request.target_ref_parts.name
2112 branch_name = pull_request.target_ref_parts.name
2113 else:
2113 else:
2114 # for mercurial we can always figure out the branch from the commit
2114 # for mercurial we can always figure out the branch from the commit
2115 # in case of bookmark
2115 # in case of bookmark
2116 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2116 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2117 branch_name = target_commit.branch
2117 branch_name = target_commit.branch
2118
2118
2119 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2119 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2120 pull_request.target_repo.repo_name, branch_name)
2120 pull_request.target_repo.repo_name, branch_name)
2121 if branch_perm and branch_perm == 'branch.none':
2121 if branch_perm and branch_perm == 'branch.none':
2122 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2122 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2123 branch_name, rule)
2123 branch_name, rule)
2124 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2124 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2125 if fail_early:
2125 if fail_early:
2126 return merge_check
2126 return merge_check
2127
2127
2128 # review status, must be always present
2128 # review status, must be always present
2129 review_status = pull_request.calculated_review_status()
2129 review_status = pull_request.calculated_review_status()
2130 merge_check.review_status = review_status
2130 merge_check.review_status = review_status
2131
2131
2132 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2132 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2133 if not status_approved:
2133 if not status_approved:
2134 log.debug("MergeCheck: cannot merge, approval is pending.")
2134 log.debug("MergeCheck: cannot merge, approval is pending.")
2135
2135
2136 msg = _('Pull request reviewer approval is pending.')
2136 msg = _('Pull request reviewer approval is pending.')
2137
2137
2138 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2138 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2139
2139
2140 if fail_early:
2140 if fail_early:
2141 return merge_check
2141 return merge_check
2142
2142
2143 # left over TODOs
2143 # left over TODOs
2144 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2144 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2145 if todos:
2145 if todos:
2146 log.debug("MergeCheck: cannot merge, {} "
2146 log.debug("MergeCheck: cannot merge, {} "
2147 "unresolved TODOs left.".format(len(todos)))
2147 "unresolved TODOs left.".format(len(todos)))
2148
2148
2149 if len(todos) == 1:
2149 if len(todos) == 1:
2150 msg = _('Cannot merge, {} TODO still not resolved.').format(
2150 msg = _('Cannot merge, {} TODO still not resolved.').format(
2151 len(todos))
2151 len(todos))
2152 else:
2152 else:
2153 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2153 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2154 len(todos))
2154 len(todos))
2155
2155
2156 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2156 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2157
2157
2158 if fail_early:
2158 if fail_early:
2159 return merge_check
2159 return merge_check
2160
2160
2161 # merge possible, here is the filesystem simulation + shadow repo
2161 # merge possible, here is the filesystem simulation + shadow repo
2162 merge_response, merge_status, msg = PullRequestModel().merge_status(
2162 merge_response, merge_status, msg = PullRequestModel().merge_status(
2163 pull_request, translator=translator,
2163 pull_request, translator=translator,
2164 force_shadow_repo_refresh=force_shadow_repo_refresh)
2164 force_shadow_repo_refresh=force_shadow_repo_refresh)
2165
2165
2166 merge_check.merge_possible = merge_status
2166 merge_check.merge_possible = merge_status
2167 merge_check.merge_msg = msg
2167 merge_check.merge_msg = msg
2168 merge_check.merge_response = merge_response
2168 merge_check.merge_response = merge_response
2169
2169
2170 source_ref_id = pull_request.source_ref_parts.commit_id
2170 source_ref_id = pull_request.source_ref_parts.commit_id
2171 target_ref_id = pull_request.target_ref_parts.commit_id
2171 target_ref_id = pull_request.target_ref_parts.commit_id
2172
2172
2173 try:
2173 try:
2174 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2174 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2175 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2175 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2176 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2176 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2177 merge_check.source_commit.current_raw_id = source_commit.raw_id
2177 merge_check.source_commit.current_raw_id = source_commit.raw_id
2178 merge_check.source_commit.previous_raw_id = source_ref_id
2178 merge_check.source_commit.previous_raw_id = source_ref_id
2179
2179
2180 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2180 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2181 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2181 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2182 merge_check.target_commit.current_raw_id = target_commit.raw_id
2182 merge_check.target_commit.current_raw_id = target_commit.raw_id
2183 merge_check.target_commit.previous_raw_id = target_ref_id
2183 merge_check.target_commit.previous_raw_id = target_ref_id
2184 except (SourceRefMissing, TargetRefMissing):
2184 except (SourceRefMissing, TargetRefMissing):
2185 pass
2185 pass
2186
2186
2187 if not merge_status:
2187 if not merge_status:
2188 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2188 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2189 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2189 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2190
2190
2191 if fail_early:
2191 if fail_early:
2192 return merge_check
2192 return merge_check
2193
2193
2194 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2194 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2195 return merge_check
2195 return merge_check
2196
2196
2197 @classmethod
2197 @classmethod
2198 def get_merge_conditions(cls, pull_request, translator):
2198 def get_merge_conditions(cls, pull_request, translator):
2199 _ = translator
2199 _ = translator
2200 merge_details = {}
2200 merge_details = {}
2201
2201
2202 model = PullRequestModel()
2202 model = PullRequestModel()
2203 use_rebase = model._use_rebase_for_merging(pull_request)
2203 use_rebase = model._use_rebase_for_merging(pull_request)
2204
2204
2205 if use_rebase:
2205 if use_rebase:
2206 merge_details['merge_strategy'] = dict(
2206 merge_details['merge_strategy'] = dict(
2207 details={},
2207 details={},
2208 message=_('Merge strategy: rebase')
2208 message=_('Merge strategy: rebase')
2209 )
2209 )
2210 else:
2210 else:
2211 merge_details['merge_strategy'] = dict(
2211 merge_details['merge_strategy'] = dict(
2212 details={},
2212 details={},
2213 message=_('Merge strategy: explicit merge commit')
2213 message=_('Merge strategy: explicit merge commit')
2214 )
2214 )
2215
2215
2216 close_branch = model._close_branch_before_merging(pull_request)
2216 close_branch = model._close_branch_before_merging(pull_request)
2217 if close_branch:
2217 if close_branch:
2218 repo_type = pull_request.target_repo.repo_type
2218 repo_type = pull_request.target_repo.repo_type
2219 close_msg = ''
2219 close_msg = ''
2220 if repo_type == 'hg':
2220 if repo_type == 'hg':
2221 close_msg = _('Source branch will be closed before the merge.')
2221 close_msg = _('Source branch will be closed before the merge.')
2222 elif repo_type == 'git':
2222 elif repo_type == 'git':
2223 close_msg = _('Source branch will be deleted after the merge.')
2223 close_msg = _('Source branch will be deleted after the merge.')
2224
2224
2225 merge_details['close_branch'] = dict(
2225 merge_details['close_branch'] = dict(
2226 details={},
2226 details={},
2227 message=close_msg
2227 message=close_msg
2228 )
2228 )
2229
2229
2230 return merge_details
2230 return merge_details
2231
2231
2232
2232
2233 ChangeTuple = collections.namedtuple(
2233 ChangeTuple = collections.namedtuple(
2234 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2234 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2235
2235
2236 FileChangeTuple = collections.namedtuple(
2236 FileChangeTuple = collections.namedtuple(
2237 'FileChangeTuple', ['added', 'modified', 'removed'])
2237 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,35 +1,40 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import colander
21 import colander
22 from rhodecode.model.validation_schema import validators, preparers, types
22 from rhodecode.model.validation_schema import validators, preparers, types
23
23
24 DEFAULT_ROLE = 'reviewer'
25 VALID_ROLES = ['reviewer', 'observer']
26
24
27
25 class ReviewerSchema(colander.MappingSchema):
28 class ReviewerSchema(colander.MappingSchema):
26 username = colander.SchemaNode(types.StrOrIntType())
29 username = colander.SchemaNode(types.StrOrIntType())
27 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
30 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
28 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
31 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
29 rules = colander.SchemaNode(colander.List(), missing=[])
32 rules = colander.SchemaNode(colander.List(), missing=[])
33 role = colander.SchemaNode(colander.String(), missing=DEFAULT_ROLE,
34 validator=colander.OneOf(VALID_ROLES))
30
35
31
36
32 class ReviewerListSchema(colander.SequenceSchema):
37 class ReviewerListSchema(colander.SequenceSchema):
33 reviewers = ReviewerSchema()
38 reviewers = ReviewerSchema()
34
39
35
40
@@ -1,114 +1,115 b''
1 <!DOCTYPE html>
1 <!DOCTYPE html>
2 <html xmlns="http://www.w3.org/1999/xhtml">
2 <html xmlns="http://www.w3.org/1999/xhtml">
3 <head>
3 <head>
4 <title>Error - 502 Bad Gateway</title>
4 <title>Error - 502 Bad Gateway</title>
5 <link id="favicon" rel="shortcut icon" type="image/png" href="data:image/png;base64,AAABAAIAEBAAAAEAIABoBAAAJgAAACAgAAABACAAqBAAAI4EAAAoAAAAEAAAACAAAAABACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALMiIiK1OTk5ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMEsLCz/SUlJ/xUVFcM3NzcAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAM8eHh7/8/Pz//39/f9BQUH/Dw8P0DY2NgMzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAMcjIyP/8vLy////////////9/f3/0RERf8REhTINzc3ADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAKUWFhb/7Ozs///////o6Of/6Onq///////z+v//NUFh/xgaIao3NjIAMzMzADMzMwAAAAAAAAAAAAAAAGgAAAD/0dHR///////o5+X/7O/2/+v5///j5Oj//+ic/92wMv83MB3/Jys2ajMzMwAzMzMAAAAAAAAAABYAAAD4kZGR///////p6er/7Pf//+jv+//my4D/6q0J9PqkAJz/zAAlXlcoeRshOf8zMzMaMzMzAAAAAAAAAACRMDAw///////09fj/6vj//+Xcwv/vtBns/7UAav+8AAP/vgAAyZUKACotNQAtLzXyMzMzsDMzMwAAAAAKAAAA8aSjov//////6PX//+fOif/2qwCH/7YAAKN7EgBsWSQAU0gqAC4wNAAqLTUANTQyZjMzM/8zMzMOAAAAUBMTEv/x8vb/9f///+nLdfL2ogAz/7kAAG5bIwAFFD81Dhs9ShskOU8qLTZMNTQyKTMzMwAzMzP/MzMzZgAAAIVJSEf//////+nRjeb4pQAV/9sAAKiFFQAADkEAMDE0UzQ0M+IzMzOOMzMzvDMzM2QzMzMAMzMz3zMzM6oAAACeXGV9////7/j/yAAe/70AALiDAAA0NTcALDJAADMzMlEzMzPVMzMzgDMzM30zMzMjMzMzADMzM8MzMzPIAAAAnWBlaf//5V86nGYAACgeAAAAAAAABgcNACsrKQA2NjYKMzMzEDMzMwwzMzMGMzMzDDMzMwAzMzPNMzMzvwAAAG0bFQv/JRgHfQAAAB4AAAAAAAAAAAAAAAADAwMAMjIyADY2NgAzMzMAMzMzADMzMxIzMzOKMzMz/zMzM3EAAAADAAAAjAAAAP8AAAD/AAAA/QAAANAAAACZAgICXzExMV82NjaZMzMz0DMzM/wzMzP/MzMz/zMzM5gzMzMAAAAAAAAAAAAAAAAAAAAAOAAAAIoAAADKAAAA9AICAv8xMTH/NjY29DMzM8ozMzOLMzMzODMzMwAzMzMAMzMzAP5/AAD8PwAA+B8AAPAPAADgBwAA4AcAAMAbAACA+QAAgf0AAIf9AAAPjAAAH5wAAD/8AAC/+QAAgYEAAPAPAAAoAAAAIAAAAEAAAAABACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgICtjExMbk2NjYAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACwAAAM4BAQH/MDAw/zY2NtEzMzMNMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACMAAADsAAAA/wEBAf8XFxf/MDAw/zU1Ne4zMzMmMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA2AAAA/QAAAP8AAAD/eXl5/56env8ODg7/Jycn/zY2Nv8zMzM6MzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQQAAAP8AAAD/AAAA/4SEhP///////////6Ghof8QEBD/IiIi/zc3N/8zMzNFMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEEAAAD/AAAA/wAAAP+bm5v//////+/v7//u7u7//////7S0tP8VFRX/ICAg/zc3N/8zMzNFMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA3AAAA/wAAAP8AAAD/p6en///////u7u7/6urq/+rq6v/t7e3//////729vf8WFhb/ICAg/zc3N/8zMzM6MzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIQAAAPgAAAD/AAAA/6ampv//////7e3t/+rq6v/r6+v/6+vr/+rq6v/s7Oz//////729vf8UFBT/IyMj/zY2NvozMzMlMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAUAAADdAAAA/wAAAP+ampr//////+3t7f/q6ur/6+vr/+vr6//r6+v/6+vr/+rq6v/s7Oz//////7Kysf8PDw//KSkp/zU1NeAzMzMIMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAsQAAAP8AAAD/f39////////u7u7/6urq/+vr6//r6+v/6+vr/+vr6//r6+v/6+vr/+rq6f/t7e///////5ynwf8KEy3/MC8t/zQ0M7UzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHUAAAD/AAAA/1paWv//////8vLy/+rq6v/r6+v/6+vr/+vr6//r6+v/6+vr/+vq6f/r7PD/7/f//+v3///o6Oj//+mZ/3FcJv8OGDL/NjUy/zMzM3ozMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAuAAAA/wAAAP8sLCz///////v7+//p6en/6+vr/+vr6//r6+v/6+vr/+vq6f/r6+7/7/j//+r2///h2sf/37tY/9+iA//zpgD//74A/2BRJv8eJTn/MzMz/zMzMzIzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAMUAAAD/AAAA/9DQ0P//////6Ojo/+vr6//r6+v/6+vr/+vr6v/r6uv/7vX+/+v4///i2sb/4LZC/+OfAP/sngD/9K0A/fCuALz/zgBgoIMYRRAZPPUzMzP/MzMzyTMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAABfAAAA/wAAAP9+fn7//////+np6f/r6+v/6+vr/+vr6//r6uj/7O/z/+36///k5OH/4btP/+WfAP/voQD/9K8AyvCwAGTvrgAQ764AAP/CAABrWSUAFyA6eTMzM/8zMzP/MzMzYzMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAN4AAAD/HR0d//r6+v/4+Pj/6urq/+vr6//r6+v/6+rp/+31///o9P//4sqI/+SjAP/unwD/9K8Aou+vACjurgAA8K8AAPayAAD/xAAA6K0FACwuNAArLjUAMzMz2jMzM/8zMzPiMzMzADMzMwAzMzMAAAAAAAAAAAAAAABgAAAA/wAAAP+dnZ3//////+jo6P/r6+v/6+rq/+zr7f/t+f//5ebi/+OzMv/rnQD/8aoAnfKxABT/ugAA/8IAAP/EAAD/wQAA/LYAAP+5AACNbhoAEh48ADU0MwAzMzNaMzMz/zMzM/8zMzNkMzMzADMzMwAAAAAAAAAAAAAAAMgAAAD/IiIi//z8/P/09PT/6+vr/+vq6f/s7fD/6/r//+TYsf/npQP/8aEAwe+tAB34uAAA/8MAAN+iBAC+jg0Apn8TAHJcIgBYSykAPDkwACcrNgAxMjQAMzMzADMzMwAzMzPYMzMz/zMzM8wzMzMAMzMzAAAAAAAAAAAwAAAA/wAAAP+Hh4f//////+np6f/r6un/7O/z/+r4///lzIf/658A+/KoAFburQAA/8EAAP+/AACCZR0AKSw2ABwkOQAWIDsAEBw8ABoiOgAjKDcALzA0ADU0MgAzMzMAMzMzADMzM3AzMzP/MzMz/zMzMzQzMzMAAAAAAAAAAHoAAAD/AAAA/9ra2v//////6+rp/+zv8//q+P//5cdy/+2eAMvyqwAP7KwAAP+/AADqrQMAUEUrAAcWPwAkKTcAMDE0ADIyMwA0MzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzGTMzM/8zMzP/MzMzfzMzMwAAAAAkAAAAvgAAAP8+Pj7//////+3s6//s7fD/6vj//+fIdP/ungCa8a0AAO6uAAD+uAAA6q0DADAxMwAMGT4ANTQzCDQ0M8gzMzOOMzMzKjMzM8QzMzOQMzMz1DMzM0szMzO9MzMzSTMzMwAzMzMAMzMzvDMzM/8zMzPCMzMzJwAAAFMAAADsAAAA/3d3d///////6urq/+r5///nz4v/7p4AffGsAADvrwAA7asAAP/OAACUdRoAABBAADc1MgAzMzMAMzMzyzMzM6QzMzOFMzMzyDMzM0AzMzPXMzMzLzMzM+gzMzMcMzMzADMzMwAzMzOAMzMz/zMzM+8zMzNXAAAAegAAAP8AAAD/mJiY///////r9///6dyz/+6hAHfwqwAA768AAO2sAAD/vgAA8LQDADUzMgAmKjcAMzMzADMzMwAzMzOdMzMz4zMzM+szMzN4MzMzADMzM+UzMzPPMzMz1DMzMwAzMzMAMzMzADMzM1ozMzP/MzMz/zMzM3wAAACUAAAA/wAAAP+traz//////+ns5//uqguL8KcAAO2tAAD5tAAA/9IAAP/UAABoVCkADho8ADc2MgAzMzMAMzMzADMzM8IzMzOoMzMzdjMzM9ozMzMkMzMz5TMzM5QzMzMmMzMzADMzMwAzMzMAMzMzQjMzM/8zMzP/MzMzkwAAAJ4AAAD/AAAA/7S1tv//////7L5RtfCfAAD8uwAA/9MAAPy9AACxfQAASTgLABYhPwA+Pj0ANDQzADIyMgAzMzMGMzMzwzMzM8kzMzPNMzMzRDMzM24zMzPiMzMzADMzMyEzMzNTMzMzFDMzMwAzMzM5MzMz/zMzM/8zMzOaAAAAlAAAAP8AAAD/q7fS///80O//tgAQ/9MAAPSzAACUagAAIBcAAAAAAAAAAAAABwcHACcnJgA9PT0AOjo6ADIyMgEzMzMBMzMzATMzMwEzMzMAMzMzEDMzMwYzMzMAMzMzRjMzM1wzMzMSMzMzADMzM0IzMzP/MzMz/zMzM5MAAAB5AAAA/wAAAP+fp6r///5ZR96WAAB0VQAADgoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8PDwAvLy8ANjY2ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMBMzMzATMzMwAzMzMAMzMzWzMzM/8zMzP/MzMzegAAAE0AAADmAAAA/1BDKeFvUA4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAAzMzMANjY2ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzOrMzMz/zMzM+ozMzNRAAAAEgAAAKkAAAD/AAAA/wAAAPUAAACnAAAAVgAAABEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgICADExMQA2NjYAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzDzMzM1UzMzOlMzMz9TMzM/8zMzP/MzMzrjMzMxYAAAAAAAAAMAAAAOoAAAD/AAAA/wAAAP8AAAD/AAAA/QAAAMgAAACQAAAAXgAAADEAAAAKAAAAAAAAAAACAgIAMTExADY2NgAzMzMAMzMzCTMzMzEzMzNdMzMzjzMzM8czMzP8MzMz/zMzM/8zMzP/MzMz/zMzM+wzMzMzMzMzAAAAAAAAAAAAAAAAAAAAAD0AAACaAAAA5wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAPYAAADPAAAArAICAoQxMTGDNjY2qzMzM88zMzP1MzMz/zMzM/8zMzP/MzMz/zMzM/8zMzP/MzMz6TMzM5wzMzM/MzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMgAAAHQAAACvAAAA5QAAAP8AAAD/AAAA/wAAAP8AAAD/AgIC/zExMf82Njb/MzMz/zMzM/8zMzP/MzMz/zMzM+UzMzOvMzMzdjMzMzQzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIgAAAFEAAAB7AAAAowAAAMYCAgLqMTEx6zY2NsczMzOkMzMzfDMzM1EzMzMjMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAP/+f////D////gf///wD///4Af//8AD//+AAf//AAD//gAAf/wAAD/8AAA/+AAAH/AAAY/wAA/P4AA/x+AA/+fAA//jwA//88Af//OAP5FxgP+FcYH/jHkB/5T4A/+N+Af///iP///5j///8YP//8HAP/wD8AAAD/8AAP//+B//">
5 <link id="favicon" rel="shortcut icon" type="image/png" href="data:image/png;base64,AAABAAIAEBAAAAEAIABoBAAAJgAAACAgAAABACAAqBAAAI4EAAAoAAAAEAAAACAAAAABACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALMiIiK1OTk5ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMEsLCz/SUlJ/xUVFcM3NzcAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAM8eHh7/8/Pz//39/f9BQUH/Dw8P0DY2NgMzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAMcjIyP/8vLy////////////9/f3/0RERf8REhTINzc3ADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAKUWFhb/7Ozs///////o6Of/6Onq///////z+v//NUFh/xgaIao3NjIAMzMzADMzMwAAAAAAAAAAAAAAAGgAAAD/0dHR///////o5+X/7O/2/+v5///j5Oj//+ic/92wMv83MB3/Jys2ajMzMwAzMzMAAAAAAAAAABYAAAD4kZGR///////p6er/7Pf//+jv+//my4D/6q0J9PqkAJz/zAAlXlcoeRshOf8zMzMaMzMzAAAAAAAAAACRMDAw///////09fj/6vj//+Xcwv/vtBns/7UAav+8AAP/vgAAyZUKACotNQAtLzXyMzMzsDMzMwAAAAAKAAAA8aSjov//////6PX//+fOif/2qwCH/7YAAKN7EgBsWSQAU0gqAC4wNAAqLTUANTQyZjMzM/8zMzMOAAAAUBMTEv/x8vb/9f///+nLdfL2ogAz/7kAAG5bIwAFFD81Dhs9ShskOU8qLTZMNTQyKTMzMwAzMzP/MzMzZgAAAIVJSEf//////+nRjeb4pQAV/9sAAKiFFQAADkEAMDE0UzQ0M+IzMzOOMzMzvDMzM2QzMzMAMzMz3zMzM6oAAACeXGV9////7/j/yAAe/70AALiDAAA0NTcALDJAADMzMlEzMzPVMzMzgDMzM30zMzMjMzMzADMzM8MzMzPIAAAAnWBlaf//5V86nGYAACgeAAAAAAAABgcNACsrKQA2NjYKMzMzEDMzMwwzMzMGMzMzDDMzMwAzMzPNMzMzvwAAAG0bFQv/JRgHfQAAAB4AAAAAAAAAAAAAAAADAwMAMjIyADY2NgAzMzMAMzMzADMzMxIzMzOKMzMz/zMzM3EAAAADAAAAjAAAAP8AAAD/AAAA/QAAANAAAACZAgICXzExMV82NjaZMzMz0DMzM/wzMzP/MzMz/zMzM5gzMzMAAAAAAAAAAAAAAAAAAAAAOAAAAIoAAADKAAAA9AICAv8xMTH/NjY29DMzM8ozMzOLMzMzODMzMwAzMzMAMzMzAP5/AAD8PwAA+B8AAPAPAADgBwAA4AcAAMAbAACA+QAAgf0AAIf9AAAPjAAAH5wAAD/8AAC/+QAAgYEAAPAPAAAoAAAAIAAAAEAAAAABACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgICtjExMbk2NjYAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACwAAAM4BAQH/MDAw/zY2NtEzMzMNMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACMAAADsAAAA/wEBAf8XFxf/MDAw/zU1Ne4zMzMmMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA2AAAA/QAAAP8AAAD/eXl5/56env8ODg7/Jycn/zY2Nv8zMzM6MzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQQAAAP8AAAD/AAAA/4SEhP///////////6Ghof8QEBD/IiIi/zc3N/8zMzNFMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEEAAAD/AAAA/wAAAP+bm5v//////+/v7//u7u7//////7S0tP8VFRX/ICAg/zc3N/8zMzNFMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA3AAAA/wAAAP8AAAD/p6en///////u7u7/6urq/+rq6v/t7e3//////729vf8WFhb/ICAg/zc3N/8zMzM6MzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIQAAAPgAAAD/AAAA/6ampv//////7e3t/+rq6v/r6+v/6+vr/+rq6v/s7Oz//////729vf8UFBT/IyMj/zY2NvozMzMlMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAUAAADdAAAA/wAAAP+ampr//////+3t7f/q6ur/6+vr/+vr6//r6+v/6+vr/+rq6v/s7Oz//////7Kysf8PDw//KSkp/zU1NeAzMzMIMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAsQAAAP8AAAD/f39////////u7u7/6urq/+vr6//r6+v/6+vr/+vr6//r6+v/6+vr/+rq6f/t7e///////5ynwf8KEy3/MC8t/zQ0M7UzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHUAAAD/AAAA/1paWv//////8vLy/+rq6v/r6+v/6+vr/+vr6//r6+v/6+vr/+vq6f/r7PD/7/f//+v3///o6Oj//+mZ/3FcJv8OGDL/NjUy/zMzM3ozMzMAMzMzADMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAAAAAAAuAAAA/wAAAP8sLCz///////v7+//p6en/6+vr/+vr6//r6+v/6+vr/+vq6f/r6+7/7/j//+r2///h2sf/37tY/9+iA//zpgD//74A/2BRJv8eJTn/MzMz/zMzMzIzMzMAMzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAMUAAAD/AAAA/9DQ0P//////6Ojo/+vr6//r6+v/6+vr/+vr6v/r6uv/7vX+/+v4///i2sb/4LZC/+OfAP/sngD/9K0A/fCuALz/zgBgoIMYRRAZPPUzMzP/MzMzyTMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAABfAAAA/wAAAP9+fn7//////+np6f/r6+v/6+vr/+vr6//r6uj/7O/z/+36///k5OH/4btP/+WfAP/voQD/9K8AyvCwAGTvrgAQ764AAP/CAABrWSUAFyA6eTMzM/8zMzP/MzMzYzMzMwAzMzMAMzMzAAAAAAAAAAAAAAAAAAAAAN4AAAD/HR0d//r6+v/4+Pj/6urq/+vr6//r6+v/6+rp/+31///o9P//4sqI/+SjAP/unwD/9K8Aou+vACjurgAA8K8AAPayAAD/xAAA6K0FACwuNAArLjUAMzMz2jMzM/8zMzPiMzMzADMzMwAzMzMAAAAAAAAAAAAAAABgAAAA/wAAAP+dnZ3//////+jo6P/r6+v/6+rq/+zr7f/t+f//5ebi/+OzMv/rnQD/8aoAnfKxABT/ugAA/8IAAP/EAAD/wQAA/LYAAP+5AACNbhoAEh48ADU0MwAzMzNaMzMz/zMzM/8zMzNkMzMzADMzMwAAAAAAAAAAAAAAAMgAAAD/IiIi//z8/P/09PT/6+vr/+vq6f/s7fD/6/r//+TYsf/npQP/8aEAwe+tAB34uAAA/8MAAN+iBAC+jg0Apn8TAHJcIgBYSykAPDkwACcrNgAxMjQAMzMzADMzMwAzMzPYMzMz/zMzM8wzMzMAMzMzAAAAAAAAAAAwAAAA/wAAAP+Hh4f//////+np6f/r6un/7O/z/+r4///lzIf/658A+/KoAFburQAA/8EAAP+/AACCZR0AKSw2ABwkOQAWIDsAEBw8ABoiOgAjKDcALzA0ADU0MgAzMzMAMzMzADMzM3AzMzP/MzMz/zMzMzQzMzMAAAAAAAAAAHoAAAD/AAAA/9ra2v//////6+rp/+zv8//q+P//5cdy/+2eAMvyqwAP7KwAAP+/AADqrQMAUEUrAAcWPwAkKTcAMDE0ADIyMwA0MzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzGTMzM/8zMzP/MzMzfzMzMwAAAAAkAAAAvgAAAP8+Pj7//////+3s6//s7fD/6vj//+fIdP/ungCa8a0AAO6uAAD+uAAA6q0DADAxMwAMGT4ANTQzCDQ0M8gzMzOOMzMzKjMzM8QzMzOQMzMz1DMzM0szMzO9MzMzSTMzMwAzMzMAMzMzvDMzM/8zMzPCMzMzJwAAAFMAAADsAAAA/3d3d///////6urq/+r5///nz4v/7p4AffGsAADvrwAA7asAAP/OAACUdRoAABBAADc1MgAzMzMAMzMzyzMzM6QzMzOFMzMzyDMzM0AzMzPXMzMzLzMzM+gzMzMcMzMzADMzMwAzMzOAMzMz/zMzM+8zMzNXAAAAegAAAP8AAAD/mJiY///////r9///6dyz/+6hAHfwqwAA768AAO2sAAD/vgAA8LQDADUzMgAmKjcAMzMzADMzMwAzMzOdMzMz4zMzM+szMzN4MzMzADMzM+UzMzPPMzMz1DMzMwAzMzMAMzMzADMzM1ozMzP/MzMz/zMzM3wAAACUAAAA/wAAAP+traz//////+ns5//uqguL8KcAAO2tAAD5tAAA/9IAAP/UAABoVCkADho8ADc2MgAzMzMAMzMzADMzM8IzMzOoMzMzdjMzM9ozMzMkMzMz5TMzM5QzMzMmMzMzADMzMwAzMzMAMzMzQjMzM/8zMzP/MzMzkwAAAJ4AAAD/AAAA/7S1tv//////7L5RtfCfAAD8uwAA/9MAAPy9AACxfQAASTgLABYhPwA+Pj0ANDQzADIyMgAzMzMGMzMzwzMzM8kzMzPNMzMzRDMzM24zMzPiMzMzADMzMyEzMzNTMzMzFDMzMwAzMzM5MzMz/zMzM/8zMzOaAAAAlAAAAP8AAAD/q7fS///80O//tgAQ/9MAAPSzAACUagAAIBcAAAAAAAAAAAAABwcHACcnJgA9PT0AOjo6ADIyMgEzMzMBMzMzATMzMwEzMzMAMzMzEDMzMwYzMzMAMzMzRjMzM1wzMzMSMzMzADMzM0IzMzP/MzMz/zMzM5MAAAB5AAAA/wAAAP+fp6r///5ZR96WAAB0VQAADgoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8PDwAvLy8ANjY2ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMBMzMzATMzMwAzMzMAMzMzWzMzM/8zMzP/MzMzegAAAE0AAADmAAAA/1BDKeFvUA4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAAzMzMANjY2ADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzOrMzMz/zMzM+ozMzNRAAAAEgAAAKkAAAD/AAAA/wAAAPUAAACnAAAAVgAAABEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgICADExMQA2NjYAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzDzMzM1UzMzOlMzMz9TMzM/8zMzP/MzMzrjMzMxYAAAAAAAAAMAAAAOoAAAD/AAAA/wAAAP8AAAD/AAAA/QAAAMgAAACQAAAAXgAAADEAAAAKAAAAAAAAAAACAgIAMTExADY2NgAzMzMAMzMzCTMzMzEzMzNdMzMzjzMzM8czMzP8MzMz/zMzM/8zMzP/MzMz/zMzM+wzMzMzMzMzAAAAAAAAAAAAAAAAAAAAAD0AAACaAAAA5wAAAP8AAAD/AAAA/wAAAP8AAAD/AAAA/wAAAPYAAADPAAAArAICAoQxMTGDNjY2qzMzM88zMzP1MzMz/zMzM/8zMzP/MzMz/zMzM/8zMzP/MzMz6TMzM5wzMzM/MzMzADMzMwAzMzMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMgAAAHQAAACvAAAA5QAAAP8AAAD/AAAA/wAAAP8AAAD/AgIC/zExMf82Njb/MzMz/zMzM/8zMzP/MzMz/zMzM+UzMzOvMzMzdjMzMzQzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIgAAAFEAAAB7AAAAowAAAMYCAgLqMTEx6zY2NsczMzOkMzMzfDMzM1EzMzMjMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzADMzMwAzMzMAMzMzAP/+f////D////gf///wD///4Af//8AD//+AAf//AAD//gAAf/wAAD/8AAA/+AAAH/AAAY/wAA/P4AA/x+AA/+fAA//jwA//88Af//OAP5FxgP+FcYH/jHkB/5T4A/+N+Af///iP///5j///8YP//8HAP/wD8AAAD/8AAP//+B//">
6 <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
6 <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
7 <meta name="robots" content="index, nofollow"/>
7 <meta name="robots" content="index, nofollow"/>
8 <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
8 <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
9 <style>
9 <style>
10 * {
10 * {
11 box-sizing: border-box;
11 box-sizing: border-box;
12 }
12 }
13 body {
13 body {
14 background:#eeeeee;
14 background:#eeeeee;
15 color: #323232;
15 color: #323232;
16 font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
16 font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif;
17 margin: 0 auto;
17 margin: 0 auto;
18 max-width: 1000px;
18 max-width: 1000px;
19 letter-spacing: .02em;
19 letter-spacing: .02em;
20 font-size: 13px;
20 font-size: 13px;
21 line-height: 1.41em;
21 line-height: 1.41em;
22 }
22 }
23 h1 {
23 h1 {
24 padding: 20px 0;
24 padding: 20px 0;
25 font-size: 1.54em;
25 font-size: 1.54em;
26 }
26 }
27 ul {
27 ul {
28 padding-left: 10px;
28 padding-left: 10px;
29 }
29 }
30 li {
30 li {
31 list-style-type: disc;
31 list-style-type: disc;
32 }
32 }
33 .error_message {
33 .error_message {
34 font-weight: normal;
34 font-weight: normal;
35 }
35 }
36 .logo-container {
36 .logo-container {
37 float: left;
37 float: left;
38 width: 150px;
38 width: 150px;
39 text-align: center;
39 text-align: center;
40 }
40 }
41 a {
41 a {
42 color: #427cc9;
42 color: #427cc9;
43 text-decoration: none;
43 text-decoration: none;
44 outline: none;
44 outline: none;
45 cursor: pointer;
45 cursor: pointer;
46 }
46 }
47 body {
47 body {
48 padding: 10px;
48 padding: 10px;
49 padding-top: 10%;
49 padding-top: 10%;
50
50
51 }
51 }
52 .inner-column {
52 .inner-column {
53 padding: 10px 30px;
53 padding: 10px 30px;
54 width: 33%;
54 width: 33%;
55 float: left;
55 float: left;
56 border-right: 1px solid #dbd9da;
56 border-right: 1px solid #dbd9da;
57
57
58 }
58 }
59 .inner-column:last-child {
59 .inner-column:last-child {
60 border: none;
60 border: none;
61 }
61 }
62 .side {
62 .side {
63 min-height: 220px;
63 min-height: 220px;
64 width: 150px;
64 width: 150px;
65 float: left;
65 float: left;
66 text-align: center;
66 text-align: center;
67 border-right: 1px solid #ddd;
67 border-right: 1px solid #ddd;
68 }
68 }
69 .logo {
69 .logo {
70 width: 120px;
70 width: 120px;
71 height: 150px;
71 height: 150px;
72 }
72 }
73 .main {
73 .main {
74 padding-left: 170px;
74 padding-left: 170px;
75 }
75 }
76 @media (max-width: 979px) {
76 @media (max-width: 979px) {
77 .inner-column {
77 .inner-column {
78 width: 100%;
78 width: 100%;
79 }
79 }
80 }
80 }
81 </style>
81 </style>
82
82
83 </head>
83 </head>
84 <body>
84 <body>
85 <div class="side">
85 <div class="side">
86 <img class="logo" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAScAAAFxCAYAAAAxjW6rAAAACXBIWXMAABcSAAAXEgFnn9JSAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAM9BJREFUeNrsnQtwHVeZ548etmX5dQMJSZx1LANhA+us5YlhSLyLpcCw2AtYXrY2Do9YKqaAhA22h0fBEtZyDdQACcieLFMkM5TkzBRJpgBLvBwYEstbE2cZEixT4RHiwtd5OInz8JUfsmRb1vb/6hy5dX373u6+fU6f7v7/qrquJV/dc7v7nH9/33e+8x0hCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEkGxRx0tAAtLqHDn577YK7xtyjoL8d14ehFCcSE20yAPis0AKkvpdrQzK1z1SvIYoXoTiREppk1YQxGeZFJ/WGL+PsrbcwuW2wAjFiaRQhJTlsypCK8gkg9K6OuSytoZ4aylOhCJkK0qo9lO0KE4kflpdLlhWRKhW0VI/E4oTiQAVD4JFtFj+u5WXpWb3cMjlHg7yklCcSGVaXOJDayg+K0uJF4PwFCcKkZieO0TsIC9FioJFcaIQkcQI1h66hBSnJNImj2XifOCapNslHHRZWHleEoqTLVaREqI2wWA1mXT93GJF64riZAQ1c7ZKnM+uJqQagy53cFAwdkVxitBFU2JESJSuIMWK4kQxIhQrilPy3LQOihGhWFGc4qalRIwYMyJJEasBkbEAe9rFKedy0zoEp/VJ8lGzgUqs8hSn5FlHa+mqkQy5gANps6rSIk5tUoxoHZGsW1X9YjJW1S8SHqtKsjh1uASJsSNCLkRZVP1JdP+SJE65EkEihARz/3YkVahsFaRO59jpHBM87D7a2tomcrkcr4X9xz7n2GR7CMRWy4kum2W0tLRMOxYvXjzt57KP6qEhUSgUiq/Dw8NicHBw6mdinUXVZ1uMyiZxQkLkRgpSjDegtVU4lo9wLKDiz6tWrSr+jN9HTT6fnzoOHTo0JWQQMBIb/a4YVexCFbc4tUi3bYPgLJtRC0gJEQSokvUTB0q03BaX+h0xgpr1g0UV29MiLnHqFAxsG7OClAumLKJE+yCOYEGk9u/fT2vL0LPCObbH4faZFKcW6bZ10m2L3hKC8CxbtmzKKsrcCJKW1Z49e6YEjLGtyOkzaU2ZECfltrXx3oZHxX5wKBFKgyVk0tKie5gsa0qXOMEy2iQYSwptDbmFCK82xYSSjpoxxKHcQ1pZ4S6lmIxNbRUacqd0iFO3dN/ougUUIgSnVayImAeWFURKzR4ylhUIJVKRqXyU4oQRtZPuG4UojYJFC8s3XdLds0qcIEycfXOBmJAKVNM1S5dgqcC7Siwl01gehQUVlTh1SHHKtFWkhAivWZwxyyoIsEOkVNCd1lVxNq/dFnE6KDIW+Ib4QISUe0ariChUwB3WFcQqo7GrJaLGIHkU4gQTYV+WxAivjBWRMK5ghsQKwfHuuMWpV0zmMqXSTYMYdXR0UIxI5GI1MDCQZjcwL62nWMXpqEhJ2gBESIkR3TRi0g3s7++fsqxSlCRaU2C8VnFKdCAcAuQWJEJsQM0CKssqwfSJydSCWMQpcS4dYkcbNmygdUQSZ1XhNWFpC/iyF8UlTomYpYMQrV27lrEjknjcQpUQ9y+0a1eLOFk7S6dKhVCQSNrdvx07dtguVNucY7Npcep2ji02WkidnZ3suYRCZQf4MqFm7WoRp33SeqLLRoiFQtXX12dLjCpUQmZYcYIKHI3rTFVQGxYSBYkQb2BJYdYv5mB6qMXAYcXJeAqBmvbfuHEjZ9kICYia9YNFFUN6AsqprDMlTj1ispicdmAdKbeNEFI7iEkpt89QfCpUSkFYcdIab4JlBAuJbhshZtw+CJVmAsedwoiT1njTpk2bRE9PD3sNIQZBEL29vV1nXCpw3Kk+RCPaLKalS5eK6z/ZzZ5CiGEwyaTZKFgV9A/CiFObrm//8Y9/3HFMx8QfCmfYWwgxjOa4bmCjJow4LdP17VevXi1ac/Wi74/D7CmEGEbX1vMmxUnLt4dLt2DBgsl/N4+LX700xt5CiGE074UY6MODihOC4S26xEnxnssaxF1PsGg8IaZB+SCNBNKOoOKkzea78sorp/387kuE+LvfH2NvIcQgmjfmSKY4zZ8/f9rPsJ5+/sxJcfzMOfYYQgyBHEONuYWBzLKg4rRY17d2u3WK/76wXnx1P4PjhKTEekqm5VSO/3Rxg/jFcyMMjhNCcYr2w4OwcuXKC343t1GI297QKL742FH2GEIMoXnJmG/ls0acvEDsafzcOIPjhBhC84ydb+ULIk7ahKlcvMlN1+JG8a3fHRPPjYyz5xCScK8xUeKkki8rWU+XNdU57t2rvLWEaEZzIqYWy0kbpWkEXtYTAuP/eOAEew8hyWWBDnHSJqfXXHNN1ffAesK6O7p3hOhHY7VZ325dY5IuGKynjftPF927vndcwh5EUsPjjz8+7ec3velNYt68eVM/P//88+Lw4cOef1/6/ijEKe5dXIKI0wJdX+L666/3J7mO5YRDuXcfeeNc9mqSeFDT+zOf+cy03919993i2muvnfr5Rz/6kbjnnns8P6P0/TYbZTrculYbzuwL/35G8ZXuHUkDx48fF9/85jervg+WU1o8xkS5ddVm69xg1u6/LawXPzh8ju4dSTywhiq5awr3ez72sY9d8P8LFy6M9Hthxi6GXVrsE6dqeU6lfHTJTPHzI2N070ii+eMf/yjuu+++4r/hkpXGnbwsp3LilEbqk/ilsazl1tc3FP/91f0FlvUlieQb3/iGb8FRlhMsJFhbOH784x8X3cK0unZBLKc2m87uv17eKL733Fnxp5NCfPHxV8X333kpeztJDLCYlKUEq6lSMNttNUGk3IFxzNB9+tOfFu9973uTJk556y2noC6dm1tfP6mtsJxgQRGSBGDtuAWmu7vbl9Xk9Vn4+0ouId26kAQJhpfy1tc0FoPjALGnhw6fYs8n1rN169YpdwyBZ4hPqbggHuW2rB577LGp47vf/e4FS0xU7CoqNC/+9UWQTTUndHwBlErBrqNhGT5zTqz/5ZgYGa8T82bUi++/61JxRXMDRwAJxYoVKwL/DQRDVxuVPvuDH/xgUcSUgCHXKSowU4dNNjWBDx6s9qbGpHemBY4gwb2786nxYknfTz36MuNPlgJLAHsT6gLxF2RKI2iMwYqnf5RZ07aR5nNLhTiB9y2cIX7y/Bnx+xP1U/Gnzy/LUQ0yBlwlCCAOZFQX+8b73lecCbv88sut+Z5eFo5buBHkhtAqUUdQXIktzhMzdW5XMOo8J4pThHzpLU3iLx+fdO8Qf3rrJbPEOxfO5ojNOBApHBAoW/KD/CwzgTCp9yGeVCkhEoKVxtyn+rScyBWz68WHFp0/HZT2Zf4TUWB2DJZJEvOCKs3EQZhgidlkGdJyKsOHF89y3LuT4vBYQzH+hPwnLG9BoJwQDHIssI0ycBwlbuvH7abdeeedRctJBb+VKGHGLs1xtca0ndD/fsss8dHHTosZDSr+NCy+suIijkwyJVCwomx0g7y+U7UkzbSSOpPizfMbxYcXnc+Q6D90ktUzyQUuntcqfyUE6kj7jBgtJ8PcetVs8ZtjI8XZO4DZu4XNDQyQxwwGeqkF4CezGS6OV0wFrk6YOBISGTEjVkqpy4c4VRqzr9MmTkPCkppOfvjim+HejYmxiUmBQoD8inc0iqtzM3jXYwIzUKWD309CItaNVXLDYAVhRg6zWn6FClPx5cSJTBJ3Fcygbl2iFq8tciylDy+qF+cmJhPbJxM0Xym+knQBqwri9cMf/nAqN6gaEDF3gJlM59ChQ4kSp8Rx85ImsWLB+VU3z42cFZ3/9yX2vBS7jUGsoYSWG0kDg5kXJ9C9tEnUT5yd+hkzeNzePL0gpuXXeiJ2EyTmlMiaJFh79zdLZ4q/+s3ZYnoBwAzeFXMaxK1vns8ekFILKk6Uy6h2TFFr/tQriV6c9jtHR9Rf4JFHHtF+km+/eKZYc+lp8S8vnzcUsUHCwuZG0bG4mb2ARAJm9fwsNUHypG3r/UoZGhqK/TtkJnX69v8wV1zSeHra77BBAuqQk/SgFv+atLDQJjLPkXZQbVMAvBczi1iQHHUNpigpFLQ5SoM6xCnxpSZ7ls+ZFn8Ctz36CtfgpQg/2ywpYYrKxfrQhz4UaqcS1BB31xEn4cVJm533xBNPGDlZpBdg37uxs+cFCqkFmMGjQCUbxHdgvahSKdUorSRZC362dvKimhuYZbfOigzx4eFhY229+/JZ4t9ePSN+9tKEqK+rmxIo5EB9/12v4yJhC1Hr4bxE6cknnwycs2TT2jpYe1GKpeVu3R4d4pRPS2dH/OnX/zosXhmfNfU7lQPFKgZ2ilOUS0hsC0bD8kLGui07qGgUJm1unTZx2rt3r/ET7/3zeWL87PRgOFw7CBSzyNOLqoxpGza5dppdOt86EtRESM3+S8h/+va1zWLkzBkKVEa46aabxJYtW7S3U26Bsx/rMAMunVZx0iKpJnKdyrE0N0N8Ykn9tAC5EihmkacHtTOJ7oW+iBuh2sHu3buL7eHVr5Vm01Ka/fv303JSmAyIl/LRN8wR73jNuakFwgrsgUeBSo846SzWpkrlomKlOz1B1fb2K1C2LEROquWkRVJNpRJ48fXl80Wu7sINObHMhQKVfDDTh40sdYFqCJXED+LkJ+HTFutJY8wpH+TN9To/PAjPPPNMrDfkn67PiZHRkxSolIL8J11BZz/Ck6QyuxSnEp5++ulYbwgC5L1/Pv+CGTwKVPzA8nBvxw0XCgd+H2S/Nr/Z4zpI0oJfjW5dINWzIiAO4kgnKAUB8tveOEMMj1KgbEbFkFSBOaQH+AH5RDZmY9uE5usTqIJdmIC4FlmN23JS3Li42RGoxgtm8JRAMc3APpAe4Ndt2rNnDy9YPC6ddstJm/UUd1DcDWbwrsudLStQqGJAgbKP7u5uX+/jZgVVTBu95XkpTlHQc21OXDP3zAUpBoCJmvaB5Sh+4jpw7by2hSJaLafAXlcYcdImrXElY3px558tEPPECAUqIfh17Wg9eaMx5hRY9ayxnIANQXE3mMG7b+VFYuL0iYoCxXIrFCdaTVUJHOwLI07apNU2y0kJ1LfflhOjYyNl/58CZQ9+K1tSnIxbTSAf9A/C1nPSssGmbXEnBVIMkAPV9ctjorlpzgX/rwrW3XXda8VbL5nFXu5B2L3iECNyC4raLCCs5YS4E0rqKpCKoMqVlAqXn6zt0r8Jm3BZem28zlMXmtfUBTbL6kI21OscnTrOAIslly5dauXg2ouZul8eF5fPm+v5nq+seA03TagwiN2iEBYkYHoJwPvf//7AlSnd69/87EBcDSSKloIlNF4F88Kcpw6WLFmic6ffwFoTtqqaNondtWuXtYPrescq+utrmsXzx094vgebJvzjgRNUopiweUcTm4EoaRSmUP5ivcnG/GBj3MkNkjQhUEdOnPR8z1f3F5hNHhNJWsNmE5rjTaEyX8OKk7awPsQpzhIqfgWqe+nssstcFMwmj4cga+2ISz30Zs6H0otaimVrk1rbUgq8BOoLV8+oKFAqm/y5kXH2frp1VtPf36/VMDMtTtqk9qc//WkibigE6n2XTVQUKKQYfOAXLzLVgG6dtSC/SXMlglAfbqXlZHvcyc1fL8uJd7z2XEWBgmv3gYdedFy9EY4EunbWoTneFPrDG+NotBooPIecJ1tTCkr52xUXiU89dlQ89NKIuLjZO40AM3l/KJwWn3cELauiEcXOJ9XEB20ESSdwW1u6dmZRJV5sFNkdO3bo/PjQHlZdjQ3vdo42HWeEfJgvf/nLiRp8EKgHXxivmAcFkKiJhE3uj0fiBukDyG/SyEVxuHU1qWKaXDu3BfWeyxoq5kEBBMo/8IsjjEORtLt0oeNNUYiTtjODWxd3XXGdAqV2GGYcisTJwMCAVu2r5Y8barUKnWOTczTpOLNFixZFspzANKsXzhaz6s6Jnx8eEfNmzfR83+lzE+Lhw6fE4ZFx8U7nbwgxCWbourq6dDbxNef4Q1yWk1br6f7770/sjf/YVXOLmeRPF4bLlltxg4RNzOYxH4qYRHNuU7GJWv44CnHSZhcm1bVTIA/qq63zfAmUyofCZp6EmGD79u3WCpP1llPSrSclUNi0EwJ1ZrzyUhbkQ33q0VeKa/MI0Qlm6TQXl6vZaIlCnPJC41q7++67L/EdAQL17bctEK+eHC67aUIpqGpAN48k3KWr2WiJKtFGm2unEjKTDoLk3/vPF4vCyHExcqZ6CgHdPJJgl25IRLABb1TipFWGUXQrDaCiJgRq+OTxistdSt08lF9hdQMSmXI47pzG2k0gkpTzqMQpEqX0wuYCdGEE6qG/uFS8tmFUvDziL8epOJvHpE2SDKspEpcuSnHSaj2hvlPSA+NuFjU3iB+sukRcNuNM1WRNBZI2EYf6u98f4+gioUFuk+Z4U15EFIOOUpy0rh5MShkVv2BXFwgUssnzRwtVUw0U3/rdMdaIIuEtCEeYNJZHidRIqYv4ix10jhZdZ/3rX/+6mDWeNrBg+AfPjIp/N3+emNXor1AEFg1/8i3zxUfeOJcjjvhm+fLlulMIlttoOWl17UCaXDs3WI/3jT9bUMyFOjF22tffIECOfChaUcQvECXNwhSZS6dDnLS6dmnIefICuVDfue414vipE74D5WCywsGL3PGFVMVAIDzS8V+n4Qtqde3uvfdesXr16tR2oCewg/Cjr4hj443i0rlzRH2d/1uEOlHYN++K5gaORDINxJkuuugi3c0sERHO2uuodqZVntOS8+RFMdXgXa8Ti5rGfS15oRVFLLGaIk8n0mE5tUjrSRtpDYy7GT5zTnxp/7D43qERcfm8eWJuhdIrXlbU5/9jTlztiB0hmnfzBai90hflB+qw/zFP2abTtUPe05o1a1LdmZoa6opLXjArN/D0cPEx0jzDv9CgRtQ/Hzwp4BVCqEh26evr010nXInTaJQfqKuItdYrgYxx2zfejArUheq7/rXizOlR8dyxY77zoRTIi3r3rheKLh/JJgaEqU/UUI7XtDj16/iybsvpnnvuyUznggWFhM3FsyfEwaMFX5UN3KiSwFinxzV62QI1wjXXCQdaFv7rEqeC0JzzlOa0gnIgUA6BetelM4sCdfRU8GoFqHDwF44VxYA5raYIyesa6zrnnA85xyd0ffgxx8W58sorE7O3XRQgDtWxqLm49OVHzxwXY+NnxdyZM0VdgHQD1C3/1xdHxcPPnxKvnzdDXDGnkSM4pSAArrlGOMA0oBbTrE7zF9/nHK26Phwzdpi5yyJ7XxoTnXtfESPn6gIteymlY/Eccetb5jM3KoVAmBAM10ykuU2mLCeAKGyHTutp5cqVRQsqayxyLJ6b3zBHPORYQH8sjASezVP8YfhMcXsqWFSc1UsPSLq85ZZbxOjoqM5m4M5pSzzUveWs1sA4uOOOOzLbAeHePfSuS4szei+fHPG1kUI5ECRXs3qsvJkOkHSpufoA0BrQ0m05QbYvc46362oAZXyzaj0pbrisqRgwxz55L548JWY1NIqZjcFvLURq17OnxK9eHnPcvEbGoxJsNd100026rSa4crfobKDewLXSnjefZetJgXQDVNh884JG8azj7qKIXRgrCiAnCqkHKA/Mige0muIa1yaioLhKCIpfTetJv5u34fVzxTHHAtp7ZEQcGzstmhobxYyGcLcZ8SikHWAyEMtgZjXUceTTalJjOvKM8DjECbzoHJ06G4BArV+/nr1TunkrL5klfvLsiDgyMho6WO62pB7408li0JwiZTdf+9rXxIMPPqi7GRRWe0B3IyZ7mdZSKgAlSGFBkUmweBjpBkg7gAV1+by5oVMOFIhFIfWgY3EzL7CFVhMW+Bpw6bSlD8RhORXHitCYVkDr6UKQtLm+ZU7R3fuX50dEYbR2KwpB84cPnxIDh0bEvJn1rHqQPatJa/pAXJYTOOocOVpP5kERu42PvVp8jcqKoiWVSaupXWjKCI/TcgKzxWQ5FVpPhnldU8NUsPyXL49GYkW5LSmmH2TCakJBuS+YOifTlhOspoO0nuIFMSjs+PLMybNFK+p1c+fULFIKZJl/8s3zmW2eTqsp8oJyNllO2pMyaT1VB0tfbmxpFmPnJopWFLZGR07UbEeggiwiLgeK3GE5DBYWz2pgTCpFVlNeipMx4ljt+aRzbNItTlmrWBAUBMtVZvnuF8ZEYexMMS9qZn1DqOzyUl4eZeDclNVkIK8JbBYRbvtko1un6BWa856yXLEgKEg52Piro2KXXFc3b9ZM8bo5c8WMhugWEDBwrsnPMlN5AFbTEtPnFledjP26rSdULFiwYIFYsWIFe7APKwp1opQVdez0WXHMeRJjW6rZEcWiVOAcGedM5oxIMczUa4rFaorTcjJiPUGcHn/88eIr8W9F3fm7Y+KepyarZUYdMFdg4wZYUR+5ah5rSYWkvb3dRAnegrSaCqbPL85eod16GhsbE01NTZy5C2hFqeUve18+LV4dO1sMmJ85d07MmVl7wFwB6+k3r54uWlIIosPtu7iJIuUXiNLWrVtNNPU153gwjnOM267Wbj2BLOxzZ8KKanCE6eI5zeKi2bO1tIf0g4+8ca5458LZvPhVWL58uRga0u5pxWY1xW05GbGewNNPPy3WrVvHHh3SikI5ll87Vs6Lo+Pi5OkzYnhsrKZqB17AgkI9Kczw4bH5+nmNjEuVAQFwQztfx2Y12SBOUOQWobHOODhw4ABLqtSAyi7HGj2I1MjZc0VXDxsszG6cIRrqoxUQBM+xCQMqIRw8flZcnZsp5s+o540Qk6kDq1evNpE6kHeOWJ/oNjyWIE4HdTfC1IJoeMaxbr40VJhKOwBw9V7juHr1dfq6E1w+bMaQ9VQExJm6u7tNNGU0G9xGy0lZT+jVbTobYWpBNMB6QtqBCphjrd7ImTOicCra1INyLp87FWHhnMbMWVNIHTAUnsgLw9ngtlpOwMiaO6YWRAsC5n//1Alxx++OTf0OcahL58wRc2fN1N4+AudrHUsqKwF0Q6kDVlhNtlhOAA609ooFSC04cuSIWLNmDZUlAhAwv96xoFAzCqVY4PJhjd4x5zrDmoJQRR00d4N4lAqgY8v1JfNnpNaawmJ2rKEzAKYAb7HhnG2aCoHVhE04W0zcaOY+RY+72oFCx1KYSqjY1DsXNhUTPdMAguBIHYBbZ8JAE4bqNSVJnECnmMx90gqD43pRuVFw+6Zc6qYmcXFzszGRgjDB3UMAPenlWzZv3iy2bdtmoqlBKU5WYFtK7pAUKK2xJwTHkelM60kPcPU6rmx2xGlC/NZx94ou9dmz4uipU8XHIXKk6ur0PhcRNFe7GSfZ7UOipaH1cwDR9hdsOXcbM9zanGO37kYQFN+9ezczxw24egiY43XqiegI00XNs7WnH5QDC47h9t3gWFVJWNNnKBMc9AkLZuhstpxAXgpUi85GEBx/4oknWJROtws9p7EYML/SeX3CsWSQeoCtPlX6Af5twpJSoM4UEjyRkvCk833GHM/zijkNVmaiw5XbsWOHiaaQzrNaaN6HLg3iBPYIA8taWJTOHCjH8rGr5hZNdbh6qMIZp0gBzPYhd+ofnjxunVAh+G2oiByIdZlKktw6RY8JgWLuk3lKFxTb4O65QSAdbl+cM34Gc5rgqSyxsZ/YLE5GEjMB1irde++9VA3DIC8K8agH8ietFCmAmT4lVqZiVEh1MbhQHQ31U5yC0ykMpBYAiBNEipgHCZxf2l+YFjRXIoUUBJRoMZWCUAkE04tCdflsbTXRDe6kAmCatdvaL5JQjwIzd21079JPuZm9qftjOE+qGiiOB6vqBsf1i3L5DCwmWE6GMLKteJrFCeVU9ploiO6dPSIFS+oJmSNVKlILmmZFXja4ViBQk2IV3v0z7M6hjGa3zf0gCXVRkRSGuNPbdTeEuk/XXHONuOqqq6gQMYL0A9SPcqcfKJDMiVpSmOVrqKuPZBurKMDMn0pRQNInEkDBJU3+Zv8wO2eoTpMQ56sOjNrcD5JSZtBYcJzunX084Ax2uHvuNXsKLCyGu4c1fHEHz71AfOqtF88qWlZvc45yM4AGZ+eK3qOwNAieRHECHc6xk+4dRaqcSKkZvgWzmqyJS/kVq+9862+L6+cM0S9irnCZRnECRoLj4K677mL2eAJFqmj9WhqXKsdo/rfi0Jb/IcZPHjPRHKYAlwuLg+BJFqcWMRkcN+Lece1dskUKGedIQ7DZ5fvTZ95TFChDwDzblpT7m7SNwqD8mGd+j+6GuPbOftSSGLweGT1XTOp0c/bcOXHi9Oni8pjxcxNiZkNj5Jsx1MILvd3i+K9+Zqq5QWFJEbm0ihP4f2Iy/nSZ7oaw9o6lVeznqnkziouLUdccAlUqUli3d0qWbMEsHxwGWFVxMvLbR8Xz9/wvk01aVQ4ljW6dwljuE4B7x8XByQF5Uvc7Ll/psphpT+UYs88RXzpw63Wm4kzA+pymNImTkBd7i4mGEHeCQDG9IFl4rd0rBYFzFUQ38r2+/pfi+L8Zc+dQDGp5Eu9fkjenHzTl3qFy5lNPPcVdgxMGtrHCbsWIS2EzBlWqpZQzMjYFt+/0+Dkx07GkGuv1WFOv/uQ7xcMgq5PmzqVBnMAvneMTJhpC9jj3vUsmapeY266eVzbrXAHZQgZ6YXS0uOU63IrG+obIguiYlYPVZBC4c/cn9b6lYSN6Y+4dYPwpHVRaZOzl9tWSkoD40p8++1/EmSPP0p3LkDgBBMdbTTTE+FO6UHGpB587NW23GC8gUHNnzgocnzIcZxJSmIaSfG8aUtLHjLl3jD+lCxWXuvkNc8RV82d4unyK0+Pj0+JTeLpXW3z80j/3iKM//ye6cxkVJwT8hoWB5EyA+BPzn9IF4lIqqRP5Us4NntrWqhwqPoXdjd1C1dhQP60OOvKZDn/rr0yeyqCwbBeVrLt1CmNr7wB3Dk6/y4c0BORMeS2RqeT6NZ8dFX/65PUm85kStXYua+LUIgytvSu6BFx/lxl2HT7lCNVI8dUv576yXohnnzT5NWEx9aXlmtelsB8ZK60CMHMHC4oB8uxYU7ueOyXuOXCiojU1ce8WMfHoD01+tcSUQvFLQwr7zx+kBWVk9u7IkSPFY82aNRy5GQAB9GtfO3NabOrZk+PTkjshShM/udvk14IbZ92mmBSn8mBTzvWm3DtUL2CCZvZAOWH3TB848Pvfion/8z9Nf5V18qGcKupS3HeMLg4u2tUMkGfb5XvmmWK53eHhYZPNJnJRrx/qU9xXkIC22WSDGzZsKFpRJHtAkG6++WbTwjSYVmFKuzgBVP3rN9lBb7vtNtMdlFjA7bffbvrBhLSBrjRf0/oM9BvcwLypxtBBYUGR7HDHHXeI+++/P9X9muKk7wljdIr1kUceKVpQJP1AlL7+9a+n2iOIi4aM9CGjy1uUBcUZvHSDe3zjjTeabnZIpCyfKeviBFB7HDN4V5tq8OGHH+YOwikWpo6OjuJGGIa9ACxPGc3CNa7LWJ9C3hPSC1pMNQjrCSkGrAGVHjDhAWGKYWa2XUzO0GWC+oz1KxV/KpjuyMiBIRSmGtiaJWHKojgpn32z6Q4dQw4M0UAMKQMAwe/urF3rugz3s17n6DTZIBcJJxvMwMaQMjAk3blC1q53Q4b72oAwtHuLgouEkwtECflMMYUh8lm85g0Z73MPiMnyvk2mGoRLgPgTBSpZwhRT3tpNImNxJorTeTAl+zNhqP64W6CYA5UMkFAbU8Y/AuDfzvK1b2D3KyZoHpIunjGQA3XllVcyxcBi8BBZv3696VwmgAD4LVm//hSnSRB0bBGGCtQpdu3aRYGyWJiQMhDDDKvKAB/N+j2oYzechtENEqYa5UadVgFBQl2mGHLTEABvFwnfby4q6nkJprEujo4RU1If8RCmGJNmY+l/FKdkoGrkFOIYEBQoO4QppvuAfjfIu0C3rhpt0sUzCtfhxQtcuZiEqU+kvHAcLafoGIyjs3CZS3wgjykmYYqlr1Gckk2fiGGDQsQ6YpolyrQwxbAsBWSmNhPdOj1gg84O041yHV7qhSlVW4dTnOIBNaAQf2o13TAFKtXCxJQBilNkAnVQGNqkkwKVamECXXGEDJIGY07BnnTGy1bEmKmcWu6++24KEy2n1NEmYkgxoAUVHTFWGBCCKQO0nDQyGFfnogVFYaLlRPywyTl6aEFRmHyCwPdy3gVaTibYFlfcgBZUIoWpnXeBlpNpjNchpwWVKGHCBMoSkcH63xQnO8A+eK1xNEyBsl6YmMtEty5WYuuAdPEoTLScSDViyyKnBWWdMKkH1iDvBC0nG4ilDhQtKCuFiXWZKE7WEevmhxQoUdxXzgJh6uNQoFtnK63SxcvF0XhWXbyY18pRmGg50YLyY0Fde+21mSr5a4Ew9VGYKE5JEqjNcTWepZrklggTl6XQrUscnWIyUTMW0l6TnMJEy4kktPMqCwpbaqcJnBeFKf1wx18zLp7x7c4V2EobgzgtOwsrwcV27hQmihNJuECBNGx9bkksjcJEcaJA6RAoxKFWrFhBYQrHoOBuKRQnCpQe4A5h+6k1a9Yk5qKpBNMDBw7Efe9WO8couzHFiQKlcbAnRaCUMB05ciTuexZb7hrFiWROoHDccMMNoqmpyWphinlJDoUpJpjnFC+dIsY8KGDrchcLFvBSmChOFCgKFIWJXAiTMOOnT8Q8NQ33qb293YrlLrfffjuFidByogU1nbiXu1iQ9W3Fw4JQnChQlgiUWo6CPCwKE1Fwts4uYp/Fw3KXHTt2GMsmV8mVe/fupTARihMFqjomlrtYklxJYaI4kSQKlK7lLpYkV1KYKE4kpEANOMd654gtS1LHchcEvW+88caiC0lhIhSnZPKCc/wsboFSy11WrlxZczb53XffLT772c/acG1RqfQL7GL2wtm6ZBDrpgmKWpM1LUkVEIKbEVCcSPoEatGiReLee+8NFCjHjNyGDRtsqchJYUoIzBBPDohBLRExb3EN9w6BbL85Ser9FghTgcKULBhzShaoJfSAc7zHOS6L60sgkL1z586qM3mIVa1evbooUBYIE5ajPMguRIhectLFm4j7WL9+/cRLL710wXHXXXdN2PD9nOOodIkJIQbptUEAVq5cOXHgwIEpYfrc5z5nizDtozARknGBWrp06UR/f3/RkrJImHLsHoTEyyZLBIHCRAi5gE6KUvHoZVcgxD6wFu8ohYkQYiOtGRWoTt56QuynRcZdKEyEEOvIZUCgmMNESILpTakwHaQwEZJ8egRTBQghltIp0jMjR2EiJGW0iWTP5PXwFhKSXlplvIYzcoQQ60jSTN5RafERQjIkULbP5LGqACEZpltwRo4QYimdwq5AOdfIEUKmsGVN3ibeCkJIKXEGyiGMHbwFhJBKAmU6UM7ANyHEN6aqa8a+Fx8hJHnoLl7HjG9CSGhahZ44VCcvLSGkVuB27RSswUQIsZRuwcRKQoildIpwcSgmVhJCtBO0skEnLxkhxBRwz3YLltIlhFiKVwlg5i8RQmIHbps7DsX8JUKINag4FNfHEUIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCMkadWk9seuuu65FXFi7Ov/oo4/2Zf2mO9emzXlp47VJxL1CH26p9B7nvnUb/k45ObZWod84x34dfacxxfcVN3RLye8GnYMDcFKYeG2SwYYyD5JSjImTI0woKogSzP3OsUONM+f3G53XdkekChQnkjULAoMiVzJQMRCG8OoMiiFeJSMWE4Rps3Msc46N8h60y4cdyjN3JUacpAuxO8Sf5uUB9qATOh2wn10kcy7NWuGj9K/zXmX9DcACjPIJTqbA/eiHC+dcb4zprfL+dErBOur8fnNU195my6nF5Wu3yQ5YkK7HVna+VIsS7nevqBJr8XBXcfQ4n4F+spn9JFJy0lBwu5ytajw613xQ/jwYRWP1Cbw4m5zjoHMhWEg/ncLUIy3tlho/qlP2E+6bF73RoNjv8nLU/0X2MKhP6AWCSO2UZj9JjzD1yodPlP1kHx9kkQGLaIOMPQHE+RAU75DWrogy9henOLX7OLqkG+elxr18MqZKmHQ9bHplagmpAUd4BqUg7XSO7fLfanzid5ujbK8x5hP1A4Jvm2UMotwTsEcKGUmuMHVoFCZlQfWyn0QCDIYt8nrmxfldnbuinrBKRCqBDGquczrxzjIC1YanovOePPtNIoUpJ/zvEIwH2oB8YuPvYDUvE/428kQ/aQvwUCTeYxHGwmbptRR0jb2k5Tlt9uiI+N02dp1EAovJj8vVVSYLuV8KnEoMzFX5jA0iopkkEm18qRz1CbsYedUhS1jMrpJYNvp4z9ZKyyPkIFnnx3ri5U4OScwQ31/GegoVFJdP3I2y07Z4uBChE/pK1iC1lXmyD8ljR63uhithsbSdvDyXHVG5NPK8OsT5PJecx7Xrr2Tyy+tfzWrK+1k7hnNzPm+oSl9o8XFuLfLc1nqcm8pK3yPPb6iGfrG2jGCqe9UXsevsPqcWD5d5j+zveYpTvHGO3iqxijZ5bJFZr30B2+iWwlfJ1WiVR6fz/rx0XQYDttMmKicstsiB0CmT5LpqvHaY6t9S5bzUtUMy5DbhnTTrx5IJEmQdqPaggiCWExQpSj2ievwqV9I3cE03+xUpGfzvrXD9ip/tvG+DT2uwWj/f5KMflvb3fnlOsYpUfQaFCZ13n/AXRFWdsVeKja8O4Rz7fAzgciKy2287LmspSMJimzz3ZWE6upzu7wl4XpvkeeVCuuMDAdoq+Lyf5a5jkD5xwTWVou3ngbXT5/VrE/7iaJX6+e4Q/VDI67Av7jSdJFpO5QbWUACh2SnCZR8Xn5KVLBvXwshabiraWeC0s9mnxRSUXMhBuLOGmE2r/Pv2oO54QEuy38dAzJcRpt4I+iWsxGXO9+2q8CDZEuK61SJMuRrOJycfKu1xLapOlDjJwV9ugOwJcbMLsjPvcXXYnIwPdXrcWHTiJRU+v9ejQ+Hzi0lrMjaizkMtmrzA2nDes8crb8TllnpZD30lFoeKrbWEvO49Htcd54XFnxDtvBwUrXIQlrYFV6VbZ+0h6YYEsTw7IhImRae8b30eLqPJkEUugo9TKzGWx7FGMWmWUzmXIh8i+atfxnfKXfB+52Zsl0/6UqFpqRCz6PSwSC5oS/67X7a1w8PU75WWWsHDVWrxsCDL1dSB9bFNuh49ATt7myi/pASfua7kvIoBfhmzKBfT24gYlA2LcasIfC2o++a20LZEJBZ+3ego3bEW+Zndpu9RImJOrnhHOSsjaMo8ZiPWVRogsmN1VfDHy7pjHmLRVaWt4iD3eGp5xTE2elgx7VXa2iaCB8R7Pc7L8xrK33eVulA1uJSmHnReluigPApBr5lHRVYvhmQ7QyHHSUsA11GdU97HezfGcYMaYxQcP/GLFulmdXh0pG0Braa8V0ygzABTFkDpYFrlYTWVs2R8leyQrl5fmU68sfSJJdsqdy26fLaF5UCr/AwY6faEOi9ZQmN7GUttg4i54qZrKr/qg6z0XH1an+5VC37FeJ27L8vx0RPQCvJzTurBkne1VW1dI4yDDtP11OK0nHb7OHorxH82Vwsae1gXQdjjYdGUsrbckylgMHerV6eoJo5SdIO0dcjn+8qd11CAtsp15jYRP34EA/fvAsGX1qcfcd1Y4RpecO9LB768xu0iWAmSDT5FMF/SVpePdlbRrfOn/EtkJzHRVimtPjv7jiANyQ4z6KNTtNXaVo2DeCDgORXKWC5xV5LwJRgh/6/0PvkR420V3GO/+VMtovqER6Wk2GoPHOP3LInihItkTamUCt8jjAm8x0enaAnRscLGL3IRtDXk0/o0STXBKFSyDuUAryYarT77aH9EEwR+2tof8v+EqL34X2CSmiGOzrU7TOa2BnIenTtMh8OA2OI1kLzidJpW2nt1xt2yXneSqSaOQz7vVauPflqLYEQtTmtlvDGM+GRHnJwBVXXPvJI1XG1lOlhPmWnbOCy5MJ2bxGPp+hlkfu7fcAQiKESEZW0jEjBrsNqtg/UBy8g5EBjs87j5PTF/zShdlLDWFqndIgwqPH7wE0TmgyyJ4lQiVJhRKBfH6UhRCVbuFEJI3G5dSLyKzbWJdOxWmwSRjaLUrW9rAa693/idzAGrNp2+nQ8bipMO6ynvUbMnzkENt2pLjN+nxfA9iNKN3COqB43bhP+ZzxYfn+enDElUOT37RfWcqsj2efNBXgTP9aM4BRSDUnFakBILKExwXZc4FTysk9YIV6n7GShrA4hT1X4gM9dxbrkar+kqn3212nISk1Vcd+hceB01ScxzGvY5qGO1JELulbaqygDOewmGhvMaCiCgtTxoqtEZIKbo95pXFfxKbVaojlF63/yIeJvBa7k2SQM9c8XmNDFUa0eo0OEH3G6th0XTpum8BjW6PJWy4kvZ6eP6tfmweAZLr2klUazwf342/lTJldWsw9Yq60xzNfTBcm11BuyXLXENKopTROZyuae4R/XHSh2+3Pv7fQhG0FXjfl2JgRotGT9s9zmo9nm1Kwf3zgDn48dN3FJuIMu2NgZoy4/49pTrK3JBri9L1VWGpxq91ap24jrjPbKi6+64BlUjdSUS+sSFNXtUvaCqAVjZMct1+HKbKwyUcV+KncnPekM5uDprOK/iYBIB61t7rWrH72Qd7mrWHwbpQfletcxngfw7v65mv7LYfLaJgbzRJTR+98hzL/oe8HG91bn1ybDFYlF+0w0/Qu/n+/XI8+ovCZMsk222+rl3tJwSgBSQ7R7W0yYfwuRVUnVrmbb6PFyFnmomewALw8959foUpZyspLmzQn30zcL/tHubFMwtIlhhtdJdRfxWtGh1tec3prXVLb7CX+Bf1e/aIvzv5Vd6vwZFsJnNTa5zU+dX7npuiGNcUZyiY5uH3w/R2F0aV5CDFp1wn0eH2FphWc7WCk/63tJgPNqWYhKmrrTXeXXK8+rwEkIpSgddMZot5VwzGXzfrPHeFEo/X7a5VUc/KDNJstlgP/RT/iQosSQ6J9Gty3s83WK3npwb2OUhABCmNrlgdlA+tVqquAXdFdpCwbi1Hk/yTikc1QZrLuLzGpKf21rls8u6hPKchIi+dC6+U9kKobjGTpuLA7i51RgqV2NMuq79wkAVUHm/2kXtGxxcIFDC8K7a9SkRp1zIqfuoO0axhneVJ1e1WMKgz3hOlwi/LmuzCJD45/O8WkX5jUNLhWJ7JdEVwQusVesrFXcPkcui+iJoq19Uzp4Pc78KIkRGvs/7FfR75E2Pp8SJkzSZvWIuOQu+X7EYngie9VuQrly7z3K7quP2Be3sUgT2hDyvsIFRtLmkWoa5/P8l0uUq1DCY8PfL/SSMSoFaF3IAFl1GH3Xp1f3qD3ivBsNcB9f9qkV48/I6LokjIG7CrSuI6NPz8eQvN7vV4no6lWt3yMR3Vx1Rxpk2CO8a6Oo7DchYRSFEO11yB5eNFSwXdLIdJW3gvFYFuTbyb9dV2ca99NzQbn+QsjaynW65W7DaRtuPVTYozm+DHvRaqt1wOn221x+0Ldf1a5PXr8PjPPrF9JhjX5jQhat/bHXdr2qfM6juW1z71SnqBDFCuTKqOorESeHIlcRBChrPK1emwxd0dGyPtnQV2/MqfZuPsn5Y6f3SdS5V+kjk50UIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYSQ4Px/AQYAeGpRDl1A8RIAAAAASUVORK5CYII=">
86 <img class="logo" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAScAAAFxCAYAAAAxjW6rAAAACXBIWXMAABcSAAAXEgFnn9JSAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAM9BJREFUeNrsnQtwHVeZ548etmX5dQMJSZx1LANhA+us5YlhSLyLpcCw2AtYXrY2Do9YKqaAhA22h0fBEtZyDdQACcieLFMkM5TkzBRJpgBLvBwYEstbE2cZEixT4RHiwtd5OInz8JUfsmRb1vb/6hy5dX373u6+fU6f7v7/qrquJV/dc7v7nH9/33e+8x0hCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEkGxRx0tAAtLqHDn577YK7xtyjoL8d14ehFCcSE20yAPis0AKkvpdrQzK1z1SvIYoXoTiREppk1YQxGeZFJ/WGL+PsrbcwuW2wAjFiaRQhJTlsypCK8gkg9K6OuSytoZ4aylOhCJkK0qo9lO0KE4kflpdLlhWRKhW0VI/E4oTiQAVD4JFtFj+u5WXpWb3cMjlHg7yklCcSGVaXOJDayg+K0uJF4PwFCcKkZieO0TsIC9FioJFcaIQkcQI1h66hBSnJNImj2XifOCapNslHHRZWHleEoqTLVaREqI2wWA1mXT93GJF64riZAQ1c7ZKnM+uJqQagy53cFAwdkVxitBFU2JESJSuIMWK4kQxIhQrilPy3LQOihGhWFGc4qalRIwYMyJJEasBkbEAe9rFKedy0zoEp/VJ8lGzgUqs8hSn5FlHa+mqkQy5gANps6rSIk5tUoxoHZGsW1X9YjJW1S8SHqtKsjh1uASJsSNCLkRZVP1JdP+SJE65EkEihARz/3YkVahsFaRO59jpHBM87D7a2tomcrkcr4X9xz7n2GR7CMRWy4kum2W0tLRMOxYvXjzt57KP6qEhUSgUiq/Dw8NicHBw6mdinUXVZ1uMyiZxQkLkRgpSjDegtVU4lo9wLKDiz6tWrSr+jN9HTT6fnzoOHTo0JWQQMBIb/a4YVexCFbc4tUi3bYPgLJtRC0gJEQSokvUTB0q03BaX+h0xgpr1g0UV29MiLnHqFAxsG7OClAumLKJE+yCOYEGk9u/fT2vL0LPCObbH4faZFKcW6bZ10m2L3hKC8CxbtmzKKsrcCJKW1Z49e6YEjLGtyOkzaU2ZECfltrXx3oZHxX5wKBFKgyVk0tKie5gsa0qXOMEy2iQYSwptDbmFCK82xYSSjpoxxKHcQ1pZ4S6lmIxNbRUacqd0iFO3dN/ougUUIgSnVayImAeWFURKzR4ylhUIJVKRqXyU4oQRtZPuG4UojYJFC8s3XdLds0qcIEycfXOBmJAKVNM1S5dgqcC7Siwl01gehQUVlTh1SHHKtFWkhAivWZwxyyoIsEOkVNCd1lVxNq/dFnE6KDIW+Ib4QISUe0ariChUwB3WFcQqo7GrJaLGIHkU4gQTYV+WxAivjBWRMK5ghsQKwfHuuMWpV0zmMqXSTYMYdXR0UIxI5GI1MDCQZjcwL62nWMXpqEhJ2gBESIkR3TRi0g3s7++fsqxSlCRaU2C8VnFKdCAcAuQWJEJsQM0CKssqwfSJydSCWMQpcS4dYkcbNmygdUQSZ1XhNWFpC/iyF8UlTomYpYMQrV27lrEjknjcQpUQ9y+0a1eLOFk7S6dKhVCQSNrdvx07dtguVNucY7Npcep2ji02WkidnZ3suYRCZQf4MqFm7WoRp33SeqLLRoiFQtXX12dLjCpUQmZYcYIKHI3rTFVQGxYSBYkQb2BJYdYv5mB6qMXAYcXJeAqBmvbfuHEjZ9kICYia9YNFFUN6AsqprDMlTj1ispicdmAdKbeNEFI7iEkpt89QfCpUSkFYcdIab4JlBAuJbhshZtw+CJVmAsedwoiT1njTpk2bRE9PD3sNIQZBEL29vV1nXCpw3Kk+RCPaLKalS5eK6z/ZzZ5CiGEwyaTZKFgV9A/CiFObrm//8Y9/3HFMx8QfCmfYWwgxjOa4bmCjJow4LdP17VevXi1ac/Wi74/D7CmEGEbX1vMmxUnLt4dLt2DBgsl/N4+LX700xt5CiGE074UY6MODihOC4S26xEnxnssaxF1PsGg8IaZB+SCNBNKOoOKkzea78sorp/387kuE+LvfH2NvIcQgmjfmSKY4zZ8/f9rPsJ5+/sxJcfzMOfYYQgyBHEONuYWBzLKg4rRY17d2u3WK/76wXnx1P4PjhKTEekqm5VSO/3Rxg/jFcyMMjhNCcYr2w4OwcuXKC343t1GI297QKL742FH2GEIMoXnJmG/ls0acvEDsafzcOIPjhBhC84ydb+ULIk7ahKlcvMlN1+JG8a3fHRPPjYyz5xCScK8xUeKkki8rWU+XNdU57t2rvLWEaEZzIqYWy0kbpWkEXtYTAuP/eOAEew8hyWWBDnHSJqfXXHNN1ffAesK6O7p3hOhHY7VZ325dY5IuGKynjftPF927vndcwh5EUsPjjz8+7ec3velNYt68eVM/P//88+Lw4cOef1/6/ijEKe5dXIKI0wJdX+L666/3J7mO5YRDuXcfeeNc9mqSeFDT+zOf+cy03919993i2muvnfr5Rz/6kbjnnns8P6P0/TYbZTrculYbzuwL/35G8ZXuHUkDx48fF9/85jervg+WU1o8xkS5ddVm69xg1u6/LawXPzh8ju4dSTywhiq5awr3ez72sY9d8P8LFy6M9Hthxi6GXVrsE6dqeU6lfHTJTPHzI2N070ii+eMf/yjuu+++4r/hkpXGnbwsp3LilEbqk/ilsazl1tc3FP/91f0FlvUlieQb3/iGb8FRlhMsJFhbOH784x8X3cK0unZBLKc2m87uv17eKL733Fnxp5NCfPHxV8X333kpeztJDLCYlKUEq6lSMNttNUGk3IFxzNB9+tOfFu9973uTJk556y2noC6dm1tfP6mtsJxgQRGSBGDtuAWmu7vbl9Xk9Vn4+0ouId26kAQJhpfy1tc0FoPjALGnhw6fYs8n1rN169YpdwyBZ4hPqbggHuW2rB577LGp47vf/e4FS0xU7CoqNC/+9UWQTTUndHwBlErBrqNhGT5zTqz/5ZgYGa8T82bUi++/61JxRXMDRwAJxYoVKwL/DQRDVxuVPvuDH/xgUcSUgCHXKSowU4dNNjWBDx6s9qbGpHemBY4gwb2786nxYknfTz36MuNPlgJLAHsT6gLxF2RKI2iMwYqnf5RZ07aR5nNLhTiB9y2cIX7y/Bnx+xP1U/Gnzy/LUQ0yBlwlCCAOZFQX+8b73lecCbv88sut+Z5eFo5buBHkhtAqUUdQXIktzhMzdW5XMOo8J4pThHzpLU3iLx+fdO8Qf3rrJbPEOxfO5ojNOBApHBAoW/KD/CwzgTCp9yGeVCkhEoKVxtyn+rScyBWz68WHFp0/HZT2Zf4TUWB2DJZJEvOCKs3EQZhgidlkGdJyKsOHF89y3LuT4vBYQzH+hPwnLG9BoJwQDHIssI0ycBwlbuvH7abdeeedRctJBb+VKGHGLs1xtca0ndD/fsss8dHHTosZDSr+NCy+suIijkwyJVCwomx0g7y+U7UkzbSSOpPizfMbxYcXnc+Q6D90ktUzyQUuntcqfyUE6kj7jBgtJ8PcetVs8ZtjI8XZO4DZu4XNDQyQxwwGeqkF4CezGS6OV0wFrk6YOBISGTEjVkqpy4c4VRqzr9MmTkPCkppOfvjim+HejYmxiUmBQoD8inc0iqtzM3jXYwIzUKWD309CItaNVXLDYAVhRg6zWn6FClPx5cSJTBJ3Fcygbl2iFq8tciylDy+qF+cmJhPbJxM0Xym+knQBqwri9cMf/nAqN6gaEDF3gJlM59ChQ4kSp8Rx85ImsWLB+VU3z42cFZ3/9yX2vBS7jUGsoYSWG0kDg5kXJ9C9tEnUT5yd+hkzeNzePL0gpuXXeiJ2EyTmlMiaJFh79zdLZ4q/+s3ZYnoBwAzeFXMaxK1vns8ekFILKk6Uy6h2TFFr/tQriV6c9jtHR9Rf4JFHHtF+km+/eKZYc+lp8S8vnzcUsUHCwuZG0bG4mb2ARAJm9fwsNUHypG3r/UoZGhqK/TtkJnX69v8wV1zSeHra77BBAuqQk/SgFv+atLDQJjLPkXZQbVMAvBczi1iQHHUNpigpFLQ5SoM6xCnxpSZ7ls+ZFn8Ctz36CtfgpQg/2ywpYYrKxfrQhz4UaqcS1BB31xEn4cVJm533xBNPGDlZpBdg37uxs+cFCqkFmMGjQCUbxHdgvahSKdUorSRZC362dvKimhuYZbfOigzx4eFhY229+/JZ4t9ePSN+9tKEqK+rmxIo5EB9/12v4yJhC1Hr4bxE6cknnwycs2TT2jpYe1GKpeVu3R4d4pRPS2dH/OnX/zosXhmfNfU7lQPFKgZ2ilOUS0hsC0bD8kLGui07qGgUJm1unTZx2rt3r/ET7/3zeWL87PRgOFw7CBSzyNOLqoxpGza5dppdOt86EtRESM3+S8h/+va1zWLkzBkKVEa46aabxJYtW7S3U26Bsx/rMAMunVZx0iKpJnKdyrE0N0N8Ykn9tAC5EihmkacHtTOJ7oW+iBuh2sHu3buL7eHVr5Vm01Ka/fv303JSmAyIl/LRN8wR73jNuakFwgrsgUeBSo846SzWpkrlomKlOz1B1fb2K1C2LEROquWkRVJNpRJ48fXl80Wu7sINObHMhQKVfDDTh40sdYFqCJXED+LkJ+HTFutJY8wpH+TN9To/PAjPPPNMrDfkn67PiZHRkxSolIL8J11BZz/Ck6QyuxSnEp5++ulYbwgC5L1/Pv+CGTwKVPzA8nBvxw0XCgd+H2S/Nr/Z4zpI0oJfjW5dINWzIiAO4kgnKAUB8tveOEMMj1KgbEbFkFSBOaQH+AH5RDZmY9uE5usTqIJdmIC4FlmN23JS3Li42RGoxgtm8JRAMc3APpAe4Ndt2rNnDy9YPC6ddstJm/UUd1DcDWbwrsudLStQqGJAgbKP7u5uX+/jZgVVTBu95XkpTlHQc21OXDP3zAUpBoCJmvaB5Sh+4jpw7by2hSJaLafAXlcYcdImrXElY3px558tEPPECAUqIfh17Wg9eaMx5hRY9ayxnIANQXE3mMG7b+VFYuL0iYoCxXIrFCdaTVUJHOwLI07apNU2y0kJ1LfflhOjYyNl/58CZQ9+K1tSnIxbTSAf9A/C1nPSssGmbXEnBVIMkAPV9ctjorlpzgX/rwrW3XXda8VbL5nFXu5B2L3iECNyC4raLCCs5YS4E0rqKpCKoMqVlAqXn6zt0r8Jm3BZem28zlMXmtfUBTbL6kI21OscnTrOAIslly5dauXg2ouZul8eF5fPm+v5nq+seA03TagwiN2iEBYkYHoJwPvf//7AlSnd69/87EBcDSSKloIlNF4F88Kcpw6WLFmic6ffwFoTtqqaNondtWuXtYPrescq+utrmsXzx094vgebJvzjgRNUopiweUcTm4EoaRSmUP5ivcnG/GBj3MkNkjQhUEdOnPR8z1f3F5hNHhNJWsNmE5rjTaEyX8OKk7awPsQpzhIqfgWqe+nssstcFMwmj4cga+2ISz30Zs6H0otaimVrk1rbUgq8BOoLV8+oKFAqm/y5kXH2frp1VtPf36/VMDMtTtqk9qc//WkibigE6n2XTVQUKKQYfOAXLzLVgG6dtSC/SXMlglAfbqXlZHvcyc1fL8uJd7z2XEWBgmv3gYdedFy9EY4EunbWoTneFPrDG+NotBooPIecJ1tTCkr52xUXiU89dlQ89NKIuLjZO40AM3l/KJwWn3cELauiEcXOJ9XEB20ESSdwW1u6dmZRJV5sFNkdO3bo/PjQHlZdjQ3vdo42HWeEfJgvf/nLiRp8EKgHXxivmAcFkKiJhE3uj0fiBukDyG/SyEVxuHU1qWKaXDu3BfWeyxoq5kEBBMo/8IsjjEORtLt0oeNNUYiTtjODWxd3XXGdAqV2GGYcisTJwMCAVu2r5Y8barUKnWOTczTpOLNFixZFspzANKsXzhaz6s6Jnx8eEfNmzfR83+lzE+Lhw6fE4ZFx8U7nbwgxCWbourq6dDbxNef4Q1yWk1br6f7770/sjf/YVXOLmeRPF4bLlltxg4RNzOYxH4qYRHNuU7GJWv44CnHSZhcm1bVTIA/qq63zfAmUyofCZp6EmGD79u3WCpP1llPSrSclUNi0EwJ1ZrzyUhbkQ33q0VeKa/MI0Qlm6TQXl6vZaIlCnPJC41q7++67L/EdAQL17bctEK+eHC67aUIpqGpAN48k3KWr2WiJKtFGm2unEjKTDoLk3/vPF4vCyHExcqZ6CgHdPJJgl25IRLABb1TipFWGUXQrDaCiJgRq+OTxistdSt08lF9hdQMSmXI47pzG2k0gkpTzqMQpEqX0wuYCdGEE6qG/uFS8tmFUvDziL8epOJvHpE2SDKspEpcuSnHSaj2hvlPSA+NuFjU3iB+sukRcNuNM1WRNBZI2EYf6u98f4+gioUFuk+Z4U15EFIOOUpy0rh5MShkVv2BXFwgUssnzRwtVUw0U3/rdMdaIIuEtCEeYNJZHidRIqYv4ix10jhZdZ/3rX/+6mDWeNrBg+AfPjIp/N3+emNXor1AEFg1/8i3zxUfeOJcjjvhm+fLlulMIlttoOWl17UCaXDs3WI/3jT9bUMyFOjF22tffIECOfChaUcQvECXNwhSZS6dDnLS6dmnIefICuVDfue414vipE74D5WCywsGL3PGFVMVAIDzS8V+n4Qtqde3uvfdesXr16tR2oCewg/Cjr4hj443i0rlzRH2d/1uEOlHYN++K5gaORDINxJkuuugi3c0sERHO2uuodqZVntOS8+RFMdXgXa8Ti5rGfS15oRVFLLGaIk8n0mE5tUjrSRtpDYy7GT5zTnxp/7D43qERcfm8eWJuhdIrXlbU5/9jTlztiB0hmnfzBai90hflB+qw/zFP2abTtUPe05o1a1LdmZoa6opLXjArN/D0cPEx0jzDv9CgRtQ/Hzwp4BVCqEh26evr010nXInTaJQfqKuItdYrgYxx2zfejArUheq7/rXizOlR8dyxY77zoRTIi3r3rheKLh/JJgaEqU/UUI7XtDj16/iybsvpnnvuyUznggWFhM3FsyfEwaMFX5UN3KiSwFinxzV62QI1wjXXCQdaFv7rEqeC0JzzlOa0gnIgUA6BetelM4sCdfRU8GoFqHDwF44VxYA5raYIyesa6zrnnA85xyd0ffgxx8W58sorE7O3XRQgDtWxqLm49OVHzxwXY+NnxdyZM0VdgHQD1C3/1xdHxcPPnxKvnzdDXDGnkSM4pSAArrlGOMA0oBbTrE7zF9/nHK26Phwzdpi5yyJ7XxoTnXtfESPn6gIteymlY/Eccetb5jM3KoVAmBAM10ykuU2mLCeAKGyHTutp5cqVRQsqayxyLJ6b3zBHPORYQH8sjASezVP8YfhMcXsqWFSc1UsPSLq85ZZbxOjoqM5m4M5pSzzUveWs1sA4uOOOOzLbAeHePfSuS4szei+fHPG1kUI5ECRXs3qsvJkOkHSpufoA0BrQ0m05QbYvc46362oAZXyzaj0pbrisqRgwxz55L548JWY1NIqZjcFvLURq17OnxK9eHnPcvEbGoxJsNd100026rSa4crfobKDewLXSnjefZetJgXQDVNh884JG8azj7qKIXRgrCiAnCqkHKA/Mige0muIa1yaioLhKCIpfTetJv5u34fVzxTHHAtp7ZEQcGzstmhobxYyGcLcZ8SikHWAyEMtgZjXUceTTalJjOvKM8DjECbzoHJ06G4BArV+/nr1TunkrL5klfvLsiDgyMho6WO62pB7408li0JwiZTdf+9rXxIMPPqi7GRRWe0B3IyZ7mdZSKgAlSGFBkUmweBjpBkg7gAV1+by5oVMOFIhFIfWgY3EzL7CFVhMW+Bpw6bSlD8RhORXHitCYVkDr6UKQtLm+ZU7R3fuX50dEYbR2KwpB84cPnxIDh0bEvJn1rHqQPatJa/pAXJYTOOocOVpP5kERu42PvVp8jcqKoiWVSaupXWjKCI/TcgKzxWQ5FVpPhnldU8NUsPyXL49GYkW5LSmmH2TCakJBuS+YOifTlhOspoO0nuIFMSjs+PLMybNFK+p1c+fULFIKZJl/8s3zmW2eTqsp8oJyNllO2pMyaT1VB0tfbmxpFmPnJopWFLZGR07UbEeggiwiLgeK3GE5DBYWz2pgTCpFVlNeipMx4ljt+aRzbNItTlmrWBAUBMtVZvnuF8ZEYexMMS9qZn1DqOzyUl4eZeDclNVkIK8JbBYRbvtko1un6BWa856yXLEgKEg52Piro2KXXFc3b9ZM8bo5c8WMhugWEDBwrsnPMlN5AFbTEtPnFledjP26rSdULFiwYIFYsWIFe7APKwp1opQVdez0WXHMeRJjW6rZEcWiVOAcGedM5oxIMczUa4rFaorTcjJiPUGcHn/88eIr8W9F3fm7Y+KepyarZUYdMFdg4wZYUR+5ah5rSYWkvb3dRAnegrSaCqbPL85eod16GhsbE01NTZy5C2hFqeUve18+LV4dO1sMmJ85d07MmVl7wFwB6+k3r54uWlIIosPtu7iJIuUXiNLWrVtNNPU153gwjnOM267Wbj2BLOxzZ8KKanCE6eI5zeKi2bO1tIf0g4+8ca5458LZvPhVWL58uRga0u5pxWY1xW05GbGewNNPPy3WrVvHHh3SikI5ll87Vs6Lo+Pi5OkzYnhsrKZqB17AgkI9Kczw4bH5+nmNjEuVAQFwQztfx2Y12SBOUOQWobHOODhw4ABLqtSAyi7HGj2I1MjZc0VXDxsszG6cIRrqoxUQBM+xCQMqIRw8flZcnZsp5s+o540Qk6kDq1evNpE6kHeOWJ/oNjyWIE4HdTfC1IJoeMaxbr40VJhKOwBw9V7juHr1dfq6E1w+bMaQ9VQExJm6u7tNNGU0G9xGy0lZT+jVbTobYWpBNMB6QtqBCphjrd7ImTOicCra1INyLp87FWHhnMbMWVNIHTAUnsgLw9ngtlpOwMiaO6YWRAsC5n//1Alxx++OTf0OcahL58wRc2fN1N4+AudrHUsqKwF0Q6kDVlhNtlhOAA609ooFSC04cuSIWLNmDZUlAhAwv96xoFAzCqVY4PJhjd4x5zrDmoJQRR00d4N4lAqgY8v1JfNnpNaawmJ2rKEzAKYAb7HhnG2aCoHVhE04W0zcaOY+RY+72oFCx1KYSqjY1DsXNhUTPdMAguBIHYBbZ8JAE4bqNSVJnECnmMx90gqD43pRuVFw+6Zc6qYmcXFzszGRgjDB3UMAPenlWzZv3iy2bdtmoqlBKU5WYFtK7pAUKK2xJwTHkelM60kPcPU6rmx2xGlC/NZx94ou9dmz4uipU8XHIXKk6ur0PhcRNFe7GSfZ7UOipaH1cwDR9hdsOXcbM9zanGO37kYQFN+9ezczxw24egiY43XqiegI00XNs7WnH5QDC47h9t3gWFVJWNNnKBMc9AkLZuhstpxAXgpUi85GEBx/4oknWJROtws9p7EYML/SeX3CsWSQeoCtPlX6Af5twpJSoM4UEjyRkvCk833GHM/zijkNVmaiw5XbsWOHiaaQzrNaaN6HLg3iBPYIA8taWJTOHCjH8rGr5hZNdbh6qMIZp0gBzPYhd+ofnjxunVAh+G2oiByIdZlKktw6RY8JgWLuk3lKFxTb4O65QSAdbl+cM34Gc5rgqSyxsZ/YLE5GEjMB1irde++9VA3DIC8K8agH8ietFCmAmT4lVqZiVEh1MbhQHQ31U5yC0ykMpBYAiBNEipgHCZxf2l+YFjRXIoUUBJRoMZWCUAkE04tCdflsbTXRDe6kAmCatdvaL5JQjwIzd21079JPuZm9qftjOE+qGiiOB6vqBsf1i3L5DCwmWE6GMLKteJrFCeVU9ploiO6dPSIFS+oJmSNVKlILmmZFXja4ViBQk2IV3v0z7M6hjGa3zf0gCXVRkRSGuNPbdTeEuk/XXHONuOqqq6gQMYL0A9SPcqcfKJDMiVpSmOVrqKuPZBurKMDMn0pRQNInEkDBJU3+Zv8wO2eoTpMQ56sOjNrcD5JSZtBYcJzunX084Ax2uHvuNXsKLCyGu4c1fHEHz71AfOqtF88qWlZvc45yM4AGZ+eK3qOwNAieRHECHc6xk+4dRaqcSKkZvgWzmqyJS/kVq+9862+L6+cM0S9irnCZRnECRoLj4K677mL2eAJFqmj9WhqXKsdo/rfi0Jb/IcZPHjPRHKYAlwuLg+BJFqcWMRkcN+Lece1dskUKGedIQ7DZ5fvTZ95TFChDwDzblpT7m7SNwqD8mGd+j+6GuPbOftSSGLweGT1XTOp0c/bcOXHi9Oni8pjxcxNiZkNj5Jsx1MILvd3i+K9+Zqq5QWFJEbm0ihP4f2Iy/nSZ7oaw9o6lVeznqnkziouLUdccAlUqUli3d0qWbMEsHxwGWFVxMvLbR8Xz9/wvk01aVQ4ljW6dwljuE4B7x8XByQF5Uvc7Ll/psphpT+UYs88RXzpw63Wm4kzA+pymNImTkBd7i4mGEHeCQDG9IFl4rd0rBYFzFUQ38r2+/pfi+L8Zc+dQDGp5Eu9fkjenHzTl3qFy5lNPPcVdgxMGtrHCbsWIS2EzBlWqpZQzMjYFt+/0+Dkx07GkGuv1WFOv/uQ7xcMgq5PmzqVBnMAvneMTJhpC9jj3vUsmapeY266eVzbrXAHZQgZ6YXS0uOU63IrG+obIguiYlYPVZBC4c/cn9b6lYSN6Y+4dYPwpHVRaZOzl9tWSkoD40p8++1/EmSPP0p3LkDgBBMdbTTTE+FO6UHGpB587NW23GC8gUHNnzgocnzIcZxJSmIaSfG8aUtLHjLl3jD+lCxWXuvkNc8RV82d4unyK0+Pj0+JTeLpXW3z80j/3iKM//ye6cxkVJwT8hoWB5EyA+BPzn9IF4lIqqRP5Us4NntrWqhwqPoXdjd1C1dhQP60OOvKZDn/rr0yeyqCwbBeVrLt1CmNr7wB3Dk6/y4c0BORMeS2RqeT6NZ8dFX/65PUm85kStXYua+LUIgytvSu6BFx/lxl2HT7lCNVI8dUv576yXohnnzT5NWEx9aXlmtelsB8ZK60CMHMHC4oB8uxYU7ueOyXuOXCiojU1ce8WMfHoD01+tcSUQvFLQwr7zx+kBWVk9u7IkSPFY82aNRy5GQAB9GtfO3NabOrZk+PTkjshShM/udvk14IbZ92mmBSn8mBTzvWm3DtUL2CCZvZAOWH3TB848Pvfion/8z9Nf5V18qGcKupS3HeMLg4u2tUMkGfb5XvmmWK53eHhYZPNJnJRrx/qU9xXkIC22WSDGzZsKFpRJHtAkG6++WbTwjSYVmFKuzgBVP3rN9lBb7vtNtMdlFjA7bffbvrBhLSBrjRf0/oM9BvcwLypxtBBYUGR7HDHHXeI+++/P9X9muKk7wljdIr1kUceKVpQJP1AlL7+9a+n2iOIi4aM9CGjy1uUBcUZvHSDe3zjjTeabnZIpCyfKeviBFB7HDN4V5tq8OGHH+YOwikWpo6OjuJGGIa9ACxPGc3CNa7LWJ9C3hPSC1pMNQjrCSkGrAGVHjDhAWGKYWa2XUzO0GWC+oz1KxV/KpjuyMiBIRSmGtiaJWHKojgpn32z6Q4dQw4M0UAMKQMAwe/urF3rugz3s17n6DTZIBcJJxvMwMaQMjAk3blC1q53Q4b72oAwtHuLgouEkwtECflMMYUh8lm85g0Z73MPiMnyvk2mGoRLgPgTBSpZwhRT3tpNImNxJorTeTAl+zNhqP64W6CYA5UMkFAbU8Y/AuDfzvK1b2D3KyZoHpIunjGQA3XllVcyxcBi8BBZv3696VwmgAD4LVm//hSnSRB0bBGGCtQpdu3aRYGyWJiQMhDDDKvKAB/N+j2oYzechtENEqYa5UadVgFBQl2mGHLTEABvFwnfby4q6nkJprEujo4RU1If8RCmGJNmY+l/FKdkoGrkFOIYEBQoO4QppvuAfjfIu0C3rhpt0sUzCtfhxQtcuZiEqU+kvHAcLafoGIyjs3CZS3wgjykmYYqlr1Gckk2fiGGDQsQ6YpolyrQwxbAsBWSmNhPdOj1gg84O041yHV7qhSlVW4dTnOIBNaAQf2o13TAFKtXCxJQBilNkAnVQGNqkkwKVamECXXGEDJIGY07BnnTGy1bEmKmcWu6++24KEy2n1NEmYkgxoAUVHTFWGBCCKQO0nDQyGFfnogVFYaLlRPywyTl6aEFRmHyCwPdy3gVaTibYFlfcgBZUIoWpnXeBlpNpjNchpwWVKGHCBMoSkcH63xQnO8A+eK1xNEyBsl6YmMtEty5WYuuAdPEoTLScSDViyyKnBWWdMKkH1iDvBC0nG4ilDhQtKCuFiXWZKE7WEevmhxQoUdxXzgJh6uNQoFtnK63SxcvF0XhWXbyY18pRmGg50YLyY0Fde+21mSr5a4Ew9VGYKE5JEqjNcTWepZrklggTl6XQrUscnWIyUTMW0l6TnMJEy4kktPMqCwpbaqcJnBeFKf1wx18zLp7x7c4V2EobgzgtOwsrwcV27hQmihNJuECBNGx9bkksjcJEcaJA6RAoxKFWrFhBYQrHoOBuKRQnCpQe4A5h+6k1a9Yk5qKpBNMDBw7Efe9WO8couzHFiQKlcbAnRaCUMB05ciTuexZb7hrFiWROoHDccMMNoqmpyWphinlJDoUpJpjnFC+dIsY8KGDrchcLFvBSmChOFCgKFIWJXAiTMOOnT8Q8NQ33qb293YrlLrfffjuFidByogU1nbiXu1iQ9W3Fw4JQnChQlgiUWo6CPCwKE1Fwts4uYp/Fw3KXHTt2GMsmV8mVe/fupTARihMFqjomlrtYklxJYaI4kSQKlK7lLpYkV1KYKE4kpEANOMd654gtS1LHchcEvW+88caiC0lhIhSnZPKCc/wsboFSy11WrlxZczb53XffLT772c/acG1RqfQL7GL2wtm6ZBDrpgmKWpM1LUkVEIKbEVCcSPoEatGiReLee+8NFCjHjNyGDRtsqchJYUoIzBBPDohBLRExb3EN9w6BbL85Ser9FghTgcKULBhzShaoJfSAc7zHOS6L60sgkL1z586qM3mIVa1evbooUBYIE5ajPMguRIhectLFm4j7WL9+/cRLL710wXHXXXdN2PD9nOOodIkJIQbptUEAVq5cOXHgwIEpYfrc5z5nizDtozARknGBWrp06UR/f3/RkrJImHLsHoTEyyZLBIHCRAi5gE6KUvHoZVcgxD6wFu8ohYkQYiOtGRWoTt56QuynRcZdKEyEEOvIZUCgmMNESILpTakwHaQwEZJ8egRTBQghltIp0jMjR2EiJGW0iWTP5PXwFhKSXlplvIYzcoQQ60jSTN5RafERQjIkULbP5LGqACEZpltwRo4QYimdwq5AOdfIEUKmsGVN3ibeCkJIKXEGyiGMHbwFhJBKAmU6UM7ANyHEN6aqa8a+Fx8hJHnoLl7HjG9CSGhahZ44VCcvLSGkVuB27RSswUQIsZRuwcRKQoildIpwcSgmVhJCtBO0skEnLxkhxBRwz3YLltIlhFiKVwlg5i8RQmIHbps7DsX8JUKINag4FNfHEUIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCMkadWk9seuuu65FXFi7Ov/oo4/2Zf2mO9emzXlp47VJxL1CH26p9B7nvnUb/k45ObZWod84x34dfacxxfcVN3RLye8GnYMDcFKYeG2SwYYyD5JSjImTI0woKogSzP3OsUONM+f3G53XdkekChQnkjULAoMiVzJQMRCG8OoMiiFeJSMWE4Rps3Msc46N8h60y4cdyjN3JUacpAuxO8Sf5uUB9qATOh2wn10kcy7NWuGj9K/zXmX9DcACjPIJTqbA/eiHC+dcb4zprfL+dErBOur8fnNU195my6nF5Wu3yQ5YkK7HVna+VIsS7nevqBJr8XBXcfQ4n4F+spn9JFJy0lBwu5ytajw613xQ/jwYRWP1Cbw4m5zjoHMhWEg/ncLUIy3tlho/qlP2E+6bF73RoNjv8nLU/0X2MKhP6AWCSO2UZj9JjzD1yodPlP1kHx9kkQGLaIOMPQHE+RAU75DWrogy9henOLX7OLqkG+elxr18MqZKmHQ9bHplagmpAUd4BqUg7XSO7fLfanzid5ujbK8x5hP1A4Jvm2UMotwTsEcKGUmuMHVoFCZlQfWyn0QCDIYt8nrmxfldnbuinrBKRCqBDGquczrxzjIC1YanovOePPtNIoUpJ/zvEIwH2oB8YuPvYDUvE/428kQ/aQvwUCTeYxHGwmbptRR0jb2k5Tlt9uiI+N02dp1EAovJj8vVVSYLuV8KnEoMzFX5jA0iopkkEm18qRz1CbsYedUhS1jMrpJYNvp4z9ZKyyPkIFnnx3ri5U4OScwQ31/GegoVFJdP3I2y07Z4uBChE/pK1iC1lXmyD8ljR63uhithsbSdvDyXHVG5NPK8OsT5PJecx7Xrr2Tyy+tfzWrK+1k7hnNzPm+oSl9o8XFuLfLc1nqcm8pK3yPPb6iGfrG2jGCqe9UXsevsPqcWD5d5j+zveYpTvHGO3iqxijZ5bJFZr30B2+iWwlfJ1WiVR6fz/rx0XQYDttMmKicstsiB0CmT5LpqvHaY6t9S5bzUtUMy5DbhnTTrx5IJEmQdqPaggiCWExQpSj2ievwqV9I3cE03+xUpGfzvrXD9ip/tvG+DT2uwWj/f5KMflvb3fnlOsYpUfQaFCZ13n/AXRFWdsVeKja8O4Rz7fAzgciKy2287LmspSMJimzz3ZWE6upzu7wl4XpvkeeVCuuMDAdoq+Lyf5a5jkD5xwTWVou3ngbXT5/VrE/7iaJX6+e4Q/VDI67Av7jSdJFpO5QbWUACh2SnCZR8Xn5KVLBvXwshabiraWeC0s9mnxRSUXMhBuLOGmE2r/Pv2oO54QEuy38dAzJcRpt4I+iWsxGXO9+2q8CDZEuK61SJMuRrOJycfKu1xLapOlDjJwV9ugOwJcbMLsjPvcXXYnIwPdXrcWHTiJRU+v9ejQ+Hzi0lrMjaizkMtmrzA2nDes8crb8TllnpZD30lFoeKrbWEvO49Htcd54XFnxDtvBwUrXIQlrYFV6VbZ+0h6YYEsTw7IhImRae8b30eLqPJkEUugo9TKzGWx7FGMWmWUzmXIh8i+atfxnfKXfB+52Zsl0/6UqFpqRCz6PSwSC5oS/67X7a1w8PU75WWWsHDVWrxsCDL1dSB9bFNuh49ATt7myi/pASfua7kvIoBfhmzKBfT24gYlA2LcasIfC2o++a20LZEJBZ+3ego3bEW+Zndpu9RImJOrnhHOSsjaMo8ZiPWVRogsmN1VfDHy7pjHmLRVaWt4iD3eGp5xTE2elgx7VXa2iaCB8R7Pc7L8xrK33eVulA1uJSmHnReluigPApBr5lHRVYvhmQ7QyHHSUsA11GdU97HezfGcYMaYxQcP/GLFulmdXh0pG0Braa8V0ygzABTFkDpYFrlYTWVs2R8leyQrl5fmU68sfSJJdsqdy26fLaF5UCr/AwY6faEOi9ZQmN7GUttg4i54qZrKr/qg6z0XH1an+5VC37FeJ27L8vx0RPQCvJzTurBkne1VW1dI4yDDtP11OK0nHb7OHorxH82Vwsae1gXQdjjYdGUsrbckylgMHerV6eoJo5SdIO0dcjn+8qd11CAtsp15jYRP34EA/fvAsGX1qcfcd1Y4RpecO9LB768xu0iWAmSDT5FMF/SVpePdlbRrfOn/EtkJzHRVimtPjv7jiANyQ4z6KNTtNXaVo2DeCDgORXKWC5xV5LwJRgh/6/0PvkR420V3GO/+VMtovqER6Wk2GoPHOP3LInihItkTamUCt8jjAm8x0enaAnRscLGL3IRtDXk0/o0STXBKFSyDuUAryYarT77aH9EEwR+2tof8v+EqL34X2CSmiGOzrU7TOa2BnIenTtMh8OA2OI1kLzidJpW2nt1xt2yXneSqSaOQz7vVauPflqLYEQtTmtlvDGM+GRHnJwBVXXPvJI1XG1lOlhPmWnbOCy5MJ2bxGPp+hlkfu7fcAQiKESEZW0jEjBrsNqtg/UBy8g5EBjs87j5PTF/zShdlLDWFqndIgwqPH7wE0TmgyyJ4lQiVJhRKBfH6UhRCVbuFEJI3G5dSLyKzbWJdOxWmwSRjaLUrW9rAa693/idzAGrNp2+nQ8bipMO6ynvUbMnzkENt2pLjN+nxfA9iNKN3COqB43bhP+ZzxYfn+enDElUOT37RfWcqsj2efNBXgTP9aM4BRSDUnFakBILKExwXZc4FTysk9YIV6n7GShrA4hT1X4gM9dxbrkar+kqn3212nISk1Vcd+hceB01ScxzGvY5qGO1JELulbaqygDOewmGhvMaCiCgtTxoqtEZIKbo95pXFfxKbVaojlF63/yIeJvBa7k2SQM9c8XmNDFUa0eo0OEH3G6th0XTpum8BjW6PJWy4kvZ6eP6tfmweAZLr2klUazwf342/lTJldWsw9Yq60xzNfTBcm11BuyXLXENKopTROZyuae4R/XHSh2+3Pv7fQhG0FXjfl2JgRotGT9s9zmo9nm1Kwf3zgDn48dN3FJuIMu2NgZoy4/49pTrK3JBri9L1VWGpxq91ap24jrjPbKi6+64BlUjdSUS+sSFNXtUvaCqAVjZMct1+HKbKwyUcV+KncnPekM5uDprOK/iYBIB61t7rWrH72Qd7mrWHwbpQfletcxngfw7v65mv7LYfLaJgbzRJTR+98hzL/oe8HG91bn1ybDFYlF+0w0/Qu/n+/XI8+ovCZMsk222+rl3tJwSgBSQ7R7W0yYfwuRVUnVrmbb6PFyFnmomewALw8959foUpZyspLmzQn30zcL/tHubFMwtIlhhtdJdRfxWtGh1tec3prXVLb7CX+Bf1e/aIvzv5Vd6vwZFsJnNTa5zU+dX7npuiGNcUZyiY5uH3w/R2F0aV5CDFp1wn0eH2FphWc7WCk/63tJgPNqWYhKmrrTXeXXK8+rwEkIpSgddMZot5VwzGXzfrPHeFEo/X7a5VUc/KDNJstlgP/RT/iQosSQ6J9Gty3s83WK3npwb2OUhABCmNrlgdlA+tVqquAXdFdpCwbi1Hk/yTikc1QZrLuLzGpKf21rls8u6hPKchIi+dC6+U9kKobjGTpuLA7i51RgqV2NMuq79wkAVUHm/2kXtGxxcIFDC8K7a9SkRp1zIqfuoO0axhneVJ1e1WMKgz3hOlwi/LmuzCJD45/O8WkX5jUNLhWJ7JdEVwQusVesrFXcPkcui+iJoq19Uzp4Pc78KIkRGvs/7FfR75E2Pp8SJkzSZvWIuOQu+X7EYngie9VuQrly7z3K7quP2Be3sUgT2hDyvsIFRtLmkWoa5/P8l0uUq1DCY8PfL/SSMSoFaF3IAFl1GH3Xp1f3qD3ivBsNcB9f9qkV48/I6LokjIG7CrSuI6NPz8eQvN7vV4no6lWt3yMR3Vx1Rxpk2CO8a6Oo7DchYRSFEO11yB5eNFSwXdLIdJW3gvFYFuTbyb9dV2ca99NzQbn+QsjaynW65W7DaRtuPVTYozm+DHvRaqt1wOn221x+0Ldf1a5PXr8PjPPrF9JhjX5jQhat/bHXdr2qfM6juW1z71SnqBDFCuTKqOorESeHIlcRBChrPK1emwxd0dGyPtnQV2/MqfZuPsn5Y6f3SdS5V+kjk50UIIYQQQgghhBBCCCGEEEIIIYQQQgghhBBCCCGEEEIIIYSQ4Px/AQYAeGpRDl1A8RIAAAAASUVORK5CYII=">
87 </div>
87 </div>
88 <div class="main">
88 <div class="main">
89 <h1>
89 <h1>
90 502 Bad Gateway
90 502 Bad Gateway
91 <br/>
91 <br/>
92 <span class="error_message">Backend server is unreachable</span>
92 <span class="error_message">Backend server is unreachable</span>
93 </h1>
93 </h1>
94 <div class="inner-column">
94 <div class="inner-column">
95 <h4>Possible Causes</h4>
95 <h4>Possible Causes</h4>
96 <ul>
96 <ul>
97 <li>The server is being restarted.</li>
97 <li>The server is being restarted.</li>
98 <li>The server is overloaded.</li>
98 <li>The server is overloaded.</li>
99 <li>The link may be incorrect.</li>
99 <li>The link may be incorrect.</li>
100 <li><a onclick="window.location.reload()">Reload page</a></li>
100 </ul>
101 </ul>
101 </div>
102 </div>
102 <div class="inner-column">
103 <div class="inner-column">
103 <h4>Support</h4>
104 <h4>Support</h4>
104 <p>For support, go to <a href="https://rhodecode.com/help/" target="_blank">Support</a>.
105 <p>For support, go to <a href="https://rhodecode.com/help/" target="_blank">Support</a>.
105 It may be useful to include your log file; see the log file locations <a href="https://rhodecode.com/r1/enterprise/docs/admin-system-overview/">here</a>.
106 It may be useful to include your log file; see the log file locations <a href="https://rhodecode.com/r1/enterprise/docs/admin-system-overview/">here</a>.
106 </p>
107 </p>
107 </div>
108 </div>
108 <div class="inner-column">
109 <div class="inner-column">
109 <h4>Documentation</h4>
110 <h4>Documentation</h4>
110 <p>For more information, see <a href="https://rhodecode.com/r1/enterprise/docs/">docs.rhodecode.com</a>.</p>
111 <p>For more information, see <a href="https://rhodecode.com/r1/enterprise/docs/">docs.rhodecode.com</a>.</p>
111 </div>
112 </div>
112 </div>
113 </div>
113 </body>
114 </body>
114 </html>
115 </html>
@@ -1,981 +1,981 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture()
47 @pytest.fixture()
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 merge_resp = MergeResponse(
53 merge_resp = MergeResponse(
54 False, False, None, MergeFailureReason.UNKNOWN,
54 False, False, None, MergeFailureReason.UNKNOWN,
55 metadata={'exception': 'MockError'})
55 metadata={'exception': 'MockError'})
56 self.merge_patcher = mock.patch.object(
56 self.merge_patcher = mock.patch.object(
57 BackendClass, 'merge', return_value=merge_resp)
57 BackendClass, 'merge', return_value=merge_resp)
58 self.workspace_remove_patcher = mock.patch.object(
58 self.workspace_remove_patcher = mock.patch.object(
59 BackendClass, 'cleanup_merge_workspace')
59 BackendClass, 'cleanup_merge_workspace')
60
60
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
63 self.comment_patcher = mock.patch(
63 self.comment_patcher = mock.patch(
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 self.comment_patcher.start()
65 self.comment_patcher.start()
66 self.notification_patcher = mock.patch(
66 self.notification_patcher = mock.patch(
67 'rhodecode.model.notification.NotificationModel.create')
67 'rhodecode.model.notification.NotificationModel.create')
68 self.notification_patcher.start()
68 self.notification_patcher.start()
69 self.helper_patcher = mock.patch(
69 self.helper_patcher = mock.patch(
70 'rhodecode.lib.helpers.route_path')
70 'rhodecode.lib.helpers.route_path')
71 self.helper_patcher.start()
71 self.helper_patcher.start()
72
72
73 self.hook_patcher = mock.patch.object(PullRequestModel,
73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 'trigger_pull_request_hook')
74 'trigger_pull_request_hook')
75 self.hook_mock = self.hook_patcher.start()
75 self.hook_mock = self.hook_patcher.start()
76
76
77 self.invalidation_patcher = mock.patch(
77 self.invalidation_patcher = mock.patch(
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 self.invalidation_mock = self.invalidation_patcher.start()
79 self.invalidation_mock = self.invalidation_patcher.start()
80
80
81 self.pull_request = pr_util.create_pull_request(
81 self.pull_request = pr_util.create_pull_request(
82 mergeable=True, name_suffix=u'Δ…Δ‡')
82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 self.repo_id = self.pull_request.target_repo.repo_id
86 self.repo_id = self.pull_request.target_repo.repo_id
87
87
88 @request.addfinalizer
88 @request.addfinalizer
89 def cleanup_pull_request():
89 def cleanup_pull_request():
90 calls = [mock.call(
90 calls = [mock.call(
91 self.pull_request, self.pull_request.author, 'create')]
91 self.pull_request, self.pull_request.author, 'create')]
92 self.hook_mock.assert_has_calls(calls)
92 self.hook_mock.assert_has_calls(calls)
93
93
94 self.workspace_remove_patcher.stop()
94 self.workspace_remove_patcher.stop()
95 self.merge_patcher.stop()
95 self.merge_patcher.stop()
96 self.comment_patcher.stop()
96 self.comment_patcher.stop()
97 self.notification_patcher.stop()
97 self.notification_patcher.stop()
98 self.helper_patcher.stop()
98 self.helper_patcher.stop()
99 self.hook_patcher.stop()
99 self.hook_patcher.stop()
100 self.invalidation_patcher.stop()
100 self.invalidation_patcher.stop()
101
101
102 return self.pull_request
102 return self.pull_request
103
103
104 def test_get_all(self, pull_request):
104 def test_get_all(self, pull_request):
105 prs = PullRequestModel().get_all(pull_request.target_repo)
105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 assert isinstance(prs, list)
106 assert isinstance(prs, list)
107 assert len(prs) == 1
107 assert len(prs) == 1
108
108
109 def test_count_all(self, pull_request):
109 def test_count_all(self, pull_request):
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 assert pr_count == 1
111 assert pr_count == 1
112
112
113 def test_get_awaiting_review(self, pull_request):
113 def test_get_awaiting_review(self, pull_request):
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 assert isinstance(prs, list)
115 assert isinstance(prs, list)
116 assert len(prs) == 1
116 assert len(prs) == 1
117
117
118 def test_count_awaiting_review(self, pull_request):
118 def test_count_awaiting_review(self, pull_request):
119 pr_count = PullRequestModel().count_awaiting_review(
119 pr_count = PullRequestModel().count_awaiting_review(
120 pull_request.target_repo)
120 pull_request.target_repo)
121 assert pr_count == 1
121 assert pr_count == 1
122
122
123 def test_get_awaiting_my_review(self, pull_request):
123 def test_get_awaiting_my_review(self, pull_request):
124 PullRequestModel().update_reviewers(
124 PullRequestModel().update_reviewers(
125 pull_request, [(pull_request.author, ['author'], False, [])],
125 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
126 pull_request.author)
126 pull_request.author)
127 Session().commit()
127 Session().commit()
128
128
129 prs = PullRequestModel().get_awaiting_my_review(
129 prs = PullRequestModel().get_awaiting_my_review(
130 pull_request.target_repo, user_id=pull_request.author.user_id)
130 pull_request.target_repo, user_id=pull_request.author.user_id)
131 assert isinstance(prs, list)
131 assert isinstance(prs, list)
132 assert len(prs) == 1
132 assert len(prs) == 1
133
133
134 def test_count_awaiting_my_review(self, pull_request):
134 def test_count_awaiting_my_review(self, pull_request):
135 PullRequestModel().update_reviewers(
135 PullRequestModel().update_reviewers(
136 pull_request, [(pull_request.author, ['author'], False, [])],
136 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
137 pull_request.author)
137 pull_request.author)
138 Session().commit()
138 Session().commit()
139
139
140 pr_count = PullRequestModel().count_awaiting_my_review(
140 pr_count = PullRequestModel().count_awaiting_my_review(
141 pull_request.target_repo, user_id=pull_request.author.user_id)
141 pull_request.target_repo, user_id=pull_request.author.user_id)
142 assert pr_count == 1
142 assert pr_count == 1
143
143
144 def test_delete_calls_cleanup_merge(self, pull_request):
144 def test_delete_calls_cleanup_merge(self, pull_request):
145 repo_id = pull_request.target_repo.repo_id
145 repo_id = pull_request.target_repo.repo_id
146 PullRequestModel().delete(pull_request, pull_request.author)
146 PullRequestModel().delete(pull_request, pull_request.author)
147 Session().commit()
147 Session().commit()
148
148
149 self.workspace_remove_mock.assert_called_once_with(
149 self.workspace_remove_mock.assert_called_once_with(
150 repo_id, self.workspace_id)
150 repo_id, self.workspace_id)
151
151
152 def test_close_calls_cleanup_and_hook(self, pull_request):
152 def test_close_calls_cleanup_and_hook(self, pull_request):
153 PullRequestModel().close_pull_request(
153 PullRequestModel().close_pull_request(
154 pull_request, pull_request.author)
154 pull_request, pull_request.author)
155 Session().commit()
155 Session().commit()
156
156
157 repo_id = pull_request.target_repo.repo_id
157 repo_id = pull_request.target_repo.repo_id
158
158
159 self.workspace_remove_mock.assert_called_once_with(
159 self.workspace_remove_mock.assert_called_once_with(
160 repo_id, self.workspace_id)
160 repo_id, self.workspace_id)
161 self.hook_mock.assert_called_with(
161 self.hook_mock.assert_called_with(
162 self.pull_request, self.pull_request.author, 'close')
162 self.pull_request, self.pull_request.author, 'close')
163
163
164 def test_merge_status(self, pull_request):
164 def test_merge_status(self, pull_request):
165 self.merge_mock.return_value = MergeResponse(
165 self.merge_mock.return_value = MergeResponse(
166 True, False, None, MergeFailureReason.NONE)
166 True, False, None, MergeFailureReason.NONE)
167
167
168 assert pull_request._last_merge_source_rev is None
168 assert pull_request._last_merge_source_rev is None
169 assert pull_request._last_merge_target_rev is None
169 assert pull_request._last_merge_target_rev is None
170 assert pull_request.last_merge_status is None
170 assert pull_request.last_merge_status is None
171
171
172 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
172 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
173 assert status is True
173 assert status is True
174 assert msg == 'This pull request can be automatically merged.'
174 assert msg == 'This pull request can be automatically merged.'
175 self.merge_mock.assert_called_with(
175 self.merge_mock.assert_called_with(
176 self.repo_id, self.workspace_id,
176 self.repo_id, self.workspace_id,
177 pull_request.target_ref_parts,
177 pull_request.target_ref_parts,
178 pull_request.source_repo.scm_instance(),
178 pull_request.source_repo.scm_instance(),
179 pull_request.source_ref_parts, dry_run=True,
179 pull_request.source_ref_parts, dry_run=True,
180 use_rebase=False, close_branch=False)
180 use_rebase=False, close_branch=False)
181
181
182 assert pull_request._last_merge_source_rev == self.source_commit
182 assert pull_request._last_merge_source_rev == self.source_commit
183 assert pull_request._last_merge_target_rev == self.target_commit
183 assert pull_request._last_merge_target_rev == self.target_commit
184 assert pull_request.last_merge_status is MergeFailureReason.NONE
184 assert pull_request.last_merge_status is MergeFailureReason.NONE
185
185
186 self.merge_mock.reset_mock()
186 self.merge_mock.reset_mock()
187 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
187 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
188 assert status is True
188 assert status is True
189 assert msg == 'This pull request can be automatically merged.'
189 assert msg == 'This pull request can be automatically merged.'
190 assert self.merge_mock.called is False
190 assert self.merge_mock.called is False
191
191
192 def test_merge_status_known_failure(self, pull_request):
192 def test_merge_status_known_failure(self, pull_request):
193 self.merge_mock.return_value = MergeResponse(
193 self.merge_mock.return_value = MergeResponse(
194 False, False, None, MergeFailureReason.MERGE_FAILED,
194 False, False, None, MergeFailureReason.MERGE_FAILED,
195 metadata={'unresolved_files': 'file1'})
195 metadata={'unresolved_files': 'file1'})
196
196
197 assert pull_request._last_merge_source_rev is None
197 assert pull_request._last_merge_source_rev is None
198 assert pull_request._last_merge_target_rev is None
198 assert pull_request._last_merge_target_rev is None
199 assert pull_request.last_merge_status is None
199 assert pull_request.last_merge_status is None
200
200
201 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
201 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
202 assert status is False
202 assert status is False
203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
204 self.merge_mock.assert_called_with(
204 self.merge_mock.assert_called_with(
205 self.repo_id, self.workspace_id,
205 self.repo_id, self.workspace_id,
206 pull_request.target_ref_parts,
206 pull_request.target_ref_parts,
207 pull_request.source_repo.scm_instance(),
207 pull_request.source_repo.scm_instance(),
208 pull_request.source_ref_parts, dry_run=True,
208 pull_request.source_ref_parts, dry_run=True,
209 use_rebase=False, close_branch=False)
209 use_rebase=False, close_branch=False)
210
210
211 assert pull_request._last_merge_source_rev == self.source_commit
211 assert pull_request._last_merge_source_rev == self.source_commit
212 assert pull_request._last_merge_target_rev == self.target_commit
212 assert pull_request._last_merge_target_rev == self.target_commit
213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
214
214
215 self.merge_mock.reset_mock()
215 self.merge_mock.reset_mock()
216 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
216 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
217 assert status is False
217 assert status is False
218 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
218 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
219 assert self.merge_mock.called is False
219 assert self.merge_mock.called is False
220
220
221 def test_merge_status_unknown_failure(self, pull_request):
221 def test_merge_status_unknown_failure(self, pull_request):
222 self.merge_mock.return_value = MergeResponse(
222 self.merge_mock.return_value = MergeResponse(
223 False, False, None, MergeFailureReason.UNKNOWN,
223 False, False, None, MergeFailureReason.UNKNOWN,
224 metadata={'exception': 'MockError'})
224 metadata={'exception': 'MockError'})
225
225
226 assert pull_request._last_merge_source_rev is None
226 assert pull_request._last_merge_source_rev is None
227 assert pull_request._last_merge_target_rev is None
227 assert pull_request._last_merge_target_rev is None
228 assert pull_request.last_merge_status is None
228 assert pull_request.last_merge_status is None
229
229
230 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
230 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
231 assert status is False
231 assert status is False
232 assert msg == (
232 assert msg == (
233 'This pull request cannot be merged because of an unhandled exception. '
233 'This pull request cannot be merged because of an unhandled exception. '
234 'MockError')
234 'MockError')
235 self.merge_mock.assert_called_with(
235 self.merge_mock.assert_called_with(
236 self.repo_id, self.workspace_id,
236 self.repo_id, self.workspace_id,
237 pull_request.target_ref_parts,
237 pull_request.target_ref_parts,
238 pull_request.source_repo.scm_instance(),
238 pull_request.source_repo.scm_instance(),
239 pull_request.source_ref_parts, dry_run=True,
239 pull_request.source_ref_parts, dry_run=True,
240 use_rebase=False, close_branch=False)
240 use_rebase=False, close_branch=False)
241
241
242 assert pull_request._last_merge_source_rev is None
242 assert pull_request._last_merge_source_rev is None
243 assert pull_request._last_merge_target_rev is None
243 assert pull_request._last_merge_target_rev is None
244 assert pull_request.last_merge_status is None
244 assert pull_request.last_merge_status is None
245
245
246 self.merge_mock.reset_mock()
246 self.merge_mock.reset_mock()
247 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
247 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
248 assert status is False
248 assert status is False
249 assert msg == (
249 assert msg == (
250 'This pull request cannot be merged because of an unhandled exception. '
250 'This pull request cannot be merged because of an unhandled exception. '
251 'MockError')
251 'MockError')
252 assert self.merge_mock.called is True
252 assert self.merge_mock.called is True
253
253
254 def test_merge_status_when_target_is_locked(self, pull_request):
254 def test_merge_status_when_target_is_locked(self, pull_request):
255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
256 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
256 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
257 assert status is False
257 assert status is False
258 assert msg == (
258 assert msg == (
259 'This pull request cannot be merged because the target repository '
259 'This pull request cannot be merged because the target repository '
260 'is locked by user:1.')
260 'is locked by user:1.')
261
261
262 def test_merge_status_requirements_check_target(self, pull_request):
262 def test_merge_status_requirements_check_target(self, pull_request):
263
263
264 def has_largefiles(self, repo):
264 def has_largefiles(self, repo):
265 return repo == pull_request.source_repo
265 return repo == pull_request.source_repo
266
266
267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
268 with patcher:
268 with patcher:
269 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
269 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
270
270
271 assert status is False
271 assert status is False
272 assert msg == 'Target repository large files support is disabled.'
272 assert msg == 'Target repository large files support is disabled.'
273
273
274 def test_merge_status_requirements_check_source(self, pull_request):
274 def test_merge_status_requirements_check_source(self, pull_request):
275
275
276 def has_largefiles(self, repo):
276 def has_largefiles(self, repo):
277 return repo == pull_request.target_repo
277 return repo == pull_request.target_repo
278
278
279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
280 with patcher:
280 with patcher:
281 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
281 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
282
282
283 assert status is False
283 assert status is False
284 assert msg == 'Source repository large files support is disabled.'
284 assert msg == 'Source repository large files support is disabled.'
285
285
286 def test_merge(self, pull_request, merge_extras):
286 def test_merge(self, pull_request, merge_extras):
287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
288 merge_ref = Reference(
288 merge_ref = Reference(
289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
290 self.merge_mock.return_value = MergeResponse(
290 self.merge_mock.return_value = MergeResponse(
291 True, True, merge_ref, MergeFailureReason.NONE)
291 True, True, merge_ref, MergeFailureReason.NONE)
292
292
293 merge_extras['repository'] = pull_request.target_repo.repo_name
293 merge_extras['repository'] = pull_request.target_repo.repo_name
294 PullRequestModel().merge_repo(
294 PullRequestModel().merge_repo(
295 pull_request, pull_request.author, extras=merge_extras)
295 pull_request, pull_request.author, extras=merge_extras)
296 Session().commit()
296 Session().commit()
297
297
298 message = (
298 message = (
299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
300 u'\n\n {pr_title}'.format(
300 u'\n\n {pr_title}'.format(
301 pr_id=pull_request.pull_request_id,
301 pr_id=pull_request.pull_request_id,
302 source_repo=safe_unicode(
302 source_repo=safe_unicode(
303 pull_request.source_repo.scm_instance().name),
303 pull_request.source_repo.scm_instance().name),
304 source_ref_name=pull_request.source_ref_parts.name,
304 source_ref_name=pull_request.source_ref_parts.name,
305 pr_title=safe_unicode(pull_request.title)
305 pr_title=safe_unicode(pull_request.title)
306 )
306 )
307 )
307 )
308 self.merge_mock.assert_called_with(
308 self.merge_mock.assert_called_with(
309 self.repo_id, self.workspace_id,
309 self.repo_id, self.workspace_id,
310 pull_request.target_ref_parts,
310 pull_request.target_ref_parts,
311 pull_request.source_repo.scm_instance(),
311 pull_request.source_repo.scm_instance(),
312 pull_request.source_ref_parts,
312 pull_request.source_ref_parts,
313 user_name=user.short_contact, user_email=user.email, message=message,
313 user_name=user.short_contact, user_email=user.email, message=message,
314 use_rebase=False, close_branch=False
314 use_rebase=False, close_branch=False
315 )
315 )
316 self.invalidation_mock.assert_called_once_with(
316 self.invalidation_mock.assert_called_once_with(
317 pull_request.target_repo.repo_name)
317 pull_request.target_repo.repo_name)
318
318
319 self.hook_mock.assert_called_with(
319 self.hook_mock.assert_called_with(
320 self.pull_request, self.pull_request.author, 'merge')
320 self.pull_request, self.pull_request.author, 'merge')
321
321
322 pull_request = PullRequest.get(pull_request.pull_request_id)
322 pull_request = PullRequest.get(pull_request.pull_request_id)
323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
324
324
325 def test_merge_with_status_lock(self, pull_request, merge_extras):
325 def test_merge_with_status_lock(self, pull_request, merge_extras):
326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
327 merge_ref = Reference(
327 merge_ref = Reference(
328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
329 self.merge_mock.return_value = MergeResponse(
329 self.merge_mock.return_value = MergeResponse(
330 True, True, merge_ref, MergeFailureReason.NONE)
330 True, True, merge_ref, MergeFailureReason.NONE)
331
331
332 merge_extras['repository'] = pull_request.target_repo.repo_name
332 merge_extras['repository'] = pull_request.target_repo.repo_name
333
333
334 with pull_request.set_state(PullRequest.STATE_UPDATING):
334 with pull_request.set_state(PullRequest.STATE_UPDATING):
335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
336 PullRequestModel().merge_repo(
336 PullRequestModel().merge_repo(
337 pull_request, pull_request.author, extras=merge_extras)
337 pull_request, pull_request.author, extras=merge_extras)
338 Session().commit()
338 Session().commit()
339
339
340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
341
341
342 message = (
342 message = (
343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
344 u'\n\n {pr_title}'.format(
344 u'\n\n {pr_title}'.format(
345 pr_id=pull_request.pull_request_id,
345 pr_id=pull_request.pull_request_id,
346 source_repo=safe_unicode(
346 source_repo=safe_unicode(
347 pull_request.source_repo.scm_instance().name),
347 pull_request.source_repo.scm_instance().name),
348 source_ref_name=pull_request.source_ref_parts.name,
348 source_ref_name=pull_request.source_ref_parts.name,
349 pr_title=safe_unicode(pull_request.title)
349 pr_title=safe_unicode(pull_request.title)
350 )
350 )
351 )
351 )
352 self.merge_mock.assert_called_with(
352 self.merge_mock.assert_called_with(
353 self.repo_id, self.workspace_id,
353 self.repo_id, self.workspace_id,
354 pull_request.target_ref_parts,
354 pull_request.target_ref_parts,
355 pull_request.source_repo.scm_instance(),
355 pull_request.source_repo.scm_instance(),
356 pull_request.source_ref_parts,
356 pull_request.source_ref_parts,
357 user_name=user.short_contact, user_email=user.email, message=message,
357 user_name=user.short_contact, user_email=user.email, message=message,
358 use_rebase=False, close_branch=False
358 use_rebase=False, close_branch=False
359 )
359 )
360 self.invalidation_mock.assert_called_once_with(
360 self.invalidation_mock.assert_called_once_with(
361 pull_request.target_repo.repo_name)
361 pull_request.target_repo.repo_name)
362
362
363 self.hook_mock.assert_called_with(
363 self.hook_mock.assert_called_with(
364 self.pull_request, self.pull_request.author, 'merge')
364 self.pull_request, self.pull_request.author, 'merge')
365
365
366 pull_request = PullRequest.get(pull_request.pull_request_id)
366 pull_request = PullRequest.get(pull_request.pull_request_id)
367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
368
368
369 def test_merge_failed(self, pull_request, merge_extras):
369 def test_merge_failed(self, pull_request, merge_extras):
370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
371 merge_ref = Reference(
371 merge_ref = Reference(
372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
373 self.merge_mock.return_value = MergeResponse(
373 self.merge_mock.return_value = MergeResponse(
374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
375
375
376 merge_extras['repository'] = pull_request.target_repo.repo_name
376 merge_extras['repository'] = pull_request.target_repo.repo_name
377 PullRequestModel().merge_repo(
377 PullRequestModel().merge_repo(
378 pull_request, pull_request.author, extras=merge_extras)
378 pull_request, pull_request.author, extras=merge_extras)
379 Session().commit()
379 Session().commit()
380
380
381 message = (
381 message = (
382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
383 u'\n\n {pr_title}'.format(
383 u'\n\n {pr_title}'.format(
384 pr_id=pull_request.pull_request_id,
384 pr_id=pull_request.pull_request_id,
385 source_repo=safe_unicode(
385 source_repo=safe_unicode(
386 pull_request.source_repo.scm_instance().name),
386 pull_request.source_repo.scm_instance().name),
387 source_ref_name=pull_request.source_ref_parts.name,
387 source_ref_name=pull_request.source_ref_parts.name,
388 pr_title=safe_unicode(pull_request.title)
388 pr_title=safe_unicode(pull_request.title)
389 )
389 )
390 )
390 )
391 self.merge_mock.assert_called_with(
391 self.merge_mock.assert_called_with(
392 self.repo_id, self.workspace_id,
392 self.repo_id, self.workspace_id,
393 pull_request.target_ref_parts,
393 pull_request.target_ref_parts,
394 pull_request.source_repo.scm_instance(),
394 pull_request.source_repo.scm_instance(),
395 pull_request.source_ref_parts,
395 pull_request.source_ref_parts,
396 user_name=user.short_contact, user_email=user.email, message=message,
396 user_name=user.short_contact, user_email=user.email, message=message,
397 use_rebase=False, close_branch=False
397 use_rebase=False, close_branch=False
398 )
398 )
399
399
400 pull_request = PullRequest.get(pull_request.pull_request_id)
400 pull_request = PullRequest.get(pull_request.pull_request_id)
401 assert self.invalidation_mock.called is False
401 assert self.invalidation_mock.called is False
402 assert pull_request.merge_rev is None
402 assert pull_request.merge_rev is None
403
403
404 def test_get_commit_ids(self, pull_request):
404 def test_get_commit_ids(self, pull_request):
405 # The PR has been not merged yet, so expect an exception
405 # The PR has been not merged yet, so expect an exception
406 with pytest.raises(ValueError):
406 with pytest.raises(ValueError):
407 PullRequestModel()._get_commit_ids(pull_request)
407 PullRequestModel()._get_commit_ids(pull_request)
408
408
409 # Merge revision is in the revisions list
409 # Merge revision is in the revisions list
410 pull_request.merge_rev = pull_request.revisions[0]
410 pull_request.merge_rev = pull_request.revisions[0]
411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
412 assert commit_ids == pull_request.revisions
412 assert commit_ids == pull_request.revisions
413
413
414 # Merge revision is not in the revisions list
414 # Merge revision is not in the revisions list
415 pull_request.merge_rev = 'f000' * 10
415 pull_request.merge_rev = 'f000' * 10
416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
418
418
419 def test_get_diff_from_pr_version(self, pull_request):
419 def test_get_diff_from_pr_version(self, pull_request):
420 source_repo = pull_request.source_repo
420 source_repo = pull_request.source_repo
421 source_ref_id = pull_request.source_ref_parts.commit_id
421 source_ref_id = pull_request.source_ref_parts.commit_id
422 target_ref_id = pull_request.target_ref_parts.commit_id
422 target_ref_id = pull_request.target_ref_parts.commit_id
423 diff = PullRequestModel()._get_diff_from_pr_or_version(
423 diff = PullRequestModel()._get_diff_from_pr_or_version(
424 source_repo, source_ref_id, target_ref_id,
424 source_repo, source_ref_id, target_ref_id,
425 hide_whitespace_changes=False, diff_context=6)
425 hide_whitespace_changes=False, diff_context=6)
426 assert 'file_1' in diff.raw
426 assert 'file_1' in diff.raw
427
427
428 def test_generate_title_returns_unicode(self):
428 def test_generate_title_returns_unicode(self):
429 title = PullRequestModel().generate_pullrequest_title(
429 title = PullRequestModel().generate_pullrequest_title(
430 source='source-dummy',
430 source='source-dummy',
431 source_ref='source-ref-dummy',
431 source_ref='source-ref-dummy',
432 target='target-dummy',
432 target='target-dummy',
433 )
433 )
434 assert type(title) == unicode
434 assert type(title) == unicode
435
435
436 @pytest.mark.parametrize('title, has_wip', [
436 @pytest.mark.parametrize('title, has_wip', [
437 ('hello', False),
437 ('hello', False),
438 ('hello wip', False),
438 ('hello wip', False),
439 ('hello wip: xxx', False),
439 ('hello wip: xxx', False),
440 ('[wip] hello', True),
440 ('[wip] hello', True),
441 ('[wip] hello', True),
441 ('[wip] hello', True),
442 ('wip: hello', True),
442 ('wip: hello', True),
443 ('wip hello', True),
443 ('wip hello', True),
444
444
445 ])
445 ])
446 def test_wip_title_marker(self, pull_request, title, has_wip):
446 def test_wip_title_marker(self, pull_request, title, has_wip):
447 pull_request.title = title
447 pull_request.title = title
448 assert pull_request.work_in_progress == has_wip
448 assert pull_request.work_in_progress == has_wip
449
449
450
450
451 @pytest.mark.usefixtures('config_stub')
451 @pytest.mark.usefixtures('config_stub')
452 class TestIntegrationMerge(object):
452 class TestIntegrationMerge(object):
453 @pytest.mark.parametrize('extra_config', (
453 @pytest.mark.parametrize('extra_config', (
454 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
454 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
455 ))
455 ))
456 def test_merge_triggers_push_hooks(
456 def test_merge_triggers_push_hooks(
457 self, pr_util, user_admin, capture_rcextensions, merge_extras,
457 self, pr_util, user_admin, capture_rcextensions, merge_extras,
458 extra_config):
458 extra_config):
459
459
460 pull_request = pr_util.create_pull_request(
460 pull_request = pr_util.create_pull_request(
461 approved=True, mergeable=True)
461 approved=True, mergeable=True)
462 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
462 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
463 merge_extras['repository'] = pull_request.target_repo.repo_name
463 merge_extras['repository'] = pull_request.target_repo.repo_name
464 Session().commit()
464 Session().commit()
465
465
466 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
466 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
467 merge_state = PullRequestModel().merge_repo(
467 merge_state = PullRequestModel().merge_repo(
468 pull_request, user_admin, extras=merge_extras)
468 pull_request, user_admin, extras=merge_extras)
469 Session().commit()
469 Session().commit()
470
470
471 assert merge_state.executed
471 assert merge_state.executed
472 assert '_pre_push_hook' in capture_rcextensions
472 assert '_pre_push_hook' in capture_rcextensions
473 assert '_push_hook' in capture_rcextensions
473 assert '_push_hook' in capture_rcextensions
474
474
475 def test_merge_can_be_rejected_by_pre_push_hook(
475 def test_merge_can_be_rejected_by_pre_push_hook(
476 self, pr_util, user_admin, capture_rcextensions, merge_extras):
476 self, pr_util, user_admin, capture_rcextensions, merge_extras):
477 pull_request = pr_util.create_pull_request(
477 pull_request = pr_util.create_pull_request(
478 approved=True, mergeable=True)
478 approved=True, mergeable=True)
479 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
479 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
480 merge_extras['repository'] = pull_request.target_repo.repo_name
480 merge_extras['repository'] = pull_request.target_repo.repo_name
481 Session().commit()
481 Session().commit()
482
482
483 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
483 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
484 pre_pull.side_effect = RepositoryError("Disallow push!")
484 pre_pull.side_effect = RepositoryError("Disallow push!")
485 merge_status = PullRequestModel().merge_repo(
485 merge_status = PullRequestModel().merge_repo(
486 pull_request, user_admin, extras=merge_extras)
486 pull_request, user_admin, extras=merge_extras)
487 Session().commit()
487 Session().commit()
488
488
489 assert not merge_status.executed
489 assert not merge_status.executed
490 assert 'pre_push' not in capture_rcextensions
490 assert 'pre_push' not in capture_rcextensions
491 assert 'post_push' not in capture_rcextensions
491 assert 'post_push' not in capture_rcextensions
492
492
493 def test_merge_fails_if_target_is_locked(
493 def test_merge_fails_if_target_is_locked(
494 self, pr_util, user_regular, merge_extras):
494 self, pr_util, user_regular, merge_extras):
495 pull_request = pr_util.create_pull_request(
495 pull_request = pr_util.create_pull_request(
496 approved=True, mergeable=True)
496 approved=True, mergeable=True)
497 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
497 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
498 pull_request.target_repo.locked = locked_by
498 pull_request.target_repo.locked = locked_by
499 # TODO: johbo: Check if this can work based on the database, currently
499 # TODO: johbo: Check if this can work based on the database, currently
500 # all data is pre-computed, that's why just updating the DB is not
500 # all data is pre-computed, that's why just updating the DB is not
501 # enough.
501 # enough.
502 merge_extras['locked_by'] = locked_by
502 merge_extras['locked_by'] = locked_by
503 merge_extras['repository'] = pull_request.target_repo.repo_name
503 merge_extras['repository'] = pull_request.target_repo.repo_name
504 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
504 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
505 Session().commit()
505 Session().commit()
506 merge_status = PullRequestModel().merge_repo(
506 merge_status = PullRequestModel().merge_repo(
507 pull_request, user_regular, extras=merge_extras)
507 pull_request, user_regular, extras=merge_extras)
508 Session().commit()
508 Session().commit()
509
509
510 assert not merge_status.executed
510 assert not merge_status.executed
511
511
512
512
513 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
513 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
514 (False, 1, 0),
514 (False, 1, 0),
515 (True, 0, 1),
515 (True, 0, 1),
516 ])
516 ])
517 def test_outdated_comments(
517 def test_outdated_comments(
518 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
518 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
519 pull_request = pr_util.create_pull_request()
519 pull_request = pr_util.create_pull_request()
520 pr_util.create_inline_comment(file_path='not_in_updated_diff')
520 pr_util.create_inline_comment(file_path='not_in_updated_diff')
521
521
522 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
522 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
523 pr_util.add_one_commit()
523 pr_util.add_one_commit()
524 assert_inline_comments(
524 assert_inline_comments(
525 pull_request, visible=inlines_count, outdated=outdated_count)
525 pull_request, visible=inlines_count, outdated=outdated_count)
526 outdated_comment_mock.assert_called_with(pull_request)
526 outdated_comment_mock.assert_called_with(pull_request)
527
527
528
528
529 @pytest.mark.parametrize('mr_type, expected_msg', [
529 @pytest.mark.parametrize('mr_type, expected_msg', [
530 (MergeFailureReason.NONE,
530 (MergeFailureReason.NONE,
531 'This pull request can be automatically merged.'),
531 'This pull request can be automatically merged.'),
532 (MergeFailureReason.UNKNOWN,
532 (MergeFailureReason.UNKNOWN,
533 'This pull request cannot be merged because of an unhandled exception. CRASH'),
533 'This pull request cannot be merged because of an unhandled exception. CRASH'),
534 (MergeFailureReason.MERGE_FAILED,
534 (MergeFailureReason.MERGE_FAILED,
535 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
535 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
536 (MergeFailureReason.PUSH_FAILED,
536 (MergeFailureReason.PUSH_FAILED,
537 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
537 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
538 (MergeFailureReason.TARGET_IS_NOT_HEAD,
538 (MergeFailureReason.TARGET_IS_NOT_HEAD,
539 'This pull request cannot be merged because the target `ref_name` is not a head.'),
539 'This pull request cannot be merged because the target `ref_name` is not a head.'),
540 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
540 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
541 'This pull request cannot be merged because the source contains more branches than the target.'),
541 'This pull request cannot be merged because the source contains more branches than the target.'),
542 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
542 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
543 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
543 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
544 (MergeFailureReason.TARGET_IS_LOCKED,
544 (MergeFailureReason.TARGET_IS_LOCKED,
545 'This pull request cannot be merged because the target repository is locked by user:123.'),
545 'This pull request cannot be merged because the target repository is locked by user:123.'),
546 (MergeFailureReason.MISSING_TARGET_REF,
546 (MergeFailureReason.MISSING_TARGET_REF,
547 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
547 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
548 (MergeFailureReason.MISSING_SOURCE_REF,
548 (MergeFailureReason.MISSING_SOURCE_REF,
549 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
549 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
550 (MergeFailureReason.SUBREPO_MERGE_FAILED,
550 (MergeFailureReason.SUBREPO_MERGE_FAILED,
551 'This pull request cannot be merged because of conflicts related to sub repositories.'),
551 'This pull request cannot be merged because of conflicts related to sub repositories.'),
552
552
553 ])
553 ])
554 def test_merge_response_message(mr_type, expected_msg):
554 def test_merge_response_message(mr_type, expected_msg):
555 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
555 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
556 metadata = {
556 metadata = {
557 'unresolved_files': 'CONFLICT_FILE',
557 'unresolved_files': 'CONFLICT_FILE',
558 'exception': "CRASH",
558 'exception': "CRASH",
559 'target': 'some-repo',
559 'target': 'some-repo',
560 'merge_commit': 'merge_commit',
560 'merge_commit': 'merge_commit',
561 'target_ref': merge_ref,
561 'target_ref': merge_ref,
562 'source_ref': merge_ref,
562 'source_ref': merge_ref,
563 'heads': ','.join(['a', 'b', 'c']),
563 'heads': ','.join(['a', 'b', 'c']),
564 'locked_by': 'user:123'
564 'locked_by': 'user:123'
565 }
565 }
566
566
567 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
567 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
568 assert merge_response.merge_status_message == expected_msg
568 assert merge_response.merge_status_message == expected_msg
569
569
570
570
571 @pytest.fixture()
571 @pytest.fixture()
572 def merge_extras(user_regular):
572 def merge_extras(user_regular):
573 """
573 """
574 Context for the vcs operation when running a merge.
574 Context for the vcs operation when running a merge.
575 """
575 """
576 extras = {
576 extras = {
577 'ip': '127.0.0.1',
577 'ip': '127.0.0.1',
578 'username': user_regular.username,
578 'username': user_regular.username,
579 'user_id': user_regular.user_id,
579 'user_id': user_regular.user_id,
580 'action': 'push',
580 'action': 'push',
581 'repository': 'fake_target_repo_name',
581 'repository': 'fake_target_repo_name',
582 'scm': 'git',
582 'scm': 'git',
583 'config': 'fake_config_ini_path',
583 'config': 'fake_config_ini_path',
584 'repo_store': '',
584 'repo_store': '',
585 'make_lock': None,
585 'make_lock': None,
586 'locked_by': [None, None, None],
586 'locked_by': [None, None, None],
587 'server_url': 'http://test.example.com:5000',
587 'server_url': 'http://test.example.com:5000',
588 'hooks': ['push', 'pull'],
588 'hooks': ['push', 'pull'],
589 'is_shadow_repo': False,
589 'is_shadow_repo': False,
590 }
590 }
591 return extras
591 return extras
592
592
593
593
594 @pytest.mark.usefixtures('config_stub')
594 @pytest.mark.usefixtures('config_stub')
595 class TestUpdateCommentHandling(object):
595 class TestUpdateCommentHandling(object):
596
596
597 @pytest.fixture(autouse=True, scope='class')
597 @pytest.fixture(autouse=True, scope='class')
598 def enable_outdated_comments(self, request, baseapp):
598 def enable_outdated_comments(self, request, baseapp):
599 config_patch = mock.patch.dict(
599 config_patch = mock.patch.dict(
600 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
600 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
601 config_patch.start()
601 config_patch.start()
602
602
603 @request.addfinalizer
603 @request.addfinalizer
604 def cleanup():
604 def cleanup():
605 config_patch.stop()
605 config_patch.stop()
606
606
607 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
607 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
608 commits = [
608 commits = [
609 {'message': 'a'},
609 {'message': 'a'},
610 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
610 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
611 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
611 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
612 ]
612 ]
613 pull_request = pr_util.create_pull_request(
613 pull_request = pr_util.create_pull_request(
614 commits=commits, target_head='a', source_head='b', revisions=['b'])
614 commits=commits, target_head='a', source_head='b', revisions=['b'])
615 pr_util.create_inline_comment(file_path='file_b')
615 pr_util.create_inline_comment(file_path='file_b')
616 pr_util.add_one_commit(head='c')
616 pr_util.add_one_commit(head='c')
617
617
618 assert_inline_comments(pull_request, visible=1, outdated=0)
618 assert_inline_comments(pull_request, visible=1, outdated=0)
619
619
620 def test_comment_stays_unflagged_on_change_above(self, pr_util):
620 def test_comment_stays_unflagged_on_change_above(self, pr_util):
621 original_content = ''.join(
621 original_content = ''.join(
622 ['line {}\n'.format(x) for x in range(1, 11)])
622 ['line {}\n'.format(x) for x in range(1, 11)])
623 updated_content = 'new_line_at_top\n' + original_content
623 updated_content = 'new_line_at_top\n' + original_content
624 commits = [
624 commits = [
625 {'message': 'a'},
625 {'message': 'a'},
626 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
626 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
627 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
627 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
628 ]
628 ]
629 pull_request = pr_util.create_pull_request(
629 pull_request = pr_util.create_pull_request(
630 commits=commits, target_head='a', source_head='b', revisions=['b'])
630 commits=commits, target_head='a', source_head='b', revisions=['b'])
631
631
632 with outdated_comments_patcher():
632 with outdated_comments_patcher():
633 comment = pr_util.create_inline_comment(
633 comment = pr_util.create_inline_comment(
634 line_no=u'n8', file_path='file_b')
634 line_no=u'n8', file_path='file_b')
635 pr_util.add_one_commit(head='c')
635 pr_util.add_one_commit(head='c')
636
636
637 assert_inline_comments(pull_request, visible=1, outdated=0)
637 assert_inline_comments(pull_request, visible=1, outdated=0)
638 assert comment.line_no == u'n9'
638 assert comment.line_no == u'n9'
639
639
640 def test_comment_stays_unflagged_on_change_below(self, pr_util):
640 def test_comment_stays_unflagged_on_change_below(self, pr_util):
641 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
641 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
642 updated_content = original_content + 'new_line_at_end\n'
642 updated_content = original_content + 'new_line_at_end\n'
643 commits = [
643 commits = [
644 {'message': 'a'},
644 {'message': 'a'},
645 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
645 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
646 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
646 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
647 ]
647 ]
648 pull_request = pr_util.create_pull_request(
648 pull_request = pr_util.create_pull_request(
649 commits=commits, target_head='a', source_head='b', revisions=['b'])
649 commits=commits, target_head='a', source_head='b', revisions=['b'])
650 pr_util.create_inline_comment(file_path='file_b')
650 pr_util.create_inline_comment(file_path='file_b')
651 pr_util.add_one_commit(head='c')
651 pr_util.add_one_commit(head='c')
652
652
653 assert_inline_comments(pull_request, visible=1, outdated=0)
653 assert_inline_comments(pull_request, visible=1, outdated=0)
654
654
655 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
655 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
656 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
656 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
657 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
657 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
658 change_lines = list(base_lines)
658 change_lines = list(base_lines)
659 change_lines.insert(6, 'line 6a added\n')
659 change_lines.insert(6, 'line 6a added\n')
660
660
661 # Changes on the last line of sight
661 # Changes on the last line of sight
662 update_lines = list(change_lines)
662 update_lines = list(change_lines)
663 update_lines[0] = 'line 1 changed\n'
663 update_lines[0] = 'line 1 changed\n'
664 update_lines[-1] = 'line 12 changed\n'
664 update_lines[-1] = 'line 12 changed\n'
665
665
666 def file_b(lines):
666 def file_b(lines):
667 return FileNode('file_b', ''.join(lines))
667 return FileNode('file_b', ''.join(lines))
668
668
669 commits = [
669 commits = [
670 {'message': 'a', 'added': [file_b(base_lines)]},
670 {'message': 'a', 'added': [file_b(base_lines)]},
671 {'message': 'b', 'changed': [file_b(change_lines)]},
671 {'message': 'b', 'changed': [file_b(change_lines)]},
672 {'message': 'c', 'changed': [file_b(update_lines)]},
672 {'message': 'c', 'changed': [file_b(update_lines)]},
673 ]
673 ]
674
674
675 pull_request = pr_util.create_pull_request(
675 pull_request = pr_util.create_pull_request(
676 commits=commits, target_head='a', source_head='b', revisions=['b'])
676 commits=commits, target_head='a', source_head='b', revisions=['b'])
677 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
677 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
678
678
679 with outdated_comments_patcher():
679 with outdated_comments_patcher():
680 pr_util.add_one_commit(head='c')
680 pr_util.add_one_commit(head='c')
681 assert_inline_comments(pull_request, visible=0, outdated=1)
681 assert_inline_comments(pull_request, visible=0, outdated=1)
682
682
683 @pytest.mark.parametrize("change, content", [
683 @pytest.mark.parametrize("change, content", [
684 ('changed', 'changed\n'),
684 ('changed', 'changed\n'),
685 ('removed', ''),
685 ('removed', ''),
686 ], ids=['changed', 'removed'])
686 ], ids=['changed', 'removed'])
687 def test_comment_flagged_on_change(self, pr_util, change, content):
687 def test_comment_flagged_on_change(self, pr_util, change, content):
688 commits = [
688 commits = [
689 {'message': 'a'},
689 {'message': 'a'},
690 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
690 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
691 {'message': 'c', change: [FileNode('file_b', content)]},
691 {'message': 'c', change: [FileNode('file_b', content)]},
692 ]
692 ]
693 pull_request = pr_util.create_pull_request(
693 pull_request = pr_util.create_pull_request(
694 commits=commits, target_head='a', source_head='b', revisions=['b'])
694 commits=commits, target_head='a', source_head='b', revisions=['b'])
695 pr_util.create_inline_comment(file_path='file_b')
695 pr_util.create_inline_comment(file_path='file_b')
696
696
697 with outdated_comments_patcher():
697 with outdated_comments_patcher():
698 pr_util.add_one_commit(head='c')
698 pr_util.add_one_commit(head='c')
699 assert_inline_comments(pull_request, visible=0, outdated=1)
699 assert_inline_comments(pull_request, visible=0, outdated=1)
700
700
701
701
702 @pytest.mark.usefixtures('config_stub')
702 @pytest.mark.usefixtures('config_stub')
703 class TestUpdateChangedFiles(object):
703 class TestUpdateChangedFiles(object):
704
704
705 def test_no_changes_on_unchanged_diff(self, pr_util):
705 def test_no_changes_on_unchanged_diff(self, pr_util):
706 commits = [
706 commits = [
707 {'message': 'a'},
707 {'message': 'a'},
708 {'message': 'b',
708 {'message': 'b',
709 'added': [FileNode('file_b', 'test_content b\n')]},
709 'added': [FileNode('file_b', 'test_content b\n')]},
710 {'message': 'c',
710 {'message': 'c',
711 'added': [FileNode('file_c', 'test_content c\n')]},
711 'added': [FileNode('file_c', 'test_content c\n')]},
712 ]
712 ]
713 # open a PR from a to b, adding file_b
713 # open a PR from a to b, adding file_b
714 pull_request = pr_util.create_pull_request(
714 pull_request = pr_util.create_pull_request(
715 commits=commits, target_head='a', source_head='b', revisions=['b'],
715 commits=commits, target_head='a', source_head='b', revisions=['b'],
716 name_suffix='per-file-review')
716 name_suffix='per-file-review')
717
717
718 # modify PR adding new file file_c
718 # modify PR adding new file file_c
719 pr_util.add_one_commit(head='c')
719 pr_util.add_one_commit(head='c')
720
720
721 assert_pr_file_changes(
721 assert_pr_file_changes(
722 pull_request,
722 pull_request,
723 added=['file_c'],
723 added=['file_c'],
724 modified=[],
724 modified=[],
725 removed=[])
725 removed=[])
726
726
727 def test_modify_and_undo_modification_diff(self, pr_util):
727 def test_modify_and_undo_modification_diff(self, pr_util):
728 commits = [
728 commits = [
729 {'message': 'a'},
729 {'message': 'a'},
730 {'message': 'b',
730 {'message': 'b',
731 'added': [FileNode('file_b', 'test_content b\n')]},
731 'added': [FileNode('file_b', 'test_content b\n')]},
732 {'message': 'c',
732 {'message': 'c',
733 'changed': [FileNode('file_b', 'test_content b modified\n')]},
733 'changed': [FileNode('file_b', 'test_content b modified\n')]},
734 {'message': 'd',
734 {'message': 'd',
735 'changed': [FileNode('file_b', 'test_content b\n')]},
735 'changed': [FileNode('file_b', 'test_content b\n')]},
736 ]
736 ]
737 # open a PR from a to b, adding file_b
737 # open a PR from a to b, adding file_b
738 pull_request = pr_util.create_pull_request(
738 pull_request = pr_util.create_pull_request(
739 commits=commits, target_head='a', source_head='b', revisions=['b'],
739 commits=commits, target_head='a', source_head='b', revisions=['b'],
740 name_suffix='per-file-review')
740 name_suffix='per-file-review')
741
741
742 # modify PR modifying file file_b
742 # modify PR modifying file file_b
743 pr_util.add_one_commit(head='c')
743 pr_util.add_one_commit(head='c')
744
744
745 assert_pr_file_changes(
745 assert_pr_file_changes(
746 pull_request,
746 pull_request,
747 added=[],
747 added=[],
748 modified=['file_b'],
748 modified=['file_b'],
749 removed=[])
749 removed=[])
750
750
751 # move the head again to d, which rollbacks change,
751 # move the head again to d, which rollbacks change,
752 # meaning we should indicate no changes
752 # meaning we should indicate no changes
753 pr_util.add_one_commit(head='d')
753 pr_util.add_one_commit(head='d')
754
754
755 assert_pr_file_changes(
755 assert_pr_file_changes(
756 pull_request,
756 pull_request,
757 added=[],
757 added=[],
758 modified=[],
758 modified=[],
759 removed=[])
759 removed=[])
760
760
761 def test_updated_all_files_in_pr(self, pr_util):
761 def test_updated_all_files_in_pr(self, pr_util):
762 commits = [
762 commits = [
763 {'message': 'a'},
763 {'message': 'a'},
764 {'message': 'b', 'added': [
764 {'message': 'b', 'added': [
765 FileNode('file_a', 'test_content a\n'),
765 FileNode('file_a', 'test_content a\n'),
766 FileNode('file_b', 'test_content b\n'),
766 FileNode('file_b', 'test_content b\n'),
767 FileNode('file_c', 'test_content c\n')]},
767 FileNode('file_c', 'test_content c\n')]},
768 {'message': 'c', 'changed': [
768 {'message': 'c', 'changed': [
769 FileNode('file_a', 'test_content a changed\n'),
769 FileNode('file_a', 'test_content a changed\n'),
770 FileNode('file_b', 'test_content b changed\n'),
770 FileNode('file_b', 'test_content b changed\n'),
771 FileNode('file_c', 'test_content c changed\n')]},
771 FileNode('file_c', 'test_content c changed\n')]},
772 ]
772 ]
773 # open a PR from a to b, changing 3 files
773 # open a PR from a to b, changing 3 files
774 pull_request = pr_util.create_pull_request(
774 pull_request = pr_util.create_pull_request(
775 commits=commits, target_head='a', source_head='b', revisions=['b'],
775 commits=commits, target_head='a', source_head='b', revisions=['b'],
776 name_suffix='per-file-review')
776 name_suffix='per-file-review')
777
777
778 pr_util.add_one_commit(head='c')
778 pr_util.add_one_commit(head='c')
779
779
780 assert_pr_file_changes(
780 assert_pr_file_changes(
781 pull_request,
781 pull_request,
782 added=[],
782 added=[],
783 modified=['file_a', 'file_b', 'file_c'],
783 modified=['file_a', 'file_b', 'file_c'],
784 removed=[])
784 removed=[])
785
785
786 def test_updated_and_removed_all_files_in_pr(self, pr_util):
786 def test_updated_and_removed_all_files_in_pr(self, pr_util):
787 commits = [
787 commits = [
788 {'message': 'a'},
788 {'message': 'a'},
789 {'message': 'b', 'added': [
789 {'message': 'b', 'added': [
790 FileNode('file_a', 'test_content a\n'),
790 FileNode('file_a', 'test_content a\n'),
791 FileNode('file_b', 'test_content b\n'),
791 FileNode('file_b', 'test_content b\n'),
792 FileNode('file_c', 'test_content c\n')]},
792 FileNode('file_c', 'test_content c\n')]},
793 {'message': 'c', 'removed': [
793 {'message': 'c', 'removed': [
794 FileNode('file_a', 'test_content a changed\n'),
794 FileNode('file_a', 'test_content a changed\n'),
795 FileNode('file_b', 'test_content b changed\n'),
795 FileNode('file_b', 'test_content b changed\n'),
796 FileNode('file_c', 'test_content c changed\n')]},
796 FileNode('file_c', 'test_content c changed\n')]},
797 ]
797 ]
798 # open a PR from a to b, removing 3 files
798 # open a PR from a to b, removing 3 files
799 pull_request = pr_util.create_pull_request(
799 pull_request = pr_util.create_pull_request(
800 commits=commits, target_head='a', source_head='b', revisions=['b'],
800 commits=commits, target_head='a', source_head='b', revisions=['b'],
801 name_suffix='per-file-review')
801 name_suffix='per-file-review')
802
802
803 pr_util.add_one_commit(head='c')
803 pr_util.add_one_commit(head='c')
804
804
805 assert_pr_file_changes(
805 assert_pr_file_changes(
806 pull_request,
806 pull_request,
807 added=[],
807 added=[],
808 modified=[],
808 modified=[],
809 removed=['file_a', 'file_b', 'file_c'])
809 removed=['file_a', 'file_b', 'file_c'])
810
810
811
811
812 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
812 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
813 model = PullRequestModel()
813 model = PullRequestModel()
814 pull_request = pr_util.create_pull_request()
814 pull_request = pr_util.create_pull_request()
815 pr_util.update_source_repository()
815 pr_util.update_source_repository()
816
816
817 model.update_commits(pull_request, pull_request.author)
817 model.update_commits(pull_request, pull_request.author)
818
818
819 # Expect that it has a version entry now
819 # Expect that it has a version entry now
820 assert len(model.get_versions(pull_request)) == 1
820 assert len(model.get_versions(pull_request)) == 1
821
821
822
822
823 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
823 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
824 pull_request = pr_util.create_pull_request()
824 pull_request = pr_util.create_pull_request()
825 model = PullRequestModel()
825 model = PullRequestModel()
826 model.update_commits(pull_request, pull_request.author)
826 model.update_commits(pull_request, pull_request.author)
827
827
828 # Expect that it still has no versions
828 # Expect that it still has no versions
829 assert len(model.get_versions(pull_request)) == 0
829 assert len(model.get_versions(pull_request)) == 0
830
830
831
831
832 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
832 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
833 model = PullRequestModel()
833 model = PullRequestModel()
834 pull_request = pr_util.create_pull_request()
834 pull_request = pr_util.create_pull_request()
835 comment = pr_util.create_comment()
835 comment = pr_util.create_comment()
836 pr_util.update_source_repository()
836 pr_util.update_source_repository()
837
837
838 model.update_commits(pull_request, pull_request.author)
838 model.update_commits(pull_request, pull_request.author)
839
839
840 # Expect that the comment is linked to the pr version now
840 # Expect that the comment is linked to the pr version now
841 assert comment.pull_request_version == model.get_versions(pull_request)[0]
841 assert comment.pull_request_version == model.get_versions(pull_request)[0]
842
842
843
843
844 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
844 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
845 model = PullRequestModel()
845 model = PullRequestModel()
846 pull_request = pr_util.create_pull_request()
846 pull_request = pr_util.create_pull_request()
847 pr_util.update_source_repository()
847 pr_util.update_source_repository()
848 pr_util.update_source_repository()
848 pr_util.update_source_repository()
849
849
850 update_response = model.update_commits(pull_request, pull_request.author)
850 update_response = model.update_commits(pull_request, pull_request.author)
851
851
852 commit_id = update_response.common_ancestor_id
852 commit_id = update_response.common_ancestor_id
853 # Expect to find a new comment about the change
853 # Expect to find a new comment about the change
854 expected_message = textwrap.dedent(
854 expected_message = textwrap.dedent(
855 """\
855 """\
856 Pull request updated. Auto status change to |under_review|
856 Pull request updated. Auto status change to |under_review|
857
857
858 .. role:: added
858 .. role:: added
859 .. role:: removed
859 .. role:: removed
860 .. parsed-literal::
860 .. parsed-literal::
861
861
862 Changed commits:
862 Changed commits:
863 * :added:`1 added`
863 * :added:`1 added`
864 * :removed:`0 removed`
864 * :removed:`0 removed`
865
865
866 Changed files:
866 Changed files:
867 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
867 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
868
868
869 .. |under_review| replace:: *"Under Review"*"""
869 .. |under_review| replace:: *"Under Review"*"""
870 ).format(commit_id[:12])
870 ).format(commit_id[:12])
871 pull_request_comments = sorted(
871 pull_request_comments = sorted(
872 pull_request.comments, key=lambda c: c.modified_at)
872 pull_request.comments, key=lambda c: c.modified_at)
873 update_comment = pull_request_comments[-1]
873 update_comment = pull_request_comments[-1]
874 assert update_comment.text == expected_message
874 assert update_comment.text == expected_message
875
875
876
876
877 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
877 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
878 pull_request = pr_util.create_pull_request()
878 pull_request = pr_util.create_pull_request()
879
879
880 # Avoiding default values
880 # Avoiding default values
881 pull_request.status = PullRequest.STATUS_CLOSED
881 pull_request.status = PullRequest.STATUS_CLOSED
882 pull_request._last_merge_source_rev = "0" * 40
882 pull_request._last_merge_source_rev = "0" * 40
883 pull_request._last_merge_target_rev = "1" * 40
883 pull_request._last_merge_target_rev = "1" * 40
884 pull_request.last_merge_status = 1
884 pull_request.last_merge_status = 1
885 pull_request.merge_rev = "2" * 40
885 pull_request.merge_rev = "2" * 40
886
886
887 # Remember automatic values
887 # Remember automatic values
888 created_on = pull_request.created_on
888 created_on = pull_request.created_on
889 updated_on = pull_request.updated_on
889 updated_on = pull_request.updated_on
890
890
891 # Create a new version of the pull request
891 # Create a new version of the pull request
892 version = PullRequestModel()._create_version_from_snapshot(pull_request)
892 version = PullRequestModel()._create_version_from_snapshot(pull_request)
893
893
894 # Check attributes
894 # Check attributes
895 assert version.title == pr_util.create_parameters['title']
895 assert version.title == pr_util.create_parameters['title']
896 assert version.description == pr_util.create_parameters['description']
896 assert version.description == pr_util.create_parameters['description']
897 assert version.status == PullRequest.STATUS_CLOSED
897 assert version.status == PullRequest.STATUS_CLOSED
898
898
899 # versions get updated created_on
899 # versions get updated created_on
900 assert version.created_on != created_on
900 assert version.created_on != created_on
901
901
902 assert version.updated_on == updated_on
902 assert version.updated_on == updated_on
903 assert version.user_id == pull_request.user_id
903 assert version.user_id == pull_request.user_id
904 assert version.revisions == pr_util.create_parameters['revisions']
904 assert version.revisions == pr_util.create_parameters['revisions']
905 assert version.source_repo == pr_util.source_repository
905 assert version.source_repo == pr_util.source_repository
906 assert version.source_ref == pr_util.create_parameters['source_ref']
906 assert version.source_ref == pr_util.create_parameters['source_ref']
907 assert version.target_repo == pr_util.target_repository
907 assert version.target_repo == pr_util.target_repository
908 assert version.target_ref == pr_util.create_parameters['target_ref']
908 assert version.target_ref == pr_util.create_parameters['target_ref']
909 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
909 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
910 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
910 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
911 assert version.last_merge_status == pull_request.last_merge_status
911 assert version.last_merge_status == pull_request.last_merge_status
912 assert version.merge_rev == pull_request.merge_rev
912 assert version.merge_rev == pull_request.merge_rev
913 assert version.pull_request == pull_request
913 assert version.pull_request == pull_request
914
914
915
915
916 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
916 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
917 version1 = pr_util.create_version_of_pull_request()
917 version1 = pr_util.create_version_of_pull_request()
918 comment_linked = pr_util.create_comment(linked_to=version1)
918 comment_linked = pr_util.create_comment(linked_to=version1)
919 comment_unlinked = pr_util.create_comment()
919 comment_unlinked = pr_util.create_comment()
920 version2 = pr_util.create_version_of_pull_request()
920 version2 = pr_util.create_version_of_pull_request()
921
921
922 PullRequestModel()._link_comments_to_version(version2)
922 PullRequestModel()._link_comments_to_version(version2)
923 Session().commit()
923 Session().commit()
924
924
925 # Expect that only the new comment is linked to version2
925 # Expect that only the new comment is linked to version2
926 assert (
926 assert (
927 comment_unlinked.pull_request_version_id ==
927 comment_unlinked.pull_request_version_id ==
928 version2.pull_request_version_id)
928 version2.pull_request_version_id)
929 assert (
929 assert (
930 comment_linked.pull_request_version_id ==
930 comment_linked.pull_request_version_id ==
931 version1.pull_request_version_id)
931 version1.pull_request_version_id)
932 assert (
932 assert (
933 comment_unlinked.pull_request_version_id !=
933 comment_unlinked.pull_request_version_id !=
934 comment_linked.pull_request_version_id)
934 comment_linked.pull_request_version_id)
935
935
936
936
937 def test_calculate_commits():
937 def test_calculate_commits():
938 old_ids = [1, 2, 3]
938 old_ids = [1, 2, 3]
939 new_ids = [1, 3, 4, 5]
939 new_ids = [1, 3, 4, 5]
940 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
940 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
941 assert change.added == [4, 5]
941 assert change.added == [4, 5]
942 assert change.common == [1, 3]
942 assert change.common == [1, 3]
943 assert change.removed == [2]
943 assert change.removed == [2]
944 assert change.total == [1, 3, 4, 5]
944 assert change.total == [1, 3, 4, 5]
945
945
946
946
947 def assert_inline_comments(pull_request, visible=None, outdated=None):
947 def assert_inline_comments(pull_request, visible=None, outdated=None):
948 if visible is not None:
948 if visible is not None:
949 inline_comments = CommentsModel().get_inline_comments(
949 inline_comments = CommentsModel().get_inline_comments(
950 pull_request.target_repo.repo_id, pull_request=pull_request)
950 pull_request.target_repo.repo_id, pull_request=pull_request)
951 inline_cnt = len(CommentsModel().get_inline_comments_as_list(
951 inline_cnt = len(CommentsModel().get_inline_comments_as_list(
952 inline_comments))
952 inline_comments))
953 assert inline_cnt == visible
953 assert inline_cnt == visible
954 if outdated is not None:
954 if outdated is not None:
955 outdated_comments = CommentsModel().get_outdated_comments(
955 outdated_comments = CommentsModel().get_outdated_comments(
956 pull_request.target_repo.repo_id, pull_request)
956 pull_request.target_repo.repo_id, pull_request)
957 assert len(outdated_comments) == outdated
957 assert len(outdated_comments) == outdated
958
958
959
959
960 def assert_pr_file_changes(
960 def assert_pr_file_changes(
961 pull_request, added=None, modified=None, removed=None):
961 pull_request, added=None, modified=None, removed=None):
962 pr_versions = PullRequestModel().get_versions(pull_request)
962 pr_versions = PullRequestModel().get_versions(pull_request)
963 # always use first version, ie original PR to calculate changes
963 # always use first version, ie original PR to calculate changes
964 pull_request_version = pr_versions[0]
964 pull_request_version = pr_versions[0]
965 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
965 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
966 pull_request, pull_request_version)
966 pull_request, pull_request_version)
967 file_changes = PullRequestModel()._calculate_file_changes(
967 file_changes = PullRequestModel()._calculate_file_changes(
968 old_diff_data, new_diff_data)
968 old_diff_data, new_diff_data)
969
969
970 assert added == file_changes.added, \
970 assert added == file_changes.added, \
971 'expected added:%s vs value:%s' % (added, file_changes.added)
971 'expected added:%s vs value:%s' % (added, file_changes.added)
972 assert modified == file_changes.modified, \
972 assert modified == file_changes.modified, \
973 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
973 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
974 assert removed == file_changes.removed, \
974 assert removed == file_changes.removed, \
975 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
975 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
976
976
977
977
978 def outdated_comments_patcher(use_outdated=True):
978 def outdated_comments_patcher(use_outdated=True):
979 return mock.patch.object(
979 return mock.patch.object(
980 CommentsModel, 'use_outdated_comments',
980 CommentsModel, 'use_outdated_comments',
981 return_value=use_outdated)
981 return_value=use_outdated)
@@ -1,1833 +1,1840 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
36 import pytest
36 import pytest
37 import colander
37 import colander
38 import requests
38 import requests
39 import pyramid.paster
39 import pyramid.paster
40
40
41 import rhodecode
41 import rhodecode
42 from rhodecode.lib.utils2 import AttributeDict
42 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 from rhodecode.model.meta import Session
48 from rhodecode.model.meta import Session
49 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo_group import RepoGroupModel
51 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.user import UserModel
52 from rhodecode.model.user import UserModel
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.user_group import UserGroupModel
54 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.integration import IntegrationModel
55 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.integrations import integration_type_registry
56 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations.types.base import IntegrationTypeBase
57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.lib.utils import repo2db_mapper
58 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.vcs.backends import get_backend
59 from rhodecode.lib.vcs.backends import get_backend
60 from rhodecode.lib.vcs.nodes import FileNode
60 from rhodecode.lib.vcs.nodes import FileNode
61 from rhodecode.tests import (
61 from rhodecode.tests import (
62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 TEST_USER_REGULAR_PASS)
64 TEST_USER_REGULAR_PASS)
65 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
65 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 from rhodecode.tests.fixture import Fixture
66 from rhodecode.tests.fixture import Fixture
67 from rhodecode.config import utils as config_utils
67 from rhodecode.config import utils as config_utils
68
68
69
69
70 def _split_comma(value):
70 def _split_comma(value):
71 return value.split(',')
71 return value.split(',')
72
72
73
73
74 def pytest_addoption(parser):
74 def pytest_addoption(parser):
75 parser.addoption(
75 parser.addoption(
76 '--keep-tmp-path', action='store_true',
76 '--keep-tmp-path', action='store_true',
77 help="Keep the test temporary directories")
77 help="Keep the test temporary directories")
78 parser.addoption(
78 parser.addoption(
79 '--backends', action='store', type=_split_comma,
79 '--backends', action='store', type=_split_comma,
80 default=['git', 'hg', 'svn'],
80 default=['git', 'hg', 'svn'],
81 help="Select which backends to test for backend specific tests.")
81 help="Select which backends to test for backend specific tests.")
82 parser.addoption(
82 parser.addoption(
83 '--dbs', action='store', type=_split_comma,
83 '--dbs', action='store', type=_split_comma,
84 default=['sqlite'],
84 default=['sqlite'],
85 help="Select which database to test for database specific tests. "
85 help="Select which database to test for database specific tests. "
86 "Possible options are sqlite,postgres,mysql")
86 "Possible options are sqlite,postgres,mysql")
87 parser.addoption(
87 parser.addoption(
88 '--appenlight', '--ae', action='store_true',
88 '--appenlight', '--ae', action='store_true',
89 help="Track statistics in appenlight.")
89 help="Track statistics in appenlight.")
90 parser.addoption(
90 parser.addoption(
91 '--appenlight-api-key', '--ae-key',
91 '--appenlight-api-key', '--ae-key',
92 help="API key for Appenlight.")
92 help="API key for Appenlight.")
93 parser.addoption(
93 parser.addoption(
94 '--appenlight-url', '--ae-url',
94 '--appenlight-url', '--ae-url',
95 default="https://ae.rhodecode.com",
95 default="https://ae.rhodecode.com",
96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 parser.addoption(
97 parser.addoption(
98 '--sqlite-connection-string', action='store',
98 '--sqlite-connection-string', action='store',
99 default='', help="Connection string for the dbs tests with SQLite")
99 default='', help="Connection string for the dbs tests with SQLite")
100 parser.addoption(
100 parser.addoption(
101 '--postgres-connection-string', action='store',
101 '--postgres-connection-string', action='store',
102 default='', help="Connection string for the dbs tests with Postgres")
102 default='', help="Connection string for the dbs tests with Postgres")
103 parser.addoption(
103 parser.addoption(
104 '--mysql-connection-string', action='store',
104 '--mysql-connection-string', action='store',
105 default='', help="Connection string for the dbs tests with MySQL")
105 default='', help="Connection string for the dbs tests with MySQL")
106 parser.addoption(
106 parser.addoption(
107 '--repeat', type=int, default=100,
107 '--repeat', type=int, default=100,
108 help="Number of repetitions in performance tests.")
108 help="Number of repetitions in performance tests.")
109
109
110
110
111 def pytest_configure(config):
111 def pytest_configure(config):
112 from rhodecode.config import patches
112 from rhodecode.config import patches
113
113
114
114
115 def pytest_collection_modifyitems(session, config, items):
115 def pytest_collection_modifyitems(session, config, items):
116 # nottest marked, compare nose, used for transition from nose to pytest
116 # nottest marked, compare nose, used for transition from nose to pytest
117 remaining = [
117 remaining = [
118 i for i in items if getattr(i.obj, '__test__', True)]
118 i for i in items if getattr(i.obj, '__test__', True)]
119 items[:] = remaining
119 items[:] = remaining
120
120
121 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
121 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
122 # be executed at the end for faster test feedback
122 # be executed at the end for faster test feedback
123 def sorter(item):
123 def sorter(item):
124 pos = 0
124 pos = 0
125 key = item._nodeid
125 key = item._nodeid
126 if key.startswith('rhodecode/tests/database'):
126 if key.startswith('rhodecode/tests/database'):
127 pos = 1
127 pos = 1
128 elif key.startswith('rhodecode/tests/vcs_operations'):
128 elif key.startswith('rhodecode/tests/vcs_operations'):
129 pos = 2
129 pos = 2
130
130
131 return pos
131 return pos
132
132
133 items.sort(key=sorter)
133 items.sort(key=sorter)
134
134
135
135
136 def pytest_generate_tests(metafunc):
136 def pytest_generate_tests(metafunc):
137
137
138 # Support test generation based on --backend parameter
138 # Support test generation based on --backend parameter
139 if 'backend_alias' in metafunc.fixturenames:
139 if 'backend_alias' in metafunc.fixturenames:
140 backends = get_backends_from_metafunc(metafunc)
140 backends = get_backends_from_metafunc(metafunc)
141 scope = None
141 scope = None
142 if not backends:
142 if not backends:
143 pytest.skip("Not enabled for any of selected backends")
143 pytest.skip("Not enabled for any of selected backends")
144
144
145 metafunc.parametrize('backend_alias', backends, scope=scope)
145 metafunc.parametrize('backend_alias', backends, scope=scope)
146
146
147 backend_mark = metafunc.definition.get_closest_marker('backends')
147 backend_mark = metafunc.definition.get_closest_marker('backends')
148 if backend_mark:
148 if backend_mark:
149 backends = get_backends_from_metafunc(metafunc)
149 backends = get_backends_from_metafunc(metafunc)
150 if not backends:
150 if not backends:
151 pytest.skip("Not enabled for any of selected backends")
151 pytest.skip("Not enabled for any of selected backends")
152
152
153
153
154 def get_backends_from_metafunc(metafunc):
154 def get_backends_from_metafunc(metafunc):
155 requested_backends = set(metafunc.config.getoption('--backends'))
155 requested_backends = set(metafunc.config.getoption('--backends'))
156 backend_mark = metafunc.definition.get_closest_marker('backends')
156 backend_mark = metafunc.definition.get_closest_marker('backends')
157 if backend_mark:
157 if backend_mark:
158 # Supported backends by this test function, created from
158 # Supported backends by this test function, created from
159 # pytest.mark.backends
159 # pytest.mark.backends
160 backends = backend_mark.args
160 backends = backend_mark.args
161 elif hasattr(metafunc.cls, 'backend_alias'):
161 elif hasattr(metafunc.cls, 'backend_alias'):
162 # Support class attribute "backend_alias", this is mainly
162 # Support class attribute "backend_alias", this is mainly
163 # for legacy reasons for tests not yet using pytest.mark.backends
163 # for legacy reasons for tests not yet using pytest.mark.backends
164 backends = [metafunc.cls.backend_alias]
164 backends = [metafunc.cls.backend_alias]
165 else:
165 else:
166 backends = metafunc.config.getoption('--backends')
166 backends = metafunc.config.getoption('--backends')
167 return requested_backends.intersection(backends)
167 return requested_backends.intersection(backends)
168
168
169
169
170 @pytest.fixture(scope='session', autouse=True)
170 @pytest.fixture(scope='session', autouse=True)
171 def activate_example_rcextensions(request):
171 def activate_example_rcextensions(request):
172 """
172 """
173 Patch in an example rcextensions module which verifies passed in kwargs.
173 Patch in an example rcextensions module which verifies passed in kwargs.
174 """
174 """
175 from rhodecode.config import rcextensions
175 from rhodecode.config import rcextensions
176
176
177 old_extensions = rhodecode.EXTENSIONS
177 old_extensions = rhodecode.EXTENSIONS
178 rhodecode.EXTENSIONS = rcextensions
178 rhodecode.EXTENSIONS = rcextensions
179 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
179 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
180
180
181 @request.addfinalizer
181 @request.addfinalizer
182 def cleanup():
182 def cleanup():
183 rhodecode.EXTENSIONS = old_extensions
183 rhodecode.EXTENSIONS = old_extensions
184
184
185
185
186 @pytest.fixture()
186 @pytest.fixture()
187 def capture_rcextensions():
187 def capture_rcextensions():
188 """
188 """
189 Returns the recorded calls to entry points in rcextensions.
189 Returns the recorded calls to entry points in rcextensions.
190 """
190 """
191 calls = rhodecode.EXTENSIONS.calls
191 calls = rhodecode.EXTENSIONS.calls
192 calls.clear()
192 calls.clear()
193 # Note: At this moment, it is still the empty dict, but that will
193 # Note: At this moment, it is still the empty dict, but that will
194 # be filled during the test run and since it is a reference this
194 # be filled during the test run and since it is a reference this
195 # is enough to make it work.
195 # is enough to make it work.
196 return calls
196 return calls
197
197
198
198
199 @pytest.fixture(scope='session')
199 @pytest.fixture(scope='session')
200 def http_environ_session():
200 def http_environ_session():
201 """
201 """
202 Allow to use "http_environ" in session scope.
202 Allow to use "http_environ" in session scope.
203 """
203 """
204 return plain_http_environ()
204 return plain_http_environ()
205
205
206
206
207 def plain_http_host_stub():
207 def plain_http_host_stub():
208 """
208 """
209 Value of HTTP_HOST in the test run.
209 Value of HTTP_HOST in the test run.
210 """
210 """
211 return 'example.com:80'
211 return 'example.com:80'
212
212
213
213
214 @pytest.fixture()
214 @pytest.fixture()
215 def http_host_stub():
215 def http_host_stub():
216 """
216 """
217 Value of HTTP_HOST in the test run.
217 Value of HTTP_HOST in the test run.
218 """
218 """
219 return plain_http_host_stub()
219 return plain_http_host_stub()
220
220
221
221
222 def plain_http_host_only_stub():
222 def plain_http_host_only_stub():
223 """
223 """
224 Value of HTTP_HOST in the test run.
224 Value of HTTP_HOST in the test run.
225 """
225 """
226 return plain_http_host_stub().split(':')[0]
226 return plain_http_host_stub().split(':')[0]
227
227
228
228
229 @pytest.fixture()
229 @pytest.fixture()
230 def http_host_only_stub():
230 def http_host_only_stub():
231 """
231 """
232 Value of HTTP_HOST in the test run.
232 Value of HTTP_HOST in the test run.
233 """
233 """
234 return plain_http_host_only_stub()
234 return plain_http_host_only_stub()
235
235
236
236
237 def plain_http_environ():
237 def plain_http_environ():
238 """
238 """
239 HTTP extra environ keys.
239 HTTP extra environ keys.
240
240
241 User by the test application and as well for setting up the pylons
241 User by the test application and as well for setting up the pylons
242 environment. In the case of the fixture "app" it should be possible
242 environment. In the case of the fixture "app" it should be possible
243 to override this for a specific test case.
243 to override this for a specific test case.
244 """
244 """
245 return {
245 return {
246 'SERVER_NAME': plain_http_host_only_stub(),
246 'SERVER_NAME': plain_http_host_only_stub(),
247 'SERVER_PORT': plain_http_host_stub().split(':')[1],
247 'SERVER_PORT': plain_http_host_stub().split(':')[1],
248 'HTTP_HOST': plain_http_host_stub(),
248 'HTTP_HOST': plain_http_host_stub(),
249 'HTTP_USER_AGENT': 'rc-test-agent',
249 'HTTP_USER_AGENT': 'rc-test-agent',
250 'REQUEST_METHOD': 'GET'
250 'REQUEST_METHOD': 'GET'
251 }
251 }
252
252
253
253
254 @pytest.fixture()
254 @pytest.fixture()
255 def http_environ():
255 def http_environ():
256 """
256 """
257 HTTP extra environ keys.
257 HTTP extra environ keys.
258
258
259 User by the test application and as well for setting up the pylons
259 User by the test application and as well for setting up the pylons
260 environment. In the case of the fixture "app" it should be possible
260 environment. In the case of the fixture "app" it should be possible
261 to override this for a specific test case.
261 to override this for a specific test case.
262 """
262 """
263 return plain_http_environ()
263 return plain_http_environ()
264
264
265
265
266 @pytest.fixture(scope='session')
266 @pytest.fixture(scope='session')
267 def baseapp(ini_config, vcsserver, http_environ_session):
267 def baseapp(ini_config, vcsserver, http_environ_session):
268 from rhodecode.lib.pyramid_utils import get_app_config
268 from rhodecode.lib.pyramid_utils import get_app_config
269 from rhodecode.config.middleware import make_pyramid_app
269 from rhodecode.config.middleware import make_pyramid_app
270
270
271 print("Using the RhodeCode configuration:{}".format(ini_config))
271 print("Using the RhodeCode configuration:{}".format(ini_config))
272 pyramid.paster.setup_logging(ini_config)
272 pyramid.paster.setup_logging(ini_config)
273
273
274 settings = get_app_config(ini_config)
274 settings = get_app_config(ini_config)
275 app = make_pyramid_app({'__file__': ini_config}, **settings)
275 app = make_pyramid_app({'__file__': ini_config}, **settings)
276
276
277 return app
277 return app
278
278
279
279
280 @pytest.fixture(scope='function')
280 @pytest.fixture(scope='function')
281 def app(request, config_stub, baseapp, http_environ):
281 def app(request, config_stub, baseapp, http_environ):
282 app = CustomTestApp(
282 app = CustomTestApp(
283 baseapp,
283 baseapp,
284 extra_environ=http_environ)
284 extra_environ=http_environ)
285 if request.cls:
285 if request.cls:
286 request.cls.app = app
286 request.cls.app = app
287 return app
287 return app
288
288
289
289
290 @pytest.fixture(scope='session')
290 @pytest.fixture(scope='session')
291 def app_settings(baseapp, ini_config):
291 def app_settings(baseapp, ini_config):
292 """
292 """
293 Settings dictionary used to create the app.
293 Settings dictionary used to create the app.
294
294
295 Parses the ini file and passes the result through the sanitize and apply
295 Parses the ini file and passes the result through the sanitize and apply
296 defaults mechanism in `rhodecode.config.middleware`.
296 defaults mechanism in `rhodecode.config.middleware`.
297 """
297 """
298 return baseapp.config.get_settings()
298 return baseapp.config.get_settings()
299
299
300
300
301 @pytest.fixture(scope='session')
301 @pytest.fixture(scope='session')
302 def db_connection(ini_settings):
302 def db_connection(ini_settings):
303 # Initialize the database connection.
303 # Initialize the database connection.
304 config_utils.initialize_database(ini_settings)
304 config_utils.initialize_database(ini_settings)
305
305
306
306
307 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
307 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
308
308
309
309
310 def _autologin_user(app, *args):
310 def _autologin_user(app, *args):
311 session = login_user_session(app, *args)
311 session = login_user_session(app, *args)
312 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
312 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
313 return LoginData(csrf_token, session['rhodecode_user'])
313 return LoginData(csrf_token, session['rhodecode_user'])
314
314
315
315
316 @pytest.fixture()
316 @pytest.fixture()
317 def autologin_user(app):
317 def autologin_user(app):
318 """
318 """
319 Utility fixture which makes sure that the admin user is logged in
319 Utility fixture which makes sure that the admin user is logged in
320 """
320 """
321 return _autologin_user(app)
321 return _autologin_user(app)
322
322
323
323
324 @pytest.fixture()
324 @pytest.fixture()
325 def autologin_regular_user(app):
325 def autologin_regular_user(app):
326 """
326 """
327 Utility fixture which makes sure that the regular user is logged in
327 Utility fixture which makes sure that the regular user is logged in
328 """
328 """
329 return _autologin_user(
329 return _autologin_user(
330 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
330 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
331
331
332
332
333 @pytest.fixture(scope='function')
333 @pytest.fixture(scope='function')
334 def csrf_token(request, autologin_user):
334 def csrf_token(request, autologin_user):
335 return autologin_user.csrf_token
335 return autologin_user.csrf_token
336
336
337
337
338 @pytest.fixture(scope='function')
338 @pytest.fixture(scope='function')
339 def xhr_header(request):
339 def xhr_header(request):
340 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
340 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
341
341
342
342
343 @pytest.fixture()
343 @pytest.fixture()
344 def real_crypto_backend(monkeypatch):
344 def real_crypto_backend(monkeypatch):
345 """
345 """
346 Switch the production crypto backend on for this test.
346 Switch the production crypto backend on for this test.
347
347
348 During the test run the crypto backend is replaced with a faster
348 During the test run the crypto backend is replaced with a faster
349 implementation based on the MD5 algorithm.
349 implementation based on the MD5 algorithm.
350 """
350 """
351 monkeypatch.setattr(rhodecode, 'is_test', False)
351 monkeypatch.setattr(rhodecode, 'is_test', False)
352
352
353
353
354 @pytest.fixture(scope='class')
354 @pytest.fixture(scope='class')
355 def index_location(request, baseapp):
355 def index_location(request, baseapp):
356 index_location = baseapp.config.get_settings()['search.location']
356 index_location = baseapp.config.get_settings()['search.location']
357 if request.cls:
357 if request.cls:
358 request.cls.index_location = index_location
358 request.cls.index_location = index_location
359 return index_location
359 return index_location
360
360
361
361
362 @pytest.fixture(scope='session', autouse=True)
362 @pytest.fixture(scope='session', autouse=True)
363 def tests_tmp_path(request):
363 def tests_tmp_path(request):
364 """
364 """
365 Create temporary directory to be used during the test session.
365 Create temporary directory to be used during the test session.
366 """
366 """
367 if not os.path.exists(TESTS_TMP_PATH):
367 if not os.path.exists(TESTS_TMP_PATH):
368 os.makedirs(TESTS_TMP_PATH)
368 os.makedirs(TESTS_TMP_PATH)
369
369
370 if not request.config.getoption('--keep-tmp-path'):
370 if not request.config.getoption('--keep-tmp-path'):
371 @request.addfinalizer
371 @request.addfinalizer
372 def remove_tmp_path():
372 def remove_tmp_path():
373 shutil.rmtree(TESTS_TMP_PATH)
373 shutil.rmtree(TESTS_TMP_PATH)
374
374
375 return TESTS_TMP_PATH
375 return TESTS_TMP_PATH
376
376
377
377
378 @pytest.fixture()
378 @pytest.fixture()
379 def test_repo_group(request):
379 def test_repo_group(request):
380 """
380 """
381 Create a temporary repository group, and destroy it after
381 Create a temporary repository group, and destroy it after
382 usage automatically
382 usage automatically
383 """
383 """
384 fixture = Fixture()
384 fixture = Fixture()
385 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
385 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
386 repo_group = fixture.create_repo_group(repogroupid)
386 repo_group = fixture.create_repo_group(repogroupid)
387
387
388 def _cleanup():
388 def _cleanup():
389 fixture.destroy_repo_group(repogroupid)
389 fixture.destroy_repo_group(repogroupid)
390
390
391 request.addfinalizer(_cleanup)
391 request.addfinalizer(_cleanup)
392 return repo_group
392 return repo_group
393
393
394
394
395 @pytest.fixture()
395 @pytest.fixture()
396 def test_user_group(request):
396 def test_user_group(request):
397 """
397 """
398 Create a temporary user group, and destroy it after
398 Create a temporary user group, and destroy it after
399 usage automatically
399 usage automatically
400 """
400 """
401 fixture = Fixture()
401 fixture = Fixture()
402 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
402 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
403 user_group = fixture.create_user_group(usergroupid)
403 user_group = fixture.create_user_group(usergroupid)
404
404
405 def _cleanup():
405 def _cleanup():
406 fixture.destroy_user_group(user_group)
406 fixture.destroy_user_group(user_group)
407
407
408 request.addfinalizer(_cleanup)
408 request.addfinalizer(_cleanup)
409 return user_group
409 return user_group
410
410
411
411
412 @pytest.fixture(scope='session')
412 @pytest.fixture(scope='session')
413 def test_repo(request):
413 def test_repo(request):
414 container = TestRepoContainer()
414 container = TestRepoContainer()
415 request.addfinalizer(container._cleanup)
415 request.addfinalizer(container._cleanup)
416 return container
416 return container
417
417
418
418
419 class TestRepoContainer(object):
419 class TestRepoContainer(object):
420 """
420 """
421 Container for test repositories which are used read only.
421 Container for test repositories which are used read only.
422
422
423 Repositories will be created on demand and re-used during the lifetime
423 Repositories will be created on demand and re-used during the lifetime
424 of this object.
424 of this object.
425
425
426 Usage to get the svn test repository "minimal"::
426 Usage to get the svn test repository "minimal"::
427
427
428 test_repo = TestContainer()
428 test_repo = TestContainer()
429 repo = test_repo('minimal', 'svn')
429 repo = test_repo('minimal', 'svn')
430
430
431 """
431 """
432
432
433 dump_extractors = {
433 dump_extractors = {
434 'git': utils.extract_git_repo_from_dump,
434 'git': utils.extract_git_repo_from_dump,
435 'hg': utils.extract_hg_repo_from_dump,
435 'hg': utils.extract_hg_repo_from_dump,
436 'svn': utils.extract_svn_repo_from_dump,
436 'svn': utils.extract_svn_repo_from_dump,
437 }
437 }
438
438
439 def __init__(self):
439 def __init__(self):
440 self._cleanup_repos = []
440 self._cleanup_repos = []
441 self._fixture = Fixture()
441 self._fixture = Fixture()
442 self._repos = {}
442 self._repos = {}
443
443
444 def __call__(self, dump_name, backend_alias, config=None):
444 def __call__(self, dump_name, backend_alias, config=None):
445 key = (dump_name, backend_alias)
445 key = (dump_name, backend_alias)
446 if key not in self._repos:
446 if key not in self._repos:
447 repo = self._create_repo(dump_name, backend_alias, config)
447 repo = self._create_repo(dump_name, backend_alias, config)
448 self._repos[key] = repo.repo_id
448 self._repos[key] = repo.repo_id
449 return Repository.get(self._repos[key])
449 return Repository.get(self._repos[key])
450
450
451 def _create_repo(self, dump_name, backend_alias, config):
451 def _create_repo(self, dump_name, backend_alias, config):
452 repo_name = '%s-%s' % (backend_alias, dump_name)
452 repo_name = '%s-%s' % (backend_alias, dump_name)
453 backend = get_backend(backend_alias)
453 backend = get_backend(backend_alias)
454 dump_extractor = self.dump_extractors[backend_alias]
454 dump_extractor = self.dump_extractors[backend_alias]
455 repo_path = dump_extractor(dump_name, repo_name)
455 repo_path = dump_extractor(dump_name, repo_name)
456
456
457 vcs_repo = backend(repo_path, config=config)
457 vcs_repo = backend(repo_path, config=config)
458 repo2db_mapper({repo_name: vcs_repo})
458 repo2db_mapper({repo_name: vcs_repo})
459
459
460 repo = RepoModel().get_by_repo_name(repo_name)
460 repo = RepoModel().get_by_repo_name(repo_name)
461 self._cleanup_repos.append(repo_name)
461 self._cleanup_repos.append(repo_name)
462 return repo
462 return repo
463
463
464 def _cleanup(self):
464 def _cleanup(self):
465 for repo_name in reversed(self._cleanup_repos):
465 for repo_name in reversed(self._cleanup_repos):
466 self._fixture.destroy_repo(repo_name)
466 self._fixture.destroy_repo(repo_name)
467
467
468
468
469 def backend_base(request, backend_alias, baseapp, test_repo):
469 def backend_base(request, backend_alias, baseapp, test_repo):
470 if backend_alias not in request.config.getoption('--backends'):
470 if backend_alias not in request.config.getoption('--backends'):
471 pytest.skip("Backend %s not selected." % (backend_alias, ))
471 pytest.skip("Backend %s not selected." % (backend_alias, ))
472
472
473 utils.check_xfail_backends(request.node, backend_alias)
473 utils.check_xfail_backends(request.node, backend_alias)
474 utils.check_skip_backends(request.node, backend_alias)
474 utils.check_skip_backends(request.node, backend_alias)
475
475
476 repo_name = 'vcs_test_%s' % (backend_alias, )
476 repo_name = 'vcs_test_%s' % (backend_alias, )
477 backend = Backend(
477 backend = Backend(
478 alias=backend_alias,
478 alias=backend_alias,
479 repo_name=repo_name,
479 repo_name=repo_name,
480 test_name=request.node.name,
480 test_name=request.node.name,
481 test_repo_container=test_repo)
481 test_repo_container=test_repo)
482 request.addfinalizer(backend.cleanup)
482 request.addfinalizer(backend.cleanup)
483 return backend
483 return backend
484
484
485
485
486 @pytest.fixture()
486 @pytest.fixture()
487 def backend(request, backend_alias, baseapp, test_repo):
487 def backend(request, backend_alias, baseapp, test_repo):
488 """
488 """
489 Parametrized fixture which represents a single backend implementation.
489 Parametrized fixture which represents a single backend implementation.
490
490
491 It respects the option `--backends` to focus the test run on specific
491 It respects the option `--backends` to focus the test run on specific
492 backend implementations.
492 backend implementations.
493
493
494 It also supports `pytest.mark.xfail_backends` to mark tests as failing
494 It also supports `pytest.mark.xfail_backends` to mark tests as failing
495 for specific backends. This is intended as a utility for incremental
495 for specific backends. This is intended as a utility for incremental
496 development of a new backend implementation.
496 development of a new backend implementation.
497 """
497 """
498 return backend_base(request, backend_alias, baseapp, test_repo)
498 return backend_base(request, backend_alias, baseapp, test_repo)
499
499
500
500
501 @pytest.fixture()
501 @pytest.fixture()
502 def backend_git(request, baseapp, test_repo):
502 def backend_git(request, baseapp, test_repo):
503 return backend_base(request, 'git', baseapp, test_repo)
503 return backend_base(request, 'git', baseapp, test_repo)
504
504
505
505
506 @pytest.fixture()
506 @pytest.fixture()
507 def backend_hg(request, baseapp, test_repo):
507 def backend_hg(request, baseapp, test_repo):
508 return backend_base(request, 'hg', baseapp, test_repo)
508 return backend_base(request, 'hg', baseapp, test_repo)
509
509
510
510
511 @pytest.fixture()
511 @pytest.fixture()
512 def backend_svn(request, baseapp, test_repo):
512 def backend_svn(request, baseapp, test_repo):
513 return backend_base(request, 'svn', baseapp, test_repo)
513 return backend_base(request, 'svn', baseapp, test_repo)
514
514
515
515
516 @pytest.fixture()
516 @pytest.fixture()
517 def backend_random(backend_git):
517 def backend_random(backend_git):
518 """
518 """
519 Use this to express that your tests need "a backend.
519 Use this to express that your tests need "a backend.
520
520
521 A few of our tests need a backend, so that we can run the code. This
521 A few of our tests need a backend, so that we can run the code. This
522 fixture is intended to be used for such cases. It will pick one of the
522 fixture is intended to be used for such cases. It will pick one of the
523 backends and run the tests.
523 backends and run the tests.
524
524
525 The fixture `backend` would run the test multiple times for each
525 The fixture `backend` would run the test multiple times for each
526 available backend which is a pure waste of time if the test is
526 available backend which is a pure waste of time if the test is
527 independent of the backend type.
527 independent of the backend type.
528 """
528 """
529 # TODO: johbo: Change this to pick a random backend
529 # TODO: johbo: Change this to pick a random backend
530 return backend_git
530 return backend_git
531
531
532
532
533 @pytest.fixture()
533 @pytest.fixture()
534 def backend_stub(backend_git):
534 def backend_stub(backend_git):
535 """
535 """
536 Use this to express that your tests need a backend stub
536 Use this to express that your tests need a backend stub
537
537
538 TODO: mikhail: Implement a real stub logic instead of returning
538 TODO: mikhail: Implement a real stub logic instead of returning
539 a git backend
539 a git backend
540 """
540 """
541 return backend_git
541 return backend_git
542
542
543
543
544 @pytest.fixture()
544 @pytest.fixture()
545 def repo_stub(backend_stub):
545 def repo_stub(backend_stub):
546 """
546 """
547 Use this to express that your tests need a repository stub
547 Use this to express that your tests need a repository stub
548 """
548 """
549 return backend_stub.create_repo()
549 return backend_stub.create_repo()
550
550
551
551
552 class Backend(object):
552 class Backend(object):
553 """
553 """
554 Represents the test configuration for one supported backend
554 Represents the test configuration for one supported backend
555
555
556 Provides easy access to different test repositories based on
556 Provides easy access to different test repositories based on
557 `__getitem__`. Such repositories will only be created once per test
557 `__getitem__`. Such repositories will only be created once per test
558 session.
558 session.
559 """
559 """
560
560
561 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
561 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
562 _master_repo = None
562 _master_repo = None
563 _master_repo_path = ''
563 _master_repo_path = ''
564 _commit_ids = {}
564 _commit_ids = {}
565
565
566 def __init__(self, alias, repo_name, test_name, test_repo_container):
566 def __init__(self, alias, repo_name, test_name, test_repo_container):
567 self.alias = alias
567 self.alias = alias
568 self.repo_name = repo_name
568 self.repo_name = repo_name
569 self._cleanup_repos = []
569 self._cleanup_repos = []
570 self._test_name = test_name
570 self._test_name = test_name
571 self._test_repo_container = test_repo_container
571 self._test_repo_container = test_repo_container
572 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
572 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
573 # Fixture will survive in the end.
573 # Fixture will survive in the end.
574 self._fixture = Fixture()
574 self._fixture = Fixture()
575
575
576 def __getitem__(self, key):
576 def __getitem__(self, key):
577 return self._test_repo_container(key, self.alias)
577 return self._test_repo_container(key, self.alias)
578
578
579 def create_test_repo(self, key, config=None):
579 def create_test_repo(self, key, config=None):
580 return self._test_repo_container(key, self.alias, config)
580 return self._test_repo_container(key, self.alias, config)
581
581
582 @property
582 @property
583 def repo(self):
583 def repo(self):
584 """
584 """
585 Returns the "current" repository. This is the vcs_test repo or the
585 Returns the "current" repository. This is the vcs_test repo or the
586 last repo which has been created with `create_repo`.
586 last repo which has been created with `create_repo`.
587 """
587 """
588 from rhodecode.model.db import Repository
588 from rhodecode.model.db import Repository
589 return Repository.get_by_repo_name(self.repo_name)
589 return Repository.get_by_repo_name(self.repo_name)
590
590
591 @property
591 @property
592 def default_branch_name(self):
592 def default_branch_name(self):
593 VcsRepository = get_backend(self.alias)
593 VcsRepository = get_backend(self.alias)
594 return VcsRepository.DEFAULT_BRANCH_NAME
594 return VcsRepository.DEFAULT_BRANCH_NAME
595
595
596 @property
596 @property
597 def default_head_id(self):
597 def default_head_id(self):
598 """
598 """
599 Returns the default head id of the underlying backend.
599 Returns the default head id of the underlying backend.
600
600
601 This will be the default branch name in case the backend does have a
601 This will be the default branch name in case the backend does have a
602 default branch. In the other cases it will point to a valid head
602 default branch. In the other cases it will point to a valid head
603 which can serve as the base to create a new commit on top of it.
603 which can serve as the base to create a new commit on top of it.
604 """
604 """
605 vcsrepo = self.repo.scm_instance()
605 vcsrepo = self.repo.scm_instance()
606 head_id = (
606 head_id = (
607 vcsrepo.DEFAULT_BRANCH_NAME or
607 vcsrepo.DEFAULT_BRANCH_NAME or
608 vcsrepo.commit_ids[-1])
608 vcsrepo.commit_ids[-1])
609 return head_id
609 return head_id
610
610
611 @property
611 @property
612 def commit_ids(self):
612 def commit_ids(self):
613 """
613 """
614 Returns the list of commits for the last created repository
614 Returns the list of commits for the last created repository
615 """
615 """
616 return self._commit_ids
616 return self._commit_ids
617
617
618 def create_master_repo(self, commits):
618 def create_master_repo(self, commits):
619 """
619 """
620 Create a repository and remember it as a template.
620 Create a repository and remember it as a template.
621
621
622 This allows to easily create derived repositories to construct
622 This allows to easily create derived repositories to construct
623 more complex scenarios for diff, compare and pull requests.
623 more complex scenarios for diff, compare and pull requests.
624
624
625 Returns a commit map which maps from commit message to raw_id.
625 Returns a commit map which maps from commit message to raw_id.
626 """
626 """
627 self._master_repo = self.create_repo(commits=commits)
627 self._master_repo = self.create_repo(commits=commits)
628 self._master_repo_path = self._master_repo.repo_full_path
628 self._master_repo_path = self._master_repo.repo_full_path
629
629
630 return self._commit_ids
630 return self._commit_ids
631
631
632 def create_repo(
632 def create_repo(
633 self, commits=None, number_of_commits=0, heads=None,
633 self, commits=None, number_of_commits=0, heads=None,
634 name_suffix=u'', bare=False, **kwargs):
634 name_suffix=u'', bare=False, **kwargs):
635 """
635 """
636 Create a repository and record it for later cleanup.
636 Create a repository and record it for later cleanup.
637
637
638 :param commits: Optional. A sequence of dict instances.
638 :param commits: Optional. A sequence of dict instances.
639 Will add a commit per entry to the new repository.
639 Will add a commit per entry to the new repository.
640 :param number_of_commits: Optional. If set to a number, this number of
640 :param number_of_commits: Optional. If set to a number, this number of
641 commits will be added to the new repository.
641 commits will be added to the new repository.
642 :param heads: Optional. Can be set to a sequence of of commit
642 :param heads: Optional. Can be set to a sequence of of commit
643 names which shall be pulled in from the master repository.
643 names which shall be pulled in from the master repository.
644 :param name_suffix: adds special suffix to generated repo name
644 :param name_suffix: adds special suffix to generated repo name
645 :param bare: set a repo as bare (no checkout)
645 :param bare: set a repo as bare (no checkout)
646 """
646 """
647 self.repo_name = self._next_repo_name() + name_suffix
647 self.repo_name = self._next_repo_name() + name_suffix
648 repo = self._fixture.create_repo(
648 repo = self._fixture.create_repo(
649 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
649 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
650 self._cleanup_repos.append(repo.repo_name)
650 self._cleanup_repos.append(repo.repo_name)
651
651
652 commits = commits or [
652 commits = commits or [
653 {'message': 'Commit %s of %s' % (x, self.repo_name)}
653 {'message': 'Commit %s of %s' % (x, self.repo_name)}
654 for x in range(number_of_commits)]
654 for x in range(number_of_commits)]
655 vcs_repo = repo.scm_instance()
655 vcs_repo = repo.scm_instance()
656 vcs_repo.count()
656 vcs_repo.count()
657 self._add_commits_to_repo(vcs_repo, commits)
657 self._add_commits_to_repo(vcs_repo, commits)
658 if heads:
658 if heads:
659 self.pull_heads(repo, heads)
659 self.pull_heads(repo, heads)
660
660
661 return repo
661 return repo
662
662
663 def pull_heads(self, repo, heads):
663 def pull_heads(self, repo, heads):
664 """
664 """
665 Make sure that repo contains all commits mentioned in `heads`
665 Make sure that repo contains all commits mentioned in `heads`
666 """
666 """
667 vcsrepo = repo.scm_instance()
667 vcsrepo = repo.scm_instance()
668 vcsrepo.config.clear_section('hooks')
668 vcsrepo.config.clear_section('hooks')
669 commit_ids = [self._commit_ids[h] for h in heads]
669 commit_ids = [self._commit_ids[h] for h in heads]
670 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
670 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
671
671
672 def create_fork(self):
672 def create_fork(self):
673 repo_to_fork = self.repo_name
673 repo_to_fork = self.repo_name
674 self.repo_name = self._next_repo_name()
674 self.repo_name = self._next_repo_name()
675 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
675 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
676 self._cleanup_repos.append(self.repo_name)
676 self._cleanup_repos.append(self.repo_name)
677 return repo
677 return repo
678
678
679 def new_repo_name(self, suffix=u''):
679 def new_repo_name(self, suffix=u''):
680 self.repo_name = self._next_repo_name() + suffix
680 self.repo_name = self._next_repo_name() + suffix
681 self._cleanup_repos.append(self.repo_name)
681 self._cleanup_repos.append(self.repo_name)
682 return self.repo_name
682 return self.repo_name
683
683
684 def _next_repo_name(self):
684 def _next_repo_name(self):
685 return u"%s_%s" % (
685 return u"%s_%s" % (
686 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
686 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
687
687
688 def ensure_file(self, filename, content='Test content\n'):
688 def ensure_file(self, filename, content='Test content\n'):
689 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
689 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
690 commits = [
690 commits = [
691 {'added': [
691 {'added': [
692 FileNode(filename, content=content),
692 FileNode(filename, content=content),
693 ]},
693 ]},
694 ]
694 ]
695 self._add_commits_to_repo(self.repo.scm_instance(), commits)
695 self._add_commits_to_repo(self.repo.scm_instance(), commits)
696
696
697 def enable_downloads(self):
697 def enable_downloads(self):
698 repo = self.repo
698 repo = self.repo
699 repo.enable_downloads = True
699 repo.enable_downloads = True
700 Session().add(repo)
700 Session().add(repo)
701 Session().commit()
701 Session().commit()
702
702
703 def cleanup(self):
703 def cleanup(self):
704 for repo_name in reversed(self._cleanup_repos):
704 for repo_name in reversed(self._cleanup_repos):
705 self._fixture.destroy_repo(repo_name)
705 self._fixture.destroy_repo(repo_name)
706
706
707 def _add_commits_to_repo(self, repo, commits):
707 def _add_commits_to_repo(self, repo, commits):
708 commit_ids = _add_commits_to_repo(repo, commits)
708 commit_ids = _add_commits_to_repo(repo, commits)
709 if not commit_ids:
709 if not commit_ids:
710 return
710 return
711 self._commit_ids = commit_ids
711 self._commit_ids = commit_ids
712
712
713 # Creating refs for Git to allow fetching them from remote repository
713 # Creating refs for Git to allow fetching them from remote repository
714 if self.alias == 'git':
714 if self.alias == 'git':
715 refs = {}
715 refs = {}
716 for message in self._commit_ids:
716 for message in self._commit_ids:
717 # TODO: mikhail: do more special chars replacements
717 # TODO: mikhail: do more special chars replacements
718 ref_name = 'refs/test-refs/{}'.format(
718 ref_name = 'refs/test-refs/{}'.format(
719 message.replace(' ', ''))
719 message.replace(' ', ''))
720 refs[ref_name] = self._commit_ids[message]
720 refs[ref_name] = self._commit_ids[message]
721 self._create_refs(repo, refs)
721 self._create_refs(repo, refs)
722
722
723 def _create_refs(self, repo, refs):
723 def _create_refs(self, repo, refs):
724 for ref_name in refs:
724 for ref_name in refs:
725 repo.set_refs(ref_name, refs[ref_name])
725 repo.set_refs(ref_name, refs[ref_name])
726
726
727
727
728 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
728 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
729 if backend_alias not in request.config.getoption('--backends'):
729 if backend_alias not in request.config.getoption('--backends'):
730 pytest.skip("Backend %s not selected." % (backend_alias, ))
730 pytest.skip("Backend %s not selected." % (backend_alias, ))
731
731
732 utils.check_xfail_backends(request.node, backend_alias)
732 utils.check_xfail_backends(request.node, backend_alias)
733 utils.check_skip_backends(request.node, backend_alias)
733 utils.check_skip_backends(request.node, backend_alias)
734
734
735 repo_name = 'vcs_test_%s' % (backend_alias, )
735 repo_name = 'vcs_test_%s' % (backend_alias, )
736 repo_path = os.path.join(tests_tmp_path, repo_name)
736 repo_path = os.path.join(tests_tmp_path, repo_name)
737 backend = VcsBackend(
737 backend = VcsBackend(
738 alias=backend_alias,
738 alias=backend_alias,
739 repo_path=repo_path,
739 repo_path=repo_path,
740 test_name=request.node.name,
740 test_name=request.node.name,
741 test_repo_container=test_repo)
741 test_repo_container=test_repo)
742 request.addfinalizer(backend.cleanup)
742 request.addfinalizer(backend.cleanup)
743 return backend
743 return backend
744
744
745
745
746 @pytest.fixture()
746 @pytest.fixture()
747 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
747 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
748 """
748 """
749 Parametrized fixture which represents a single vcs backend implementation.
749 Parametrized fixture which represents a single vcs backend implementation.
750
750
751 See the fixture `backend` for more details. This one implements the same
751 See the fixture `backend` for more details. This one implements the same
752 concept, but on vcs level. So it does not provide model instances etc.
752 concept, but on vcs level. So it does not provide model instances etc.
753
753
754 Parameters are generated dynamically, see :func:`pytest_generate_tests`
754 Parameters are generated dynamically, see :func:`pytest_generate_tests`
755 for how this works.
755 for how this works.
756 """
756 """
757 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
757 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
758
758
759
759
760 @pytest.fixture()
760 @pytest.fixture()
761 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
761 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
762 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
762 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
763
763
764
764
765 @pytest.fixture()
765 @pytest.fixture()
766 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
766 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
767 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
767 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
768
768
769
769
770 @pytest.fixture()
770 @pytest.fixture()
771 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
771 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
772 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
772 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
773
773
774
774
775 @pytest.fixture()
775 @pytest.fixture()
776 def vcsbackend_stub(vcsbackend_git):
776 def vcsbackend_stub(vcsbackend_git):
777 """
777 """
778 Use this to express that your test just needs a stub of a vcsbackend.
778 Use this to express that your test just needs a stub of a vcsbackend.
779
779
780 Plan is to eventually implement an in-memory stub to speed tests up.
780 Plan is to eventually implement an in-memory stub to speed tests up.
781 """
781 """
782 return vcsbackend_git
782 return vcsbackend_git
783
783
784
784
785 class VcsBackend(object):
785 class VcsBackend(object):
786 """
786 """
787 Represents the test configuration for one supported vcs backend.
787 Represents the test configuration for one supported vcs backend.
788 """
788 """
789
789
790 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
790 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
791
791
792 def __init__(self, alias, repo_path, test_name, test_repo_container):
792 def __init__(self, alias, repo_path, test_name, test_repo_container):
793 self.alias = alias
793 self.alias = alias
794 self._repo_path = repo_path
794 self._repo_path = repo_path
795 self._cleanup_repos = []
795 self._cleanup_repos = []
796 self._test_name = test_name
796 self._test_name = test_name
797 self._test_repo_container = test_repo_container
797 self._test_repo_container = test_repo_container
798
798
799 def __getitem__(self, key):
799 def __getitem__(self, key):
800 return self._test_repo_container(key, self.alias).scm_instance()
800 return self._test_repo_container(key, self.alias).scm_instance()
801
801
802 @property
802 @property
803 def repo(self):
803 def repo(self):
804 """
804 """
805 Returns the "current" repository. This is the vcs_test repo of the last
805 Returns the "current" repository. This is the vcs_test repo of the last
806 repo which has been created.
806 repo which has been created.
807 """
807 """
808 Repository = get_backend(self.alias)
808 Repository = get_backend(self.alias)
809 return Repository(self._repo_path)
809 return Repository(self._repo_path)
810
810
811 @property
811 @property
812 def backend(self):
812 def backend(self):
813 """
813 """
814 Returns the backend implementation class.
814 Returns the backend implementation class.
815 """
815 """
816 return get_backend(self.alias)
816 return get_backend(self.alias)
817
817
818 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
818 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
819 bare=False):
819 bare=False):
820 repo_name = self._next_repo_name()
820 repo_name = self._next_repo_name()
821 self._repo_path = get_new_dir(repo_name)
821 self._repo_path = get_new_dir(repo_name)
822 repo_class = get_backend(self.alias)
822 repo_class = get_backend(self.alias)
823 src_url = None
823 src_url = None
824 if _clone_repo:
824 if _clone_repo:
825 src_url = _clone_repo.path
825 src_url = _clone_repo.path
826 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
826 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
827 self._cleanup_repos.append(repo)
827 self._cleanup_repos.append(repo)
828
828
829 commits = commits or [
829 commits = commits or [
830 {'message': 'Commit %s of %s' % (x, repo_name)}
830 {'message': 'Commit %s of %s' % (x, repo_name)}
831 for x in xrange(number_of_commits)]
831 for x in xrange(number_of_commits)]
832 _add_commits_to_repo(repo, commits)
832 _add_commits_to_repo(repo, commits)
833 return repo
833 return repo
834
834
835 def clone_repo(self, repo):
835 def clone_repo(self, repo):
836 return self.create_repo(_clone_repo=repo)
836 return self.create_repo(_clone_repo=repo)
837
837
838 def cleanup(self):
838 def cleanup(self):
839 for repo in self._cleanup_repos:
839 for repo in self._cleanup_repos:
840 shutil.rmtree(repo.path)
840 shutil.rmtree(repo.path)
841
841
842 def new_repo_path(self):
842 def new_repo_path(self):
843 repo_name = self._next_repo_name()
843 repo_name = self._next_repo_name()
844 self._repo_path = get_new_dir(repo_name)
844 self._repo_path = get_new_dir(repo_name)
845 return self._repo_path
845 return self._repo_path
846
846
847 def _next_repo_name(self):
847 def _next_repo_name(self):
848 return "%s_%s" % (
848 return "%s_%s" % (
849 self.invalid_repo_name.sub('_', self._test_name),
849 self.invalid_repo_name.sub('_', self._test_name),
850 len(self._cleanup_repos))
850 len(self._cleanup_repos))
851
851
852 def add_file(self, repo, filename, content='Test content\n'):
852 def add_file(self, repo, filename, content='Test content\n'):
853 imc = repo.in_memory_commit
853 imc = repo.in_memory_commit
854 imc.add(FileNode(filename, content=content))
854 imc.add(FileNode(filename, content=content))
855 imc.commit(
855 imc.commit(
856 message=u'Automatic commit from vcsbackend fixture',
856 message=u'Automatic commit from vcsbackend fixture',
857 author=u'Automatic <automatic@rhodecode.com>')
857 author=u'Automatic <automatic@rhodecode.com>')
858
858
859 def ensure_file(self, filename, content='Test content\n'):
859 def ensure_file(self, filename, content='Test content\n'):
860 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
860 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
861 self.add_file(self.repo, filename, content)
861 self.add_file(self.repo, filename, content)
862
862
863
863
864 def _add_commits_to_repo(vcs_repo, commits):
864 def _add_commits_to_repo(vcs_repo, commits):
865 commit_ids = {}
865 commit_ids = {}
866 if not commits:
866 if not commits:
867 return commit_ids
867 return commit_ids
868
868
869 imc = vcs_repo.in_memory_commit
869 imc = vcs_repo.in_memory_commit
870 commit = None
870 commit = None
871
871
872 for idx, commit in enumerate(commits):
872 for idx, commit in enumerate(commits):
873 message = unicode(commit.get('message', 'Commit %s' % idx))
873 message = unicode(commit.get('message', 'Commit %s' % idx))
874
874
875 for node in commit.get('added', []):
875 for node in commit.get('added', []):
876 imc.add(FileNode(node.path, content=node.content))
876 imc.add(FileNode(node.path, content=node.content))
877 for node in commit.get('changed', []):
877 for node in commit.get('changed', []):
878 imc.change(FileNode(node.path, content=node.content))
878 imc.change(FileNode(node.path, content=node.content))
879 for node in commit.get('removed', []):
879 for node in commit.get('removed', []):
880 imc.remove(FileNode(node.path))
880 imc.remove(FileNode(node.path))
881
881
882 parents = [
882 parents = [
883 vcs_repo.get_commit(commit_id=commit_ids[p])
883 vcs_repo.get_commit(commit_id=commit_ids[p])
884 for p in commit.get('parents', [])]
884 for p in commit.get('parents', [])]
885
885
886 operations = ('added', 'changed', 'removed')
886 operations = ('added', 'changed', 'removed')
887 if not any((commit.get(o) for o in operations)):
887 if not any((commit.get(o) for o in operations)):
888 imc.add(FileNode('file_%s' % idx, content=message))
888 imc.add(FileNode('file_%s' % idx, content=message))
889
889
890 commit = imc.commit(
890 commit = imc.commit(
891 message=message,
891 message=message,
892 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
892 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
893 date=commit.get('date'),
893 date=commit.get('date'),
894 branch=commit.get('branch'),
894 branch=commit.get('branch'),
895 parents=parents)
895 parents=parents)
896
896
897 commit_ids[commit.message] = commit.raw_id
897 commit_ids[commit.message] = commit.raw_id
898
898
899 return commit_ids
899 return commit_ids
900
900
901
901
902 @pytest.fixture()
902 @pytest.fixture()
903 def reposerver(request):
903 def reposerver(request):
904 """
904 """
905 Allows to serve a backend repository
905 Allows to serve a backend repository
906 """
906 """
907
907
908 repo_server = RepoServer()
908 repo_server = RepoServer()
909 request.addfinalizer(repo_server.cleanup)
909 request.addfinalizer(repo_server.cleanup)
910 return repo_server
910 return repo_server
911
911
912
912
913 class RepoServer(object):
913 class RepoServer(object):
914 """
914 """
915 Utility to serve a local repository for the duration of a test case.
915 Utility to serve a local repository for the duration of a test case.
916
916
917 Supports only Subversion so far.
917 Supports only Subversion so far.
918 """
918 """
919
919
920 url = None
920 url = None
921
921
922 def __init__(self):
922 def __init__(self):
923 self._cleanup_servers = []
923 self._cleanup_servers = []
924
924
925 def serve(self, vcsrepo):
925 def serve(self, vcsrepo):
926 if vcsrepo.alias != 'svn':
926 if vcsrepo.alias != 'svn':
927 raise TypeError("Backend %s not supported" % vcsrepo.alias)
927 raise TypeError("Backend %s not supported" % vcsrepo.alias)
928
928
929 proc = subprocess32.Popen(
929 proc = subprocess32.Popen(
930 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
930 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
931 '--root', vcsrepo.path])
931 '--root', vcsrepo.path])
932 self._cleanup_servers.append(proc)
932 self._cleanup_servers.append(proc)
933 self.url = 'svn://localhost'
933 self.url = 'svn://localhost'
934
934
935 def cleanup(self):
935 def cleanup(self):
936 for proc in self._cleanup_servers:
936 for proc in self._cleanup_servers:
937 proc.terminate()
937 proc.terminate()
938
938
939
939
940 @pytest.fixture()
940 @pytest.fixture()
941 def pr_util(backend, request, config_stub):
941 def pr_util(backend, request, config_stub):
942 """
942 """
943 Utility for tests of models and for functional tests around pull requests.
943 Utility for tests of models and for functional tests around pull requests.
944
944
945 It gives an instance of :class:`PRTestUtility` which provides various
945 It gives an instance of :class:`PRTestUtility` which provides various
946 utility methods around one pull request.
946 utility methods around one pull request.
947
947
948 This fixture uses `backend` and inherits its parameterization.
948 This fixture uses `backend` and inherits its parameterization.
949 """
949 """
950
950
951 util = PRTestUtility(backend)
951 util = PRTestUtility(backend)
952 request.addfinalizer(util.cleanup)
952 request.addfinalizer(util.cleanup)
953
953
954 return util
954 return util
955
955
956
956
957 class PRTestUtility(object):
957 class PRTestUtility(object):
958
958
959 pull_request = None
959 pull_request = None
960 pull_request_id = None
960 pull_request_id = None
961 mergeable_patcher = None
961 mergeable_patcher = None
962 mergeable_mock = None
962 mergeable_mock = None
963 notification_patcher = None
963 notification_patcher = None
964
964
965 def __init__(self, backend):
965 def __init__(self, backend):
966 self.backend = backend
966 self.backend = backend
967
967
968 def create_pull_request(
968 def create_pull_request(
969 self, commits=None, target_head=None, source_head=None,
969 self, commits=None, target_head=None, source_head=None,
970 revisions=None, approved=False, author=None, mergeable=False,
970 revisions=None, approved=False, author=None, mergeable=False,
971 enable_notifications=True, name_suffix=u'', reviewers=None,
971 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
972 title=u"Test", description=u"Description"):
972 title=u"Test", description=u"Description"):
973 self.set_mergeable(mergeable)
973 self.set_mergeable(mergeable)
974 if not enable_notifications:
974 if not enable_notifications:
975 # mock notification side effect
975 # mock notification side effect
976 self.notification_patcher = mock.patch(
976 self.notification_patcher = mock.patch(
977 'rhodecode.model.notification.NotificationModel.create')
977 'rhodecode.model.notification.NotificationModel.create')
978 self.notification_patcher.start()
978 self.notification_patcher.start()
979
979
980 if not self.pull_request:
980 if not self.pull_request:
981 if not commits:
981 if not commits:
982 commits = [
982 commits = [
983 {'message': 'c1'},
983 {'message': 'c1'},
984 {'message': 'c2'},
984 {'message': 'c2'},
985 {'message': 'c3'},
985 {'message': 'c3'},
986 ]
986 ]
987 target_head = 'c1'
987 target_head = 'c1'
988 source_head = 'c2'
988 source_head = 'c2'
989 revisions = ['c2']
989 revisions = ['c2']
990
990
991 self.commit_ids = self.backend.create_master_repo(commits)
991 self.commit_ids = self.backend.create_master_repo(commits)
992 self.target_repository = self.backend.create_repo(
992 self.target_repository = self.backend.create_repo(
993 heads=[target_head], name_suffix=name_suffix)
993 heads=[target_head], name_suffix=name_suffix)
994 self.source_repository = self.backend.create_repo(
994 self.source_repository = self.backend.create_repo(
995 heads=[source_head], name_suffix=name_suffix)
995 heads=[source_head], name_suffix=name_suffix)
996 self.author = author or UserModel().get_by_username(
996 self.author = author or UserModel().get_by_username(
997 TEST_USER_ADMIN_LOGIN)
997 TEST_USER_ADMIN_LOGIN)
998
998
999 model = PullRequestModel()
999 model = PullRequestModel()
1000 self.create_parameters = {
1000 self.create_parameters = {
1001 'created_by': self.author,
1001 'created_by': self.author,
1002 'source_repo': self.source_repository.repo_name,
1002 'source_repo': self.source_repository.repo_name,
1003 'source_ref': self._default_branch_reference(source_head),
1003 'source_ref': self._default_branch_reference(source_head),
1004 'target_repo': self.target_repository.repo_name,
1004 'target_repo': self.target_repository.repo_name,
1005 'target_ref': self._default_branch_reference(target_head),
1005 'target_ref': self._default_branch_reference(target_head),
1006 'revisions': [self.commit_ids[r] for r in revisions],
1006 'revisions': [self.commit_ids[r] for r in revisions],
1007 'reviewers': reviewers or self._get_reviewers(),
1007 'reviewers': reviewers or self._get_reviewers(),
1008 'observers': observers or self._get_observers(),
1008 'title': title,
1009 'title': title,
1009 'description': description,
1010 'description': description,
1010 }
1011 }
1011 self.pull_request = model.create(**self.create_parameters)
1012 self.pull_request = model.create(**self.create_parameters)
1012 assert model.get_versions(self.pull_request) == []
1013 assert model.get_versions(self.pull_request) == []
1013
1014
1014 self.pull_request_id = self.pull_request.pull_request_id
1015 self.pull_request_id = self.pull_request.pull_request_id
1015
1016
1016 if approved:
1017 if approved:
1017 self.approve()
1018 self.approve()
1018
1019
1019 Session().add(self.pull_request)
1020 Session().add(self.pull_request)
1020 Session().commit()
1021 Session().commit()
1021
1022
1022 return self.pull_request
1023 return self.pull_request
1023
1024
1024 def approve(self):
1025 def approve(self):
1025 self.create_status_votes(
1026 self.create_status_votes(
1026 ChangesetStatus.STATUS_APPROVED,
1027 ChangesetStatus.STATUS_APPROVED,
1027 *self.pull_request.reviewers)
1028 *self.pull_request.reviewers)
1028
1029
1029 def close(self):
1030 def close(self):
1030 PullRequestModel().close_pull_request(self.pull_request, self.author)
1031 PullRequestModel().close_pull_request(self.pull_request, self.author)
1031
1032
1032 def _default_branch_reference(self, commit_message):
1033 def _default_branch_reference(self, commit_message):
1033 reference = '%s:%s:%s' % (
1034 reference = '%s:%s:%s' % (
1034 'branch',
1035 'branch',
1035 self.backend.default_branch_name,
1036 self.backend.default_branch_name,
1036 self.commit_ids[commit_message])
1037 self.commit_ids[commit_message])
1037 return reference
1038 return reference
1038
1039
1039 def _get_reviewers(self):
1040 def _get_reviewers(self):
1041 role = PullRequestReviewers.ROLE_REVIEWER
1040 return [
1042 return [
1041 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1043 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
1042 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1044 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
1045 ]
1046
1047 def _get_observers(self):
1048 return [
1049
1043 ]
1050 ]
1044
1051
1045 def update_source_repository(self, head=None):
1052 def update_source_repository(self, head=None):
1046 heads = [head or 'c3']
1053 heads = [head or 'c3']
1047 self.backend.pull_heads(self.source_repository, heads=heads)
1054 self.backend.pull_heads(self.source_repository, heads=heads)
1048
1055
1049 def add_one_commit(self, head=None):
1056 def add_one_commit(self, head=None):
1050 self.update_source_repository(head=head)
1057 self.update_source_repository(head=head)
1051 old_commit_ids = set(self.pull_request.revisions)
1058 old_commit_ids = set(self.pull_request.revisions)
1052 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1059 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1053 commit_ids = set(self.pull_request.revisions)
1060 commit_ids = set(self.pull_request.revisions)
1054 new_commit_ids = commit_ids - old_commit_ids
1061 new_commit_ids = commit_ids - old_commit_ids
1055 assert len(new_commit_ids) == 1
1062 assert len(new_commit_ids) == 1
1056 return new_commit_ids.pop()
1063 return new_commit_ids.pop()
1057
1064
1058 def remove_one_commit(self):
1065 def remove_one_commit(self):
1059 assert len(self.pull_request.revisions) == 2
1066 assert len(self.pull_request.revisions) == 2
1060 source_vcs = self.source_repository.scm_instance()
1067 source_vcs = self.source_repository.scm_instance()
1061 removed_commit_id = source_vcs.commit_ids[-1]
1068 removed_commit_id = source_vcs.commit_ids[-1]
1062
1069
1063 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1070 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1064 # remove the if once that's sorted out.
1071 # remove the if once that's sorted out.
1065 if self.backend.alias == "git":
1072 if self.backend.alias == "git":
1066 kwargs = {'branch_name': self.backend.default_branch_name}
1073 kwargs = {'branch_name': self.backend.default_branch_name}
1067 else:
1074 else:
1068 kwargs = {}
1075 kwargs = {}
1069 source_vcs.strip(removed_commit_id, **kwargs)
1076 source_vcs.strip(removed_commit_id, **kwargs)
1070
1077
1071 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1078 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1072 assert len(self.pull_request.revisions) == 1
1079 assert len(self.pull_request.revisions) == 1
1073 return removed_commit_id
1080 return removed_commit_id
1074
1081
1075 def create_comment(self, linked_to=None):
1082 def create_comment(self, linked_to=None):
1076 comment = CommentsModel().create(
1083 comment = CommentsModel().create(
1077 text=u"Test comment",
1084 text=u"Test comment",
1078 repo=self.target_repository.repo_name,
1085 repo=self.target_repository.repo_name,
1079 user=self.author,
1086 user=self.author,
1080 pull_request=self.pull_request)
1087 pull_request=self.pull_request)
1081 assert comment.pull_request_version_id is None
1088 assert comment.pull_request_version_id is None
1082
1089
1083 if linked_to:
1090 if linked_to:
1084 PullRequestModel()._link_comments_to_version(linked_to)
1091 PullRequestModel()._link_comments_to_version(linked_to)
1085
1092
1086 return comment
1093 return comment
1087
1094
1088 def create_inline_comment(
1095 def create_inline_comment(
1089 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1096 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1090 comment = CommentsModel().create(
1097 comment = CommentsModel().create(
1091 text=u"Test comment",
1098 text=u"Test comment",
1092 repo=self.target_repository.repo_name,
1099 repo=self.target_repository.repo_name,
1093 user=self.author,
1100 user=self.author,
1094 line_no=line_no,
1101 line_no=line_no,
1095 f_path=file_path,
1102 f_path=file_path,
1096 pull_request=self.pull_request)
1103 pull_request=self.pull_request)
1097 assert comment.pull_request_version_id is None
1104 assert comment.pull_request_version_id is None
1098
1105
1099 if linked_to:
1106 if linked_to:
1100 PullRequestModel()._link_comments_to_version(linked_to)
1107 PullRequestModel()._link_comments_to_version(linked_to)
1101
1108
1102 return comment
1109 return comment
1103
1110
1104 def create_version_of_pull_request(self):
1111 def create_version_of_pull_request(self):
1105 pull_request = self.create_pull_request()
1112 pull_request = self.create_pull_request()
1106 version = PullRequestModel()._create_version_from_snapshot(
1113 version = PullRequestModel()._create_version_from_snapshot(
1107 pull_request)
1114 pull_request)
1108 return version
1115 return version
1109
1116
1110 def create_status_votes(self, status, *reviewers):
1117 def create_status_votes(self, status, *reviewers):
1111 for reviewer in reviewers:
1118 for reviewer in reviewers:
1112 ChangesetStatusModel().set_status(
1119 ChangesetStatusModel().set_status(
1113 repo=self.pull_request.target_repo,
1120 repo=self.pull_request.target_repo,
1114 status=status,
1121 status=status,
1115 user=reviewer.user_id,
1122 user=reviewer.user_id,
1116 pull_request=self.pull_request)
1123 pull_request=self.pull_request)
1117
1124
1118 def set_mergeable(self, value):
1125 def set_mergeable(self, value):
1119 if not self.mergeable_patcher:
1126 if not self.mergeable_patcher:
1120 self.mergeable_patcher = mock.patch.object(
1127 self.mergeable_patcher = mock.patch.object(
1121 VcsSettingsModel, 'get_general_settings')
1128 VcsSettingsModel, 'get_general_settings')
1122 self.mergeable_mock = self.mergeable_patcher.start()
1129 self.mergeable_mock = self.mergeable_patcher.start()
1123 self.mergeable_mock.return_value = {
1130 self.mergeable_mock.return_value = {
1124 'rhodecode_pr_merge_enabled': value}
1131 'rhodecode_pr_merge_enabled': value}
1125
1132
1126 def cleanup(self):
1133 def cleanup(self):
1127 # In case the source repository is already cleaned up, the pull
1134 # In case the source repository is already cleaned up, the pull
1128 # request will already be deleted.
1135 # request will already be deleted.
1129 pull_request = PullRequest().get(self.pull_request_id)
1136 pull_request = PullRequest().get(self.pull_request_id)
1130 if pull_request:
1137 if pull_request:
1131 PullRequestModel().delete(pull_request, pull_request.author)
1138 PullRequestModel().delete(pull_request, pull_request.author)
1132 Session().commit()
1139 Session().commit()
1133
1140
1134 if self.notification_patcher:
1141 if self.notification_patcher:
1135 self.notification_patcher.stop()
1142 self.notification_patcher.stop()
1136
1143
1137 if self.mergeable_patcher:
1144 if self.mergeable_patcher:
1138 self.mergeable_patcher.stop()
1145 self.mergeable_patcher.stop()
1139
1146
1140
1147
1141 @pytest.fixture()
1148 @pytest.fixture()
1142 def user_admin(baseapp):
1149 def user_admin(baseapp):
1143 """
1150 """
1144 Provides the default admin test user as an instance of `db.User`.
1151 Provides the default admin test user as an instance of `db.User`.
1145 """
1152 """
1146 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1153 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1147 return user
1154 return user
1148
1155
1149
1156
1150 @pytest.fixture()
1157 @pytest.fixture()
1151 def user_regular(baseapp):
1158 def user_regular(baseapp):
1152 """
1159 """
1153 Provides the default regular test user as an instance of `db.User`.
1160 Provides the default regular test user as an instance of `db.User`.
1154 """
1161 """
1155 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1162 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1156 return user
1163 return user
1157
1164
1158
1165
1159 @pytest.fixture()
1166 @pytest.fixture()
1160 def user_util(request, db_connection):
1167 def user_util(request, db_connection):
1161 """
1168 """
1162 Provides a wired instance of `UserUtility` with integrated cleanup.
1169 Provides a wired instance of `UserUtility` with integrated cleanup.
1163 """
1170 """
1164 utility = UserUtility(test_name=request.node.name)
1171 utility = UserUtility(test_name=request.node.name)
1165 request.addfinalizer(utility.cleanup)
1172 request.addfinalizer(utility.cleanup)
1166 return utility
1173 return utility
1167
1174
1168
1175
1169 # TODO: johbo: Split this up into utilities per domain or something similar
1176 # TODO: johbo: Split this up into utilities per domain or something similar
1170 class UserUtility(object):
1177 class UserUtility(object):
1171
1178
1172 def __init__(self, test_name="test"):
1179 def __init__(self, test_name="test"):
1173 self._test_name = self._sanitize_name(test_name)
1180 self._test_name = self._sanitize_name(test_name)
1174 self.fixture = Fixture()
1181 self.fixture = Fixture()
1175 self.repo_group_ids = []
1182 self.repo_group_ids = []
1176 self.repos_ids = []
1183 self.repos_ids = []
1177 self.user_ids = []
1184 self.user_ids = []
1178 self.user_group_ids = []
1185 self.user_group_ids = []
1179 self.user_repo_permission_ids = []
1186 self.user_repo_permission_ids = []
1180 self.user_group_repo_permission_ids = []
1187 self.user_group_repo_permission_ids = []
1181 self.user_repo_group_permission_ids = []
1188 self.user_repo_group_permission_ids = []
1182 self.user_group_repo_group_permission_ids = []
1189 self.user_group_repo_group_permission_ids = []
1183 self.user_user_group_permission_ids = []
1190 self.user_user_group_permission_ids = []
1184 self.user_group_user_group_permission_ids = []
1191 self.user_group_user_group_permission_ids = []
1185 self.user_permissions = []
1192 self.user_permissions = []
1186
1193
1187 def _sanitize_name(self, name):
1194 def _sanitize_name(self, name):
1188 for char in ['[', ']']:
1195 for char in ['[', ']']:
1189 name = name.replace(char, '_')
1196 name = name.replace(char, '_')
1190 return name
1197 return name
1191
1198
1192 def create_repo_group(
1199 def create_repo_group(
1193 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1200 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1194 group_name = "{prefix}_repogroup_{count}".format(
1201 group_name = "{prefix}_repogroup_{count}".format(
1195 prefix=self._test_name,
1202 prefix=self._test_name,
1196 count=len(self.repo_group_ids))
1203 count=len(self.repo_group_ids))
1197 repo_group = self.fixture.create_repo_group(
1204 repo_group = self.fixture.create_repo_group(
1198 group_name, cur_user=owner)
1205 group_name, cur_user=owner)
1199 if auto_cleanup:
1206 if auto_cleanup:
1200 self.repo_group_ids.append(repo_group.group_id)
1207 self.repo_group_ids.append(repo_group.group_id)
1201 return repo_group
1208 return repo_group
1202
1209
1203 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1210 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1204 auto_cleanup=True, repo_type='hg', bare=False):
1211 auto_cleanup=True, repo_type='hg', bare=False):
1205 repo_name = "{prefix}_repository_{count}".format(
1212 repo_name = "{prefix}_repository_{count}".format(
1206 prefix=self._test_name,
1213 prefix=self._test_name,
1207 count=len(self.repos_ids))
1214 count=len(self.repos_ids))
1208
1215
1209 repository = self.fixture.create_repo(
1216 repository = self.fixture.create_repo(
1210 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1217 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1211 if auto_cleanup:
1218 if auto_cleanup:
1212 self.repos_ids.append(repository.repo_id)
1219 self.repos_ids.append(repository.repo_id)
1213 return repository
1220 return repository
1214
1221
1215 def create_user(self, auto_cleanup=True, **kwargs):
1222 def create_user(self, auto_cleanup=True, **kwargs):
1216 user_name = "{prefix}_user_{count}".format(
1223 user_name = "{prefix}_user_{count}".format(
1217 prefix=self._test_name,
1224 prefix=self._test_name,
1218 count=len(self.user_ids))
1225 count=len(self.user_ids))
1219 user = self.fixture.create_user(user_name, **kwargs)
1226 user = self.fixture.create_user(user_name, **kwargs)
1220 if auto_cleanup:
1227 if auto_cleanup:
1221 self.user_ids.append(user.user_id)
1228 self.user_ids.append(user.user_id)
1222 return user
1229 return user
1223
1230
1224 def create_additional_user_email(self, user, email):
1231 def create_additional_user_email(self, user, email):
1225 uem = self.fixture.create_additional_user_email(user=user, email=email)
1232 uem = self.fixture.create_additional_user_email(user=user, email=email)
1226 return uem
1233 return uem
1227
1234
1228 def create_user_with_group(self):
1235 def create_user_with_group(self):
1229 user = self.create_user()
1236 user = self.create_user()
1230 user_group = self.create_user_group(members=[user])
1237 user_group = self.create_user_group(members=[user])
1231 return user, user_group
1238 return user, user_group
1232
1239
1233 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1240 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1234 auto_cleanup=True, **kwargs):
1241 auto_cleanup=True, **kwargs):
1235 group_name = "{prefix}_usergroup_{count}".format(
1242 group_name = "{prefix}_usergroup_{count}".format(
1236 prefix=self._test_name,
1243 prefix=self._test_name,
1237 count=len(self.user_group_ids))
1244 count=len(self.user_group_ids))
1238 user_group = self.fixture.create_user_group(
1245 user_group = self.fixture.create_user_group(
1239 group_name, cur_user=owner, **kwargs)
1246 group_name, cur_user=owner, **kwargs)
1240
1247
1241 if auto_cleanup:
1248 if auto_cleanup:
1242 self.user_group_ids.append(user_group.users_group_id)
1249 self.user_group_ids.append(user_group.users_group_id)
1243 if members:
1250 if members:
1244 for user in members:
1251 for user in members:
1245 UserGroupModel().add_user_to_group(user_group, user)
1252 UserGroupModel().add_user_to_group(user_group, user)
1246 return user_group
1253 return user_group
1247
1254
1248 def grant_user_permission(self, user_name, permission_name):
1255 def grant_user_permission(self, user_name, permission_name):
1249 self.inherit_default_user_permissions(user_name, False)
1256 self.inherit_default_user_permissions(user_name, False)
1250 self.user_permissions.append((user_name, permission_name))
1257 self.user_permissions.append((user_name, permission_name))
1251
1258
1252 def grant_user_permission_to_repo_group(
1259 def grant_user_permission_to_repo_group(
1253 self, repo_group, user, permission_name):
1260 self, repo_group, user, permission_name):
1254 permission = RepoGroupModel().grant_user_permission(
1261 permission = RepoGroupModel().grant_user_permission(
1255 repo_group, user, permission_name)
1262 repo_group, user, permission_name)
1256 self.user_repo_group_permission_ids.append(
1263 self.user_repo_group_permission_ids.append(
1257 (repo_group.group_id, user.user_id))
1264 (repo_group.group_id, user.user_id))
1258 return permission
1265 return permission
1259
1266
1260 def grant_user_group_permission_to_repo_group(
1267 def grant_user_group_permission_to_repo_group(
1261 self, repo_group, user_group, permission_name):
1268 self, repo_group, user_group, permission_name):
1262 permission = RepoGroupModel().grant_user_group_permission(
1269 permission = RepoGroupModel().grant_user_group_permission(
1263 repo_group, user_group, permission_name)
1270 repo_group, user_group, permission_name)
1264 self.user_group_repo_group_permission_ids.append(
1271 self.user_group_repo_group_permission_ids.append(
1265 (repo_group.group_id, user_group.users_group_id))
1272 (repo_group.group_id, user_group.users_group_id))
1266 return permission
1273 return permission
1267
1274
1268 def grant_user_permission_to_repo(
1275 def grant_user_permission_to_repo(
1269 self, repo, user, permission_name):
1276 self, repo, user, permission_name):
1270 permission = RepoModel().grant_user_permission(
1277 permission = RepoModel().grant_user_permission(
1271 repo, user, permission_name)
1278 repo, user, permission_name)
1272 self.user_repo_permission_ids.append(
1279 self.user_repo_permission_ids.append(
1273 (repo.repo_id, user.user_id))
1280 (repo.repo_id, user.user_id))
1274 return permission
1281 return permission
1275
1282
1276 def grant_user_group_permission_to_repo(
1283 def grant_user_group_permission_to_repo(
1277 self, repo, user_group, permission_name):
1284 self, repo, user_group, permission_name):
1278 permission = RepoModel().grant_user_group_permission(
1285 permission = RepoModel().grant_user_group_permission(
1279 repo, user_group, permission_name)
1286 repo, user_group, permission_name)
1280 self.user_group_repo_permission_ids.append(
1287 self.user_group_repo_permission_ids.append(
1281 (repo.repo_id, user_group.users_group_id))
1288 (repo.repo_id, user_group.users_group_id))
1282 return permission
1289 return permission
1283
1290
1284 def grant_user_permission_to_user_group(
1291 def grant_user_permission_to_user_group(
1285 self, target_user_group, user, permission_name):
1292 self, target_user_group, user, permission_name):
1286 permission = UserGroupModel().grant_user_permission(
1293 permission = UserGroupModel().grant_user_permission(
1287 target_user_group, user, permission_name)
1294 target_user_group, user, permission_name)
1288 self.user_user_group_permission_ids.append(
1295 self.user_user_group_permission_ids.append(
1289 (target_user_group.users_group_id, user.user_id))
1296 (target_user_group.users_group_id, user.user_id))
1290 return permission
1297 return permission
1291
1298
1292 def grant_user_group_permission_to_user_group(
1299 def grant_user_group_permission_to_user_group(
1293 self, target_user_group, user_group, permission_name):
1300 self, target_user_group, user_group, permission_name):
1294 permission = UserGroupModel().grant_user_group_permission(
1301 permission = UserGroupModel().grant_user_group_permission(
1295 target_user_group, user_group, permission_name)
1302 target_user_group, user_group, permission_name)
1296 self.user_group_user_group_permission_ids.append(
1303 self.user_group_user_group_permission_ids.append(
1297 (target_user_group.users_group_id, user_group.users_group_id))
1304 (target_user_group.users_group_id, user_group.users_group_id))
1298 return permission
1305 return permission
1299
1306
1300 def revoke_user_permission(self, user_name, permission_name):
1307 def revoke_user_permission(self, user_name, permission_name):
1301 self.inherit_default_user_permissions(user_name, True)
1308 self.inherit_default_user_permissions(user_name, True)
1302 UserModel().revoke_perm(user_name, permission_name)
1309 UserModel().revoke_perm(user_name, permission_name)
1303
1310
1304 def inherit_default_user_permissions(self, user_name, value):
1311 def inherit_default_user_permissions(self, user_name, value):
1305 user = UserModel().get_by_username(user_name)
1312 user = UserModel().get_by_username(user_name)
1306 user.inherit_default_permissions = value
1313 user.inherit_default_permissions = value
1307 Session().add(user)
1314 Session().add(user)
1308 Session().commit()
1315 Session().commit()
1309
1316
1310 def cleanup(self):
1317 def cleanup(self):
1311 self._cleanup_permissions()
1318 self._cleanup_permissions()
1312 self._cleanup_repos()
1319 self._cleanup_repos()
1313 self._cleanup_repo_groups()
1320 self._cleanup_repo_groups()
1314 self._cleanup_user_groups()
1321 self._cleanup_user_groups()
1315 self._cleanup_users()
1322 self._cleanup_users()
1316
1323
1317 def _cleanup_permissions(self):
1324 def _cleanup_permissions(self):
1318 if self.user_permissions:
1325 if self.user_permissions:
1319 for user_name, permission_name in self.user_permissions:
1326 for user_name, permission_name in self.user_permissions:
1320 self.revoke_user_permission(user_name, permission_name)
1327 self.revoke_user_permission(user_name, permission_name)
1321
1328
1322 for permission in self.user_repo_permission_ids:
1329 for permission in self.user_repo_permission_ids:
1323 RepoModel().revoke_user_permission(*permission)
1330 RepoModel().revoke_user_permission(*permission)
1324
1331
1325 for permission in self.user_group_repo_permission_ids:
1332 for permission in self.user_group_repo_permission_ids:
1326 RepoModel().revoke_user_group_permission(*permission)
1333 RepoModel().revoke_user_group_permission(*permission)
1327
1334
1328 for permission in self.user_repo_group_permission_ids:
1335 for permission in self.user_repo_group_permission_ids:
1329 RepoGroupModel().revoke_user_permission(*permission)
1336 RepoGroupModel().revoke_user_permission(*permission)
1330
1337
1331 for permission in self.user_group_repo_group_permission_ids:
1338 for permission in self.user_group_repo_group_permission_ids:
1332 RepoGroupModel().revoke_user_group_permission(*permission)
1339 RepoGroupModel().revoke_user_group_permission(*permission)
1333
1340
1334 for permission in self.user_user_group_permission_ids:
1341 for permission in self.user_user_group_permission_ids:
1335 UserGroupModel().revoke_user_permission(*permission)
1342 UserGroupModel().revoke_user_permission(*permission)
1336
1343
1337 for permission in self.user_group_user_group_permission_ids:
1344 for permission in self.user_group_user_group_permission_ids:
1338 UserGroupModel().revoke_user_group_permission(*permission)
1345 UserGroupModel().revoke_user_group_permission(*permission)
1339
1346
1340 def _cleanup_repo_groups(self):
1347 def _cleanup_repo_groups(self):
1341 def _repo_group_compare(first_group_id, second_group_id):
1348 def _repo_group_compare(first_group_id, second_group_id):
1342 """
1349 """
1343 Gives higher priority to the groups with the most complex paths
1350 Gives higher priority to the groups with the most complex paths
1344 """
1351 """
1345 first_group = RepoGroup.get(first_group_id)
1352 first_group = RepoGroup.get(first_group_id)
1346 second_group = RepoGroup.get(second_group_id)
1353 second_group = RepoGroup.get(second_group_id)
1347 first_group_parts = (
1354 first_group_parts = (
1348 len(first_group.group_name.split('/')) if first_group else 0)
1355 len(first_group.group_name.split('/')) if first_group else 0)
1349 second_group_parts = (
1356 second_group_parts = (
1350 len(second_group.group_name.split('/')) if second_group else 0)
1357 len(second_group.group_name.split('/')) if second_group else 0)
1351 return cmp(second_group_parts, first_group_parts)
1358 return cmp(second_group_parts, first_group_parts)
1352
1359
1353 sorted_repo_group_ids = sorted(
1360 sorted_repo_group_ids = sorted(
1354 self.repo_group_ids, cmp=_repo_group_compare)
1361 self.repo_group_ids, cmp=_repo_group_compare)
1355 for repo_group_id in sorted_repo_group_ids:
1362 for repo_group_id in sorted_repo_group_ids:
1356 self.fixture.destroy_repo_group(repo_group_id)
1363 self.fixture.destroy_repo_group(repo_group_id)
1357
1364
1358 def _cleanup_repos(self):
1365 def _cleanup_repos(self):
1359 sorted_repos_ids = sorted(self.repos_ids)
1366 sorted_repos_ids = sorted(self.repos_ids)
1360 for repo_id in sorted_repos_ids:
1367 for repo_id in sorted_repos_ids:
1361 self.fixture.destroy_repo(repo_id)
1368 self.fixture.destroy_repo(repo_id)
1362
1369
1363 def _cleanup_user_groups(self):
1370 def _cleanup_user_groups(self):
1364 def _user_group_compare(first_group_id, second_group_id):
1371 def _user_group_compare(first_group_id, second_group_id):
1365 """
1372 """
1366 Gives higher priority to the groups with the most complex paths
1373 Gives higher priority to the groups with the most complex paths
1367 """
1374 """
1368 first_group = UserGroup.get(first_group_id)
1375 first_group = UserGroup.get(first_group_id)
1369 second_group = UserGroup.get(second_group_id)
1376 second_group = UserGroup.get(second_group_id)
1370 first_group_parts = (
1377 first_group_parts = (
1371 len(first_group.users_group_name.split('/'))
1378 len(first_group.users_group_name.split('/'))
1372 if first_group else 0)
1379 if first_group else 0)
1373 second_group_parts = (
1380 second_group_parts = (
1374 len(second_group.users_group_name.split('/'))
1381 len(second_group.users_group_name.split('/'))
1375 if second_group else 0)
1382 if second_group else 0)
1376 return cmp(second_group_parts, first_group_parts)
1383 return cmp(second_group_parts, first_group_parts)
1377
1384
1378 sorted_user_group_ids = sorted(
1385 sorted_user_group_ids = sorted(
1379 self.user_group_ids, cmp=_user_group_compare)
1386 self.user_group_ids, cmp=_user_group_compare)
1380 for user_group_id in sorted_user_group_ids:
1387 for user_group_id in sorted_user_group_ids:
1381 self.fixture.destroy_user_group(user_group_id)
1388 self.fixture.destroy_user_group(user_group_id)
1382
1389
1383 def _cleanup_users(self):
1390 def _cleanup_users(self):
1384 for user_id in self.user_ids:
1391 for user_id in self.user_ids:
1385 self.fixture.destroy_user(user_id)
1392 self.fixture.destroy_user(user_id)
1386
1393
1387
1394
1388 # TODO: Think about moving this into a pytest-pyro package and make it a
1395 # TODO: Think about moving this into a pytest-pyro package and make it a
1389 # pytest plugin
1396 # pytest plugin
1390 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1397 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1391 def pytest_runtest_makereport(item, call):
1398 def pytest_runtest_makereport(item, call):
1392 """
1399 """
1393 Adding the remote traceback if the exception has this information.
1400 Adding the remote traceback if the exception has this information.
1394
1401
1395 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1402 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1396 to the exception instance.
1403 to the exception instance.
1397 """
1404 """
1398 outcome = yield
1405 outcome = yield
1399 report = outcome.get_result()
1406 report = outcome.get_result()
1400 if call.excinfo:
1407 if call.excinfo:
1401 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1408 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1402
1409
1403
1410
1404 def _add_vcsserver_remote_traceback(report, exc):
1411 def _add_vcsserver_remote_traceback(report, exc):
1405 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1412 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1406
1413
1407 if vcsserver_traceback:
1414 if vcsserver_traceback:
1408 section = 'VCSServer remote traceback ' + report.when
1415 section = 'VCSServer remote traceback ' + report.when
1409 report.sections.append((section, vcsserver_traceback))
1416 report.sections.append((section, vcsserver_traceback))
1410
1417
1411
1418
1412 @pytest.fixture(scope='session')
1419 @pytest.fixture(scope='session')
1413 def testrun():
1420 def testrun():
1414 return {
1421 return {
1415 'uuid': uuid.uuid4(),
1422 'uuid': uuid.uuid4(),
1416 'start': datetime.datetime.utcnow().isoformat(),
1423 'start': datetime.datetime.utcnow().isoformat(),
1417 'timestamp': int(time.time()),
1424 'timestamp': int(time.time()),
1418 }
1425 }
1419
1426
1420
1427
1421 class AppenlightClient(object):
1428 class AppenlightClient(object):
1422
1429
1423 url_template = '{url}?protocol_version=0.5'
1430 url_template = '{url}?protocol_version=0.5'
1424
1431
1425 def __init__(
1432 def __init__(
1426 self, url, api_key, add_server=True, add_timestamp=True,
1433 self, url, api_key, add_server=True, add_timestamp=True,
1427 namespace=None, request=None, testrun=None):
1434 namespace=None, request=None, testrun=None):
1428 self.url = self.url_template.format(url=url)
1435 self.url = self.url_template.format(url=url)
1429 self.api_key = api_key
1436 self.api_key = api_key
1430 self.add_server = add_server
1437 self.add_server = add_server
1431 self.add_timestamp = add_timestamp
1438 self.add_timestamp = add_timestamp
1432 self.namespace = namespace
1439 self.namespace = namespace
1433 self.request = request
1440 self.request = request
1434 self.server = socket.getfqdn(socket.gethostname())
1441 self.server = socket.getfqdn(socket.gethostname())
1435 self.tags_before = {}
1442 self.tags_before = {}
1436 self.tags_after = {}
1443 self.tags_after = {}
1437 self.stats = []
1444 self.stats = []
1438 self.testrun = testrun or {}
1445 self.testrun = testrun or {}
1439
1446
1440 def tag_before(self, tag, value):
1447 def tag_before(self, tag, value):
1441 self.tags_before[tag] = value
1448 self.tags_before[tag] = value
1442
1449
1443 def tag_after(self, tag, value):
1450 def tag_after(self, tag, value):
1444 self.tags_after[tag] = value
1451 self.tags_after[tag] = value
1445
1452
1446 def collect(self, data):
1453 def collect(self, data):
1447 if self.add_server:
1454 if self.add_server:
1448 data.setdefault('server', self.server)
1455 data.setdefault('server', self.server)
1449 if self.add_timestamp:
1456 if self.add_timestamp:
1450 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1457 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1451 if self.namespace:
1458 if self.namespace:
1452 data.setdefault('namespace', self.namespace)
1459 data.setdefault('namespace', self.namespace)
1453 if self.request:
1460 if self.request:
1454 data.setdefault('request', self.request)
1461 data.setdefault('request', self.request)
1455 self.stats.append(data)
1462 self.stats.append(data)
1456
1463
1457 def send_stats(self):
1464 def send_stats(self):
1458 tags = [
1465 tags = [
1459 ('testrun', self.request),
1466 ('testrun', self.request),
1460 ('testrun.start', self.testrun['start']),
1467 ('testrun.start', self.testrun['start']),
1461 ('testrun.timestamp', self.testrun['timestamp']),
1468 ('testrun.timestamp', self.testrun['timestamp']),
1462 ('test', self.namespace),
1469 ('test', self.namespace),
1463 ]
1470 ]
1464 for key, value in self.tags_before.items():
1471 for key, value in self.tags_before.items():
1465 tags.append((key + '.before', value))
1472 tags.append((key + '.before', value))
1466 try:
1473 try:
1467 delta = self.tags_after[key] - value
1474 delta = self.tags_after[key] - value
1468 tags.append((key + '.delta', delta))
1475 tags.append((key + '.delta', delta))
1469 except Exception:
1476 except Exception:
1470 pass
1477 pass
1471 for key, value in self.tags_after.items():
1478 for key, value in self.tags_after.items():
1472 tags.append((key + '.after', value))
1479 tags.append((key + '.after', value))
1473 self.collect({
1480 self.collect({
1474 'message': "Collected tags",
1481 'message': "Collected tags",
1475 'tags': tags,
1482 'tags': tags,
1476 })
1483 })
1477
1484
1478 response = requests.post(
1485 response = requests.post(
1479 self.url,
1486 self.url,
1480 headers={
1487 headers={
1481 'X-appenlight-api-key': self.api_key},
1488 'X-appenlight-api-key': self.api_key},
1482 json=self.stats,
1489 json=self.stats,
1483 )
1490 )
1484
1491
1485 if not response.status_code == 200:
1492 if not response.status_code == 200:
1486 pprint.pprint(self.stats)
1493 pprint.pprint(self.stats)
1487 print(response.headers)
1494 print(response.headers)
1488 print(response.text)
1495 print(response.text)
1489 raise Exception('Sending to appenlight failed')
1496 raise Exception('Sending to appenlight failed')
1490
1497
1491
1498
1492 @pytest.fixture()
1499 @pytest.fixture()
1493 def gist_util(request, db_connection):
1500 def gist_util(request, db_connection):
1494 """
1501 """
1495 Provides a wired instance of `GistUtility` with integrated cleanup.
1502 Provides a wired instance of `GistUtility` with integrated cleanup.
1496 """
1503 """
1497 utility = GistUtility()
1504 utility = GistUtility()
1498 request.addfinalizer(utility.cleanup)
1505 request.addfinalizer(utility.cleanup)
1499 return utility
1506 return utility
1500
1507
1501
1508
1502 class GistUtility(object):
1509 class GistUtility(object):
1503 def __init__(self):
1510 def __init__(self):
1504 self.fixture = Fixture()
1511 self.fixture = Fixture()
1505 self.gist_ids = []
1512 self.gist_ids = []
1506
1513
1507 def create_gist(self, **kwargs):
1514 def create_gist(self, **kwargs):
1508 gist = self.fixture.create_gist(**kwargs)
1515 gist = self.fixture.create_gist(**kwargs)
1509 self.gist_ids.append(gist.gist_id)
1516 self.gist_ids.append(gist.gist_id)
1510 return gist
1517 return gist
1511
1518
1512 def cleanup(self):
1519 def cleanup(self):
1513 for id_ in self.gist_ids:
1520 for id_ in self.gist_ids:
1514 self.fixture.destroy_gists(str(id_))
1521 self.fixture.destroy_gists(str(id_))
1515
1522
1516
1523
1517 @pytest.fixture()
1524 @pytest.fixture()
1518 def enabled_backends(request):
1525 def enabled_backends(request):
1519 backends = request.config.option.backends
1526 backends = request.config.option.backends
1520 return backends[:]
1527 return backends[:]
1521
1528
1522
1529
1523 @pytest.fixture()
1530 @pytest.fixture()
1524 def settings_util(request, db_connection):
1531 def settings_util(request, db_connection):
1525 """
1532 """
1526 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1533 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1527 """
1534 """
1528 utility = SettingsUtility()
1535 utility = SettingsUtility()
1529 request.addfinalizer(utility.cleanup)
1536 request.addfinalizer(utility.cleanup)
1530 return utility
1537 return utility
1531
1538
1532
1539
1533 class SettingsUtility(object):
1540 class SettingsUtility(object):
1534 def __init__(self):
1541 def __init__(self):
1535 self.rhodecode_ui_ids = []
1542 self.rhodecode_ui_ids = []
1536 self.rhodecode_setting_ids = []
1543 self.rhodecode_setting_ids = []
1537 self.repo_rhodecode_ui_ids = []
1544 self.repo_rhodecode_ui_ids = []
1538 self.repo_rhodecode_setting_ids = []
1545 self.repo_rhodecode_setting_ids = []
1539
1546
1540 def create_repo_rhodecode_ui(
1547 def create_repo_rhodecode_ui(
1541 self, repo, section, value, key=None, active=True, cleanup=True):
1548 self, repo, section, value, key=None, active=True, cleanup=True):
1542 key = key or hashlib.sha1(
1549 key = key or hashlib.sha1(
1543 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1550 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1544
1551
1545 setting = RepoRhodeCodeUi()
1552 setting = RepoRhodeCodeUi()
1546 setting.repository_id = repo.repo_id
1553 setting.repository_id = repo.repo_id
1547 setting.ui_section = section
1554 setting.ui_section = section
1548 setting.ui_value = value
1555 setting.ui_value = value
1549 setting.ui_key = key
1556 setting.ui_key = key
1550 setting.ui_active = active
1557 setting.ui_active = active
1551 Session().add(setting)
1558 Session().add(setting)
1552 Session().commit()
1559 Session().commit()
1553
1560
1554 if cleanup:
1561 if cleanup:
1555 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1562 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1556 return setting
1563 return setting
1557
1564
1558 def create_rhodecode_ui(
1565 def create_rhodecode_ui(
1559 self, section, value, key=None, active=True, cleanup=True):
1566 self, section, value, key=None, active=True, cleanup=True):
1560 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1567 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1561
1568
1562 setting = RhodeCodeUi()
1569 setting = RhodeCodeUi()
1563 setting.ui_section = section
1570 setting.ui_section = section
1564 setting.ui_value = value
1571 setting.ui_value = value
1565 setting.ui_key = key
1572 setting.ui_key = key
1566 setting.ui_active = active
1573 setting.ui_active = active
1567 Session().add(setting)
1574 Session().add(setting)
1568 Session().commit()
1575 Session().commit()
1569
1576
1570 if cleanup:
1577 if cleanup:
1571 self.rhodecode_ui_ids.append(setting.ui_id)
1578 self.rhodecode_ui_ids.append(setting.ui_id)
1572 return setting
1579 return setting
1573
1580
1574 def create_repo_rhodecode_setting(
1581 def create_repo_rhodecode_setting(
1575 self, repo, name, value, type_, cleanup=True):
1582 self, repo, name, value, type_, cleanup=True):
1576 setting = RepoRhodeCodeSetting(
1583 setting = RepoRhodeCodeSetting(
1577 repo.repo_id, key=name, val=value, type=type_)
1584 repo.repo_id, key=name, val=value, type=type_)
1578 Session().add(setting)
1585 Session().add(setting)
1579 Session().commit()
1586 Session().commit()
1580
1587
1581 if cleanup:
1588 if cleanup:
1582 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1589 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1583 return setting
1590 return setting
1584
1591
1585 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1592 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1586 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1593 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1587 Session().add(setting)
1594 Session().add(setting)
1588 Session().commit()
1595 Session().commit()
1589
1596
1590 if cleanup:
1597 if cleanup:
1591 self.rhodecode_setting_ids.append(setting.app_settings_id)
1598 self.rhodecode_setting_ids.append(setting.app_settings_id)
1592
1599
1593 return setting
1600 return setting
1594
1601
1595 def cleanup(self):
1602 def cleanup(self):
1596 for id_ in self.rhodecode_ui_ids:
1603 for id_ in self.rhodecode_ui_ids:
1597 setting = RhodeCodeUi.get(id_)
1604 setting = RhodeCodeUi.get(id_)
1598 Session().delete(setting)
1605 Session().delete(setting)
1599
1606
1600 for id_ in self.rhodecode_setting_ids:
1607 for id_ in self.rhodecode_setting_ids:
1601 setting = RhodeCodeSetting.get(id_)
1608 setting = RhodeCodeSetting.get(id_)
1602 Session().delete(setting)
1609 Session().delete(setting)
1603
1610
1604 for id_ in self.repo_rhodecode_ui_ids:
1611 for id_ in self.repo_rhodecode_ui_ids:
1605 setting = RepoRhodeCodeUi.get(id_)
1612 setting = RepoRhodeCodeUi.get(id_)
1606 Session().delete(setting)
1613 Session().delete(setting)
1607
1614
1608 for id_ in self.repo_rhodecode_setting_ids:
1615 for id_ in self.repo_rhodecode_setting_ids:
1609 setting = RepoRhodeCodeSetting.get(id_)
1616 setting = RepoRhodeCodeSetting.get(id_)
1610 Session().delete(setting)
1617 Session().delete(setting)
1611
1618
1612 Session().commit()
1619 Session().commit()
1613
1620
1614
1621
1615 @pytest.fixture()
1622 @pytest.fixture()
1616 def no_notifications(request):
1623 def no_notifications(request):
1617 notification_patcher = mock.patch(
1624 notification_patcher = mock.patch(
1618 'rhodecode.model.notification.NotificationModel.create')
1625 'rhodecode.model.notification.NotificationModel.create')
1619 notification_patcher.start()
1626 notification_patcher.start()
1620 request.addfinalizer(notification_patcher.stop)
1627 request.addfinalizer(notification_patcher.stop)
1621
1628
1622
1629
1623 @pytest.fixture(scope='session')
1630 @pytest.fixture(scope='session')
1624 def repeat(request):
1631 def repeat(request):
1625 """
1632 """
1626 The number of repetitions is based on this fixture.
1633 The number of repetitions is based on this fixture.
1627
1634
1628 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1635 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1629 tests are not too slow in our default test suite.
1636 tests are not too slow in our default test suite.
1630 """
1637 """
1631 return request.config.getoption('--repeat')
1638 return request.config.getoption('--repeat')
1632
1639
1633
1640
1634 @pytest.fixture()
1641 @pytest.fixture()
1635 def rhodecode_fixtures():
1642 def rhodecode_fixtures():
1636 return Fixture()
1643 return Fixture()
1637
1644
1638
1645
1639 @pytest.fixture()
1646 @pytest.fixture()
1640 def context_stub():
1647 def context_stub():
1641 """
1648 """
1642 Stub context object.
1649 Stub context object.
1643 """
1650 """
1644 context = pyramid.testing.DummyResource()
1651 context = pyramid.testing.DummyResource()
1645 return context
1652 return context
1646
1653
1647
1654
1648 @pytest.fixture()
1655 @pytest.fixture()
1649 def request_stub():
1656 def request_stub():
1650 """
1657 """
1651 Stub request object.
1658 Stub request object.
1652 """
1659 """
1653 from rhodecode.lib.base import bootstrap_request
1660 from rhodecode.lib.base import bootstrap_request
1654 request = bootstrap_request(scheme='https')
1661 request = bootstrap_request(scheme='https')
1655 return request
1662 return request
1656
1663
1657
1664
1658 @pytest.fixture()
1665 @pytest.fixture()
1659 def config_stub(request, request_stub):
1666 def config_stub(request, request_stub):
1660 """
1667 """
1661 Set up pyramid.testing and return the Configurator.
1668 Set up pyramid.testing and return the Configurator.
1662 """
1669 """
1663 from rhodecode.lib.base import bootstrap_config
1670 from rhodecode.lib.base import bootstrap_config
1664 config = bootstrap_config(request=request_stub)
1671 config = bootstrap_config(request=request_stub)
1665
1672
1666 @request.addfinalizer
1673 @request.addfinalizer
1667 def cleanup():
1674 def cleanup():
1668 pyramid.testing.tearDown()
1675 pyramid.testing.tearDown()
1669
1676
1670 return config
1677 return config
1671
1678
1672
1679
1673 @pytest.fixture()
1680 @pytest.fixture()
1674 def StubIntegrationType():
1681 def StubIntegrationType():
1675 class _StubIntegrationType(IntegrationTypeBase):
1682 class _StubIntegrationType(IntegrationTypeBase):
1676 """ Test integration type class """
1683 """ Test integration type class """
1677
1684
1678 key = 'test'
1685 key = 'test'
1679 display_name = 'Test integration type'
1686 display_name = 'Test integration type'
1680 description = 'A test integration type for testing'
1687 description = 'A test integration type for testing'
1681
1688
1682 @classmethod
1689 @classmethod
1683 def icon(cls):
1690 def icon(cls):
1684 return 'test_icon_html_image'
1691 return 'test_icon_html_image'
1685
1692
1686 def __init__(self, settings):
1693 def __init__(self, settings):
1687 super(_StubIntegrationType, self).__init__(settings)
1694 super(_StubIntegrationType, self).__init__(settings)
1688 self.sent_events = [] # for testing
1695 self.sent_events = [] # for testing
1689
1696
1690 def send_event(self, event):
1697 def send_event(self, event):
1691 self.sent_events.append(event)
1698 self.sent_events.append(event)
1692
1699
1693 def settings_schema(self):
1700 def settings_schema(self):
1694 class SettingsSchema(colander.Schema):
1701 class SettingsSchema(colander.Schema):
1695 test_string_field = colander.SchemaNode(
1702 test_string_field = colander.SchemaNode(
1696 colander.String(),
1703 colander.String(),
1697 missing=colander.required,
1704 missing=colander.required,
1698 title='test string field',
1705 title='test string field',
1699 )
1706 )
1700 test_int_field = colander.SchemaNode(
1707 test_int_field = colander.SchemaNode(
1701 colander.Int(),
1708 colander.Int(),
1702 title='some integer setting',
1709 title='some integer setting',
1703 )
1710 )
1704 return SettingsSchema()
1711 return SettingsSchema()
1705
1712
1706
1713
1707 integration_type_registry.register_integration_type(_StubIntegrationType)
1714 integration_type_registry.register_integration_type(_StubIntegrationType)
1708 return _StubIntegrationType
1715 return _StubIntegrationType
1709
1716
1710 @pytest.fixture()
1717 @pytest.fixture()
1711 def stub_integration_settings():
1718 def stub_integration_settings():
1712 return {
1719 return {
1713 'test_string_field': 'some data',
1720 'test_string_field': 'some data',
1714 'test_int_field': 100,
1721 'test_int_field': 100,
1715 }
1722 }
1716
1723
1717
1724
1718 @pytest.fixture()
1725 @pytest.fixture()
1719 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1726 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1720 stub_integration_settings):
1727 stub_integration_settings):
1721 integration = IntegrationModel().create(
1728 integration = IntegrationModel().create(
1722 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1729 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1723 name='test repo integration',
1730 name='test repo integration',
1724 repo=repo_stub, repo_group=None, child_repos_only=None)
1731 repo=repo_stub, repo_group=None, child_repos_only=None)
1725
1732
1726 @request.addfinalizer
1733 @request.addfinalizer
1727 def cleanup():
1734 def cleanup():
1728 IntegrationModel().delete(integration)
1735 IntegrationModel().delete(integration)
1729
1736
1730 return integration
1737 return integration
1731
1738
1732
1739
1733 @pytest.fixture()
1740 @pytest.fixture()
1734 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1741 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1735 stub_integration_settings):
1742 stub_integration_settings):
1736 integration = IntegrationModel().create(
1743 integration = IntegrationModel().create(
1737 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1744 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1738 name='test repogroup integration',
1745 name='test repogroup integration',
1739 repo=None, repo_group=test_repo_group, child_repos_only=True)
1746 repo=None, repo_group=test_repo_group, child_repos_only=True)
1740
1747
1741 @request.addfinalizer
1748 @request.addfinalizer
1742 def cleanup():
1749 def cleanup():
1743 IntegrationModel().delete(integration)
1750 IntegrationModel().delete(integration)
1744
1751
1745 return integration
1752 return integration
1746
1753
1747
1754
1748 @pytest.fixture()
1755 @pytest.fixture()
1749 def repogroup_recursive_integration_stub(request, test_repo_group,
1756 def repogroup_recursive_integration_stub(request, test_repo_group,
1750 StubIntegrationType, stub_integration_settings):
1757 StubIntegrationType, stub_integration_settings):
1751 integration = IntegrationModel().create(
1758 integration = IntegrationModel().create(
1752 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1759 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1753 name='test recursive repogroup integration',
1760 name='test recursive repogroup integration',
1754 repo=None, repo_group=test_repo_group, child_repos_only=False)
1761 repo=None, repo_group=test_repo_group, child_repos_only=False)
1755
1762
1756 @request.addfinalizer
1763 @request.addfinalizer
1757 def cleanup():
1764 def cleanup():
1758 IntegrationModel().delete(integration)
1765 IntegrationModel().delete(integration)
1759
1766
1760 return integration
1767 return integration
1761
1768
1762
1769
1763 @pytest.fixture()
1770 @pytest.fixture()
1764 def global_integration_stub(request, StubIntegrationType,
1771 def global_integration_stub(request, StubIntegrationType,
1765 stub_integration_settings):
1772 stub_integration_settings):
1766 integration = IntegrationModel().create(
1773 integration = IntegrationModel().create(
1767 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1774 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1768 name='test global integration',
1775 name='test global integration',
1769 repo=None, repo_group=None, child_repos_only=None)
1776 repo=None, repo_group=None, child_repos_only=None)
1770
1777
1771 @request.addfinalizer
1778 @request.addfinalizer
1772 def cleanup():
1779 def cleanup():
1773 IntegrationModel().delete(integration)
1780 IntegrationModel().delete(integration)
1774
1781
1775 return integration
1782 return integration
1776
1783
1777
1784
1778 @pytest.fixture()
1785 @pytest.fixture()
1779 def root_repos_integration_stub(request, StubIntegrationType,
1786 def root_repos_integration_stub(request, StubIntegrationType,
1780 stub_integration_settings):
1787 stub_integration_settings):
1781 integration = IntegrationModel().create(
1788 integration = IntegrationModel().create(
1782 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1789 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1783 name='test global integration',
1790 name='test global integration',
1784 repo=None, repo_group=None, child_repos_only=True)
1791 repo=None, repo_group=None, child_repos_only=True)
1785
1792
1786 @request.addfinalizer
1793 @request.addfinalizer
1787 def cleanup():
1794 def cleanup():
1788 IntegrationModel().delete(integration)
1795 IntegrationModel().delete(integration)
1789
1796
1790 return integration
1797 return integration
1791
1798
1792
1799
1793 @pytest.fixture()
1800 @pytest.fixture()
1794 def local_dt_to_utc():
1801 def local_dt_to_utc():
1795 def _factory(dt):
1802 def _factory(dt):
1796 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1803 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1797 dateutil.tz.tzutc()).replace(tzinfo=None)
1804 dateutil.tz.tzutc()).replace(tzinfo=None)
1798 return _factory
1805 return _factory
1799
1806
1800
1807
1801 @pytest.fixture()
1808 @pytest.fixture()
1802 def disable_anonymous_user(request, baseapp):
1809 def disable_anonymous_user(request, baseapp):
1803 set_anonymous_access(False)
1810 set_anonymous_access(False)
1804
1811
1805 @request.addfinalizer
1812 @request.addfinalizer
1806 def cleanup():
1813 def cleanup():
1807 set_anonymous_access(True)
1814 set_anonymous_access(True)
1808
1815
1809
1816
1810 @pytest.fixture(scope='module')
1817 @pytest.fixture(scope='module')
1811 def rc_fixture(request):
1818 def rc_fixture(request):
1812 return Fixture()
1819 return Fixture()
1813
1820
1814
1821
1815 @pytest.fixture()
1822 @pytest.fixture()
1816 def repo_groups(request):
1823 def repo_groups(request):
1817 fixture = Fixture()
1824 fixture = Fixture()
1818
1825
1819 session = Session()
1826 session = Session()
1820 zombie_group = fixture.create_repo_group('zombie')
1827 zombie_group = fixture.create_repo_group('zombie')
1821 parent_group = fixture.create_repo_group('parent')
1828 parent_group = fixture.create_repo_group('parent')
1822 child_group = fixture.create_repo_group('parent/child')
1829 child_group = fixture.create_repo_group('parent/child')
1823 groups_in_db = session.query(RepoGroup).all()
1830 groups_in_db = session.query(RepoGroup).all()
1824 assert len(groups_in_db) == 3
1831 assert len(groups_in_db) == 3
1825 assert child_group.group_parent_id == parent_group.group_id
1832 assert child_group.group_parent_id == parent_group.group_id
1826
1833
1827 @request.addfinalizer
1834 @request.addfinalizer
1828 def cleanup():
1835 def cleanup():
1829 fixture.destroy_repo_group(zombie_group)
1836 fixture.destroy_repo_group(zombie_group)
1830 fixture.destroy_repo_group(child_group)
1837 fixture.destroy_repo_group(child_group)
1831 fixture.destroy_repo_group(parent_group)
1838 fixture.destroy_repo_group(parent_group)
1832
1839
1833 return zombie_group, parent_group, child_group
1840 return zombie_group, parent_group, child_group
General Comments 0
You need to be logged in to leave comments. Login now