##// END OF EJS Templates
python3: removed compat modules
super-admin -
r4928:daf8135e default
parent child Browse files
Show More
@@ -1,96 +1,96 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import logging
23 23
24 24
25 25 from pyramid.httpexceptions import HTTPFound
26 26
27 27 from rhodecode.apps._base import BaseAppView
28 28 from rhodecode.lib import helpers as h
29 29 from rhodecode.lib.auth import LoginRequired
30 from rhodecode.lib.compat import OrderedDict
30 from collections import OrderedDict
31 31 from rhodecode.model.db import UserApiKeys
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 class OpsView(BaseAppView):
37 37
38 38 def load_default_context(self):
39 39 c = self._get_local_tmpl_context()
40 40 c.user = c.auth_user.get_instance()
41 41
42 42 return c
43 43
44 44 def ops_ping(self):
45 45 data = OrderedDict()
46 46 data['instance'] = self.request.registry.settings.get('instance_id')
47 47
48 48 if getattr(self.request, 'user'):
49 49 caller_name = 'anonymous'
50 50 if self.request.user.user_id:
51 51 caller_name = self.request.user.username
52 52
53 53 data['caller_ip'] = self.request.user.ip_addr
54 54 data['caller_name'] = caller_name
55 55
56 56 return {'ok': data}
57 57
58 58 def ops_error_test(self):
59 59 """
60 60 Test exception handling and emails on errors
61 61 """
62 62
63 63 class TestException(Exception):
64 64 pass
65 65 # add timeout so we add some sort of rate limiter
66 66 time.sleep(2)
67 67 msg = ('RhodeCode Enterprise test exception. '
68 68 'Client:{}. Generation time: {}.'.format(self.request.user, time.time()))
69 69 raise TestException(msg)
70 70
71 71 def ops_redirect_test(self):
72 72 """
73 73 Test redirect handling
74 74 """
75 75 redirect_to = self.request.GET.get('to') or h.route_path('home')
76 76 raise HTTPFound(redirect_to)
77 77
78 78 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_HTTP])
79 79 def ops_healthcheck(self):
80 80 from rhodecode.lib.system_info import load_system_info
81 81
82 82 vcsserver_info = load_system_info('vcs_server')
83 83 if vcsserver_info:
84 84 vcsserver_info = vcsserver_info['human_value']
85 85
86 86 db_info = load_system_info('database_info')
87 87 if db_info:
88 88 db_info = db_info['human_value']
89 89
90 90 health_spec = {
91 91 'caller_ip': self.request.user.ip_addr,
92 92 'vcsserver': vcsserver_info,
93 93 'db': db_info,
94 94 }
95 95
96 96 return {'healthcheck': health_spec}
@@ -1,1092 +1,1092 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
27 27 from rhodecode.apps.repository.views.repo_files import RepoFilesView
28 28 from rhodecode.lib import helpers as h
29 from rhodecode.lib.compat import OrderedDict
29 from collections import OrderedDict
30 30 from rhodecode.lib.ext_json import json
31 31 from rhodecode.lib.vcs import nodes
32 32
33 33 from rhodecode.lib.vcs.conf import settings
34 34 from rhodecode.tests import assert_session_flash
35 35 from rhodecode.tests.fixture import Fixture
36 36 from rhodecode.model.db import Session
37 37
38 38 fixture = Fixture()
39 39
40 40
41 41 def get_node_history(backend_type):
42 42 return {
43 43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
44 44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
45 45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
46 46 }[backend_type]
47 47
48 48
49 49 def route_path(name, params=None, **kwargs):
50 50 import urllib.request, urllib.parse, urllib.error
51 51
52 52 base_url = {
53 53 'repo_summary': '/{repo_name}',
54 54 'repo_archivefile': '/{repo_name}/archive/{fname}',
55 55 'repo_files_diff': '/{repo_name}/diff/{f_path}',
56 56 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
57 57 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
58 58 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
59 59 'repo_files:default_commit': '/{repo_name}/files',
60 60 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
61 61 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
62 62 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
63 63 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
64 64 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
65 65 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
66 66 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
67 67 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
68 68 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
69 69 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
70 70 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
71 71 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
72 72 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
73 73 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
74 74 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
75 75 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
76 76 }[name].format(**kwargs)
77 77
78 78 if params:
79 79 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
80 80 return base_url
81 81
82 82
83 83 def assert_files_in_response(response, files, params):
84 84 template = (
85 85 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
86 86 _assert_items_in_response(response, files, template, params)
87 87
88 88
89 89 def assert_dirs_in_response(response, dirs, params):
90 90 template = (
91 91 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
92 92 _assert_items_in_response(response, dirs, template, params)
93 93
94 94
95 95 def _assert_items_in_response(response, items, template, params):
96 96 for item in items:
97 97 item_params = {'name': item}
98 98 item_params.update(params)
99 99 response.mustcontain(template % item_params)
100 100
101 101
102 102 def assert_timeago_in_response(response, items, params):
103 103 for item in items:
104 104 response.mustcontain(h.age_component(params['date']))
105 105
106 106
107 107 @pytest.mark.usefixtures("app")
108 108 class TestFilesViews(object):
109 109
110 110 def test_show_files(self, backend):
111 111 response = self.app.get(
112 112 route_path('repo_files',
113 113 repo_name=backend.repo_name,
114 114 commit_id='tip', f_path='/'))
115 115 commit = backend.repo.get_commit()
116 116
117 117 params = {
118 118 'repo_name': backend.repo_name,
119 119 'commit_id': commit.raw_id,
120 120 'date': commit.date
121 121 }
122 122 assert_dirs_in_response(response, ['docs', 'vcs'], params)
123 123 files = [
124 124 '.gitignore',
125 125 '.hgignore',
126 126 '.hgtags',
127 127 # TODO: missing in Git
128 128 # '.travis.yml',
129 129 'MANIFEST.in',
130 130 'README.rst',
131 131 # TODO: File is missing in svn repository
132 132 # 'run_test_and_report.sh',
133 133 'setup.cfg',
134 134 'setup.py',
135 135 'test_and_report.sh',
136 136 'tox.ini',
137 137 ]
138 138 assert_files_in_response(response, files, params)
139 139 assert_timeago_in_response(response, files, params)
140 140
141 141 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
142 142 repo = backend_hg['subrepos']
143 143 response = self.app.get(
144 144 route_path('repo_files',
145 145 repo_name=repo.repo_name,
146 146 commit_id='tip', f_path='/'))
147 147 assert_response = response.assert_response()
148 148 assert_response.contains_one_link(
149 149 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
150 150
151 151 def test_show_files_links_submodules_with_absolute_url_subpaths(
152 152 self, backend_hg):
153 153 repo = backend_hg['subrepos']
154 154 response = self.app.get(
155 155 route_path('repo_files',
156 156 repo_name=repo.repo_name,
157 157 commit_id='tip', f_path='/'))
158 158 assert_response = response.assert_response()
159 159 assert_response.contains_one_link(
160 160 'subpaths-path @ 000000000000',
161 161 'http://sub-base.example.com/subpaths-path')
162 162
163 163 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
164 164 def test_files_menu(self, backend):
165 165 new_branch = "temp_branch_name"
166 166 commits = [
167 167 {'message': 'a'},
168 168 {'message': 'b', 'branch': new_branch}
169 169 ]
170 170 backend.create_repo(commits)
171 171 backend.repo.landing_rev = "branch:%s" % new_branch
172 172 Session().commit()
173 173
174 174 # get response based on tip and not new commit
175 175 response = self.app.get(
176 176 route_path('repo_files',
177 177 repo_name=backend.repo_name,
178 178 commit_id='tip', f_path='/'))
179 179
180 180 # make sure Files menu url is not tip but new commit
181 181 landing_rev = backend.repo.landing_ref_name
182 182 files_url = route_path('repo_files:default_path',
183 183 repo_name=backend.repo_name,
184 184 commit_id=landing_rev, params={'at': landing_rev})
185 185
186 186 assert landing_rev != 'tip'
187 187 response.mustcontain(
188 188 '<li class="active"><a class="menulink" href="%s">' % files_url)
189 189
190 190 def test_show_files_commit(self, backend):
191 191 commit = backend.repo.get_commit(commit_idx=32)
192 192
193 193 response = self.app.get(
194 194 route_path('repo_files',
195 195 repo_name=backend.repo_name,
196 196 commit_id=commit.raw_id, f_path='/'))
197 197
198 198 dirs = ['docs', 'tests']
199 199 files = ['README.rst']
200 200 params = {
201 201 'repo_name': backend.repo_name,
202 202 'commit_id': commit.raw_id,
203 203 }
204 204 assert_dirs_in_response(response, dirs, params)
205 205 assert_files_in_response(response, files, params)
206 206
207 207 def test_show_files_different_branch(self, backend):
208 208 branches = dict(
209 209 hg=(150, ['git']),
210 210 # TODO: Git test repository does not contain other branches
211 211 git=(633, ['master']),
212 212 # TODO: Branch support in Subversion
213 213 svn=(150, [])
214 214 )
215 215 idx, branches = branches[backend.alias]
216 216 commit = backend.repo.get_commit(commit_idx=idx)
217 217 response = self.app.get(
218 218 route_path('repo_files',
219 219 repo_name=backend.repo_name,
220 220 commit_id=commit.raw_id, f_path='/'))
221 221
222 222 assert_response = response.assert_response()
223 223 for branch in branches:
224 224 assert_response.element_contains('.tags .branchtag', branch)
225 225
226 226 def test_show_files_paging(self, backend):
227 227 repo = backend.repo
228 228 indexes = [73, 92, 109, 1, 0]
229 229 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
230 230 for rev in indexes]
231 231
232 232 for idx in idx_map:
233 233 response = self.app.get(
234 234 route_path('repo_files',
235 235 repo_name=backend.repo_name,
236 236 commit_id=idx[1], f_path='/'))
237 237
238 238 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
239 239
240 240 def test_file_source(self, backend):
241 241 commit = backend.repo.get_commit(commit_idx=167)
242 242 response = self.app.get(
243 243 route_path('repo_files',
244 244 repo_name=backend.repo_name,
245 245 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
246 246
247 247 msgbox = """<div class="commit">%s</div>"""
248 248 response.mustcontain(msgbox % (commit.message, ))
249 249
250 250 assert_response = response.assert_response()
251 251 if commit.branch:
252 252 assert_response.element_contains(
253 253 '.tags.tags-main .branchtag', commit.branch)
254 254 if commit.tags:
255 255 for tag in commit.tags:
256 256 assert_response.element_contains('.tags.tags-main .tagtag', tag)
257 257
258 258 def test_file_source_annotated(self, backend):
259 259 response = self.app.get(
260 260 route_path('repo_files:annotated',
261 261 repo_name=backend.repo_name,
262 262 commit_id='tip', f_path='vcs/nodes.py'))
263 263 expected_commits = {
264 264 'hg': 'r356',
265 265 'git': 'r345',
266 266 'svn': 'r208',
267 267 }
268 268 response.mustcontain(expected_commits[backend.alias])
269 269
270 270 def test_file_source_authors(self, backend):
271 271 response = self.app.get(
272 272 route_path('repo_file_authors',
273 273 repo_name=backend.repo_name,
274 274 commit_id='tip', f_path='vcs/nodes.py'))
275 275 expected_authors = {
276 276 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
277 277 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
278 278 'svn': ('marcin', 'lukasz'),
279 279 }
280 280
281 281 for author in expected_authors[backend.alias]:
282 282 response.mustcontain(author)
283 283
284 284 def test_file_source_authors_with_annotation(self, backend):
285 285 response = self.app.get(
286 286 route_path('repo_file_authors',
287 287 repo_name=backend.repo_name,
288 288 commit_id='tip', f_path='vcs/nodes.py',
289 289 params=dict(annotate=1)))
290 290 expected_authors = {
291 291 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
292 292 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
293 293 'svn': ('marcin', 'lukasz'),
294 294 }
295 295
296 296 for author in expected_authors[backend.alias]:
297 297 response.mustcontain(author)
298 298
299 299 def test_file_source_history(self, backend, xhr_header):
300 300 response = self.app.get(
301 301 route_path('repo_file_history',
302 302 repo_name=backend.repo_name,
303 303 commit_id='tip', f_path='vcs/nodes.py'),
304 304 extra_environ=xhr_header)
305 305 assert get_node_history(backend.alias) == json.loads(response.body)
306 306
307 307 def test_file_source_history_svn(self, backend_svn, xhr_header):
308 308 simple_repo = backend_svn['svn-simple-layout']
309 309 response = self.app.get(
310 310 route_path('repo_file_history',
311 311 repo_name=simple_repo.repo_name,
312 312 commit_id='tip', f_path='trunk/example.py'),
313 313 extra_environ=xhr_header)
314 314
315 315 expected_data = json.loads(
316 316 fixture.load_resource('svn_node_history_branches.json'))
317 317
318 318 assert expected_data == response.json
319 319
320 320 def test_file_source_history_with_annotation(self, backend, xhr_header):
321 321 response = self.app.get(
322 322 route_path('repo_file_history',
323 323 repo_name=backend.repo_name,
324 324 commit_id='tip', f_path='vcs/nodes.py',
325 325 params=dict(annotate=1)),
326 326
327 327 extra_environ=xhr_header)
328 328 assert get_node_history(backend.alias) == json.loads(response.body)
329 329
330 330 def test_tree_search_top_level(self, backend, xhr_header):
331 331 commit = backend.repo.get_commit(commit_idx=173)
332 332 response = self.app.get(
333 333 route_path('repo_files_nodelist',
334 334 repo_name=backend.repo_name,
335 335 commit_id=commit.raw_id, f_path='/'),
336 336 extra_environ=xhr_header)
337 337 assert 'nodes' in response.json
338 338 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
339 339
340 340 def test_tree_search_missing_xhr(self, backend):
341 341 self.app.get(
342 342 route_path('repo_files_nodelist',
343 343 repo_name=backend.repo_name,
344 344 commit_id='tip', f_path='/'),
345 345 status=404)
346 346
347 347 def test_tree_search_at_path(self, backend, xhr_header):
348 348 commit = backend.repo.get_commit(commit_idx=173)
349 349 response = self.app.get(
350 350 route_path('repo_files_nodelist',
351 351 repo_name=backend.repo_name,
352 352 commit_id=commit.raw_id, f_path='/docs'),
353 353 extra_environ=xhr_header)
354 354 assert 'nodes' in response.json
355 355 nodes = response.json['nodes']
356 356 assert {'name': 'docs/api', 'type': 'dir'} in nodes
357 357 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
358 358
359 359 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
360 360 commit = backend.repo.get_commit(commit_idx=173)
361 361 response = self.app.get(
362 362 route_path('repo_files_nodelist',
363 363 repo_name=backend.repo_name,
364 364 commit_id=commit.raw_id, f_path='/docs/api'),
365 365 extra_environ=xhr_header)
366 366 assert 'nodes' in response.json
367 367 nodes = response.json['nodes']
368 368 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
369 369
370 370 def test_tree_search_at_path_missing_xhr(self, backend):
371 371 self.app.get(
372 372 route_path('repo_files_nodelist',
373 373 repo_name=backend.repo_name,
374 374 commit_id='tip', f_path='/docs'),
375 375 status=404)
376 376
377 377 def test_nodetree(self, backend, xhr_header):
378 378 commit = backend.repo.get_commit(commit_idx=173)
379 379 response = self.app.get(
380 380 route_path('repo_nodetree_full',
381 381 repo_name=backend.repo_name,
382 382 commit_id=commit.raw_id, f_path='/'),
383 383 extra_environ=xhr_header)
384 384
385 385 assert_response = response.assert_response()
386 386
387 387 for attr in ['data-commit-id', 'data-date', 'data-author']:
388 388 elements = assert_response.get_elements('[{}]'.format(attr))
389 389 assert len(elements) > 1
390 390
391 391 for element in elements:
392 392 assert element.get(attr)
393 393
394 394 def test_nodetree_if_file(self, backend, xhr_header):
395 395 commit = backend.repo.get_commit(commit_idx=173)
396 396 response = self.app.get(
397 397 route_path('repo_nodetree_full',
398 398 repo_name=backend.repo_name,
399 399 commit_id=commit.raw_id, f_path='README.rst'),
400 400 extra_environ=xhr_header)
401 401 assert response.body == ''
402 402
403 403 def test_nodetree_wrong_path(self, backend, xhr_header):
404 404 commit = backend.repo.get_commit(commit_idx=173)
405 405 response = self.app.get(
406 406 route_path('repo_nodetree_full',
407 407 repo_name=backend.repo_name,
408 408 commit_id=commit.raw_id, f_path='/dont-exist'),
409 409 extra_environ=xhr_header)
410 410
411 411 err = 'error: There is no file nor ' \
412 412 'directory at the given path'
413 413 assert err in response.body
414 414
415 415 def test_nodetree_missing_xhr(self, backend):
416 416 self.app.get(
417 417 route_path('repo_nodetree_full',
418 418 repo_name=backend.repo_name,
419 419 commit_id='tip', f_path='/'),
420 420 status=404)
421 421
422 422
423 423 @pytest.mark.usefixtures("app", "autologin_user")
424 424 class TestRawFileHandling(object):
425 425
426 426 def test_download_file(self, backend):
427 427 commit = backend.repo.get_commit(commit_idx=173)
428 428 response = self.app.get(
429 429 route_path('repo_file_download',
430 430 repo_name=backend.repo_name,
431 431 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
432 432
433 433 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
434 434 assert response.content_type == "text/x-python"
435 435
436 436 def test_download_file_wrong_cs(self, backend):
437 437 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
438 438
439 439 response = self.app.get(
440 440 route_path('repo_file_download',
441 441 repo_name=backend.repo_name,
442 442 commit_id=raw_id, f_path='vcs/nodes.svg'),
443 443 status=404)
444 444
445 445 msg = """No such commit exists for this repository"""
446 446 response.mustcontain(msg)
447 447
448 448 def test_download_file_wrong_f_path(self, backend):
449 449 commit = backend.repo.get_commit(commit_idx=173)
450 450 f_path = 'vcs/ERRORnodes.py'
451 451
452 452 response = self.app.get(
453 453 route_path('repo_file_download',
454 454 repo_name=backend.repo_name,
455 455 commit_id=commit.raw_id, f_path=f_path),
456 456 status=404)
457 457
458 458 msg = (
459 459 "There is no file nor directory at the given path: "
460 460 "`%s` at commit %s" % (f_path, commit.short_id))
461 461 response.mustcontain(msg)
462 462
463 463 def test_file_raw(self, backend):
464 464 commit = backend.repo.get_commit(commit_idx=173)
465 465 response = self.app.get(
466 466 route_path('repo_file_raw',
467 467 repo_name=backend.repo_name,
468 468 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
469 469
470 470 assert response.content_type == "text/plain"
471 471
472 472 def test_file_raw_binary(self, backend):
473 473 commit = backend.repo.get_commit()
474 474 response = self.app.get(
475 475 route_path('repo_file_raw',
476 476 repo_name=backend.repo_name,
477 477 commit_id=commit.raw_id,
478 478 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
479 479
480 480 assert response.content_disposition == 'inline'
481 481
482 482 def test_raw_file_wrong_cs(self, backend):
483 483 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
484 484
485 485 response = self.app.get(
486 486 route_path('repo_file_raw',
487 487 repo_name=backend.repo_name,
488 488 commit_id=raw_id, f_path='vcs/nodes.svg'),
489 489 status=404)
490 490
491 491 msg = """No such commit exists for this repository"""
492 492 response.mustcontain(msg)
493 493
494 494 def test_raw_wrong_f_path(self, backend):
495 495 commit = backend.repo.get_commit(commit_idx=173)
496 496 f_path = 'vcs/ERRORnodes.py'
497 497 response = self.app.get(
498 498 route_path('repo_file_raw',
499 499 repo_name=backend.repo_name,
500 500 commit_id=commit.raw_id, f_path=f_path),
501 501 status=404)
502 502
503 503 msg = (
504 504 "There is no file nor directory at the given path: "
505 505 "`%s` at commit %s" % (f_path, commit.short_id))
506 506 response.mustcontain(msg)
507 507
508 508 def test_raw_svg_should_not_be_rendered(self, backend):
509 509 backend.create_repo()
510 510 backend.ensure_file("xss.svg")
511 511 response = self.app.get(
512 512 route_path('repo_file_raw',
513 513 repo_name=backend.repo_name,
514 514 commit_id='tip', f_path='xss.svg'),)
515 515 # If the content type is image/svg+xml then it allows to render HTML
516 516 # and malicious SVG.
517 517 assert response.content_type == "text/plain"
518 518
519 519
520 520 @pytest.mark.usefixtures("app")
521 521 class TestRepositoryArchival(object):
522 522
523 523 def test_archival(self, backend):
524 524 backend.enable_downloads()
525 525 commit = backend.repo.get_commit(commit_idx=173)
526 526 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
527 527
528 528 short = commit.short_id + extension
529 529 fname = commit.raw_id + extension
530 530 filename = '%s-%s' % (backend.repo_name, short)
531 531 response = self.app.get(
532 532 route_path('repo_archivefile',
533 533 repo_name=backend.repo_name,
534 534 fname=fname))
535 535
536 536 assert response.status == '200 OK'
537 537 headers = [
538 538 ('Content-Disposition', 'attachment; filename=%s' % filename),
539 539 ('Content-Type', '%s' % content_type),
540 540 ]
541 541
542 542 for header in headers:
543 543 assert header in response.headers.items()
544 544
545 545 def test_archival_no_hash(self, backend):
546 546 backend.enable_downloads()
547 547 commit = backend.repo.get_commit(commit_idx=173)
548 548 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
549 549
550 550 short = 'plain' + extension
551 551 fname = commit.raw_id + extension
552 552 filename = '%s-%s' % (backend.repo_name, short)
553 553 response = self.app.get(
554 554 route_path('repo_archivefile',
555 555 repo_name=backend.repo_name,
556 556 fname=fname, params={'with_hash': 0}))
557 557
558 558 assert response.status == '200 OK'
559 559 headers = [
560 560 ('Content-Disposition', 'attachment; filename=%s' % filename),
561 561 ('Content-Type', '%s' % content_type),
562 562 ]
563 563
564 564 for header in headers:
565 565 assert header in response.headers.items()
566 566
567 567 @pytest.mark.parametrize('arch_ext',[
568 568 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
569 569 def test_archival_wrong_ext(self, backend, arch_ext):
570 570 backend.enable_downloads()
571 571 commit = backend.repo.get_commit(commit_idx=173)
572 572
573 573 fname = commit.raw_id + '.' + arch_ext
574 574
575 575 response = self.app.get(
576 576 route_path('repo_archivefile',
577 577 repo_name=backend.repo_name,
578 578 fname=fname))
579 579 response.mustcontain(
580 580 'Unknown archive type for: `{}`'.format(fname))
581 581
582 582 @pytest.mark.parametrize('commit_id', [
583 583 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
584 584 def test_archival_wrong_commit_id(self, backend, commit_id):
585 585 backend.enable_downloads()
586 586 fname = '%s.zip' % commit_id
587 587
588 588 response = self.app.get(
589 589 route_path('repo_archivefile',
590 590 repo_name=backend.repo_name,
591 591 fname=fname))
592 592 response.mustcontain('Unknown commit_id')
593 593
594 594
595 595 @pytest.mark.usefixtures("app")
596 596 class TestFilesDiff(object):
597 597
598 598 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
599 599 def test_file_full_diff(self, backend, diff):
600 600 commit1 = backend.repo.get_commit(commit_idx=-1)
601 601 commit2 = backend.repo.get_commit(commit_idx=-2)
602 602
603 603 response = self.app.get(
604 604 route_path('repo_files_diff',
605 605 repo_name=backend.repo_name,
606 606 f_path='README'),
607 607 params={
608 608 'diff1': commit2.raw_id,
609 609 'diff2': commit1.raw_id,
610 610 'fulldiff': '1',
611 611 'diff': diff,
612 612 })
613 613
614 614 if diff == 'diff':
615 615 # use redirect since this is OLD view redirecting to compare page
616 616 response = response.follow()
617 617
618 618 # It's a symlink to README.rst
619 619 response.mustcontain('README.rst')
620 620 response.mustcontain('No newline at end of file')
621 621
622 622 def test_file_binary_diff(self, backend):
623 623 commits = [
624 624 {'message': 'First commit'},
625 625 {'message': 'Commit with binary',
626 626 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
627 627 ]
628 628 repo = backend.create_repo(commits=commits)
629 629
630 630 response = self.app.get(
631 631 route_path('repo_files_diff',
632 632 repo_name=backend.repo_name,
633 633 f_path='file.bin'),
634 634 params={
635 635 'diff1': repo.get_commit(commit_idx=0).raw_id,
636 636 'diff2': repo.get_commit(commit_idx=1).raw_id,
637 637 'fulldiff': '1',
638 638 'diff': 'diff',
639 639 })
640 640 # use redirect since this is OLD view redirecting to compare page
641 641 response = response.follow()
642 642 response.mustcontain('Collapse 1 commit')
643 643 file_changes = (1, 0, 0)
644 644
645 645 compare_page = ComparePage(response)
646 646 compare_page.contains_change_summary(*file_changes)
647 647
648 648 if backend.alias == 'svn':
649 649 response.mustcontain('new file 10644')
650 650 # TODO(marcink): SVN doesn't yet detect binary changes
651 651 else:
652 652 response.mustcontain('new file 100644')
653 653 response.mustcontain('binary diff hidden')
654 654
655 655 def test_diff_2way(self, backend):
656 656 commit1 = backend.repo.get_commit(commit_idx=-1)
657 657 commit2 = backend.repo.get_commit(commit_idx=-2)
658 658 response = self.app.get(
659 659 route_path('repo_files_diff_2way_redirect',
660 660 repo_name=backend.repo_name,
661 661 f_path='README'),
662 662 params={
663 663 'diff1': commit2.raw_id,
664 664 'diff2': commit1.raw_id,
665 665 })
666 666 # use redirect since this is OLD view redirecting to compare page
667 667 response = response.follow()
668 668
669 669 # It's a symlink to README.rst
670 670 response.mustcontain('README.rst')
671 671 response.mustcontain('No newline at end of file')
672 672
673 673 def test_requires_one_commit_id(self, backend, autologin_user):
674 674 response = self.app.get(
675 675 route_path('repo_files_diff',
676 676 repo_name=backend.repo_name,
677 677 f_path='README.rst'),
678 678 status=400)
679 679 response.mustcontain(
680 680 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
681 681
682 682 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
683 683 repo = vcsbackend.repo
684 684 response = self.app.get(
685 685 route_path('repo_files_diff',
686 686 repo_name=repo.name,
687 687 f_path='does-not-exist-in-any-commit'),
688 688 params={
689 689 'diff1': repo[0].raw_id,
690 690 'diff2': repo[1].raw_id
691 691 })
692 692
693 693 response = response.follow()
694 694 response.mustcontain('No files')
695 695
696 696 def test_returns_redirect_if_file_not_changed(self, backend):
697 697 commit = backend.repo.get_commit(commit_idx=-1)
698 698 response = self.app.get(
699 699 route_path('repo_files_diff_2way_redirect',
700 700 repo_name=backend.repo_name,
701 701 f_path='README'),
702 702 params={
703 703 'diff1': commit.raw_id,
704 704 'diff2': commit.raw_id,
705 705 })
706 706
707 707 response = response.follow()
708 708 response.mustcontain('No files')
709 709 response.mustcontain('No commits in this compare')
710 710
711 711 def test_supports_diff_to_different_path_svn(self, backend_svn):
712 712 #TODO: check this case
713 713 return
714 714
715 715 repo = backend_svn['svn-simple-layout'].scm_instance()
716 716 commit_id_1 = '24'
717 717 commit_id_2 = '26'
718 718
719 719 response = self.app.get(
720 720 route_path('repo_files_diff',
721 721 repo_name=backend_svn.repo_name,
722 722 f_path='trunk/example.py'),
723 723 params={
724 724 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
725 725 'diff2': commit_id_2,
726 726 })
727 727
728 728 response = response.follow()
729 729 response.mustcontain(
730 730 # diff contains this
731 731 "Will print out a useful message on invocation.")
732 732
733 733 # Note: Expecting that we indicate the user what's being compared
734 734 response.mustcontain("trunk/example.py")
735 735 response.mustcontain("tags/v0.2/example.py")
736 736
737 737 def test_show_rev_redirects_to_svn_path(self, backend_svn):
738 738 #TODO: check this case
739 739 return
740 740
741 741 repo = backend_svn['svn-simple-layout'].scm_instance()
742 742 commit_id = repo[-1].raw_id
743 743
744 744 response = self.app.get(
745 745 route_path('repo_files_diff',
746 746 repo_name=backend_svn.repo_name,
747 747 f_path='trunk/example.py'),
748 748 params={
749 749 'diff1': 'branches/argparse/example.py@' + commit_id,
750 750 'diff2': commit_id,
751 751 },
752 752 status=302)
753 753 response = response.follow()
754 754 assert response.headers['Location'].endswith(
755 755 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
756 756
757 757 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
758 758 #TODO: check this case
759 759 return
760 760
761 761 repo = backend_svn['svn-simple-layout'].scm_instance()
762 762 commit_id = repo[-1].raw_id
763 763 response = self.app.get(
764 764 route_path('repo_files_diff',
765 765 repo_name=backend_svn.repo_name,
766 766 f_path='trunk/example.py'),
767 767 params={
768 768 'diff1': 'branches/argparse/example.py@' + commit_id,
769 769 'diff2': commit_id,
770 770 'show_rev': 'Show at Revision',
771 771 'annotate': 'true',
772 772 },
773 773 status=302)
774 774 response = response.follow()
775 775 assert response.headers['Location'].endswith(
776 776 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
777 777
778 778
779 779 @pytest.mark.usefixtures("app", "autologin_user")
780 780 class TestModifyFilesWithWebInterface(object):
781 781
782 782 def test_add_file_view(self, backend):
783 783 self.app.get(
784 784 route_path('repo_files_add_file',
785 785 repo_name=backend.repo_name,
786 786 commit_id='tip', f_path='/')
787 787 )
788 788
789 789 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
790 790 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
791 791 backend.create_repo()
792 792 filename = 'init.py'
793 793 response = self.app.post(
794 794 route_path('repo_files_create_file',
795 795 repo_name=backend.repo_name,
796 796 commit_id='tip', f_path='/'),
797 797 params={
798 798 'content': "",
799 799 'filename': filename,
800 800 'csrf_token': csrf_token,
801 801 },
802 802 status=302)
803 803 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
804 804 assert_session_flash(response, expected_msg)
805 805
806 806 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
807 807 commit_id = backend.repo.get_commit().raw_id
808 808 response = self.app.post(
809 809 route_path('repo_files_create_file',
810 810 repo_name=backend.repo_name,
811 811 commit_id=commit_id, f_path='/'),
812 812 params={
813 813 'content': "foo",
814 814 'csrf_token': csrf_token,
815 815 },
816 816 status=302)
817 817
818 818 assert_session_flash(response, 'No filename specified')
819 819
820 820 def test_add_file_into_repo_errors_and_no_commits(
821 821 self, backend, csrf_token):
822 822 repo = backend.create_repo()
823 823 # Create a file with no filename, it will display an error but
824 824 # the repo has no commits yet
825 825 response = self.app.post(
826 826 route_path('repo_files_create_file',
827 827 repo_name=repo.repo_name,
828 828 commit_id='tip', f_path='/'),
829 829 params={
830 830 'content': "foo",
831 831 'csrf_token': csrf_token,
832 832 },
833 833 status=302)
834 834
835 835 assert_session_flash(response, 'No filename specified')
836 836
837 837 # Not allowed, redirect to the summary
838 838 redirected = response.follow()
839 839 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
840 840
841 841 # As there are no commits, displays the summary page with the error of
842 842 # creating a file with no filename
843 843
844 844 assert redirected.request.path == summary_url
845 845
846 846 @pytest.mark.parametrize("filename, clean_filename", [
847 847 ('/abs/foo', 'abs/foo'),
848 848 ('../rel/foo', 'rel/foo'),
849 849 ('file/../foo/foo', 'file/foo/foo'),
850 850 ])
851 851 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
852 852 repo = backend.create_repo()
853 853 commit_id = repo.get_commit().raw_id
854 854
855 855 response = self.app.post(
856 856 route_path('repo_files_create_file',
857 857 repo_name=repo.repo_name,
858 858 commit_id=commit_id, f_path='/'),
859 859 params={
860 860 'content': "foo",
861 861 'filename': filename,
862 862 'csrf_token': csrf_token,
863 863 },
864 864 status=302)
865 865
866 866 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
867 867 assert_session_flash(response, expected_msg)
868 868
869 869 @pytest.mark.parametrize("cnt, filename, content", [
870 870 (1, 'foo.txt', "Content"),
871 871 (2, 'dir/foo.rst', "Content"),
872 872 (3, 'dir/foo-second.rst', "Content"),
873 873 (4, 'rel/dir/foo.bar', "Content"),
874 874 ])
875 875 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
876 876 repo = backend.create_repo()
877 877 commit_id = repo.get_commit().raw_id
878 878 response = self.app.post(
879 879 route_path('repo_files_create_file',
880 880 repo_name=repo.repo_name,
881 881 commit_id=commit_id, f_path='/'),
882 882 params={
883 883 'content': content,
884 884 'filename': filename,
885 885 'csrf_token': csrf_token,
886 886 },
887 887 status=302)
888 888
889 889 expected_msg = 'Successfully committed new file `{}`'.format(filename)
890 890 assert_session_flash(response, expected_msg)
891 891
892 892 def test_edit_file_view(self, backend):
893 893 response = self.app.get(
894 894 route_path('repo_files_edit_file',
895 895 repo_name=backend.repo_name,
896 896 commit_id=backend.default_head_id,
897 897 f_path='vcs/nodes.py'),
898 898 status=200)
899 899 response.mustcontain("Module holding everything related to vcs nodes.")
900 900
901 901 def test_edit_file_view_not_on_branch(self, backend):
902 902 repo = backend.create_repo()
903 903 backend.ensure_file("vcs/nodes.py")
904 904
905 905 response = self.app.get(
906 906 route_path('repo_files_edit_file',
907 907 repo_name=repo.repo_name,
908 908 commit_id='tip',
909 909 f_path='vcs/nodes.py'),
910 910 status=302)
911 911 assert_session_flash(
912 912 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
913 913
914 914 def test_edit_file_view_commit_changes(self, backend, csrf_token):
915 915 repo = backend.create_repo()
916 916 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
917 917
918 918 response = self.app.post(
919 919 route_path('repo_files_update_file',
920 920 repo_name=repo.repo_name,
921 921 commit_id=backend.default_head_id,
922 922 f_path='vcs/nodes.py'),
923 923 params={
924 924 'content': "print 'hello world'",
925 925 'message': 'I committed',
926 926 'filename': "vcs/nodes.py",
927 927 'csrf_token': csrf_token,
928 928 },
929 929 status=302)
930 930 assert_session_flash(
931 931 response, 'Successfully committed changes to file `vcs/nodes.py`')
932 932 tip = repo.get_commit(commit_idx=-1)
933 933 assert tip.message == 'I committed'
934 934
935 935 def test_edit_file_view_commit_changes_default_message(self, backend,
936 936 csrf_token):
937 937 repo = backend.create_repo()
938 938 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
939 939
940 940 commit_id = (
941 941 backend.default_branch_name or
942 942 backend.repo.scm_instance().commit_ids[-1])
943 943
944 944 response = self.app.post(
945 945 route_path('repo_files_update_file',
946 946 repo_name=repo.repo_name,
947 947 commit_id=commit_id,
948 948 f_path='vcs/nodes.py'),
949 949 params={
950 950 'content': "print 'hello world'",
951 951 'message': '',
952 952 'filename': "vcs/nodes.py",
953 953 'csrf_token': csrf_token,
954 954 },
955 955 status=302)
956 956 assert_session_flash(
957 957 response, 'Successfully committed changes to file `vcs/nodes.py`')
958 958 tip = repo.get_commit(commit_idx=-1)
959 959 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
960 960
961 961 def test_delete_file_view(self, backend):
962 962 self.app.get(
963 963 route_path('repo_files_remove_file',
964 964 repo_name=backend.repo_name,
965 965 commit_id=backend.default_head_id,
966 966 f_path='vcs/nodes.py'),
967 967 status=200)
968 968
969 969 def test_delete_file_view_not_on_branch(self, backend):
970 970 repo = backend.create_repo()
971 971 backend.ensure_file('vcs/nodes.py')
972 972
973 973 response = self.app.get(
974 974 route_path('repo_files_remove_file',
975 975 repo_name=repo.repo_name,
976 976 commit_id='tip',
977 977 f_path='vcs/nodes.py'),
978 978 status=302)
979 979 assert_session_flash(
980 980 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
981 981
982 982 def test_delete_file_view_commit_changes(self, backend, csrf_token):
983 983 repo = backend.create_repo()
984 984 backend.ensure_file("vcs/nodes.py")
985 985
986 986 response = self.app.post(
987 987 route_path('repo_files_delete_file',
988 988 repo_name=repo.repo_name,
989 989 commit_id=backend.default_head_id,
990 990 f_path='vcs/nodes.py'),
991 991 params={
992 992 'message': 'i committed',
993 993 'csrf_token': csrf_token,
994 994 },
995 995 status=302)
996 996 assert_session_flash(
997 997 response, 'Successfully deleted file `vcs/nodes.py`')
998 998
999 999
1000 1000 @pytest.mark.usefixtures("app")
1001 1001 class TestFilesViewOtherCases(object):
1002 1002
1003 1003 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1004 1004 self, backend_stub, autologin_regular_user, user_regular,
1005 1005 user_util):
1006 1006
1007 1007 repo = backend_stub.create_repo()
1008 1008 user_util.grant_user_permission_to_repo(
1009 1009 repo, user_regular, 'repository.write')
1010 1010 response = self.app.get(
1011 1011 route_path('repo_files',
1012 1012 repo_name=repo.repo_name,
1013 1013 commit_id='tip', f_path='/'))
1014 1014
1015 1015 repo_file_add_url = route_path(
1016 1016 'repo_files_add_file',
1017 1017 repo_name=repo.repo_name,
1018 1018 commit_id=0, f_path='')
1019 1019
1020 1020 assert_session_flash(
1021 1021 response,
1022 1022 'There are no files yet. <a class="alert-link" '
1023 1023 'href="{}">Click here to add a new file.</a>'
1024 1024 .format(repo_file_add_url))
1025 1025
1026 1026 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1027 1027 self, backend_stub, autologin_regular_user):
1028 1028 repo = backend_stub.create_repo()
1029 1029 # init session for anon user
1030 1030 route_path('repo_summary', repo_name=repo.repo_name)
1031 1031
1032 1032 repo_file_add_url = route_path(
1033 1033 'repo_files_add_file',
1034 1034 repo_name=repo.repo_name,
1035 1035 commit_id=0, f_path='')
1036 1036
1037 1037 response = self.app.get(
1038 1038 route_path('repo_files',
1039 1039 repo_name=repo.repo_name,
1040 1040 commit_id='tip', f_path='/'))
1041 1041
1042 1042 assert_session_flash(response, no_=repo_file_add_url)
1043 1043
1044 1044 @pytest.mark.parametrize('file_node', [
1045 1045 'archive/file.zip',
1046 1046 'diff/my-file.txt',
1047 1047 'render.py',
1048 1048 'render',
1049 1049 'remove_file',
1050 1050 'remove_file/to-delete.txt',
1051 1051 ])
1052 1052 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1053 1053 backend.create_repo()
1054 1054 backend.ensure_file(file_node)
1055 1055
1056 1056 self.app.get(
1057 1057 route_path('repo_files',
1058 1058 repo_name=backend.repo_name,
1059 1059 commit_id='tip', f_path=file_node),
1060 1060 status=200)
1061 1061
1062 1062
1063 1063 class TestAdjustFilePathForSvn(object):
1064 1064 """
1065 1065 SVN specific adjustments of node history in RepoFilesView.
1066 1066 """
1067 1067
1068 1068 def test_returns_path_relative_to_matched_reference(self):
1069 1069 repo = self._repo(branches=['trunk'])
1070 1070 self.assert_file_adjustment('trunk/file', 'file', repo)
1071 1071
1072 1072 def test_does_not_modify_file_if_no_reference_matches(self):
1073 1073 repo = self._repo(branches=['trunk'])
1074 1074 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1075 1075
1076 1076 def test_does_not_adjust_partial_directory_names(self):
1077 1077 repo = self._repo(branches=['trun'])
1078 1078 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1079 1079
1080 1080 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1081 1081 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1082 1082 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1083 1083
1084 1084 def assert_file_adjustment(self, f_path, expected, repo):
1085 1085 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1086 1086 assert result == expected
1087 1087
1088 1088 def _repo(self, branches=None):
1089 1089 repo = mock.Mock()
1090 1090 repo.branches = OrderedDict((name, '0') for name in branches or [])
1091 1091 repo.tags = {}
1092 1092 return repo
@@ -1,524 +1,524 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import re
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
27 27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.compat import OrderedDict
28 from collections import OrderedDict
29 29 from rhodecode.lib.utils2 import AttributeDict, safe_str
30 30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 31 from rhodecode.model.db import Repository
32 32 from rhodecode.model.meta import Session
33 33 from rhodecode.model.repo import RepoModel
34 34 from rhodecode.model.scm import ScmModel
35 35 from rhodecode.tests import assert_session_flash
36 36 from rhodecode.tests.fixture import Fixture
37 37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
38 38
39 39
40 40 fixture = Fixture()
41 41
42 42
43 43 def route_path(name, params=None, **kwargs):
44 44 import urllib.request, urllib.parse, urllib.error
45 45
46 46 base_url = {
47 47 'repo_summary': '/{repo_name}',
48 48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
49 49 'repo_refs_data': '/{repo_name}/refs-data',
50 50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog',
51 51 'repo_creating_check': '/{repo_name}/repo_creating_check',
52 52 }[name].format(**kwargs)
53 53
54 54 if params:
55 55 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
56 56 return base_url
57 57
58 58
59 59 def assert_clone_url(response, server, repo, disabled=False):
60 60
61 61 response.mustcontain(
62 62 '<input type="text" class="input-monospace clone_url_input" '
63 63 '{disabled}readonly="readonly" '
64 64 'value="http://test_admin@{server}/{repo}"/>'.format(
65 65 server=server, repo=repo, disabled='disabled ' if disabled else ' ')
66 66 )
67 67
68 68
69 69 @pytest.mark.usefixtures('app')
70 70 class TestSummaryView(object):
71 71 def test_index(self, autologin_user, backend, http_host_only_stub):
72 72 repo_id = backend.repo.repo_id
73 73 repo_name = backend.repo_name
74 74 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
75 75 return_value=False):
76 76 response = self.app.get(
77 77 route_path('repo_summary', repo_name=repo_name))
78 78
79 79 # repo type
80 80 response.mustcontain(
81 81 '<i class="icon-%s">' % (backend.alias, )
82 82 )
83 83 # public/private
84 84 response.mustcontain(
85 85 """<i class="icon-unlock-alt">"""
86 86 )
87 87
88 88 # clone url...
89 89 assert_clone_url(response, http_host_only_stub, repo_name)
90 90 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
91 91
92 92 def test_index_svn_without_proxy(
93 93 self, autologin_user, backend_svn, http_host_only_stub):
94 94 repo_id = backend_svn.repo.repo_id
95 95 repo_name = backend_svn.repo_name
96 96 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
97 97 # clone url...
98 98
99 99 assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
100 100 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
101 101
102 102 def test_index_with_trailing_slash(
103 103 self, autologin_user, backend, http_host_only_stub):
104 104
105 105 repo_id = backend.repo.repo_id
106 106 repo_name = backend.repo_name
107 107 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
108 108 return_value=False):
109 109 response = self.app.get(
110 110 route_path('repo_summary', repo_name=repo_name) + '/',
111 111 status=200)
112 112
113 113 # clone url...
114 114 assert_clone_url(response, http_host_only_stub, repo_name)
115 115 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
116 116
117 117 def test_index_by_id(self, autologin_user, backend):
118 118 repo_id = backend.repo.repo_id
119 119 response = self.app.get(
120 120 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
121 121
122 122 # repo type
123 123 response.mustcontain(
124 124 '<i class="icon-%s">' % (backend.alias, )
125 125 )
126 126 # public/private
127 127 response.mustcontain(
128 128 """<i class="icon-unlock-alt">"""
129 129 )
130 130
131 131 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
132 132 fixture.create_repo(name='repo_1')
133 133 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
134 134
135 135 try:
136 136 response.mustcontain("repo_1")
137 137 finally:
138 138 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
139 139 Session().commit()
140 140
141 141 def test_index_with_anonymous_access_disabled(
142 142 self, backend, disable_anonymous_user):
143 143 response = self.app.get(
144 144 route_path('repo_summary', repo_name=backend.repo_name), status=302)
145 145 assert 'login' in response.location
146 146
147 147 def _enable_stats(self, repo):
148 148 r = Repository.get_by_repo_name(repo)
149 149 r.enable_statistics = True
150 150 Session().add(r)
151 151 Session().commit()
152 152
153 153 expected_trending = {
154 154 'hg': {
155 155 "py": {"count": 68, "desc": ["Python"]},
156 156 "rst": {"count": 16, "desc": ["Rst"]},
157 157 "css": {"count": 2, "desc": ["Css"]},
158 158 "sh": {"count": 2, "desc": ["Bash"]},
159 159 "bat": {"count": 1, "desc": ["Batch"]},
160 160 "cfg": {"count": 1, "desc": ["Ini"]},
161 161 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
162 162 "ini": {"count": 1, "desc": ["Ini"]},
163 163 "js": {"count": 1, "desc": ["Javascript"]},
164 164 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
165 165 },
166 166 'git': {
167 167 "py": {"count": 68, "desc": ["Python"]},
168 168 "rst": {"count": 16, "desc": ["Rst"]},
169 169 "css": {"count": 2, "desc": ["Css"]},
170 170 "sh": {"count": 2, "desc": ["Bash"]},
171 171 "bat": {"count": 1, "desc": ["Batch"]},
172 172 "cfg": {"count": 1, "desc": ["Ini"]},
173 173 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
174 174 "ini": {"count": 1, "desc": ["Ini"]},
175 175 "js": {"count": 1, "desc": ["Javascript"]},
176 176 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
177 177 },
178 178 'svn': {
179 179 "py": {"count": 75, "desc": ["Python"]},
180 180 "rst": {"count": 16, "desc": ["Rst"]},
181 181 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
182 182 "css": {"count": 2, "desc": ["Css"]},
183 183 "bat": {"count": 1, "desc": ["Batch"]},
184 184 "cfg": {"count": 1, "desc": ["Ini"]},
185 185 "ini": {"count": 1, "desc": ["Ini"]},
186 186 "js": {"count": 1, "desc": ["Javascript"]},
187 187 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
188 188 "sh": {"count": 1, "desc": ["Bash"]}
189 189 },
190 190 }
191 191
192 192 def test_repo_stats(self, autologin_user, backend, xhr_header):
193 193 response = self.app.get(
194 194 route_path(
195 195 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
196 196 extra_environ=xhr_header,
197 197 status=200)
198 198 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
199 199
200 200 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
201 201 repo_name = backend.repo_name
202 202
203 203 # codes stats
204 204 self._enable_stats(repo_name)
205 205 ScmModel().mark_for_invalidation(repo_name)
206 206
207 207 response = self.app.get(
208 208 route_path(
209 209 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
210 210 extra_environ=xhr_header,
211 211 status=200)
212 212
213 213 expected_data = self.expected_trending[backend.alias]
214 214 returned_stats = response.json['code_stats']
215 215 for k, v in expected_data.items():
216 216 assert v == returned_stats[k]
217 217
218 218 def test_repo_refs_data(self, backend):
219 219 response = self.app.get(
220 220 route_path('repo_refs_data', repo_name=backend.repo_name),
221 221 status=200)
222 222
223 223 # Ensure that there is the correct amount of items in the result
224 224 repo = backend.repo.scm_instance()
225 225 data = response.json['results']
226 226 items = sum(len(section['children']) for section in data)
227 227 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
228 228 assert items == repo_refs
229 229
230 230 def test_index_shows_missing_requirements_message(
231 231 self, backend, autologin_user):
232 232 repo_name = backend.repo_name
233 233 scm_patcher = mock.patch.object(
234 234 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
235 235
236 236 with scm_patcher:
237 237 response = self.app.get(
238 238 route_path('repo_summary', repo_name=repo_name))
239 239 assert_response = response.assert_response()
240 240 assert_response.element_contains(
241 241 '.main .alert-warning strong', 'Missing requirements')
242 242 assert_response.element_contains(
243 243 '.main .alert-warning',
244 244 'Commits cannot be displayed, because this repository '
245 245 'uses one or more extensions, which was not enabled.')
246 246
247 247 def test_missing_requirements_page_does_not_contains_switch_to(
248 248 self, autologin_user, backend):
249 249 repo_name = backend.repo_name
250 250 scm_patcher = mock.patch.object(
251 251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
252 252
253 253 with scm_patcher:
254 254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
255 255 response.mustcontain(no='Switch To')
256 256
257 257
258 258 @pytest.mark.usefixtures('app')
259 259 class TestRepoLocation(object):
260 260
261 261 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
262 262 def test_missing_filesystem_repo(
263 263 self, autologin_user, backend, suffix, csrf_token):
264 264 repo = backend.create_repo(name_suffix=suffix)
265 265 repo_name = repo.repo_name
266 266
267 267 # delete from file system
268 268 RepoModel()._delete_filesystem_repo(repo)
269 269
270 270 # test if the repo is still in the database
271 271 new_repo = RepoModel().get_by_repo_name(repo_name)
272 272 assert new_repo.repo_name == repo_name
273 273
274 274 # check if repo is not in the filesystem
275 275 assert not repo_on_filesystem(repo_name)
276 276
277 277 response = self.app.get(
278 278 route_path('repo_summary', repo_name=safe_str(repo_name)), status=302)
279 279
280 280 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
281 281 'Please check if it exist, or is not damaged.' % repo_name
282 282 assert_session_flash(response, msg)
283 283
284 284 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
285 285 def test_missing_filesystem_repo_on_repo_check(
286 286 self, autologin_user, backend, suffix, csrf_token):
287 287 repo = backend.create_repo(name_suffix=suffix)
288 288 repo_name = repo.repo_name
289 289
290 290 # delete from file system
291 291 RepoModel()._delete_filesystem_repo(repo)
292 292
293 293 # test if the repo is still in the database
294 294 new_repo = RepoModel().get_by_repo_name(repo_name)
295 295 assert new_repo.repo_name == repo_name
296 296
297 297 # check if repo is not in the filesystem
298 298 assert not repo_on_filesystem(repo_name)
299 299
300 300 # flush the session
301 301 self.app.get(
302 302 route_path('repo_summary', repo_name=safe_str(repo_name)),
303 303 status=302)
304 304
305 305 response = self.app.get(
306 306 route_path('repo_creating_check', repo_name=safe_str(repo_name)),
307 307 status=200)
308 308 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
309 309 'Please check if it exist, or is not damaged.' % repo_name
310 310 assert_session_flash(response, msg )
311 311
312 312
313 313 @pytest.fixture()
314 314 def summary_view(context_stub, request_stub, user_util):
315 315 """
316 316 Bootstrap view to test the view functions
317 317 """
318 318 request_stub.matched_route = AttributeDict(name='test_view')
319 319
320 320 request_stub.user = user_util.create_user().AuthUser()
321 321 request_stub.db_repo = user_util.create_repo()
322 322
323 323 view = RepoSummaryView(context=context_stub, request=request_stub)
324 324 return view
325 325
326 326
327 327 @pytest.mark.usefixtures('app')
328 328 class TestCreateReferenceData(object):
329 329
330 330 @pytest.fixture()
331 331 def example_refs(self):
332 332 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
333 333 example_refs = [
334 334 ('section_1', section_1_refs, 't1'),
335 335 ('section_2', {'c': 'c_id'}, 't2'),
336 336 ]
337 337 return example_refs
338 338
339 339 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
340 340 repo = mock.Mock()
341 341 repo.name = 'test-repo'
342 342 repo.alias = 'git'
343 343 full_repo_name = 'pytest-repo-group/' + repo.name
344 344
345 345 result = summary_view._create_reference_data(
346 346 repo, full_repo_name, example_refs)
347 347
348 348 expected_files_url = '/{}/files/'.format(full_repo_name)
349 349 expected_result = [
350 350 {
351 351 'children': [
352 352 {
353 353 'id': 'a', 'idx': 0, 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
354 354 'files_url': expected_files_url + 'a/?at=a',
355 355 },
356 356 {
357 357 'id': 'b', 'idx': 0, 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
358 358 'files_url': expected_files_url + 'b/?at=b',
359 359 }
360 360 ],
361 361 'text': 'section_1'
362 362 },
363 363 {
364 364 'children': [
365 365 {
366 366 'id': 'c', 'idx': 0, 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
367 367 'files_url': expected_files_url + 'c/?at=c',
368 368 }
369 369 ],
370 370 'text': 'section_2'
371 371 }]
372 372 assert result == expected_result
373 373
374 374 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
375 375 repo = mock.Mock()
376 376 repo.name = 'test-repo'
377 377 repo.alias = 'svn'
378 378 full_repo_name = 'pytest-repo-group/' + repo.name
379 379
380 380 result = summary_view._create_reference_data(
381 381 repo, full_repo_name, example_refs)
382 382
383 383 expected_files_url = '/{}/files/'.format(full_repo_name)
384 384 expected_result = [
385 385 {
386 386 'children': [
387 387 {
388 388 'id': 'a@a_id', 'idx': 0, 'raw_id': 'a_id',
389 389 'text': 'a', 'type': 't1',
390 390 'files_url': expected_files_url + 'a_id/a?at=a',
391 391 },
392 392 {
393 393 'id': 'b@b_id', 'idx': 0, 'raw_id': 'b_id',
394 394 'text': 'b', 'type': 't1',
395 395 'files_url': expected_files_url + 'b_id/b?at=b',
396 396 }
397 397 ],
398 398 'text': 'section_1'
399 399 },
400 400 {
401 401 'children': [
402 402 {
403 403 'id': 'c@c_id', 'idx': 0, 'raw_id': 'c_id',
404 404 'text': 'c', 'type': 't2',
405 405 'files_url': expected_files_url + 'c_id/c?at=c',
406 406 }
407 407 ],
408 408 'text': 'section_2'
409 409 }
410 410 ]
411 411 assert result == expected_result
412 412
413 413
414 414 class TestCreateFilesUrl(object):
415 415
416 416 def test_creates_non_svn_url(self, app, summary_view):
417 417 repo = mock.Mock()
418 418 repo.name = 'abcde'
419 419 full_repo_name = 'test-repo-group/' + repo.name
420 420 ref_name = 'branch1'
421 421 raw_id = 'deadbeef0123456789'
422 422 is_svn = False
423 423
424 424 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
425 425 result = summary_view._create_files_url(
426 426 repo, full_repo_name, ref_name, raw_id, is_svn)
427 427 url_mock.assert_called_once_with(
428 428 'repo_files', repo_name=full_repo_name, commit_id=ref_name,
429 429 f_path='', _query=dict(at=ref_name))
430 430 assert result == url_mock.return_value
431 431
432 432 def test_creates_svn_url(self, app, summary_view):
433 433 repo = mock.Mock()
434 434 repo.name = 'abcde'
435 435 full_repo_name = 'test-repo-group/' + repo.name
436 436 ref_name = 'branch1'
437 437 raw_id = 'deadbeef0123456789'
438 438 is_svn = True
439 439
440 440 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
441 441 result = summary_view._create_files_url(
442 442 repo, full_repo_name, ref_name, raw_id, is_svn)
443 443 url_mock.assert_called_once_with(
444 444 'repo_files', repo_name=full_repo_name, f_path=ref_name,
445 445 commit_id=raw_id, _query=dict(at=ref_name))
446 446 assert result == url_mock.return_value
447 447
448 448 def test_name_has_slashes(self, app, summary_view):
449 449 repo = mock.Mock()
450 450 repo.name = 'abcde'
451 451 full_repo_name = 'test-repo-group/' + repo.name
452 452 ref_name = 'branch1/branch2'
453 453 raw_id = 'deadbeef0123456789'
454 454 is_svn = False
455 455
456 456 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
457 457 result = summary_view._create_files_url(
458 458 repo, full_repo_name, ref_name, raw_id, is_svn)
459 459 url_mock.assert_called_once_with(
460 460 'repo_files', repo_name=full_repo_name, commit_id=raw_id,
461 461 f_path='', _query=dict(at=ref_name))
462 462 assert result == url_mock.return_value
463 463
464 464
465 465 class TestReferenceItems(object):
466 466 repo = mock.Mock()
467 467 repo.name = 'pytest-repo'
468 468 repo_full_name = 'pytest-repo-group/' + repo.name
469 469 ref_type = 'branch'
470 470 fake_url = '/abcde/'
471 471
472 472 @staticmethod
473 473 def _format_function(name, id_):
474 474 return 'format_function_{}_{}'.format(name, id_)
475 475
476 476 def test_creates_required_amount_of_items(self, summary_view):
477 477 amount = 100
478 478 refs = {
479 479 'ref{}'.format(i): '{0:040d}'.format(i)
480 480 for i in range(amount)
481 481 }
482 482
483 483 url_patcher = mock.patch.object(summary_view, '_create_files_url')
484 484 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
485 485 return_value=False)
486 486
487 487 with url_patcher as url_mock, svn_patcher:
488 488 result = summary_view._create_reference_items(
489 489 self.repo, self.repo_full_name, refs, self.ref_type,
490 490 self._format_function)
491 491 assert len(result) == amount
492 492 assert url_mock.call_count == amount
493 493
494 494 def test_single_item_details(self, summary_view):
495 495 ref_name = 'ref1'
496 496 ref_id = 'deadbeef'
497 497 refs = {
498 498 ref_name: ref_id
499 499 }
500 500
501 501 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
502 502 return_value=False)
503 503
504 504 url_patcher = mock.patch.object(
505 505 summary_view, '_create_files_url', return_value=self.fake_url)
506 506
507 507 with url_patcher as url_mock, svn_patcher:
508 508 result = summary_view._create_reference_items(
509 509 self.repo, self.repo_full_name, refs, self.ref_type,
510 510 self._format_function)
511 511
512 512 url_mock.assert_called_once_with(
513 513 self.repo, self.repo_full_name, ref_name, ref_id, False)
514 514 expected_result = [
515 515 {
516 516 'text': ref_name,
517 517 'id': self._format_function(ref_name, ref_id),
518 518 'raw_id': ref_id,
519 519 'idx': 0,
520 520 'type': self.ref_type,
521 521 'files_url': self.fake_url
522 522 }
523 523 ]
524 524 assert result == expected_result
@@ -1,819 +1,819 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 from pyramid.httpexceptions import (
25 25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30 from rhodecode.apps.file_store import utils as store_utils
31 31 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
32 32
33 33 from rhodecode.lib import diffs, codeblocks, channelstream
34 34 from rhodecode.lib.auth import (
35 35 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
36 36 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.compat import OrderedDict
37 from collections import OrderedDict
38 38 from rhodecode.lib.diffs import (
39 39 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
40 40 get_diff_whitespace_flag)
41 41 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
42 42 import rhodecode.lib.helpers as h
43 43 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict, safe_str
44 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 RepositoryError, CommitDoesNotExistError)
47 47 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
48 48 ChangesetCommentHistory
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import CommentsModel
51 51 from rhodecode.model.meta import Session
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 def _update_with_GET(params, request):
58 58 for k in ['diff1', 'diff2', 'diff']:
59 59 params[k] += request.GET.getall(k)
60 60
61 61
62 62 class RepoCommitsView(RepoAppView):
63 63 def load_default_context(self):
64 64 c = self._get_local_tmpl_context(include_app_defaults=True)
65 65 c.rhodecode_repo = self.rhodecode_vcs_repo
66 66
67 67 return c
68 68
69 69 def _is_diff_cache_enabled(self, target_repo):
70 70 caching_enabled = self._get_general_setting(
71 71 target_repo, 'rhodecode_diff_cache')
72 72 log.debug('Diff caching enabled: %s', caching_enabled)
73 73 return caching_enabled
74 74
75 75 def _commit(self, commit_id_range, method):
76 76 _ = self.request.translate
77 77 c = self.load_default_context()
78 78 c.fulldiff = self.request.GET.get('fulldiff')
79 79 redirect_to_combined = str2bool(self.request.GET.get('redirect_combined'))
80 80
81 81 # fetch global flags of ignore ws or context lines
82 82 diff_context = get_diff_context(self.request)
83 83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84 84
85 85 # diff_limit will cut off the whole diff if the limit is applied
86 86 # otherwise it will just hide the big files from the front-end
87 87 diff_limit = c.visual.cut_off_limit_diff
88 88 file_limit = c.visual.cut_off_limit_file
89 89
90 90 # get ranges of commit ids if preset
91 91 commit_range = commit_id_range.split('...')[:2]
92 92
93 93 try:
94 94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 95 'message', 'parents']
96 96 if self.rhodecode_vcs_repo.alias == 'hg':
97 97 pre_load += ['hidden', 'obsolete', 'phase']
98 98
99 99 if len(commit_range) == 2:
100 100 commits = self.rhodecode_vcs_repo.get_commits(
101 101 start_id=commit_range[0], end_id=commit_range[1],
102 102 pre_load=pre_load, translate_tags=False)
103 103 commits = list(commits)
104 104 else:
105 105 commits = [self.rhodecode_vcs_repo.get_commit(
106 106 commit_id=commit_id_range, pre_load=pre_load)]
107 107
108 108 c.commit_ranges = commits
109 109 if not c.commit_ranges:
110 110 raise RepositoryError('The commit range returned an empty result')
111 111 except CommitDoesNotExistError as e:
112 112 msg = _('No such commit exists. Org exception: `{}`').format(safe_str(e))
113 113 h.flash(msg, category='error')
114 114 raise HTTPNotFound()
115 115 except Exception:
116 116 log.exception("General failure")
117 117 raise HTTPNotFound()
118 118 single_commit = len(c.commit_ranges) == 1
119 119
120 120 if redirect_to_combined and not single_commit:
121 121 source_ref = getattr(c.commit_ranges[0].parents[0]
122 122 if c.commit_ranges[0].parents else h.EmptyCommit(), 'raw_id')
123 123 target_ref = c.commit_ranges[-1].raw_id
124 124 next_url = h.route_path(
125 125 'repo_compare',
126 126 repo_name=c.repo_name,
127 127 source_ref_type='rev',
128 128 source_ref=source_ref,
129 129 target_ref_type='rev',
130 130 target_ref=target_ref)
131 131 raise HTTPFound(next_url)
132 132
133 133 c.changes = OrderedDict()
134 134 c.lines_added = 0
135 135 c.lines_deleted = 0
136 136
137 137 # auto collapse if we have more than limit
138 138 collapse_limit = diffs.DiffProcessor._collapse_commits_over
139 139 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
140 140
141 141 c.commit_statuses = ChangesetStatus.STATUSES
142 142 c.inline_comments = []
143 143 c.files = []
144 144
145 145 c.comments = []
146 146 c.unresolved_comments = []
147 147 c.resolved_comments = []
148 148
149 149 # Single commit
150 150 if single_commit:
151 151 commit = c.commit_ranges[0]
152 152 c.comments = CommentsModel().get_comments(
153 153 self.db_repo.repo_id,
154 154 revision=commit.raw_id)
155 155
156 156 # comments from PR
157 157 statuses = ChangesetStatusModel().get_statuses(
158 158 self.db_repo.repo_id, commit.raw_id,
159 159 with_revisions=True)
160 160
161 161 prs = set()
162 162 reviewers = list()
163 163 reviewers_duplicates = set() # to not have duplicates from multiple votes
164 164 for c_status in statuses:
165 165
166 166 # extract associated pull-requests from votes
167 167 if c_status.pull_request:
168 168 prs.add(c_status.pull_request)
169 169
170 170 # extract reviewers
171 171 _user_id = c_status.author.user_id
172 172 if _user_id not in reviewers_duplicates:
173 173 reviewers.append(
174 174 StrictAttributeDict({
175 175 'user': c_status.author,
176 176
177 177 # fake attributed for commit, page that we don't have
178 178 # but we share the display with PR page
179 179 'mandatory': False,
180 180 'reasons': [],
181 181 'rule_user_group_data': lambda: None
182 182 })
183 183 )
184 184 reviewers_duplicates.add(_user_id)
185 185
186 186 c.reviewers_count = len(reviewers)
187 187 c.observers_count = 0
188 188
189 189 # from associated statuses, check the pull requests, and
190 190 # show comments from them
191 191 for pr in prs:
192 192 c.comments.extend(pr.comments)
193 193
194 194 c.unresolved_comments = CommentsModel()\
195 195 .get_commit_unresolved_todos(commit.raw_id)
196 196 c.resolved_comments = CommentsModel()\
197 197 .get_commit_resolved_todos(commit.raw_id)
198 198
199 199 c.inline_comments_flat = CommentsModel()\
200 200 .get_commit_inline_comments(commit.raw_id)
201 201
202 202 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
203 203 statuses, reviewers)
204 204
205 205 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
206 206
207 207 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
208 208
209 209 for review_obj, member, reasons, mandatory, status in review_statuses:
210 210 member_reviewer = h.reviewer_as_json(
211 211 member, reasons=reasons, mandatory=mandatory, role=None,
212 212 user_group=None
213 213 )
214 214
215 215 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
216 216 member_reviewer['review_status'] = current_review_status
217 217 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
218 218 member_reviewer['allowed_to_update'] = False
219 219 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
220 220
221 221 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
222 222
223 223 # NOTE(marcink): this uses the same voting logic as in pull-requests
224 224 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
225 225 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
226 226
227 227 diff = None
228 228 # Iterate over ranges (default commit view is always one commit)
229 229 for commit in c.commit_ranges:
230 230 c.changes[commit.raw_id] = []
231 231
232 232 commit2 = commit
233 233 commit1 = commit.first_parent
234 234
235 235 if method == 'show':
236 236 inline_comments = CommentsModel().get_inline_comments(
237 237 self.db_repo.repo_id, revision=commit.raw_id)
238 238 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
239 239 inline_comments))
240 240 c.inline_comments = inline_comments
241 241
242 242 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
243 243 self.db_repo)
244 244 cache_file_path = diff_cache_exist(
245 245 cache_path, 'diff', commit.raw_id,
246 246 hide_whitespace_changes, diff_context, c.fulldiff)
247 247
248 248 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
249 249 force_recache = str2bool(self.request.GET.get('force_recache'))
250 250
251 251 cached_diff = None
252 252 if caching_enabled:
253 253 cached_diff = load_cached_diff(cache_file_path)
254 254
255 255 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
256 256 if not force_recache and has_proper_diff_cache:
257 257 diffset = cached_diff['diff']
258 258 else:
259 259 vcs_diff = self.rhodecode_vcs_repo.get_diff(
260 260 commit1, commit2,
261 261 ignore_whitespace=hide_whitespace_changes,
262 262 context=diff_context)
263 263
264 264 diff_processor = diffs.DiffProcessor(
265 265 vcs_diff, format='newdiff', diff_limit=diff_limit,
266 266 file_limit=file_limit, show_full_diff=c.fulldiff)
267 267
268 268 _parsed = diff_processor.prepare()
269 269
270 270 diffset = codeblocks.DiffSet(
271 271 repo_name=self.db_repo_name,
272 272 source_node_getter=codeblocks.diffset_node_getter(commit1),
273 273 target_node_getter=codeblocks.diffset_node_getter(commit2))
274 274
275 275 diffset = self.path_filter.render_patchset_filtered(
276 276 diffset, _parsed, commit1.raw_id, commit2.raw_id)
277 277
278 278 # save cached diff
279 279 if caching_enabled:
280 280 cache_diff(cache_file_path, diffset, None)
281 281
282 282 c.limited_diff = diffset.limited_diff
283 283 c.changes[commit.raw_id] = diffset
284 284 else:
285 285 # TODO(marcink): no cache usage here...
286 286 _diff = self.rhodecode_vcs_repo.get_diff(
287 287 commit1, commit2,
288 288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
289 289 diff_processor = diffs.DiffProcessor(
290 290 _diff, format='newdiff', diff_limit=diff_limit,
291 291 file_limit=file_limit, show_full_diff=c.fulldiff)
292 292 # downloads/raw we only need RAW diff nothing else
293 293 diff = self.path_filter.get_raw_patch(diff_processor)
294 294 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
295 295
296 296 # sort comments by how they were generated
297 297 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
298 298 c.at_version_num = None
299 299
300 300 if len(c.commit_ranges) == 1:
301 301 c.commit = c.commit_ranges[0]
302 302 c.parent_tmpl = ''.join(
303 303 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
304 304
305 305 if method == 'download':
306 306 response = Response(diff)
307 307 response.content_type = 'text/plain'
308 308 response.content_disposition = (
309 309 'attachment; filename=%s.diff' % commit_id_range[:12])
310 310 return response
311 311 elif method == 'patch':
312 312 c.diff = safe_unicode(diff)
313 313 patch = render(
314 314 'rhodecode:templates/changeset/patch_changeset.mako',
315 315 self._get_template_context(c), self.request)
316 316 response = Response(patch)
317 317 response.content_type = 'text/plain'
318 318 return response
319 319 elif method == 'raw':
320 320 response = Response(diff)
321 321 response.content_type = 'text/plain'
322 322 return response
323 323 elif method == 'show':
324 324 if len(c.commit_ranges) == 1:
325 325 html = render(
326 326 'rhodecode:templates/changeset/changeset.mako',
327 327 self._get_template_context(c), self.request)
328 328 return Response(html)
329 329 else:
330 330 c.ancestor = None
331 331 c.target_repo = self.db_repo
332 332 html = render(
333 333 'rhodecode:templates/changeset/changeset_range.mako',
334 334 self._get_template_context(c), self.request)
335 335 return Response(html)
336 336
337 337 raise HTTPBadRequest()
338 338
339 339 @LoginRequired()
340 340 @HasRepoPermissionAnyDecorator(
341 341 'repository.read', 'repository.write', 'repository.admin')
342 342 def repo_commit_show(self):
343 343 commit_id = self.request.matchdict['commit_id']
344 344 return self._commit(commit_id, method='show')
345 345
346 346 @LoginRequired()
347 347 @HasRepoPermissionAnyDecorator(
348 348 'repository.read', 'repository.write', 'repository.admin')
349 349 def repo_commit_raw(self):
350 350 commit_id = self.request.matchdict['commit_id']
351 351 return self._commit(commit_id, method='raw')
352 352
353 353 @LoginRequired()
354 354 @HasRepoPermissionAnyDecorator(
355 355 'repository.read', 'repository.write', 'repository.admin')
356 356 def repo_commit_patch(self):
357 357 commit_id = self.request.matchdict['commit_id']
358 358 return self._commit(commit_id, method='patch')
359 359
360 360 @LoginRequired()
361 361 @HasRepoPermissionAnyDecorator(
362 362 'repository.read', 'repository.write', 'repository.admin')
363 363 def repo_commit_download(self):
364 364 commit_id = self.request.matchdict['commit_id']
365 365 return self._commit(commit_id, method='download')
366 366
367 367 def _commit_comments_create(self, commit_id, comments):
368 368 _ = self.request.translate
369 369 data = {}
370 370 if not comments:
371 371 return
372 372
373 373 commit = self.db_repo.get_commit(commit_id)
374 374
375 375 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
376 376 for entry in comments:
377 377 c = self.load_default_context()
378 378 comment_type = entry['comment_type']
379 379 text = entry['text']
380 380 status = entry['status']
381 381 is_draft = str2bool(entry['is_draft'])
382 382 resolves_comment_id = entry['resolves_comment_id']
383 383 f_path = entry['f_path']
384 384 line_no = entry['line']
385 385 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
386 386
387 387 if status:
388 388 text = text or (_('Status change %(transition_icon)s %(status)s')
389 389 % {'transition_icon': '>',
390 390 'status': ChangesetStatus.get_status_lbl(status)})
391 391
392 392 comment = CommentsModel().create(
393 393 text=text,
394 394 repo=self.db_repo.repo_id,
395 395 user=self._rhodecode_db_user.user_id,
396 396 commit_id=commit_id,
397 397 f_path=f_path,
398 398 line_no=line_no,
399 399 status_change=(ChangesetStatus.get_status_lbl(status)
400 400 if status else None),
401 401 status_change_type=status,
402 402 comment_type=comment_type,
403 403 is_draft=is_draft,
404 404 resolves_comment_id=resolves_comment_id,
405 405 auth_user=self._rhodecode_user,
406 406 send_email=not is_draft, # skip notification for draft comments
407 407 )
408 408 is_inline = comment.is_inline
409 409
410 410 # get status if set !
411 411 if status:
412 412 # `dont_allow_on_closed_pull_request = True` means
413 413 # if latest status was from pull request and it's closed
414 414 # disallow changing status !
415 415
416 416 try:
417 417 ChangesetStatusModel().set_status(
418 418 self.db_repo.repo_id,
419 419 status,
420 420 self._rhodecode_db_user.user_id,
421 421 comment,
422 422 revision=commit_id,
423 423 dont_allow_on_closed_pull_request=True
424 424 )
425 425 except StatusChangeOnClosedPullRequestError:
426 426 msg = _('Changing the status of a commit associated with '
427 427 'a closed pull request is not allowed')
428 428 log.exception(msg)
429 429 h.flash(msg, category='warning')
430 430 raise HTTPFound(h.route_path(
431 431 'repo_commit', repo_name=self.db_repo_name,
432 432 commit_id=commit_id))
433 433
434 434 Session().flush()
435 435 # this is somehow required to get access to some relationship
436 436 # loaded on comment
437 437 Session().refresh(comment)
438 438
439 439 # skip notifications for drafts
440 440 if not is_draft:
441 441 CommentsModel().trigger_commit_comment_hook(
442 442 self.db_repo, self._rhodecode_user, 'create',
443 443 data={'comment': comment, 'commit': commit})
444 444
445 445 comment_id = comment.comment_id
446 446 data[comment_id] = {
447 447 'target_id': target_elem_id
448 448 }
449 449 Session().flush()
450 450
451 451 c.co = comment
452 452 c.at_version_num = 0
453 453 c.is_new = True
454 454 rendered_comment = render(
455 455 'rhodecode:templates/changeset/changeset_comment_block.mako',
456 456 self._get_template_context(c), self.request)
457 457
458 458 data[comment_id].update(comment.get_dict())
459 459 data[comment_id].update({'rendered_text': rendered_comment})
460 460
461 461 # finalize, commit and redirect
462 462 Session().commit()
463 463
464 464 # skip channelstream for draft comments
465 465 if not all_drafts:
466 466 comment_broadcast_channel = channelstream.comment_channel(
467 467 self.db_repo_name, commit_obj=commit)
468 468
469 469 comment_data = data
470 470 posted_comment_type = 'inline' if is_inline else 'general'
471 471 if len(data) == 1:
472 472 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
473 473 else:
474 474 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
475 475
476 476 channelstream.comment_channelstream_push(
477 477 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
478 478 comment_data=comment_data)
479 479
480 480 return data
481 481
482 482 @LoginRequired()
483 483 @NotAnonymous()
484 484 @HasRepoPermissionAnyDecorator(
485 485 'repository.read', 'repository.write', 'repository.admin')
486 486 @CSRFRequired()
487 487 def repo_commit_comment_create(self):
488 488 _ = self.request.translate
489 489 commit_id = self.request.matchdict['commit_id']
490 490
491 491 multi_commit_ids = []
492 492 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
493 493 if _commit_id not in ['', None, EmptyCommit.raw_id]:
494 494 if _commit_id not in multi_commit_ids:
495 495 multi_commit_ids.append(_commit_id)
496 496
497 497 commit_ids = multi_commit_ids or [commit_id]
498 498
499 499 data = []
500 500 # Multiple comments for each passed commit id
501 501 for current_id in filter(None, commit_ids):
502 502 comment_data = {
503 503 'comment_type': self.request.POST.get('comment_type'),
504 504 'text': self.request.POST.get('text'),
505 505 'status': self.request.POST.get('changeset_status', None),
506 506 'is_draft': self.request.POST.get('draft'),
507 507 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
508 508 'close_pull_request': self.request.POST.get('close_pull_request'),
509 509 'f_path': self.request.POST.get('f_path'),
510 510 'line': self.request.POST.get('line'),
511 511 }
512 512 comment = self._commit_comments_create(commit_id=current_id, comments=[comment_data])
513 513 data.append(comment)
514 514
515 515 return data if len(data) > 1 else data[0]
516 516
517 517 @LoginRequired()
518 518 @NotAnonymous()
519 519 @HasRepoPermissionAnyDecorator(
520 520 'repository.read', 'repository.write', 'repository.admin')
521 521 @CSRFRequired()
522 522 def repo_commit_comment_preview(self):
523 523 # Technically a CSRF token is not needed as no state changes with this
524 524 # call. However, as this is a POST is better to have it, so automated
525 525 # tools don't flag it as potential CSRF.
526 526 # Post is required because the payload could be bigger than the maximum
527 527 # allowed by GET.
528 528
529 529 text = self.request.POST.get('text')
530 530 renderer = self.request.POST.get('renderer') or 'rst'
531 531 if text:
532 532 return h.render(text, renderer=renderer, mentions=True,
533 533 repo_name=self.db_repo_name)
534 534 return ''
535 535
536 536 @LoginRequired()
537 537 @HasRepoPermissionAnyDecorator(
538 538 'repository.read', 'repository.write', 'repository.admin')
539 539 @CSRFRequired()
540 540 def repo_commit_comment_history_view(self):
541 541 c = self.load_default_context()
542 542 comment_id = self.request.matchdict['comment_id']
543 543 comment_history_id = self.request.matchdict['comment_history_id']
544 544
545 545 comment = ChangesetComment.get_or_404(comment_id)
546 546 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
547 547 if comment.draft and not comment_owner:
548 548 # if we see draft comments history, we only allow this for owner
549 549 raise HTTPNotFound()
550 550
551 551 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
552 552 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
553 553
554 554 if is_repo_comment:
555 555 c.comment_history = comment_history
556 556
557 557 rendered_comment = render(
558 558 'rhodecode:templates/changeset/comment_history.mako',
559 559 self._get_template_context(c), self.request)
560 560 return rendered_comment
561 561 else:
562 562 log.warning('No permissions for user %s to show comment_history_id: %s',
563 563 self._rhodecode_db_user, comment_history_id)
564 564 raise HTTPNotFound()
565 565
566 566 @LoginRequired()
567 567 @NotAnonymous()
568 568 @HasRepoPermissionAnyDecorator(
569 569 'repository.read', 'repository.write', 'repository.admin')
570 570 @CSRFRequired()
571 571 def repo_commit_comment_attachment_upload(self):
572 572 c = self.load_default_context()
573 573 upload_key = 'attachment'
574 574
575 575 file_obj = self.request.POST.get(upload_key)
576 576
577 577 if file_obj is None:
578 578 self.request.response.status = 400
579 579 return {'store_fid': None,
580 580 'access_path': None,
581 581 'error': '{} data field is missing'.format(upload_key)}
582 582
583 583 if not hasattr(file_obj, 'filename'):
584 584 self.request.response.status = 400
585 585 return {'store_fid': None,
586 586 'access_path': None,
587 587 'error': 'filename cannot be read from the data field'}
588 588
589 589 filename = file_obj.filename
590 590 file_display_name = filename
591 591
592 592 metadata = {
593 593 'user_uploaded': {'username': self._rhodecode_user.username,
594 594 'user_id': self._rhodecode_user.user_id,
595 595 'ip': self._rhodecode_user.ip_addr}}
596 596
597 597 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
598 598 allowed_extensions = [
599 599 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
600 600 '.pptx', '.txt', '.xlsx', '.zip']
601 601 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
602 602
603 603 try:
604 604 storage = store_utils.get_file_storage(self.request.registry.settings)
605 605 store_uid, metadata = storage.save_file(
606 606 file_obj.file, filename, extra_metadata=metadata,
607 607 extensions=allowed_extensions, max_filesize=max_file_size)
608 608 except FileNotAllowedException:
609 609 self.request.response.status = 400
610 610 permitted_extensions = ', '.join(allowed_extensions)
611 611 error_msg = 'File `{}` is not allowed. ' \
612 612 'Only following extensions are permitted: {}'.format(
613 613 filename, permitted_extensions)
614 614 return {'store_fid': None,
615 615 'access_path': None,
616 616 'error': error_msg}
617 617 except FileOverSizeException:
618 618 self.request.response.status = 400
619 619 limit_mb = h.format_byte_size_binary(max_file_size)
620 620 return {'store_fid': None,
621 621 'access_path': None,
622 622 'error': 'File {} is exceeding allowed limit of {}.'.format(
623 623 filename, limit_mb)}
624 624
625 625 try:
626 626 entry = FileStore.create(
627 627 file_uid=store_uid, filename=metadata["filename"],
628 628 file_hash=metadata["sha256"], file_size=metadata["size"],
629 629 file_display_name=file_display_name,
630 630 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
631 631 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
632 632 scope_repo_id=self.db_repo.repo_id
633 633 )
634 634 Session().add(entry)
635 635 Session().commit()
636 636 log.debug('Stored upload in DB as %s', entry)
637 637 except Exception:
638 638 log.exception('Failed to store file %s', filename)
639 639 self.request.response.status = 400
640 640 return {'store_fid': None,
641 641 'access_path': None,
642 642 'error': 'File {} failed to store in DB.'.format(filename)}
643 643
644 644 Session().commit()
645 645
646 646 return {
647 647 'store_fid': store_uid,
648 648 'access_path': h.route_path(
649 649 'download_file', fid=store_uid),
650 650 'fqn_access_path': h.route_url(
651 651 'download_file', fid=store_uid),
652 652 'repo_access_path': h.route_path(
653 653 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
654 654 'repo_fqn_access_path': h.route_url(
655 655 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
656 656 }
657 657
658 658 @LoginRequired()
659 659 @NotAnonymous()
660 660 @HasRepoPermissionAnyDecorator(
661 661 'repository.read', 'repository.write', 'repository.admin')
662 662 @CSRFRequired()
663 663 def repo_commit_comment_delete(self):
664 664 commit_id = self.request.matchdict['commit_id']
665 665 comment_id = self.request.matchdict['comment_id']
666 666
667 667 comment = ChangesetComment.get_or_404(comment_id)
668 668 if not comment:
669 669 log.debug('Comment with id:%s not found, skipping', comment_id)
670 670 # comment already deleted in another call probably
671 671 return True
672 672
673 673 if comment.immutable:
674 674 # don't allow deleting comments that are immutable
675 675 raise HTTPForbidden()
676 676
677 677 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
678 678 super_admin = h.HasPermissionAny('hg.admin')()
679 679 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
680 680 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
681 681 comment_repo_admin = is_repo_admin and is_repo_comment
682 682
683 683 if comment.draft and not comment_owner:
684 684 # We never allow to delete draft comments for other than owners
685 685 raise HTTPNotFound()
686 686
687 687 if super_admin or comment_owner or comment_repo_admin:
688 688 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
689 689 Session().commit()
690 690 return True
691 691 else:
692 692 log.warning('No permissions for user %s to delete comment_id: %s',
693 693 self._rhodecode_db_user, comment_id)
694 694 raise HTTPNotFound()
695 695
696 696 @LoginRequired()
697 697 @NotAnonymous()
698 698 @HasRepoPermissionAnyDecorator(
699 699 'repository.read', 'repository.write', 'repository.admin')
700 700 @CSRFRequired()
701 701 def repo_commit_comment_edit(self):
702 702 self.load_default_context()
703 703
704 704 commit_id = self.request.matchdict['commit_id']
705 705 comment_id = self.request.matchdict['comment_id']
706 706 comment = ChangesetComment.get_or_404(comment_id)
707 707
708 708 if comment.immutable:
709 709 # don't allow deleting comments that are immutable
710 710 raise HTTPForbidden()
711 711
712 712 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
713 713 super_admin = h.HasPermissionAny('hg.admin')()
714 714 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
715 715 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
716 716 comment_repo_admin = is_repo_admin and is_repo_comment
717 717
718 718 if super_admin or comment_owner or comment_repo_admin:
719 719 text = self.request.POST.get('text')
720 720 version = self.request.POST.get('version')
721 721 if text == comment.text:
722 722 log.warning(
723 723 'Comment(repo): '
724 724 'Trying to create new version '
725 725 'with the same comment body {}'.format(
726 726 comment_id,
727 727 )
728 728 )
729 729 raise HTTPNotFound()
730 730
731 731 if version.isdigit():
732 732 version = int(version)
733 733 else:
734 734 log.warning(
735 735 'Comment(repo): Wrong version type {} {} '
736 736 'for comment {}'.format(
737 737 version,
738 738 type(version),
739 739 comment_id,
740 740 )
741 741 )
742 742 raise HTTPNotFound()
743 743
744 744 try:
745 745 comment_history = CommentsModel().edit(
746 746 comment_id=comment_id,
747 747 text=text,
748 748 auth_user=self._rhodecode_user,
749 749 version=version,
750 750 )
751 751 except CommentVersionMismatch:
752 752 raise HTTPConflict()
753 753
754 754 if not comment_history:
755 755 raise HTTPNotFound()
756 756
757 757 if not comment.draft:
758 758 commit = self.db_repo.get_commit(commit_id)
759 759 CommentsModel().trigger_commit_comment_hook(
760 760 self.db_repo, self._rhodecode_user, 'edit',
761 761 data={'comment': comment, 'commit': commit})
762 762
763 763 Session().commit()
764 764 return {
765 765 'comment_history_id': comment_history.comment_history_id,
766 766 'comment_id': comment.comment_id,
767 767 'comment_version': comment_history.version,
768 768 'comment_author_username': comment_history.author.username,
769 769 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
770 770 'comment_created_on': h.age_component(comment_history.created_on,
771 771 time_is_local=True),
772 772 }
773 773 else:
774 774 log.warning('No permissions for user %s to edit comment_id: %s',
775 775 self._rhodecode_db_user, comment_id)
776 776 raise HTTPNotFound()
777 777
778 778 @LoginRequired()
779 779 @HasRepoPermissionAnyDecorator(
780 780 'repository.read', 'repository.write', 'repository.admin')
781 781 def repo_commit_data(self):
782 782 commit_id = self.request.matchdict['commit_id']
783 783 self.load_default_context()
784 784
785 785 try:
786 786 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
787 787 except CommitDoesNotExistError as e:
788 788 return EmptyCommit(message=str(e))
789 789
790 790 @LoginRequired()
791 791 @HasRepoPermissionAnyDecorator(
792 792 'repository.read', 'repository.write', 'repository.admin')
793 793 def repo_commit_children(self):
794 794 commit_id = self.request.matchdict['commit_id']
795 795 self.load_default_context()
796 796
797 797 try:
798 798 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
799 799 children = commit.children
800 800 except CommitDoesNotExistError:
801 801 children = []
802 802
803 803 result = {"results": children}
804 804 return result
805 805
806 806 @LoginRequired()
807 807 @HasRepoPermissionAnyDecorator(
808 808 'repository.read', 'repository.write', 'repository.admin')
809 809 def repo_commit_parents(self):
810 810 commit_id = self.request.matchdict['commit_id']
811 811 self.load_default_context()
812 812
813 813 try:
814 814 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
815 815 parents = commit.parents
816 816 except CommitDoesNotExistError:
817 817 parents = []
818 818 result = {"results": parents}
819 819 return result
@@ -1,149 +1,148 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import errno
23 23
24 24 from multiprocessing.util import Finalize
25 25
26 from rhodecode.lib.compat import kill
27 26
28 27
29 28 class LockHeld(Exception):
30 29 pass
31 30
32 31
33 32 class DaemonLock(object):
34 33 """daemon locking
35 34 USAGE:
36 35 try:
37 36 l = DaemonLock(file_='/path/tolockfile',desc='test lock')
38 37 main()
39 38 l.release()
40 39 except LockHeld:
41 40 sys.exit(1)
42 41 """
43 42
44 43 def __init__(self, file_=None, callbackfn=None,
45 44 desc='daemon lock', debug=False):
46 45
47 46 lock_name = os.path.join(os.path.dirname(__file__), 'running.lock')
48 47 self.pidfile = file_ if file_ else lock_name
49 48 self.callbackfn = callbackfn
50 49 self.desc = desc
51 50 self.debug = debug
52 51 self.held = False
53 52 #run the lock automatically !
54 53 self.lock()
55 54 self._finalize = Finalize(self, DaemonLock._on_finalize,
56 55 args=(self, debug), exitpriority=10)
57 56
58 57 @staticmethod
59 58 def _on_finalize(lock, debug):
60 59 if lock.held:
61 60 if debug:
62 61 print('leck held finilazing and running lock.release()')
63 62 lock.release()
64 63
65 64 def lock(self):
66 65 """
67 66 locking function, if lock is present it
68 67 will raise LockHeld exception
69 68 """
70 69 lockname = '%s' % (os.getpid())
71 70 if self.debug:
72 71 print('running lock')
73 72 self.trylock()
74 73 self.makelock(lockname, self.pidfile)
75 74 return True
76 75
77 76 def trylock(self):
78 77 running_pid = False
79 78 if self.debug:
80 79 print('checking for already running process')
81 80 try:
82 81 with open(self.pidfile, 'r') as f:
83 82 try:
84 83 running_pid = int(f.readline())
85 84 except ValueError:
86 85 running_pid = -1
87 86
88 87 if self.debug:
89 88 print('lock file present running_pid: %s, '
90 89 'checking for execution' % (running_pid,))
91 90 # Now we check the PID from lock file matches to the current
92 91 # process PID
93 92 if running_pid:
94 93 try:
95 kill(running_pid, 0)
94 os.kill(running_pid, 0)
96 95 except OSError as exc:
97 96 if exc.errno in (errno.ESRCH, errno.EPERM):
98 97 print("Lock File is there but"
99 98 " the program is not running")
100 99 print("Removing lock file for the: %s" % running_pid)
101 100 self.release()
102 101 else:
103 102 raise
104 103 else:
105 104 print("You already have an instance of the program running")
106 105 print("It is running as process %s" % running_pid)
107 106 raise LockHeld()
108 107
109 108 except IOError as e:
110 109 if e.errno != 2:
111 110 raise
112 111
113 112 def release(self):
114 113 """releases the pid by removing the pidfile
115 114 """
116 115 if self.debug:
117 116 print('trying to release the pidlock')
118 117
119 118 if self.callbackfn:
120 119 # execute callback function on release
121 120 if self.debug:
122 121 print('executing callback function %s' % self.callbackfn)
123 122 self.callbackfn()
124 123 try:
125 124 if self.debug:
126 125 print('removing pidfile %s' % self.pidfile)
127 126 os.remove(self.pidfile)
128 127 self.held = False
129 128 except OSError as e:
130 129 if self.debug:
131 130 print('removing pidfile failed %s' % e)
132 131 pass
133 132
134 133 def makelock(self, lockname, pidfile):
135 134 """
136 135 this function will make an actual lock
137 136
138 137 :param lockname: acctual pid of file
139 138 :param pidfile: the file to write the pid in
140 139 """
141 140 if self.debug:
142 141 print('creating a file %s and pid: %s' % (pidfile, lockname))
143 142
144 143 dir_, file_ = os.path.split(pidfile)
145 144 if not os.path.isdir(dir_):
146 145 os.makedirs(dir_)
147 146 with open(self.pidfile, 'wb') as f:
148 147 f.write(lockname)
149 148 self.held = True
@@ -1,1052 +1,1052 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 from rhodecode.lib.compat import OrderedDict
31 from collections import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
58 58 DEFAULT_REF = 'branch:{}'.format(DEFAULT_BRANCH_NAME)
59 59
60 60 contact = BaseRepository.DEFAULT_CONTACT
61 61
62 62 def __init__(self, repo_path, config=None, create=False, src_url=None,
63 63 do_workspace_checkout=False, with_wire=None, bare=False):
64 64
65 65 self.path = safe_str(os.path.abspath(repo_path))
66 66 self.config = config if config else self.get_default_config()
67 67 self.with_wire = with_wire or {"cache": False} # default should not use cache
68 68
69 69 self._init_repo(create, src_url, do_workspace_checkout, bare)
70 70
71 71 # caches
72 72 self._commit_ids = {}
73 73
74 74 @LazyProperty
75 75 def _remote(self):
76 76 repo_id = self.path
77 77 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
78 78
79 79 @LazyProperty
80 80 def bare(self):
81 81 return self._remote.bare()
82 82
83 83 @LazyProperty
84 84 def head(self):
85 85 return self._remote.head()
86 86
87 87 @CachedProperty
88 88 def commit_ids(self):
89 89 """
90 90 Returns list of commit ids, in ascending order. Being lazy
91 91 attribute allows external tools to inject commit ids from cache.
92 92 """
93 93 commit_ids = self._get_all_commit_ids()
94 94 self._rebuild_cache(commit_ids)
95 95 return commit_ids
96 96
97 97 def _rebuild_cache(self, commit_ids):
98 98 self._commit_ids = dict((commit_id, index)
99 99 for index, commit_id in enumerate(commit_ids))
100 100
101 101 def run_git_command(self, cmd, **opts):
102 102 """
103 103 Runs given ``cmd`` as git command and returns tuple
104 104 (stdout, stderr).
105 105
106 106 :param cmd: git command to be executed
107 107 :param opts: env options to pass into Subprocess command
108 108 """
109 109 if not isinstance(cmd, list):
110 110 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
111 111
112 112 skip_stderr_log = opts.pop('skip_stderr_log', False)
113 113 out, err = self._remote.run_git_command(cmd, **opts)
114 114 if err and not skip_stderr_log:
115 115 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
116 116 return out, err
117 117
118 118 @staticmethod
119 119 def check_url(url, config):
120 120 """
121 121 Function will check given url and try to verify if it's a valid
122 122 link. Sometimes it may happened that git will issue basic
123 123 auth request that can cause whole API to hang when used from python
124 124 or other external calls.
125 125
126 126 On failures it'll raise urllib2.HTTPError, exception is also thrown
127 127 when the return code is non 200
128 128 """
129 129 # check first if it's not an url
130 130 if os.path.isdir(url) or url.startswith('file:'):
131 131 return True
132 132
133 133 if '+' in url.split('://', 1)[0]:
134 134 url = url.split('+', 1)[1]
135 135
136 136 # Request the _remote to verify the url
137 137 return connection.Git.check_url(url, config.serialize())
138 138
139 139 @staticmethod
140 140 def is_valid_repository(path):
141 141 if os.path.isdir(os.path.join(path, '.git')):
142 142 return True
143 143 # check case of bare repository
144 144 try:
145 145 GitRepository(path)
146 146 return True
147 147 except VCSError:
148 148 pass
149 149 return False
150 150
151 151 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
152 152 bare=False):
153 153 if create and os.path.exists(self.path):
154 154 raise RepositoryError(
155 155 "Cannot create repository at %s, location already exist"
156 156 % self.path)
157 157
158 158 if bare and do_workspace_checkout:
159 159 raise RepositoryError("Cannot update a bare repository")
160 160 try:
161 161
162 162 if src_url:
163 163 # check URL before any actions
164 164 GitRepository.check_url(src_url, self.config)
165 165
166 166 if create:
167 167 os.makedirs(self.path, mode=0o755)
168 168
169 169 if bare:
170 170 self._remote.init_bare()
171 171 else:
172 172 self._remote.init()
173 173
174 174 if src_url and bare:
175 175 # bare repository only allows a fetch and checkout is not allowed
176 176 self.fetch(src_url, commit_ids=None)
177 177 elif src_url:
178 178 self.pull(src_url, commit_ids=None,
179 179 update_after=do_workspace_checkout)
180 180
181 181 else:
182 182 if not self._remote.assert_correct_path():
183 183 raise RepositoryError(
184 184 'Path "%s" does not contain a Git repository' %
185 185 (self.path,))
186 186
187 187 # TODO: johbo: check if we have to translate the OSError here
188 188 except OSError as err:
189 189 raise RepositoryError(err)
190 190
191 191 def _get_all_commit_ids(self):
192 192 return self._remote.get_all_commit_ids()
193 193
194 194 def _get_commit_ids(self, filters=None):
195 195 # we must check if this repo is not empty, since later command
196 196 # fails if it is. And it's cheaper to ask than throw the subprocess
197 197 # errors
198 198
199 199 head = self._remote.head(show_exc=False)
200 200
201 201 if not head:
202 202 return []
203 203
204 204 rev_filter = ['--branches', '--tags']
205 205 extra_filter = []
206 206
207 207 if filters:
208 208 if filters.get('since'):
209 209 extra_filter.append('--since=%s' % (filters['since']))
210 210 if filters.get('until'):
211 211 extra_filter.append('--until=%s' % (filters['until']))
212 212 if filters.get('branch_name'):
213 213 rev_filter = []
214 214 extra_filter.append(filters['branch_name'])
215 215 rev_filter.extend(extra_filter)
216 216
217 217 # if filters.get('start') or filters.get('end'):
218 218 # # skip is offset, max-count is limit
219 219 # if filters.get('start'):
220 220 # extra_filter += ' --skip=%s' % filters['start']
221 221 # if filters.get('end'):
222 222 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
223 223
224 224 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
225 225 try:
226 226 output, __ = self.run_git_command(cmd)
227 227 except RepositoryError:
228 228 # Can be raised for empty repositories
229 229 return []
230 230 return output.splitlines()
231 231
232 232 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
233 233
234 234 def is_null(value):
235 235 return len(value) == commit_id_or_idx.count('0')
236 236
237 237 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
238 238 return self.commit_ids[-1]
239 239
240 240 commit_missing_err = "Commit {} does not exist for `{}`".format(
241 241 *map(safe_str, [commit_id_or_idx, self.name]))
242 242
243 243 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
244 244 is_branch = reference_obj and reference_obj.branch
245 245
246 246 lookup_ok = False
247 247 if is_bstr:
248 248 # Need to call remote to translate id for tagging scenarios,
249 249 # or branch that are numeric
250 250 try:
251 251 remote_data = self._remote.get_object(commit_id_or_idx,
252 252 maybe_unreachable=maybe_unreachable)
253 253 commit_id_or_idx = remote_data["commit_id"]
254 254 lookup_ok = True
255 255 except (CommitDoesNotExistError,):
256 256 lookup_ok = False
257 257
258 258 if lookup_ok is False:
259 259 is_numeric_idx = \
260 260 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
261 261 or isinstance(commit_id_or_idx, int)
262 262 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
263 263 try:
264 264 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
265 265 lookup_ok = True
266 266 except Exception:
267 267 raise CommitDoesNotExistError(commit_missing_err)
268 268
269 269 # we failed regular lookup, and by integer number lookup
270 270 if lookup_ok is False:
271 271 raise CommitDoesNotExistError(commit_missing_err)
272 272
273 273 # Ensure we return full id
274 274 if not SHA_PATTERN.match(str(commit_id_or_idx)):
275 275 raise CommitDoesNotExistError(
276 276 "Given commit id %s not recognized" % commit_id_or_idx)
277 277 return commit_id_or_idx
278 278
279 279 def get_hook_location(self):
280 280 """
281 281 returns absolute path to location where hooks are stored
282 282 """
283 283 loc = os.path.join(self.path, 'hooks')
284 284 if not self.bare:
285 285 loc = os.path.join(self.path, '.git', 'hooks')
286 286 return loc
287 287
288 288 @LazyProperty
289 289 def last_change(self):
290 290 """
291 291 Returns last change made on this repository as
292 292 `datetime.datetime` object.
293 293 """
294 294 try:
295 295 return self.get_commit().date
296 296 except RepositoryError:
297 297 tzoffset = makedate()[1]
298 298 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
299 299
300 300 def _get_fs_mtime(self):
301 301 idx_loc = '' if self.bare else '.git'
302 302 # fallback to filesystem
303 303 in_path = os.path.join(self.path, idx_loc, "index")
304 304 he_path = os.path.join(self.path, idx_loc, "HEAD")
305 305 if os.path.exists(in_path):
306 306 return os.stat(in_path).st_mtime
307 307 else:
308 308 return os.stat(he_path).st_mtime
309 309
310 310 @LazyProperty
311 311 def description(self):
312 312 description = self._remote.get_description()
313 313 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
314 314
315 315 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
316 316 if self.is_empty():
317 317 return OrderedDict()
318 318
319 319 result = []
320 320 for ref, sha in self._refs.iteritems():
321 321 if ref.startswith(prefix):
322 322 ref_name = ref
323 323 if strip_prefix:
324 324 ref_name = ref[len(prefix):]
325 325 result.append((safe_unicode(ref_name), sha))
326 326
327 327 def get_name(entry):
328 328 return entry[0]
329 329
330 330 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
331 331
332 332 def _get_branches(self):
333 333 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
334 334
335 335 @CachedProperty
336 336 def branches(self):
337 337 return self._get_branches()
338 338
339 339 @CachedProperty
340 340 def branches_closed(self):
341 341 return {}
342 342
343 343 @CachedProperty
344 344 def bookmarks(self):
345 345 return {}
346 346
347 347 @CachedProperty
348 348 def branches_all(self):
349 349 all_branches = {}
350 350 all_branches.update(self.branches)
351 351 all_branches.update(self.branches_closed)
352 352 return all_branches
353 353
354 354 @CachedProperty
355 355 def tags(self):
356 356 return self._get_tags()
357 357
358 358 def _get_tags(self):
359 359 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
360 360
361 361 def tag(self, name, user, commit_id=None, message=None, date=None,
362 362 **kwargs):
363 363 # TODO: fix this method to apply annotated tags correct with message
364 364 """
365 365 Creates and returns a tag for the given ``commit_id``.
366 366
367 367 :param name: name for new tag
368 368 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
369 369 :param commit_id: commit id for which new tag would be created
370 370 :param message: message of the tag's commit
371 371 :param date: date of tag's commit
372 372
373 373 :raises TagAlreadyExistError: if tag with same name already exists
374 374 """
375 375 if name in self.tags:
376 376 raise TagAlreadyExistError("Tag %s already exists" % name)
377 377 commit = self.get_commit(commit_id=commit_id)
378 378 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
379 379
380 380 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
381 381
382 382 self._invalidate_prop_cache('tags')
383 383 self._invalidate_prop_cache('_refs')
384 384
385 385 return commit
386 386
387 387 def remove_tag(self, name, user, message=None, date=None):
388 388 """
389 389 Removes tag with the given ``name``.
390 390
391 391 :param name: name of the tag to be removed
392 392 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
393 393 :param message: message of the tag's removal commit
394 394 :param date: date of tag's removal commit
395 395
396 396 :raises TagDoesNotExistError: if tag with given name does not exists
397 397 """
398 398 if name not in self.tags:
399 399 raise TagDoesNotExistError("Tag %s does not exist" % name)
400 400
401 401 self._remote.tag_remove(name)
402 402 self._invalidate_prop_cache('tags')
403 403 self._invalidate_prop_cache('_refs')
404 404
405 405 def _get_refs(self):
406 406 return self._remote.get_refs()
407 407
408 408 @CachedProperty
409 409 def _refs(self):
410 410 return self._get_refs()
411 411
412 412 @property
413 413 def _ref_tree(self):
414 414 node = tree = {}
415 415 for ref, sha in self._refs.iteritems():
416 416 path = ref.split('/')
417 417 for bit in path[:-1]:
418 418 node = node.setdefault(bit, {})
419 419 node[path[-1]] = sha
420 420 node = tree
421 421 return tree
422 422
423 423 def get_remote_ref(self, ref_name):
424 424 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
425 425 try:
426 426 return self._refs[ref_key]
427 427 except Exception:
428 428 return
429 429
430 430 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
431 431 translate_tag=True, maybe_unreachable=False, reference_obj=None):
432 432 """
433 433 Returns `GitCommit` object representing commit from git repository
434 434 at the given `commit_id` or head (most recent commit) if None given.
435 435 """
436 436
437 437 if self.is_empty():
438 438 raise EmptyRepositoryError("There are no commits yet")
439 439
440 440 if commit_id is not None:
441 441 self._validate_commit_id(commit_id)
442 442 try:
443 443 # we have cached idx, use it without contacting the remote
444 444 idx = self._commit_ids[commit_id]
445 445 return GitCommit(self, commit_id, idx, pre_load=pre_load)
446 446 except KeyError:
447 447 pass
448 448
449 449 elif commit_idx is not None:
450 450 self._validate_commit_idx(commit_idx)
451 451 try:
452 452 _commit_id = self.commit_ids[commit_idx]
453 453 if commit_idx < 0:
454 454 commit_idx = self.commit_ids.index(_commit_id)
455 455 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
456 456 except IndexError:
457 457 commit_id = commit_idx
458 458 else:
459 459 commit_id = "tip"
460 460
461 461 if translate_tag:
462 462 commit_id = self._lookup_commit(
463 463 commit_id, maybe_unreachable=maybe_unreachable,
464 464 reference_obj=reference_obj)
465 465
466 466 try:
467 467 idx = self._commit_ids[commit_id]
468 468 except KeyError:
469 469 idx = -1
470 470
471 471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
472 472
473 473 def get_commits(
474 474 self, start_id=None, end_id=None, start_date=None, end_date=None,
475 475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
476 476 """
477 477 Returns generator of `GitCommit` objects from start to end (both
478 478 are inclusive), in ascending date order.
479 479
480 480 :param start_id: None, str(commit_id)
481 481 :param end_id: None, str(commit_id)
482 482 :param start_date: if specified, commits with commit date less than
483 483 ``start_date`` would be filtered out from returned set
484 484 :param end_date: if specified, commits with commit date greater than
485 485 ``end_date`` would be filtered out from returned set
486 486 :param branch_name: if specified, commits not reachable from given
487 487 branch would be filtered out from returned set
488 488 :param show_hidden: Show hidden commits such as obsolete or hidden from
489 489 Mercurial evolve
490 490 :raise BranchDoesNotExistError: If given `branch_name` does not
491 491 exist.
492 492 :raise CommitDoesNotExistError: If commits for given `start` or
493 493 `end` could not be found.
494 494
495 495 """
496 496 if self.is_empty():
497 497 raise EmptyRepositoryError("There are no commits yet")
498 498
499 499 self._validate_branch_name(branch_name)
500 500
501 501 if start_id is not None:
502 502 self._validate_commit_id(start_id)
503 503 if end_id is not None:
504 504 self._validate_commit_id(end_id)
505 505
506 506 start_raw_id = self._lookup_commit(start_id)
507 507 start_pos = self._commit_ids[start_raw_id] if start_id else None
508 508 end_raw_id = self._lookup_commit(end_id)
509 509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
510 510
511 511 if None not in [start_id, end_id] and start_pos > end_pos:
512 512 raise RepositoryError(
513 513 "Start commit '%s' cannot be after end commit '%s'" %
514 514 (start_id, end_id))
515 515
516 516 if end_pos is not None:
517 517 end_pos += 1
518 518
519 519 filter_ = []
520 520 if branch_name:
521 521 filter_.append({'branch_name': branch_name})
522 522 if start_date and not end_date:
523 523 filter_.append({'since': start_date})
524 524 if end_date and not start_date:
525 525 filter_.append({'until': end_date})
526 526 if start_date and end_date:
527 527 filter_.append({'since': start_date})
528 528 filter_.append({'until': end_date})
529 529
530 530 # if start_pos or end_pos:
531 531 # filter_.append({'start': start_pos})
532 532 # filter_.append({'end': end_pos})
533 533
534 534 if filter_:
535 535 revfilters = {
536 536 'branch_name': branch_name,
537 537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
538 538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
539 539 'start': start_pos,
540 540 'end': end_pos,
541 541 }
542 542 commit_ids = self._get_commit_ids(filters=revfilters)
543 543
544 544 else:
545 545 commit_ids = self.commit_ids
546 546
547 547 if start_pos or end_pos:
548 548 commit_ids = commit_ids[start_pos: end_pos]
549 549
550 550 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
551 551 translate_tag=translate_tags)
552 552
553 553 def get_diff(
554 554 self, commit1, commit2, path='', ignore_whitespace=False,
555 555 context=3, path1=None):
556 556 """
557 557 Returns (git like) *diff*, as plain text. Shows changes introduced by
558 558 ``commit2`` since ``commit1``.
559 559
560 560 :param commit1: Entry point from which diff is shown. Can be
561 561 ``self.EMPTY_COMMIT`` - in this case, patch showing all
562 562 the changes since empty state of the repository until ``commit2``
563 563 :param commit2: Until which commits changes should be shown.
564 564 :param ignore_whitespace: If set to ``True``, would not show whitespace
565 565 changes. Defaults to ``False``.
566 566 :param context: How many lines before/after changed lines should be
567 567 shown. Defaults to ``3``.
568 568 """
569 569 self._validate_diff_commits(commit1, commit2)
570 570 if path1 is not None and path1 != path:
571 571 raise ValueError("Diff of two different paths not supported.")
572 572
573 573 if path:
574 574 file_filter = path
575 575 else:
576 576 file_filter = None
577 577
578 578 diff = self._remote.diff(
579 579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
580 580 opt_ignorews=ignore_whitespace,
581 581 context=context)
582 582 return GitDiff(diff)
583 583
584 584 def strip(self, commit_id, branch_name):
585 585 commit = self.get_commit(commit_id=commit_id)
586 586 if commit.merge:
587 587 raise Exception('Cannot reset to merge commit')
588 588
589 589 # parent is going to be the new head now
590 590 commit = commit.parents[0]
591 591 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
592 592
593 593 # clear cached properties
594 594 self._invalidate_prop_cache('commit_ids')
595 595 self._invalidate_prop_cache('_refs')
596 596 self._invalidate_prop_cache('branches')
597 597
598 598 return len(self.commit_ids)
599 599
600 600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 602 self, commit_id1, repo2, commit_id2)
603 603
604 604 if commit_id1 == commit_id2:
605 605 return commit_id1
606 606
607 607 if self != repo2:
608 608 commits = self._remote.get_missing_revs(
609 609 commit_id1, commit_id2, repo2.path)
610 610 if commits:
611 611 commit = repo2.get_commit(commits[-1])
612 612 if commit.parents:
613 613 ancestor_id = commit.parents[0].raw_id
614 614 else:
615 615 ancestor_id = None
616 616 else:
617 617 # no commits from other repo, ancestor_id is the commit_id2
618 618 ancestor_id = commit_id2
619 619 else:
620 620 output, __ = self.run_git_command(
621 621 ['merge-base', commit_id1, commit_id2])
622 622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623 623
624 624 log.debug('Found common ancestor with sha: %s', ancestor_id)
625 625
626 626 return ancestor_id
627 627
628 628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 629 repo1 = self
630 630 ancestor_id = None
631 631
632 632 if commit_id1 == commit_id2:
633 633 commits = []
634 634 elif repo1 != repo2:
635 635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 636 repo2.path)
637 637 commits = [
638 638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 639 for commit_id in reversed(missing_ids)]
640 640 else:
641 641 output, __ = repo1.run_git_command(
642 642 ['log', '--reverse', '--pretty=format: %H', '-s',
643 643 '%s..%s' % (commit_id1, commit_id2)])
644 644 commits = [
645 645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647 647
648 648 return commits
649 649
650 650 @LazyProperty
651 651 def in_memory_commit(self):
652 652 """
653 653 Returns ``GitInMemoryCommit`` object for this repository.
654 654 """
655 655 return GitInMemoryCommit(self)
656 656
657 657 def pull(self, url, commit_ids=None, update_after=False):
658 658 """
659 659 Pull changes from external location. Pull is different in GIT
660 660 that fetch since it's doing a checkout
661 661
662 662 :param commit_ids: Optional. Can be set to a list of commit ids
663 663 which shall be pulled from the other repository.
664 664 """
665 665 refs = None
666 666 if commit_ids is not None:
667 667 remote_refs = self._remote.get_remote_refs(url)
668 668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 669 self._remote.pull(url, refs=refs, update_after=update_after)
670 670 self._remote.invalidate_vcs_cache()
671 671
672 672 def fetch(self, url, commit_ids=None):
673 673 """
674 674 Fetch all git objects from external location.
675 675 """
676 676 self._remote.sync_fetch(url, refs=commit_ids)
677 677 self._remote.invalidate_vcs_cache()
678 678
679 679 def push(self, url):
680 680 refs = None
681 681 self._remote.sync_push(url, refs=refs)
682 682
683 683 def set_refs(self, ref_name, commit_id):
684 684 self._remote.set_refs(ref_name, commit_id)
685 685 self._invalidate_prop_cache('_refs')
686 686
687 687 def remove_ref(self, ref_name):
688 688 self._remote.remove_ref(ref_name)
689 689 self._invalidate_prop_cache('_refs')
690 690
691 691 def run_gc(self, prune=True):
692 692 cmd = ['gc', '--aggressive']
693 693 if prune:
694 694 cmd += ['--prune=now']
695 695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 696 return stderr
697 697
698 698 def _update_server_info(self):
699 699 """
700 700 runs gits update-server-info command in this repo instance
701 701 """
702 702 self._remote.update_server_info()
703 703
704 704 def _current_branch(self):
705 705 """
706 706 Return the name of the current branch.
707 707
708 708 It only works for non bare repositories (i.e. repositories with a
709 709 working copy)
710 710 """
711 711 if self.bare:
712 712 raise RepositoryError('Bare git repos do not have active branches')
713 713
714 714 if self.is_empty():
715 715 return None
716 716
717 717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 718 return stdout.strip()
719 719
720 720 def _checkout(self, branch_name, create=False, force=False):
721 721 """
722 722 Checkout a branch in the working directory.
723 723
724 724 It tries to create the branch if create is True, failing if the branch
725 725 already exists.
726 726
727 727 It only works for non bare repositories (i.e. repositories with a
728 728 working copy)
729 729 """
730 730 if self.bare:
731 731 raise RepositoryError('Cannot checkout branches in a bare git repo')
732 732
733 733 cmd = ['checkout']
734 734 if force:
735 735 cmd.append('-f')
736 736 if create:
737 737 cmd.append('-b')
738 738 cmd.append(branch_name)
739 739 self.run_git_command(cmd, fail_on_stderr=False)
740 740
741 741 def _create_branch(self, branch_name, commit_id):
742 742 """
743 743 creates a branch in a GIT repo
744 744 """
745 745 self._remote.create_branch(branch_name, commit_id)
746 746
747 747 def _identify(self):
748 748 """
749 749 Return the current state of the working directory.
750 750 """
751 751 if self.bare:
752 752 raise RepositoryError('Bare git repos do not have active branches')
753 753
754 754 if self.is_empty():
755 755 return None
756 756
757 757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 758 return stdout.strip()
759 759
760 760 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 761 """
762 762 Create a local clone of the current repo.
763 763 """
764 764 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 765 # clone will only fetch the active branch.
766 766 cmd = ['clone', '--branch', branch_name,
767 767 self.path, os.path.abspath(clone_path)]
768 768
769 769 self.run_git_command(cmd, fail_on_stderr=False)
770 770
771 771 # if we get the different source branch, make sure we also fetch it for
772 772 # merge conditions
773 773 if source_branch and source_branch != branch_name:
774 774 # check if the ref exists.
775 775 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 776 if shadow_repo.get_remote_ref(source_branch):
777 777 cmd = ['fetch', self.path, source_branch]
778 778 self.run_git_command(cmd, fail_on_stderr=False)
779 779
780 780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 781 """
782 782 Fetch a branch from a local repository.
783 783 """
784 784 repository_path = os.path.abspath(repository_path)
785 785 if repository_path == self.path:
786 786 raise ValueError('Cannot fetch from the same repository')
787 787
788 788 if use_origin:
789 789 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 790 branch=branch_name)
791 791
792 792 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 793 repository_path, branch_name]
794 794 self.run_git_command(cmd, fail_on_stderr=False)
795 795
796 796 def _local_reset(self, branch_name):
797 797 branch_name = '{}'.format(branch_name)
798 798 cmd = ['reset', '--hard', branch_name, '--']
799 799 self.run_git_command(cmd, fail_on_stderr=False)
800 800
801 801 def _last_fetch_heads(self):
802 802 """
803 803 Return the last fetched heads that need merging.
804 804
805 805 The algorithm is defined at
806 806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 807 """
808 808 if not self.bare:
809 809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 810 else:
811 811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812 812
813 813 heads = []
814 814 with open(fetch_heads_path) as f:
815 815 for line in f:
816 816 if ' not-for-merge ' in line:
817 817 continue
818 818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 819 heads.append(line)
820 820
821 821 return heads
822 822
823 823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825 825
826 826 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 827 """
828 828 Pull a branch from a local repository.
829 829 """
830 830 if self.bare:
831 831 raise RepositoryError('Cannot pull into a bare git repository')
832 832 # N.B.(skreft): The --ff-only option is to make sure this is a
833 833 # fast-forward (i.e., we are only pulling new changes and there are no
834 834 # conflicts with our current branch)
835 835 # Additionally, that option needs to go before --no-tags, otherwise git
836 836 # pull complains about it being an unknown flag.
837 837 cmd = ['pull']
838 838 if ff_only:
839 839 cmd.append('--ff-only')
840 840 cmd.extend(['--no-tags', repository_path, branch_name])
841 841 self.run_git_command(cmd, fail_on_stderr=False)
842 842
843 843 def _local_merge(self, merge_message, user_name, user_email, heads):
844 844 """
845 845 Merge the given head into the checked out branch.
846 846
847 847 It will force a merge commit.
848 848
849 849 Currently it raises an error if the repo is empty, as it is not possible
850 850 to create a merge commit in an empty repo.
851 851
852 852 :param merge_message: The message to use for the merge commit.
853 853 :param heads: the heads to merge.
854 854 """
855 855 if self.bare:
856 856 raise RepositoryError('Cannot merge into a bare git repository')
857 857
858 858 if not heads:
859 859 return
860 860
861 861 if self.is_empty():
862 862 # TODO(skreft): do something more robust in this case.
863 863 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 864 unresolved = None
865 865
866 866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 867 # commit message. We also specify the user who is doing the merge.
868 868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
869 869 '-c', 'user.email=%s' % safe_str(user_email),
870 870 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 871
872 872 merge_cmd = cmd + heads
873 873
874 874 try:
875 875 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 876 except RepositoryError:
877 877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 878 fail_on_stderr=False)[0].splitlines()
879 879 # NOTE(marcink): we add U notation for consistent with HG backend output
880 880 unresolved = ['U {}'.format(f) for f in files]
881 881
882 882 # Cleanup any merge leftovers
883 883 self._remote.invalidate_vcs_cache()
884 884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885 885
886 886 if unresolved:
887 887 raise UnresolvedFilesInRepo(unresolved)
888 888 else:
889 889 raise
890 890
891 891 def _local_push(
892 892 self, source_branch, repository_path, target_branch,
893 893 enable_hooks=False, rc_scm_data=None):
894 894 """
895 895 Push the source_branch to the given repository and target_branch.
896 896
897 897 Currently it if the target_branch is not master and the target repo is
898 898 empty, the push will work, but then GitRepository won't be able to find
899 899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 900 pointing to master, which does not exist).
901 901
902 902 It does not run the hooks in the target repo.
903 903 """
904 904 # TODO(skreft): deal with the case in which the target repo is empty,
905 905 # and the target_branch is not master.
906 906 target_repo = GitRepository(repository_path)
907 907 if (not target_repo.bare and
908 908 target_repo._current_branch() == target_branch):
909 909 # Git prevents pushing to the checked out branch, so simulate it by
910 910 # pulling into the target repository.
911 911 target_repo._local_pull(self.path, source_branch)
912 912 else:
913 913 cmd = ['push', os.path.abspath(repository_path),
914 914 '%s:%s' % (source_branch, target_branch)]
915 915 gitenv = {}
916 916 if rc_scm_data:
917 917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918 918
919 919 if not enable_hooks:
920 920 gitenv['RC_SKIP_HOOKS'] = '1'
921 921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922 922
923 923 def _get_new_pr_branch(self, source_branch, target_branch):
924 924 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
925 925 pr_branches = []
926 926 for branch in self.branches:
927 927 if branch.startswith(prefix):
928 928 pr_branches.append(int(branch[len(prefix):]))
929 929
930 930 if not pr_branches:
931 931 branch_id = 0
932 932 else:
933 933 branch_id = max(pr_branches) + 1
934 934
935 935 return '%s%d' % (prefix, branch_id)
936 936
937 937 def _maybe_prepare_merge_workspace(
938 938 self, repo_id, workspace_id, target_ref, source_ref):
939 939 shadow_repository_path = self._get_shadow_repository_path(
940 940 self.path, repo_id, workspace_id)
941 941 if not os.path.exists(shadow_repository_path):
942 942 self._local_clone(
943 943 shadow_repository_path, target_ref.name, source_ref.name)
944 944 log.debug('Prepared %s shadow repository in %s',
945 945 self.alias, shadow_repository_path)
946 946
947 947 return shadow_repository_path
948 948
949 949 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 950 source_repo, source_ref, merge_message,
951 951 merger_name, merger_email, dry_run=False,
952 952 use_rebase=False, close_branch=False):
953 953
954 954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 955 'rebase' if use_rebase else 'merge', dry_run)
956 956 if target_ref.commit_id != self.branches[target_ref.name]:
957 957 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
958 958 target_ref.commit_id, self.branches[target_ref.name])
959 959 return MergeResponse(
960 960 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 961 metadata={'target_ref': target_ref})
962 962
963 963 shadow_repository_path = self._maybe_prepare_merge_workspace(
964 964 repo_id, workspace_id, target_ref, source_ref)
965 965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 966
967 967 # checkout source, if it's different. Otherwise we could not
968 968 # fetch proper commits for merge testing
969 969 if source_ref.name != target_ref.name:
970 970 if shadow_repo.get_remote_ref(source_ref.name):
971 971 shadow_repo._checkout(source_ref.name, force=True)
972 972
973 973 # checkout target, and fetch changes
974 974 shadow_repo._checkout(target_ref.name, force=True)
975 975
976 976 # fetch/reset pull the target, in case it is changed
977 977 # this handles even force changes
978 978 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
979 979 shadow_repo._local_reset(target_ref.name)
980 980
981 981 # Need to reload repo to invalidate the cache, or otherwise we cannot
982 982 # retrieve the last target commit.
983 983 shadow_repo = self.get_shadow_instance(shadow_repository_path)
984 984 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
985 985 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
986 986 target_ref, target_ref.commit_id,
987 987 shadow_repo.branches[target_ref.name])
988 988 return MergeResponse(
989 989 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
990 990 metadata={'target_ref': target_ref})
991 991
992 992 # calculate new branch
993 993 pr_branch = shadow_repo._get_new_pr_branch(
994 994 source_ref.name, target_ref.name)
995 995 log.debug('using pull-request merge branch: `%s`', pr_branch)
996 996 # checkout to temp branch, and fetch changes
997 997 shadow_repo._checkout(pr_branch, create=True)
998 998 try:
999 999 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1000 1000 except RepositoryError:
1001 1001 log.exception('Failure when doing local fetch on '
1002 1002 'shadow repo: %s', shadow_repo)
1003 1003 return MergeResponse(
1004 1004 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1005 1005 metadata={'source_ref': source_ref})
1006 1006
1007 1007 merge_ref = None
1008 1008 merge_failure_reason = MergeFailureReason.NONE
1009 1009 metadata = {}
1010 1010 try:
1011 1011 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1012 1012 [source_ref.commit_id])
1013 1013 merge_possible = True
1014 1014
1015 1015 # Need to invalidate the cache, or otherwise we
1016 1016 # cannot retrieve the merge commit.
1017 1017 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1018 1018 merge_commit_id = shadow_repo.branches[pr_branch]
1019 1019
1020 1020 # Set a reference pointing to the merge commit. This reference may
1021 1021 # be used to easily identify the last successful merge commit in
1022 1022 # the shadow repository.
1023 1023 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1024 1024 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1025 1025 except RepositoryError as e:
1026 1026 log.exception('Failure when doing local merge on git shadow repo')
1027 1027 if isinstance(e, UnresolvedFilesInRepo):
1028 1028 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1029 1029
1030 1030 merge_possible = False
1031 1031 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1032 1032
1033 1033 if merge_possible and not dry_run:
1034 1034 try:
1035 1035 shadow_repo._local_push(
1036 1036 pr_branch, self.path, target_ref.name, enable_hooks=True,
1037 1037 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1038 1038 merge_succeeded = True
1039 1039 except RepositoryError:
1040 1040 log.exception(
1041 1041 'Failure when doing local push from the shadow '
1042 1042 'repository to the target repository at %s.', self.path)
1043 1043 merge_succeeded = False
1044 1044 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1045 1045 metadata['target'] = 'git shadow repo'
1046 1046 metadata['merge_commit'] = pr_branch
1047 1047 else:
1048 1048 merge_succeeded = False
1049 1049
1050 1050 return MergeResponse(
1051 1051 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1052 1052 metadata=metadata)
@@ -1,1012 +1,1012 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import configparser
28 28 import urllib.request, urllib.parse, urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 from rhodecode.lib.compat import OrderedDict
32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.utils import safe_unicode, safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 301 self, commit_id1, repo2, commit_id2)
302 302
303 303 if commit_id1 == commit_id2:
304 304 return commit_id1
305 305
306 306 ancestors = self._remote.revs_from_revspec(
307 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 308 other_path=repo2.path)
309 309
310 310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311 311
312 312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 313 return ancestor_id
314 314
315 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 316 if commit_id1 == commit_id2:
317 317 commits = []
318 318 else:
319 319 if merge:
320 320 indexes = self._remote.revs_from_revspec(
321 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 323 else:
324 324 indexes = self._remote.revs_from_revspec(
325 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 326 commit_id1, other_path=repo2.path)
327 327
328 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 329 for idx in indexes]
330 330
331 331 return commits
332 332
333 333 @staticmethod
334 334 def check_url(url, config):
335 335 """
336 336 Function will check given url and try to verify if it's a valid
337 337 link. Sometimes it may happened that mercurial will issue basic
338 338 auth request that can cause whole API to hang when used from python
339 339 or other external calls.
340 340
341 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 342 when the return code is non 200
343 343 """
344 344 # check first if it's not an local url
345 345 if os.path.isdir(url) or url.startswith('file:'):
346 346 return True
347 347
348 348 # Request the _remote to verify the url
349 349 return connection.Hg.check_url(url, config.serialize())
350 350
351 351 @staticmethod
352 352 def is_valid_repository(path):
353 353 return os.path.isdir(os.path.join(path, '.hg'))
354 354
355 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 356 """
357 357 Function will check for mercurial repository in given path. If there
358 358 is no repository in that path it will raise an exception unless
359 359 `create` parameter is set to True - in that case repository would
360 360 be created.
361 361
362 362 If `src_url` is given, would try to clone repository from the
363 363 location at given clone_point. Additionally it'll make update to
364 364 working copy accordingly to `do_workspace_checkout` flag.
365 365 """
366 366 if create and os.path.exists(self.path):
367 367 raise RepositoryError(
368 368 "Cannot create repository at %s, location already exist"
369 369 % self.path)
370 370
371 371 if src_url:
372 372 url = str(self._get_url(src_url))
373 373 MercurialRepository.check_url(url, self.config)
374 374
375 375 self._remote.clone(url, self.path, do_workspace_checkout)
376 376
377 377 # Don't try to create if we've already cloned repo
378 378 create = False
379 379
380 380 if create:
381 381 os.makedirs(self.path, mode=0o755)
382 382 self._remote.localrepository(create)
383 383
384 384 @LazyProperty
385 385 def in_memory_commit(self):
386 386 return MercurialInMemoryCommit(self)
387 387
388 388 @LazyProperty
389 389 def description(self):
390 390 description = self._remote.get_config_value(
391 391 'web', 'description', untrusted=True)
392 392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393 393
394 394 @LazyProperty
395 395 def contact(self):
396 396 contact = (
397 397 self._remote.get_config_value("web", "contact") or
398 398 self._remote.get_config_value("ui", "username"))
399 399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400 400
401 401 @LazyProperty
402 402 def last_change(self):
403 403 """
404 404 Returns last change made on this repository as
405 405 `datetime.datetime` object.
406 406 """
407 407 try:
408 408 return self.get_commit().date
409 409 except RepositoryError:
410 410 tzoffset = makedate()[1]
411 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412 412
413 413 def _get_fs_mtime(self):
414 414 # fallback to filesystem
415 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 416 st_path = os.path.join(self.path, '.hg', "store")
417 417 if os.path.exists(cl_path):
418 418 return os.stat(cl_path).st_mtime
419 419 else:
420 420 return os.stat(st_path).st_mtime
421 421
422 422 def _get_url(self, url):
423 423 """
424 424 Returns normalized url. If schema is not given, would fall
425 425 to filesystem
426 426 (``file:///``) schema.
427 427 """
428 428 url = url.encode('utf8')
429 429 if url != 'default' and '://' not in url:
430 430 url = "file:" + urllib.request.pathname2url(url)
431 431 return url
432 432
433 433 def get_hook_location(self):
434 434 """
435 435 returns absolute path to location where hooks are stored
436 436 """
437 437 return os.path.join(self.path, '.hg', '.hgrc')
438 438
439 439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 441 """
442 442 Returns ``MercurialCommit`` object representing repository's
443 443 commit at the given `commit_id` or `commit_idx`.
444 444 """
445 445 if self.is_empty():
446 446 raise EmptyRepositoryError("There are no commits yet")
447 447
448 448 if commit_id is not None:
449 449 self._validate_commit_id(commit_id)
450 450 try:
451 451 # we have cached idx, use it without contacting the remote
452 452 idx = self._commit_ids[commit_id]
453 453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 454 except KeyError:
455 455 pass
456 456
457 457 elif commit_idx is not None:
458 458 self._validate_commit_idx(commit_idx)
459 459 try:
460 460 _commit_id = self.commit_ids[commit_idx]
461 461 if commit_idx < 0:
462 462 commit_idx = self.commit_ids.index(_commit_id)
463 463
464 464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 465 except IndexError:
466 466 commit_id = commit_idx
467 467 else:
468 468 commit_id = "tip"
469 469
470 470 if isinstance(commit_id, unicode):
471 471 commit_id = safe_str(commit_id)
472 472
473 473 try:
474 474 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 475 except CommitDoesNotExistError:
476 476 msg = "Commit {} does not exist for `{}`".format(
477 477 *map(safe_str, [commit_id, self.name]))
478 478 raise CommitDoesNotExistError(msg)
479 479
480 480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481 481
482 482 def get_commits(
483 483 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 485 """
486 486 Returns generator of ``MercurialCommit`` objects from start to end
487 487 (both are inclusive)
488 488
489 489 :param start_id: None, str(commit_id)
490 490 :param end_id: None, str(commit_id)
491 491 :param start_date: if specified, commits with commit date less than
492 492 ``start_date`` would be filtered out from returned set
493 493 :param end_date: if specified, commits with commit date greater than
494 494 ``end_date`` would be filtered out from returned set
495 495 :param branch_name: if specified, commits not reachable from given
496 496 branch would be filtered out from returned set
497 497 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 498 Mercurial evolve
499 499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 500 exist.
501 501 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 502 ``end`` could not be found.
503 503 """
504 504 # actually we should check now if it's not an empty repo
505 505 if self.is_empty():
506 506 raise EmptyRepositoryError("There are no commits yet")
507 507 self._validate_branch_name(branch_name)
508 508
509 509 branch_ancestors = False
510 510 if start_id is not None:
511 511 self._validate_commit_id(start_id)
512 512 c_start = self.get_commit(commit_id=start_id)
513 513 start_pos = self._commit_ids[c_start.raw_id]
514 514 else:
515 515 start_pos = None
516 516
517 517 if end_id is not None:
518 518 self._validate_commit_id(end_id)
519 519 c_end = self.get_commit(commit_id=end_id)
520 520 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 521 else:
522 522 end_pos = None
523 523
524 524 if None not in [start_id, end_id] and start_pos > end_pos:
525 525 raise RepositoryError(
526 526 "Start commit '%s' cannot be after end commit '%s'" %
527 527 (start_id, end_id))
528 528
529 529 if end_pos is not None:
530 530 end_pos += 1
531 531
532 532 commit_filter = []
533 533
534 534 if branch_name and not branch_ancestors:
535 535 commit_filter.append('branch("%s")' % (branch_name,))
536 536 elif branch_name and branch_ancestors:
537 537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538 538
539 539 if start_date and not end_date:
540 540 commit_filter.append('date(">%s")' % (start_date,))
541 541 if end_date and not start_date:
542 542 commit_filter.append('date("<%s")' % (end_date,))
543 543 if start_date and end_date:
544 544 commit_filter.append(
545 545 'date(">%s") and date("<%s")' % (start_date, end_date))
546 546
547 547 if not show_hidden:
548 548 commit_filter.append('not obsolete()')
549 549 commit_filter.append('not hidden()')
550 550
551 551 # TODO: johbo: Figure out a simpler way for this solution
552 552 collection_generator = CollectionGenerator
553 553 if commit_filter:
554 554 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 555 revisions = self._remote.rev_range([commit_filter])
556 556 collection_generator = MercurialIndexBasedCollectionGenerator
557 557 else:
558 558 revisions = self.commit_ids
559 559
560 560 if start_pos or end_pos:
561 561 revisions = revisions[start_pos:end_pos]
562 562
563 563 return collection_generator(self, revisions, pre_load=pre_load)
564 564
565 565 def pull(self, url, commit_ids=None):
566 566 """
567 567 Pull changes from external location.
568 568
569 569 :param commit_ids: Optional. Can be set to a list of commit ids
570 570 which shall be pulled from the other repository.
571 571 """
572 572 url = self._get_url(url)
573 573 self._remote.pull(url, commit_ids=commit_ids)
574 574 self._remote.invalidate_vcs_cache()
575 575
576 576 def fetch(self, url, commit_ids=None):
577 577 """
578 578 Backward compatibility with GIT fetch==pull
579 579 """
580 580 return self.pull(url, commit_ids=commit_ids)
581 581
582 582 def push(self, url):
583 583 url = self._get_url(url)
584 584 self._remote.sync_push(url)
585 585
586 586 def _local_clone(self, clone_path):
587 587 """
588 588 Create a local clone of the current repo.
589 589 """
590 590 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 591 hooks=False)
592 592
593 593 def _update(self, revision, clean=False):
594 594 """
595 595 Update the working copy to the specified revision.
596 596 """
597 597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 598 self._remote.update(revision, clean=clean)
599 599
600 600 def _identify(self):
601 601 """
602 602 Return the current state of the working directory.
603 603 """
604 604 return self._remote.identify().strip().rstrip('+')
605 605
606 606 def _heads(self, branch=None):
607 607 """
608 608 Return the commit ids of the repository heads.
609 609 """
610 610 return self._remote.heads(branch=branch).strip().split(' ')
611 611
612 612 def _ancestor(self, revision1, revision2):
613 613 """
614 614 Return the common ancestor of the two revisions.
615 615 """
616 616 return self._remote.ancestor(revision1, revision2)
617 617
618 618 def _local_push(
619 619 self, revision, repository_path, push_branches=False,
620 620 enable_hooks=False):
621 621 """
622 622 Push the given revision to the specified repository.
623 623
624 624 :param push_branches: allow to create branches in the target repo.
625 625 """
626 626 self._remote.push(
627 627 [revision], repository_path, hooks=enable_hooks,
628 628 push_branches=push_branches)
629 629
630 630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 632 """
633 633 Merge the given source_revision into the checked out revision.
634 634
635 635 Returns the commit id of the merge and a boolean indicating if the
636 636 commit needs to be pushed.
637 637 """
638 638 source_ref_commit_id = source_ref.commit_id
639 639 target_ref_commit_id = target_ref.commit_id
640 640
641 641 # update our workdir to target ref, for proper merge
642 642 self._update(target_ref_commit_id, clean=True)
643 643
644 644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646 646
647 647 if close_commit_id:
648 648 # NOTE(marcink): if we get the close commit, this is our new source
649 649 # which will include the close commit itself.
650 650 source_ref_commit_id = close_commit_id
651 651
652 652 if ancestor == source_ref_commit_id:
653 653 # Nothing to do, the changes were already integrated
654 654 return target_ref_commit_id, False
655 655
656 656 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 657 # In this case we should force a commit message
658 658 return source_ref_commit_id, True
659 659
660 660 unresolved = None
661 661 if use_rebase:
662 662 try:
663 663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 665 self._remote.rebase(
666 666 source=source_ref_commit_id, dest=target_ref_commit_id)
667 667 self._remote.invalidate_vcs_cache()
668 668 self._update(bookmark_name, clean=True)
669 669 return self._identify(), True
670 670 except RepositoryError as e:
671 671 # The rebase-abort may raise another exception which 'hides'
672 672 # the original one, therefore we log it here.
673 673 log.exception('Error while rebasing shadow repo during merge.')
674 674 if 'unresolved conflicts' in safe_str(e):
675 675 unresolved = self._remote.get_unresolved_files()
676 676 log.debug('unresolved files: %s', unresolved)
677 677
678 678 # Cleanup any rebase leftovers
679 679 self._remote.invalidate_vcs_cache()
680 680 self._remote.rebase(abort=True)
681 681 self._remote.invalidate_vcs_cache()
682 682 self._remote.update(clean=True)
683 683 if unresolved:
684 684 raise UnresolvedFilesInRepo(unresolved)
685 685 else:
686 686 raise
687 687 else:
688 688 try:
689 689 self._remote.merge(source_ref_commit_id)
690 690 self._remote.invalidate_vcs_cache()
691 691 self._remote.commit(
692 692 message=safe_str(merge_message),
693 693 username=safe_str('%s <%s>' % (user_name, user_email)))
694 694 self._remote.invalidate_vcs_cache()
695 695 return self._identify(), True
696 696 except RepositoryError as e:
697 697 # The merge-abort may raise another exception which 'hides'
698 698 # the original one, therefore we log it here.
699 699 log.exception('Error while merging shadow repo during merge.')
700 700 if 'unresolved merge conflicts' in safe_str(e):
701 701 unresolved = self._remote.get_unresolved_files()
702 702 log.debug('unresolved files: %s', unresolved)
703 703
704 704 # Cleanup any merge leftovers
705 705 self._remote.update(clean=True)
706 706 if unresolved:
707 707 raise UnresolvedFilesInRepo(unresolved)
708 708 else:
709 709 raise
710 710
711 711 def _local_close(self, target_ref, user_name, user_email,
712 712 source_ref, close_message=''):
713 713 """
714 714 Close the branch of the given source_revision
715 715
716 716 Returns the commit id of the close and a boolean indicating if the
717 717 commit needs to be pushed.
718 718 """
719 719 self._update(source_ref.commit_id)
720 720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 721 try:
722 722 self._remote.commit(
723 723 message=safe_str(message),
724 724 username=safe_str('%s <%s>' % (user_name, user_email)),
725 725 close_branch=True)
726 726 self._remote.invalidate_vcs_cache()
727 727 return self._identify(), True
728 728 except RepositoryError:
729 729 # Cleanup any commit leftovers
730 730 self._remote.update(clean=True)
731 731 raise
732 732
733 733 def _is_the_same_branch(self, target_ref, source_ref):
734 734 return (
735 735 self._get_branch_name(target_ref) ==
736 736 self._get_branch_name(source_ref))
737 737
738 738 def _get_branch_name(self, ref):
739 739 if ref.type == 'branch':
740 740 return ref.name
741 741 return self._remote.ctx_branch(ref.commit_id)
742 742
743 743 def _maybe_prepare_merge_workspace(
744 744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 745 shadow_repository_path = self._get_shadow_repository_path(
746 746 self.path, repo_id, workspace_id)
747 747 if not os.path.exists(shadow_repository_path):
748 748 self._local_clone(shadow_repository_path)
749 749 log.debug(
750 750 'Prepared shadow repository in %s', shadow_repository_path)
751 751
752 752 return shadow_repository_path
753 753
754 754 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 755 source_repo, source_ref, merge_message,
756 756 merger_name, merger_email, dry_run=False,
757 757 use_rebase=False, close_branch=False):
758 758
759 759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 760 'rebase' if use_rebase else 'merge', dry_run)
761 761 if target_ref.commit_id not in self._heads():
762 762 return MergeResponse(
763 763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 764 metadata={'target_ref': target_ref})
765 765
766 766 try:
767 767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 768 heads_all = self._heads(target_ref.name)
769 769 max_heads = 10
770 770 if len(heads_all) > max_heads:
771 771 heads = '\n,'.join(
772 772 heads_all[:max_heads] +
773 773 ['and {} more.'.format(len(heads_all)-max_heads)])
774 774 else:
775 775 heads = '\n,'.join(heads_all)
776 776 metadata = {
777 777 'target_ref': target_ref,
778 778 'source_ref': source_ref,
779 779 'heads': heads
780 780 }
781 781 return MergeResponse(
782 782 False, False, None,
783 783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
784 784 metadata=metadata)
785 785 except CommitDoesNotExistError:
786 786 log.exception('Failure when looking up branch heads on hg target')
787 787 return MergeResponse(
788 788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
789 789 metadata={'target_ref': target_ref})
790 790
791 791 shadow_repository_path = self._maybe_prepare_merge_workspace(
792 792 repo_id, workspace_id, target_ref, source_ref)
793 793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
794 794
795 795 log.debug('Pulling in target reference %s', target_ref)
796 796 self._validate_pull_reference(target_ref)
797 797 shadow_repo._local_pull(self.path, target_ref)
798 798
799 799 try:
800 800 log.debug('Pulling in source reference %s', source_ref)
801 801 source_repo._validate_pull_reference(source_ref)
802 802 shadow_repo._local_pull(source_repo.path, source_ref)
803 803 except CommitDoesNotExistError:
804 804 log.exception('Failure when doing local pull on hg shadow repo')
805 805 return MergeResponse(
806 806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
807 807 metadata={'source_ref': source_ref})
808 808
809 809 merge_ref = None
810 810 merge_commit_id = None
811 811 close_commit_id = None
812 812 merge_failure_reason = MergeFailureReason.NONE
813 813 metadata = {}
814 814
815 815 # enforce that close branch should be used only in case we source from
816 816 # an actual Branch
817 817 close_branch = close_branch and source_ref.type == 'branch'
818 818
819 819 # don't allow to close branch if source and target are the same
820 820 close_branch = close_branch and source_ref.name != target_ref.name
821 821
822 822 needs_push_on_close = False
823 823 if close_branch and not use_rebase and not dry_run:
824 824 try:
825 825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
826 826 target_ref, merger_name, merger_email, source_ref)
827 827 merge_possible = True
828 828 except RepositoryError:
829 829 log.exception('Failure when doing close branch on '
830 830 'shadow repo: %s', shadow_repo)
831 831 merge_possible = False
832 832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
833 833 else:
834 834 merge_possible = True
835 835
836 836 needs_push = False
837 837 if merge_possible:
838 838
839 839 try:
840 840 merge_commit_id, needs_push = shadow_repo._local_merge(
841 841 target_ref, merge_message, merger_name, merger_email,
842 842 source_ref, use_rebase=use_rebase,
843 843 close_commit_id=close_commit_id, dry_run=dry_run)
844 844 merge_possible = True
845 845
846 846 # read the state of the close action, if it
847 847 # maybe required a push
848 848 needs_push = needs_push or needs_push_on_close
849 849
850 850 # Set a bookmark pointing to the merge commit. This bookmark
851 851 # may be used to easily identify the last successful merge
852 852 # commit in the shadow repository.
853 853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
854 854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
855 855 except SubrepoMergeError:
856 856 log.exception(
857 857 'Subrepo merge error during local merge on hg shadow repo.')
858 858 merge_possible = False
859 859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
860 860 needs_push = False
861 861 except RepositoryError as e:
862 862 log.exception('Failure when doing local merge on hg shadow repo')
863 863 if isinstance(e, UnresolvedFilesInRepo):
864 864 all_conflicts = list(e.args[0])
865 865 max_conflicts = 20
866 866 if len(all_conflicts) > max_conflicts:
867 867 conflicts = all_conflicts[:max_conflicts] \
868 868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 869 else:
870 870 conflicts = all_conflicts
871 871 metadata['unresolved_files'] = \
872 872 '\n* conflict: ' + \
873 873 ('\n * conflict: '.join(conflicts))
874 874
875 875 merge_possible = False
876 876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 877 needs_push = False
878 878
879 879 if merge_possible and not dry_run:
880 880 if needs_push:
881 881 # In case the target is a bookmark, update it, so after pushing
882 882 # the bookmarks is also updated in the target.
883 883 if target_ref.type == 'book':
884 884 shadow_repo.bookmark(
885 885 target_ref.name, revision=merge_commit_id)
886 886 try:
887 887 shadow_repo_with_hooks = self.get_shadow_instance(
888 888 shadow_repository_path,
889 889 enable_hooks=True)
890 890 # This is the actual merge action, we push from shadow
891 891 # into origin.
892 892 # Note: the push_branches option will push any new branch
893 893 # defined in the source repository to the target. This may
894 894 # be dangerous as branches are permanent in Mercurial.
895 895 # This feature was requested in issue #441.
896 896 shadow_repo_with_hooks._local_push(
897 897 merge_commit_id, self.path, push_branches=True,
898 898 enable_hooks=True)
899 899
900 900 # maybe we also need to push the close_commit_id
901 901 if close_commit_id:
902 902 shadow_repo_with_hooks._local_push(
903 903 close_commit_id, self.path, push_branches=True,
904 904 enable_hooks=True)
905 905 merge_succeeded = True
906 906 except RepositoryError:
907 907 log.exception(
908 908 'Failure when doing local push from the shadow '
909 909 'repository to the target repository at %s.', self.path)
910 910 merge_succeeded = False
911 911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 912 metadata['target'] = 'hg shadow repo'
913 913 metadata['merge_commit'] = merge_commit_id
914 914 else:
915 915 merge_succeeded = True
916 916 else:
917 917 merge_succeeded = False
918 918
919 919 return MergeResponse(
920 920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 921 metadata=metadata)
922 922
923 923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 924 config = self.config.copy()
925 925 if not enable_hooks:
926 926 config.clear_section('hooks')
927 927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928 928
929 929 def _validate_pull_reference(self, reference):
930 930 if not (reference.name in self.bookmarks or
931 931 reference.name in self.branches or
932 932 self.get_commit(reference.commit_id)):
933 933 raise CommitDoesNotExistError(
934 934 'Unknown branch, bookmark or commit id')
935 935
936 936 def _local_pull(self, repository_path, reference):
937 937 """
938 938 Fetch a branch, bookmark or commit from a local repository.
939 939 """
940 940 repository_path = os.path.abspath(repository_path)
941 941 if repository_path == self.path:
942 942 raise ValueError('Cannot pull from the same repository')
943 943
944 944 reference_type_to_option_name = {
945 945 'book': 'bookmark',
946 946 'branch': 'branch',
947 947 }
948 948 option_name = reference_type_to_option_name.get(
949 949 reference.type, 'revision')
950 950
951 951 if option_name == 'revision':
952 952 ref = reference.commit_id
953 953 else:
954 954 ref = reference.name
955 955
956 956 options = {option_name: [ref]}
957 957 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 958 self._remote.invalidate_vcs_cache()
959 959
960 960 def bookmark(self, bookmark, revision=None):
961 961 if isinstance(bookmark, unicode):
962 962 bookmark = safe_str(bookmark)
963 963 self._remote.bookmark(bookmark, revision=revision)
964 964 self._remote.invalidate_vcs_cache()
965 965
966 966 def get_path_permissions(self, username):
967 967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968 968
969 969 def read_patterns(suffix):
970 970 svalue = None
971 971 for section, option in [
972 972 ('narrowacl', username + suffix),
973 973 ('narrowacl', 'default' + suffix),
974 974 ('narrowhgacl', username + suffix),
975 975 ('narrowhgacl', 'default' + suffix)
976 976 ]:
977 977 try:
978 978 svalue = hgacl.get(section, option)
979 979 break # stop at the first value we find
980 980 except configparser.NoOptionError:
981 981 pass
982 982 if not svalue:
983 983 return None
984 984 result = ['/']
985 985 for pattern in svalue.split():
986 986 result.append(pattern)
987 987 if '*' not in pattern and '?' not in pattern:
988 988 result.append(pattern + '/*')
989 989 return result
990 990
991 991 if os.path.exists(hgacl_file):
992 992 try:
993 993 hgacl = configparser.RawConfigParser()
994 994 hgacl.read(hgacl_file)
995 995
996 996 includes = read_patterns('.includes')
997 997 excludes = read_patterns('.excludes')
998 998 return BasePathPermissionChecker.create_from_patterns(
999 999 includes, excludes)
1000 1000 except BaseException as e:
1001 1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 1002 hgacl_file, self.name, e)
1003 1003 raise exceptions.RepositoryRequirementError(msg)
1004 1004 else:
1005 1005 return None
1006 1006
1007 1007
1008 1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009 1009
1010 1010 def _commit_factory(self, commit_id):
1011 1011 return self.repo.get_commit(
1012 1012 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,370 +1,370 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SVN repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import urllib.request, urllib.parse, urllib.error
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 from rhodecode.lib.compat import OrderedDict
31 from collections import OrderedDict
32 32 from rhodecode.lib.datelib import date_astimestamp
33 33 from rhodecode.lib.utils import safe_str, safe_unicode
34 34 from rhodecode.lib.utils2 import CachedProperty
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.backends.svn.commit import (
38 38 SubversionCommit, _date_from_svn_properties)
39 39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 44 VCSError, NodeDoesNotExistError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class SubversionRepository(base.BaseRepository):
51 51 """
52 52 Subversion backend implementation
53 53
54 54 .. important::
55 55
56 56 It is very important to distinguish the commit index and the commit id
57 57 which is assigned by Subversion. The first one is always handled as an
58 58 `int` by this implementation. The commit id assigned by Subversion on
59 59 the other side will always be a `str`.
60 60
61 61 There is a specific trap since the first commit will have the index
62 62 ``0`` but the svn id will be ``"1"``.
63 63
64 64 """
65 65
66 66 # Note: Subversion does not really have a default branch name.
67 67 DEFAULT_BRANCH_NAME = None
68 68
69 69 contact = base.BaseRepository.DEFAULT_CONTACT
70 70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71 71
72 72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 73 bare=False, **kwargs):
74 74 self.path = safe_str(os.path.abspath(repo_path))
75 75 self.config = config if config else self.get_default_config()
76 76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77 77
78 78 self._init_repo(create, src_url)
79 79
80 80 # caches
81 81 self._commit_ids = {}
82 82
83 83 @LazyProperty
84 84 def _remote(self):
85 85 repo_id = self.path
86 86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87 87
88 88 def _init_repo(self, create, src_url):
89 89 if create and os.path.exists(self.path):
90 90 raise RepositoryError(
91 91 "Cannot create repository at %s, location already exist"
92 92 % self.path)
93 93
94 94 if create:
95 95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 96 if src_url:
97 97 src_url = _sanitize_url(src_url)
98 98 self._remote.import_remote_repository(src_url)
99 99 else:
100 100 self._check_path()
101 101
102 102 @CachedProperty
103 103 def commit_ids(self):
104 104 head = self._remote.lookup(None)
105 105 return [str(r) for r in range(1, head + 1)]
106 106
107 107 def _rebuild_cache(self, commit_ids):
108 108 pass
109 109
110 110 def run_svn_command(self, cmd, **opts):
111 111 """
112 112 Runs given ``cmd`` as svn command and returns tuple
113 113 (stdout, stderr).
114 114
115 115 :param cmd: full svn command to be executed
116 116 :param opts: env options to pass into Subprocess command
117 117 """
118 118 if not isinstance(cmd, list):
119 119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
120 120
121 121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 122 out, err = self._remote.run_svn_command(cmd, **opts)
123 123 if err and not skip_stderr_log:
124 124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 125 return out, err
126 126
127 127 @LazyProperty
128 128 def branches(self):
129 129 return self._tags_or_branches('vcs_svn_branch')
130 130
131 131 @LazyProperty
132 132 def branches_closed(self):
133 133 return {}
134 134
135 135 @LazyProperty
136 136 def bookmarks(self):
137 137 return {}
138 138
139 139 @LazyProperty
140 140 def branches_all(self):
141 141 # TODO: johbo: Implement proper branch support
142 142 all_branches = {}
143 143 all_branches.update(self.branches)
144 144 all_branches.update(self.branches_closed)
145 145 return all_branches
146 146
147 147 @LazyProperty
148 148 def tags(self):
149 149 return self._tags_or_branches('vcs_svn_tag')
150 150
151 151 def _tags_or_branches(self, config_section):
152 152 found_items = {}
153 153
154 154 if self.is_empty():
155 155 return {}
156 156
157 157 for pattern in self._patterns_from_section(config_section):
158 158 pattern = vcspath.sanitize(pattern)
159 159 tip = self.get_commit()
160 160 try:
161 161 if pattern.endswith('*'):
162 162 basedir = tip.get_node(vcspath.dirname(pattern))
163 163 directories = basedir.dirs
164 164 else:
165 165 directories = (tip.get_node(pattern), )
166 166 except NodeDoesNotExistError:
167 167 continue
168 168 found_items.update(
169 169 (safe_unicode(n.path),
170 170 self.commit_ids[-1])
171 171 for n in directories)
172 172
173 173 def get_name(item):
174 174 return item[0]
175 175
176 176 return OrderedDict(sorted(found_items.items(), key=get_name))
177 177
178 178 def _patterns_from_section(self, section):
179 179 return (pattern for key, pattern in self.config.items(section))
180 180
181 181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 182 if self != repo2:
183 183 raise ValueError(
184 184 "Subversion does not support getting common ancestor of"
185 185 " different repositories.")
186 186
187 187 if int(commit_id1) < int(commit_id2):
188 188 return commit_id1
189 189 return commit_id2
190 190
191 191 def verify(self):
192 192 verify = self._remote.verify()
193 193
194 194 self._remote.invalidate_vcs_cache()
195 195 return verify
196 196
197 197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 198 # TODO: johbo: Implement better comparison, this is a very naive
199 199 # version which does not allow to compare branches, tags or folders
200 200 # at all.
201 201 if repo2 != self:
202 202 raise ValueError(
203 203 "Subversion does not support comparison of of different "
204 204 "repositories.")
205 205
206 206 if commit_id1 == commit_id2:
207 207 return []
208 208
209 209 commit_idx1 = self._get_commit_idx(commit_id1)
210 210 commit_idx2 = self._get_commit_idx(commit_id2)
211 211
212 212 commits = [
213 213 self.get_commit(commit_idx=idx)
214 214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215 215
216 216 return commits
217 217
218 218 def _get_commit_idx(self, commit_id):
219 219 try:
220 220 svn_rev = int(commit_id)
221 221 except:
222 222 # TODO: johbo: this might be only one case, HEAD, check this
223 223 svn_rev = self._remote.lookup(commit_id)
224 224 commit_idx = svn_rev - 1
225 225 if commit_idx >= len(self.commit_ids):
226 226 raise CommitDoesNotExistError(
227 227 "Commit at index %s does not exist." % (commit_idx, ))
228 228 return commit_idx
229 229
230 230 @staticmethod
231 231 def check_url(url, config):
232 232 """
233 233 Check if `url` is a valid source to import a Subversion repository.
234 234 """
235 235 # convert to URL if it's a local directory
236 236 if os.path.isdir(url):
237 237 url = 'file://' + urllib.request.pathname2url(url)
238 238 return connection.Svn.check_url(url, config.serialize())
239 239
240 240 @staticmethod
241 241 def is_valid_repository(path):
242 242 try:
243 243 SubversionRepository(path)
244 244 return True
245 245 except VCSError:
246 246 pass
247 247 return False
248 248
249 249 def _check_path(self):
250 250 if not os.path.exists(self.path):
251 251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 252 if not self._remote.is_path_valid_repository(self.path):
253 253 raise VCSError(
254 254 'Path "%s" does not contain a Subversion repository' %
255 255 (self.path, ))
256 256
257 257 @LazyProperty
258 258 def last_change(self):
259 259 """
260 260 Returns last change made on this repository as
261 261 `datetime.datetime` object.
262 262 """
263 263 # Subversion always has a first commit which has id "0" and contains
264 264 # what we are looking for.
265 265 last_id = len(self.commit_ids)
266 266 properties = self._remote.revision_properties(last_id)
267 267 return _date_from_svn_properties(properties)
268 268
269 269 @LazyProperty
270 270 def in_memory_commit(self):
271 271 return SubversionInMemoryCommit(self)
272 272
273 273 def get_hook_location(self):
274 274 """
275 275 returns absolute path to location where hooks are stored
276 276 """
277 277 return os.path.join(self.path, 'hooks')
278 278
279 279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 280 translate_tag=None, maybe_unreachable=False, reference_obj=None):
281 281 if self.is_empty():
282 282 raise EmptyRepositoryError("There are no commits yet")
283 283 if commit_id is not None:
284 284 self._validate_commit_id(commit_id)
285 285 elif commit_idx is not None:
286 286 self._validate_commit_idx(commit_idx)
287 287 try:
288 288 commit_id = self.commit_ids[commit_idx]
289 289 except IndexError:
290 290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
291 291
292 292 commit_id = self._sanitize_commit_id(commit_id)
293 293 commit = SubversionCommit(repository=self, commit_id=commit_id)
294 294 return commit
295 295
296 296 def get_commits(
297 297 self, start_id=None, end_id=None, start_date=None, end_date=None,
298 298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
299 299 if self.is_empty():
300 300 raise EmptyRepositoryError("There are no commit_ids yet")
301 301 self._validate_branch_name(branch_name)
302 302
303 303 if start_id is not None:
304 304 self._validate_commit_id(start_id)
305 305 if end_id is not None:
306 306 self._validate_commit_id(end_id)
307 307
308 308 start_raw_id = self._sanitize_commit_id(start_id)
309 309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
310 310 end_raw_id = self._sanitize_commit_id(end_id)
311 311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
312 312
313 313 if None not in [start_id, end_id] and start_pos > end_pos:
314 314 raise RepositoryError(
315 315 "Start commit '%s' cannot be after end commit '%s'" %
316 316 (start_id, end_id))
317 317 if end_pos is not None:
318 318 end_pos += 1
319 319
320 320 # Date based filtering
321 321 if start_date or end_date:
322 322 start_raw_id, end_raw_id = self._remote.lookup_interval(
323 323 date_astimestamp(start_date) if start_date else None,
324 324 date_astimestamp(end_date) if end_date else None)
325 325 start_pos = start_raw_id - 1
326 326 end_pos = end_raw_id
327 327
328 328 commit_ids = self.commit_ids
329 329
330 330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
331 331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
332 332 svn_rev = long(self.commit_ids[-1])
333 333 commit_ids = self._remote.node_history(
334 334 path=branch_name, revision=svn_rev, limit=None)
335 335 commit_ids = [str(i) for i in reversed(commit_ids)]
336 336
337 337 if start_pos or end_pos:
338 338 commit_ids = commit_ids[start_pos:end_pos]
339 339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
340 340
341 341 def _sanitize_commit_id(self, commit_id):
342 342 if commit_id and commit_id.isdigit():
343 343 if int(commit_id) <= len(self.commit_ids):
344 344 return commit_id
345 345 else:
346 346 raise CommitDoesNotExistError(
347 347 "Commit %s does not exist." % (commit_id, ))
348 348 if commit_id not in [
349 349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
350 350 raise CommitDoesNotExistError(
351 351 "Commit id %s not understood." % (commit_id, ))
352 352 svn_rev = self._remote.lookup('HEAD')
353 353 return str(svn_rev)
354 354
355 355 def get_diff(
356 356 self, commit1, commit2, path=None, ignore_whitespace=False,
357 357 context=3, path1=None):
358 358 self._validate_diff_commits(commit1, commit2)
359 359 svn_rev1 = long(commit1.raw_id)
360 360 svn_rev2 = long(commit2.raw_id)
361 361 diff = self._remote.diff(
362 362 svn_rev1, svn_rev2, path1=path1, path2=path,
363 363 ignore_whitespace=ignore_whitespace, context=context)
364 364 return SubversionDiff(diff)
365 365
366 366
367 367 def _sanitize_url(url):
368 368 if '://' not in url:
369 369 url = 'file://' + urllib.request.pathname2url(url)
370 370 return url
@@ -1,2380 +1,2380 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib.request, urllib.parse, urllib.error
33 33 import collections
34 34
35 35 from pyramid.threadlocal import get_current_request
36 36
37 37 from rhodecode.lib.vcs.nodes import FileNode
38 38 from rhodecode.translation import lazy_ugettext
39 39 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 40 from rhodecode.lib import audit_logger
41 from rhodecode.lib.compat import OrderedDict
41 from collections import OrderedDict
42 42 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 43 from rhodecode.lib.markup_renderer import (
44 44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 45 from rhodecode.lib.utils2 import (
46 46 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
47 47 get_current_rhodecode_user)
48 48 from rhodecode.lib.vcs.backends.base import (
49 49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 50 TargetRefMissing, SourceRefMissing)
51 51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 52 from rhodecode.lib.vcs.exceptions import (
53 53 CommitDoesNotExistError, EmptyRepositoryError)
54 54 from rhodecode.model import BaseModel
55 55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 56 from rhodecode.model.comment import CommentsModel
57 57 from rhodecode.model.db import (
58 58 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 60 from rhodecode.model.meta import Session
61 61 from rhodecode.model.notification import NotificationModel, \
62 62 EmailNotificationModel
63 63 from rhodecode.model.scm import ScmModel
64 64 from rhodecode.model.settings import VcsSettingsModel
65 65
66 66
67 67 log = logging.getLogger(__name__)
68 68
69 69
70 70 # Data structure to hold the response data when updating commits during a pull
71 71 # request update.
72 72 class UpdateResponse(object):
73 73
74 74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 75 commit_changes, source_changed, target_changed):
76 76
77 77 self.executed = executed
78 78 self.reason = reason
79 79 self.new = new
80 80 self.old = old
81 81 self.common_ancestor_id = common_ancestor_id
82 82 self.changes = commit_changes
83 83 self.source_changed = source_changed
84 84 self.target_changed = target_changed
85 85
86 86
87 87 def get_diff_info(
88 88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 89 get_commit_authors=True):
90 90 """
91 91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 92 This is also used for default reviewers logic
93 93 """
94 94
95 95 source_scm = source_repo.scm_instance()
96 96 target_scm = target_repo.scm_instance()
97 97
98 98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 99 if not ancestor_id:
100 100 raise ValueError(
101 101 'cannot calculate diff info without a common ancestor. '
102 102 'Make sure both repositories are related, and have a common forking commit.')
103 103
104 104 # case here is that want a simple diff without incoming commits,
105 105 # previewing what will be merged based only on commits in the source.
106 106 log.debug('Using ancestor %s as source_ref instead of %s',
107 107 ancestor_id, source_ref)
108 108
109 109 # source of changes now is the common ancestor
110 110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 111 # target commit becomes the source ref as it is the last commit
112 112 # for diff generation this logic gives proper diff
113 113 target_commit = source_scm.get_commit(commit_id=source_ref)
114 114
115 115 vcs_diff = \
116 116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 117 ignore_whitespace=False, context=3)
118 118
119 119 diff_processor = diffs.DiffProcessor(
120 120 vcs_diff, format='newdiff', diff_limit=None,
121 121 file_limit=None, show_full_diff=True)
122 122
123 123 _parsed = diff_processor.prepare()
124 124
125 125 all_files = []
126 126 all_files_changes = []
127 127 changed_lines = {}
128 128 stats = [0, 0]
129 129 for f in _parsed:
130 130 all_files.append(f['filename'])
131 131 all_files_changes.append({
132 132 'filename': f['filename'],
133 133 'stats': f['stats']
134 134 })
135 135 stats[0] += f['stats']['added']
136 136 stats[1] += f['stats']['deleted']
137 137
138 138 changed_lines[f['filename']] = []
139 139 if len(f['chunks']) < 2:
140 140 continue
141 141 # first line is "context" information
142 142 for chunks in f['chunks'][1:]:
143 143 for chunk in chunks['lines']:
144 144 if chunk['action'] not in ('del', 'mod'):
145 145 continue
146 146 changed_lines[f['filename']].append(chunk['old_lineno'])
147 147
148 148 commit_authors = []
149 149 user_counts = {}
150 150 email_counts = {}
151 151 author_counts = {}
152 152 _commit_cache = {}
153 153
154 154 commits = []
155 155 if get_commit_authors:
156 156 log.debug('Obtaining commit authors from set of commits')
157 157 _compare_data = target_scm.compare(
158 158 target_ref, source_ref, source_scm, merge=True,
159 159 pre_load=["author", "date", "message"]
160 160 )
161 161
162 162 for commit in _compare_data:
163 163 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
164 164 # at this function which is later called via JSON serialization
165 165 serialized_commit = dict(
166 166 author=commit.author,
167 167 date=commit.date,
168 168 message=commit.message,
169 169 commit_id=commit.raw_id,
170 170 raw_id=commit.raw_id
171 171 )
172 172 commits.append(serialized_commit)
173 173 user = User.get_from_cs_author(serialized_commit['author'])
174 174 if user and user not in commit_authors:
175 175 commit_authors.append(user)
176 176
177 177 # lines
178 178 if get_authors:
179 179 log.debug('Calculating authors of changed files')
180 180 target_commit = source_repo.get_commit(ancestor_id)
181 181
182 182 for fname, lines in changed_lines.items():
183 183
184 184 try:
185 185 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 186 except Exception:
187 187 log.exception("Failed to load node with path %s", fname)
188 188 continue
189 189
190 190 if not isinstance(node, FileNode):
191 191 continue
192 192
193 193 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 194 if node.is_binary:
195 195 author = node.last_commit.author
196 196 email = node.last_commit.author_email
197 197
198 198 user = User.get_from_cs_author(author)
199 199 if user:
200 200 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 201 author_counts[author] = author_counts.get(author, 0) + 1
202 202 email_counts[email] = email_counts.get(email, 0) + 1
203 203
204 204 continue
205 205
206 206 for annotation in node.annotate:
207 207 line_no, commit_id, get_commit_func, line_text = annotation
208 208 if line_no in lines:
209 209 if commit_id not in _commit_cache:
210 210 _commit_cache[commit_id] = get_commit_func()
211 211 commit = _commit_cache[commit_id]
212 212 author = commit.author
213 213 email = commit.author_email
214 214 user = User.get_from_cs_author(author)
215 215 if user:
216 216 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 217 author_counts[author] = author_counts.get(author, 0) + 1
218 218 email_counts[email] = email_counts.get(email, 0) + 1
219 219
220 220 log.debug('Default reviewers processing finished')
221 221
222 222 return {
223 223 'commits': commits,
224 224 'files': all_files_changes,
225 225 'stats': stats,
226 226 'ancestor': ancestor_id,
227 227 # original authors of modified files
228 228 'original_authors': {
229 229 'users': user_counts,
230 230 'authors': author_counts,
231 231 'emails': email_counts,
232 232 },
233 233 'commit_authors': commit_authors
234 234 }
235 235
236 236
237 237 class PullRequestModel(BaseModel):
238 238
239 239 cls = PullRequest
240 240
241 241 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242 242
243 243 UPDATE_STATUS_MESSAGES = {
244 244 UpdateFailureReason.NONE: lazy_ugettext(
245 245 'Pull request update successful.'),
246 246 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 247 'Pull request update failed because of an unknown error.'),
248 248 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 249 'No update needed because the source and target have not changed.'),
250 250 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 251 'Pull request cannot be updated because the reference type is '
252 252 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 253 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 254 'This pull request cannot be updated because the target '
255 255 'reference is missing.'),
256 256 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 257 'This pull request cannot be updated because the source '
258 258 'reference is missing.'),
259 259 }
260 260 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 261 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262 262
263 263 def __get_pull_request(self, pull_request):
264 264 return self._get_instance((
265 265 PullRequest, PullRequestVersion), pull_request)
266 266
267 267 def _check_perms(self, perms, pull_request, user, api=False):
268 268 if not api:
269 269 return h.HasRepoPermissionAny(*perms)(
270 270 user=user, repo_name=pull_request.target_repo.repo_name)
271 271 else:
272 272 return h.HasRepoPermissionAnyApi(*perms)(
273 273 user=user, repo_name=pull_request.target_repo.repo_name)
274 274
275 275 def check_user_read(self, pull_request, user, api=False):
276 276 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 277 return self._check_perms(_perms, pull_request, user, api)
278 278
279 279 def check_user_merge(self, pull_request, user, api=False):
280 280 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 281 return self._check_perms(_perms, pull_request, user, api)
282 282
283 283 def check_user_update(self, pull_request, user, api=False):
284 284 owner = user.user_id == pull_request.user_id
285 285 return self.check_user_merge(pull_request, user, api) or owner
286 286
287 287 def check_user_delete(self, pull_request, user):
288 288 owner = user.user_id == pull_request.user_id
289 289 _perms = ('repository.admin',)
290 290 return self._check_perms(_perms, pull_request, user) or owner
291 291
292 292 def is_user_reviewer(self, pull_request, user):
293 293 return user.user_id in [
294 294 x.user_id for x in
295 295 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 296 if x.user
297 297 ]
298 298
299 299 def check_user_change_status(self, pull_request, user, api=False):
300 300 return self.check_user_update(pull_request, user, api) \
301 301 or self.is_user_reviewer(pull_request, user)
302 302
303 303 def check_user_comment(self, pull_request, user):
304 304 owner = user.user_id == pull_request.user_id
305 305 return self.check_user_read(pull_request, user) or owner
306 306
307 307 def get(self, pull_request):
308 308 return self.__get_pull_request(pull_request)
309 309
310 310 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 311 statuses=None, opened_by=None, order_by=None,
312 312 order_dir='desc', only_created=False):
313 313 repo = None
314 314 if repo_name:
315 315 repo = self._get_repo(repo_name)
316 316
317 317 q = PullRequest.query()
318 318
319 319 if search_q:
320 320 like_expression = u'%{}%'.format(safe_unicode(search_q))
321 321 q = q.join(User, User.user_id == PullRequest.user_id)
322 322 q = q.filter(or_(
323 323 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 324 User.username.ilike(like_expression),
325 325 PullRequest.title.ilike(like_expression),
326 326 PullRequest.description.ilike(like_expression),
327 327 ))
328 328
329 329 # source or target
330 330 if repo and source:
331 331 q = q.filter(PullRequest.source_repo == repo)
332 332 elif repo:
333 333 q = q.filter(PullRequest.target_repo == repo)
334 334
335 335 # closed,opened
336 336 if statuses:
337 337 q = q.filter(PullRequest.status.in_(statuses))
338 338
339 339 # opened by filter
340 340 if opened_by:
341 341 q = q.filter(PullRequest.user_id.in_(opened_by))
342 342
343 343 # only get those that are in "created" state
344 344 if only_created:
345 345 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346 346
347 347 order_map = {
348 348 'name_raw': PullRequest.pull_request_id,
349 349 'id': PullRequest.pull_request_id,
350 350 'title': PullRequest.title,
351 351 'updated_on_raw': PullRequest.updated_on,
352 352 'target_repo': PullRequest.target_repo_id
353 353 }
354 354 if order_by and order_by in order_map:
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 363 opened_by=None):
364 364 """
365 365 Count the number of pull requests for a specific repository.
366 366
367 367 :param repo_name: target or source repo
368 368 :param search_q: filter by text
369 369 :param source: boolean flag to specify if repo_name refers to source
370 370 :param statuses: list of pull request statuses
371 371 :param opened_by: author user of the pull request
372 372 :returns: int number of pull requests
373 373 """
374 374 q = self._prepare_get_all_query(
375 375 repo_name, search_q=search_q, source=source, statuses=statuses,
376 376 opened_by=opened_by)
377 377
378 378 return q.count()
379 379
380 380 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 381 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 382 """
383 383 Get all pull requests for a specific repository.
384 384
385 385 :param repo_name: target or source repo
386 386 :param search_q: filter by text
387 387 :param source: boolean flag to specify if repo_name refers to source
388 388 :param statuses: list of pull request statuses
389 389 :param opened_by: author user of the pull request
390 390 :param offset: pagination offset
391 391 :param length: length of returned list
392 392 :param order_by: order of the returned list
393 393 :param order_dir: 'asc' or 'desc' ordering direction
394 394 :returns: list of pull requests
395 395 """
396 396 q = self._prepare_get_all_query(
397 397 repo_name, search_q=search_q, source=source, statuses=statuses,
398 398 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399 399
400 400 if length:
401 401 pull_requests = q.limit(length).offset(offset).all()
402 402 else:
403 403 pull_requests = q.all()
404 404
405 405 return pull_requests
406 406
407 407 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
408 408 """
409 409 Count the number of pull requests for a specific repository that are
410 410 awaiting review.
411 411
412 412 :param repo_name: target or source repo
413 413 :param search_q: filter by text
414 414 :param statuses: list of pull request statuses
415 415 :returns: int number of pull requests
416 416 """
417 417 pull_requests = self.get_awaiting_review(
418 418 repo_name, search_q=search_q, statuses=statuses)
419 419
420 420 return len(pull_requests)
421 421
422 422 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
423 423 offset=0, length=None, order_by=None, order_dir='desc'):
424 424 """
425 425 Get all pull requests for a specific repository that are awaiting
426 426 review.
427 427
428 428 :param repo_name: target or source repo
429 429 :param search_q: filter by text
430 430 :param statuses: list of pull request statuses
431 431 :param offset: pagination offset
432 432 :param length: length of returned list
433 433 :param order_by: order of the returned list
434 434 :param order_dir: 'asc' or 'desc' ordering direction
435 435 :returns: list of pull requests
436 436 """
437 437 pull_requests = self.get_all(
438 438 repo_name, search_q=search_q, statuses=statuses,
439 439 order_by=order_by, order_dir=order_dir)
440 440
441 441 _filtered_pull_requests = []
442 442 for pr in pull_requests:
443 443 status = pr.calculated_review_status()
444 444 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
445 445 ChangesetStatus.STATUS_UNDER_REVIEW]:
446 446 _filtered_pull_requests.append(pr)
447 447 if length:
448 448 return _filtered_pull_requests[offset:offset+length]
449 449 else:
450 450 return _filtered_pull_requests
451 451
452 452 def _prepare_awaiting_my_review_review_query(
453 453 self, repo_name, user_id, search_q=None, statuses=None,
454 454 order_by=None, order_dir='desc'):
455 455
456 456 for_review_statuses = [
457 457 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
458 458 ]
459 459
460 460 pull_request_alias = aliased(PullRequest)
461 461 status_alias = aliased(ChangesetStatus)
462 462 reviewers_alias = aliased(PullRequestReviewers)
463 463 repo_alias = aliased(Repository)
464 464
465 465 last_ver_subq = Session()\
466 466 .query(func.min(ChangesetStatus.version)) \
467 467 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
468 468 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
469 469 .subquery()
470 470
471 471 q = Session().query(pull_request_alias) \
472 472 .options(lazyload(pull_request_alias.author)) \
473 473 .join(reviewers_alias,
474 474 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
475 475 .join(repo_alias,
476 476 repo_alias.repo_id == pull_request_alias.target_repo_id) \
477 477 .outerjoin(status_alias,
478 478 and_(status_alias.user_id == reviewers_alias.user_id,
479 479 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
480 480 .filter(or_(status_alias.version == null(),
481 481 status_alias.version == last_ver_subq)) \
482 482 .filter(reviewers_alias.user_id == user_id) \
483 483 .filter(repo_alias.repo_name == repo_name) \
484 484 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
485 485 .group_by(pull_request_alias)
486 486
487 487 # closed,opened
488 488 if statuses:
489 489 q = q.filter(pull_request_alias.status.in_(statuses))
490 490
491 491 if search_q:
492 492 like_expression = u'%{}%'.format(safe_unicode(search_q))
493 493 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 494 q = q.filter(or_(
495 495 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
496 496 User.username.ilike(like_expression),
497 497 pull_request_alias.title.ilike(like_expression),
498 498 pull_request_alias.description.ilike(like_expression),
499 499 ))
500 500
501 501 order_map = {
502 502 'name_raw': pull_request_alias.pull_request_id,
503 503 'title': pull_request_alias.title,
504 504 'updated_on_raw': pull_request_alias.updated_on,
505 505 'target_repo': pull_request_alias.target_repo_id
506 506 }
507 507 if order_by and order_by in order_map:
508 508 if order_dir == 'asc':
509 509 q = q.order_by(order_map[order_by].asc())
510 510 else:
511 511 q = q.order_by(order_map[order_by].desc())
512 512
513 513 return q
514 514
515 515 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
516 516 """
517 517 Count the number of pull requests for a specific repository that are
518 518 awaiting review from a specific user.
519 519
520 520 :param repo_name: target or source repo
521 521 :param user_id: reviewer user of the pull request
522 522 :param search_q: filter by text
523 523 :param statuses: list of pull request statuses
524 524 :returns: int number of pull requests
525 525 """
526 526 q = self._prepare_awaiting_my_review_review_query(
527 527 repo_name, user_id, search_q=search_q, statuses=statuses)
528 528 return q.count()
529 529
530 530 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
531 531 offset=0, length=None, order_by=None, order_dir='desc'):
532 532 """
533 533 Get all pull requests for a specific repository that are awaiting
534 534 review from a specific user.
535 535
536 536 :param repo_name: target or source repo
537 537 :param user_id: reviewer user of the pull request
538 538 :param search_q: filter by text
539 539 :param statuses: list of pull request statuses
540 540 :param offset: pagination offset
541 541 :param length: length of returned list
542 542 :param order_by: order of the returned list
543 543 :param order_dir: 'asc' or 'desc' ordering direction
544 544 :returns: list of pull requests
545 545 """
546 546
547 547 q = self._prepare_awaiting_my_review_review_query(
548 548 repo_name, user_id, search_q=search_q, statuses=statuses,
549 549 order_by=order_by, order_dir=order_dir)
550 550
551 551 if length:
552 552 pull_requests = q.limit(length).offset(offset).all()
553 553 else:
554 554 pull_requests = q.all()
555 555
556 556 return pull_requests
557 557
558 558 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
559 559 order_by=None, order_dir='desc'):
560 560 """
561 561 return a query of pull-requests user is an creator, or he's added as a reviewer
562 562 """
563 563 q = PullRequest.query()
564 564 if user_id:
565 565 reviewers_subquery = Session().query(
566 566 PullRequestReviewers.pull_request_id).filter(
567 567 PullRequestReviewers.user_id == user_id).subquery()
568 568 user_filter = or_(
569 569 PullRequest.user_id == user_id,
570 570 PullRequest.pull_request_id.in_(reviewers_subquery)
571 571 )
572 572 q = PullRequest.query().filter(user_filter)
573 573
574 574 # closed,opened
575 575 if statuses:
576 576 q = q.filter(PullRequest.status.in_(statuses))
577 577
578 578 if query:
579 579 like_expression = u'%{}%'.format(safe_unicode(query))
580 580 q = q.join(User, User.user_id == PullRequest.user_id)
581 581 q = q.filter(or_(
582 582 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 583 User.username.ilike(like_expression),
584 584 PullRequest.title.ilike(like_expression),
585 585 PullRequest.description.ilike(like_expression),
586 586 ))
587 587
588 588 order_map = {
589 589 'name_raw': PullRequest.pull_request_id,
590 590 'title': PullRequest.title,
591 591 'updated_on_raw': PullRequest.updated_on,
592 592 'target_repo': PullRequest.target_repo_id
593 593 }
594 594 if order_by and order_by in order_map:
595 595 if order_dir == 'asc':
596 596 q = q.order_by(order_map[order_by].asc())
597 597 else:
598 598 q = q.order_by(order_map[order_by].desc())
599 599
600 600 return q
601 601
602 602 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 603 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 604 return q.count()
605 605
606 606 def get_im_participating_in(
607 607 self, user_id=None, statuses=None, query='', offset=0,
608 608 length=None, order_by=None, order_dir='desc'):
609 609 """
610 610 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 611 """
612 612
613 613 q = self._prepare_im_participating_query(
614 614 user_id, statuses=statuses, query=query, order_by=order_by,
615 615 order_dir=order_dir)
616 616
617 617 if length:
618 618 pull_requests = q.limit(length).offset(offset).all()
619 619 else:
620 620 pull_requests = q.all()
621 621
622 622 return pull_requests
623 623
624 624 def _prepare_participating_in_for_review_query(
625 625 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 626
627 627 for_review_statuses = [
628 628 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 629 ]
630 630
631 631 pull_request_alias = aliased(PullRequest)
632 632 status_alias = aliased(ChangesetStatus)
633 633 reviewers_alias = aliased(PullRequestReviewers)
634 634
635 635 last_ver_subq = Session()\
636 636 .query(func.min(ChangesetStatus.version)) \
637 637 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 638 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 639 .subquery()
640 640
641 641 q = Session().query(pull_request_alias) \
642 642 .options(lazyload(pull_request_alias.author)) \
643 643 .join(reviewers_alias,
644 644 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 645 .outerjoin(status_alias,
646 646 and_(status_alias.user_id == reviewers_alias.user_id,
647 647 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 648 .filter(or_(status_alias.version == null(),
649 649 status_alias.version == last_ver_subq)) \
650 650 .filter(reviewers_alias.user_id == user_id) \
651 651 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 652 .group_by(pull_request_alias)
653 653
654 654 # closed,opened
655 655 if statuses:
656 656 q = q.filter(pull_request_alias.status.in_(statuses))
657 657
658 658 if query:
659 659 like_expression = u'%{}%'.format(safe_unicode(query))
660 660 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 661 q = q.filter(or_(
662 662 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 663 User.username.ilike(like_expression),
664 664 pull_request_alias.title.ilike(like_expression),
665 665 pull_request_alias.description.ilike(like_expression),
666 666 ))
667 667
668 668 order_map = {
669 669 'name_raw': pull_request_alias.pull_request_id,
670 670 'title': pull_request_alias.title,
671 671 'updated_on_raw': pull_request_alias.updated_on,
672 672 'target_repo': pull_request_alias.target_repo_id
673 673 }
674 674 if order_by and order_by in order_map:
675 675 if order_dir == 'asc':
676 676 q = q.order_by(order_map[order_by].asc())
677 677 else:
678 678 q = q.order_by(order_map[order_by].desc())
679 679
680 680 return q
681 681
682 682 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 683 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 684 return q.count()
685 685
686 686 def get_im_participating_in_for_review(
687 687 self, user_id, statuses=None, query='', offset=0,
688 688 length=None, order_by=None, order_dir='desc'):
689 689 """
690 690 Get all Pull requests that needs user approval or rejection
691 691 """
692 692
693 693 q = self._prepare_participating_in_for_review_query(
694 694 user_id, statuses=statuses, query=query, order_by=order_by,
695 695 order_dir=order_dir)
696 696
697 697 if length:
698 698 pull_requests = q.limit(length).offset(offset).all()
699 699 else:
700 700 pull_requests = q.all()
701 701
702 702 return pull_requests
703 703
704 704 def get_versions(self, pull_request):
705 705 """
706 706 returns version of pull request sorted by ID descending
707 707 """
708 708 return PullRequestVersion.query()\
709 709 .filter(PullRequestVersion.pull_request == pull_request)\
710 710 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 711 .all()
712 712
713 713 def get_pr_version(self, pull_request_id, version=None):
714 714 at_version = None
715 715
716 716 if version and version == 'latest':
717 717 pull_request_ver = PullRequest.get(pull_request_id)
718 718 pull_request_obj = pull_request_ver
719 719 _org_pull_request_obj = pull_request_obj
720 720 at_version = 'latest'
721 721 elif version:
722 722 pull_request_ver = PullRequestVersion.get_or_404(version)
723 723 pull_request_obj = pull_request_ver
724 724 _org_pull_request_obj = pull_request_ver.pull_request
725 725 at_version = pull_request_ver.pull_request_version_id
726 726 else:
727 727 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 728 pull_request_id)
729 729
730 730 pull_request_display_obj = PullRequest.get_pr_display_object(
731 731 pull_request_obj, _org_pull_request_obj)
732 732
733 733 return _org_pull_request_obj, pull_request_obj, \
734 734 pull_request_display_obj, at_version
735 735
736 736 def pr_commits_versions(self, versions):
737 737 """
738 738 Maps the pull-request commits into all known PR versions. This way we can obtain
739 739 each pr version the commit was introduced in.
740 740 """
741 741 commit_versions = collections.defaultdict(list)
742 742 num_versions = [x.pull_request_version_id for x in versions]
743 743 for ver in versions:
744 744 for commit_id in ver.revisions:
745 745 ver_idx = ChangesetComment.get_index_from_version(
746 746 ver.pull_request_version_id, num_versions=num_versions)
747 747 commit_versions[commit_id].append(ver_idx)
748 748 return commit_versions
749 749
750 750 def create(self, created_by, source_repo, source_ref, target_repo,
751 751 target_ref, revisions, reviewers, observers, title, description=None,
752 752 common_ancestor_id=None,
753 753 description_renderer=None,
754 754 reviewer_data=None, translator=None, auth_user=None):
755 755 translator = translator or get_current_request().translate
756 756
757 757 created_by_user = self._get_user(created_by)
758 758 auth_user = auth_user or created_by_user.AuthUser()
759 759 source_repo = self._get_repo(source_repo)
760 760 target_repo = self._get_repo(target_repo)
761 761
762 762 pull_request = PullRequest()
763 763 pull_request.source_repo = source_repo
764 764 pull_request.source_ref = source_ref
765 765 pull_request.target_repo = target_repo
766 766 pull_request.target_ref = target_ref
767 767 pull_request.revisions = revisions
768 768 pull_request.title = title
769 769 pull_request.description = description
770 770 pull_request.description_renderer = description_renderer
771 771 pull_request.author = created_by_user
772 772 pull_request.reviewer_data = reviewer_data
773 773 pull_request.pull_request_state = pull_request.STATE_CREATING
774 774 pull_request.common_ancestor_id = common_ancestor_id
775 775
776 776 Session().add(pull_request)
777 777 Session().flush()
778 778
779 779 reviewer_ids = set()
780 780 # members / reviewers
781 781 for reviewer_object in reviewers:
782 782 user_id, reasons, mandatory, role, rules = reviewer_object
783 783 user = self._get_user(user_id)
784 784
785 785 # skip duplicates
786 786 if user.user_id in reviewer_ids:
787 787 continue
788 788
789 789 reviewer_ids.add(user.user_id)
790 790
791 791 reviewer = PullRequestReviewers()
792 792 reviewer.user = user
793 793 reviewer.pull_request = pull_request
794 794 reviewer.reasons = reasons
795 795 reviewer.mandatory = mandatory
796 796 reviewer.role = role
797 797
798 798 # NOTE(marcink): pick only first rule for now
799 799 rule_id = list(rules)[0] if rules else None
800 800 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 801 if rule:
802 802 review_group = rule.user_group_vote_rule(user_id)
803 803 # we check if this particular reviewer is member of a voting group
804 804 if review_group:
805 805 # NOTE(marcink):
806 806 # can be that user is member of more but we pick the first same,
807 807 # same as default reviewers algo
808 808 review_group = review_group[0]
809 809
810 810 rule_data = {
811 811 'rule_name':
812 812 rule.review_rule_name,
813 813 'rule_user_group_entry_id':
814 814 review_group.repo_review_rule_users_group_id,
815 815 'rule_user_group_name':
816 816 review_group.users_group.users_group_name,
817 817 'rule_user_group_members':
818 818 [x.user.username for x in review_group.users_group.members],
819 819 'rule_user_group_members_id':
820 820 [x.user.user_id for x in review_group.users_group.members],
821 821 }
822 822 # e.g {'vote_rule': -1, 'mandatory': True}
823 823 rule_data.update(review_group.rule_data())
824 824
825 825 reviewer.rule_data = rule_data
826 826
827 827 Session().add(reviewer)
828 828 Session().flush()
829 829
830 830 for observer_object in observers:
831 831 user_id, reasons, mandatory, role, rules = observer_object
832 832 user = self._get_user(user_id)
833 833
834 834 # skip duplicates from reviewers
835 835 if user.user_id in reviewer_ids:
836 836 continue
837 837
838 838 #reviewer_ids.add(user.user_id)
839 839
840 840 observer = PullRequestReviewers()
841 841 observer.user = user
842 842 observer.pull_request = pull_request
843 843 observer.reasons = reasons
844 844 observer.mandatory = mandatory
845 845 observer.role = role
846 846
847 847 # NOTE(marcink): pick only first rule for now
848 848 rule_id = list(rules)[0] if rules else None
849 849 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 850 if rule:
851 851 # TODO(marcink): do we need this for observers ??
852 852 pass
853 853
854 854 Session().add(observer)
855 855 Session().flush()
856 856
857 857 # Set approval status to "Under Review" for all commits which are
858 858 # part of this pull request.
859 859 ChangesetStatusModel().set_status(
860 860 repo=target_repo,
861 861 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 862 user=created_by_user,
863 863 pull_request=pull_request
864 864 )
865 865 # we commit early at this point. This has to do with a fact
866 866 # that before queries do some row-locking. And because of that
867 867 # we need to commit and finish transaction before below validate call
868 868 # that for large repos could be long resulting in long row locks
869 869 Session().commit()
870 870
871 871 # prepare workspace, and run initial merge simulation. Set state during that
872 872 # operation
873 873 pull_request = PullRequest.get(pull_request.pull_request_id)
874 874
875 875 # set as merging, for merge simulation, and if finished to created so we mark
876 876 # simulation is working fine
877 877 with pull_request.set_state(PullRequest.STATE_MERGING,
878 878 final_state=PullRequest.STATE_CREATED) as state_obj:
879 879 MergeCheck.validate(
880 880 pull_request, auth_user=auth_user, translator=translator)
881 881
882 882 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 883 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 884
885 885 creation_data = pull_request.get_api_data(with_merge_state=False)
886 886 self._log_audit_action(
887 887 'repo.pull_request.create', {'data': creation_data},
888 888 auth_user, pull_request)
889 889
890 890 return pull_request
891 891
892 892 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 893 pull_request = self.__get_pull_request(pull_request)
894 894 target_scm = pull_request.target_repo.scm_instance()
895 895 if action == 'create':
896 896 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 897 elif action == 'merge':
898 898 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 899 elif action == 'close':
900 900 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 901 elif action == 'review_status_change':
902 902 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 903 elif action == 'update':
904 904 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 905 elif action == 'comment':
906 906 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 907 elif action == 'comment_edit':
908 908 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 909 else:
910 910 return
911 911
912 912 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 913 pull_request, action, trigger_hook)
914 914 trigger_hook(
915 915 username=user.username,
916 916 repo_name=pull_request.target_repo.repo_name,
917 917 repo_type=target_scm.alias,
918 918 pull_request=pull_request,
919 919 data=data)
920 920
921 921 def _get_commit_ids(self, pull_request):
922 922 """
923 923 Return the commit ids of the merged pull request.
924 924
925 925 This method is not dealing correctly yet with the lack of autoupdates
926 926 nor with the implicit target updates.
927 927 For example: if a commit in the source repo is already in the target it
928 928 will be reported anyways.
929 929 """
930 930 merge_rev = pull_request.merge_rev
931 931 if merge_rev is None:
932 932 raise ValueError('This pull request was not merged yet')
933 933
934 934 commit_ids = list(pull_request.revisions)
935 935 if merge_rev not in commit_ids:
936 936 commit_ids.append(merge_rev)
937 937
938 938 return commit_ids
939 939
940 940 def merge_repo(self, pull_request, user, extras):
941 941 repo_type = pull_request.source_repo.repo_type
942 942 log.debug("Merging pull request %s", pull_request.pull_request_id)
943 943 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
944 944 merge_state = self._merge_pull_request(pull_request, user, extras)
945 945 if merge_state.executed:
946 946 log.debug("Merge was successful, updating the pull request comments.")
947 947 self._comment_and_close_pr(pull_request, user, merge_state)
948 948
949 949 self._log_audit_action(
950 950 'repo.pull_request.merge',
951 951 {'merge_state': merge_state.__dict__},
952 952 user, pull_request)
953 953
954 954 else:
955 955 log.warn("Merge failed, not updating the pull request.")
956 956 return merge_state
957 957
958 958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
959 959 target_vcs = pull_request.target_repo.scm_instance()
960 960 source_vcs = pull_request.source_repo.scm_instance()
961 961
962 962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
963 963 pr_id=pull_request.pull_request_id,
964 964 pr_title=pull_request.title,
965 965 pr_desc=pull_request.description,
966 966 source_repo=source_vcs.name,
967 967 source_ref_name=pull_request.source_ref_parts.name,
968 968 target_repo=target_vcs.name,
969 969 target_ref_name=pull_request.target_ref_parts.name,
970 970 )
971 971
972 972 workspace_id = self._workspace_id(pull_request)
973 973 repo_id = pull_request.target_repo.repo_id
974 974 use_rebase = self._use_rebase_for_merging(pull_request)
975 975 close_branch = self._close_branch_before_merging(pull_request)
976 976 user_name = self._user_name_for_merging(pull_request, user)
977 977
978 978 target_ref = self._refresh_reference(
979 979 pull_request.target_ref_parts, target_vcs)
980 980
981 981 callback_daemon, extras = prepare_callback_daemon(
982 982 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
983 983 host=vcs_settings.HOOKS_HOST,
984 984 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
985 985
986 986 with callback_daemon:
987 987 # TODO: johbo: Implement a clean way to run a config_override
988 988 # for a single call.
989 989 target_vcs.config.set(
990 990 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
991 991
992 992 merge_state = target_vcs.merge(
993 993 repo_id, workspace_id, target_ref, source_vcs,
994 994 pull_request.source_ref_parts,
995 995 user_name=user_name, user_email=user.email,
996 996 message=message, use_rebase=use_rebase,
997 997 close_branch=close_branch)
998 998 return merge_state
999 999
1000 1000 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1001 1001 pull_request.merge_rev = merge_state.merge_ref.commit_id
1002 1002 pull_request.updated_on = datetime.datetime.now()
1003 1003 close_msg = close_msg or 'Pull request merged and closed'
1004 1004
1005 1005 CommentsModel().create(
1006 1006 text=safe_unicode(close_msg),
1007 1007 repo=pull_request.target_repo.repo_id,
1008 1008 user=user.user_id,
1009 1009 pull_request=pull_request.pull_request_id,
1010 1010 f_path=None,
1011 1011 line_no=None,
1012 1012 closing_pr=True
1013 1013 )
1014 1014
1015 1015 Session().add(pull_request)
1016 1016 Session().flush()
1017 1017 # TODO: paris: replace invalidation with less radical solution
1018 1018 ScmModel().mark_for_invalidation(
1019 1019 pull_request.target_repo.repo_name)
1020 1020 self.trigger_pull_request_hook(pull_request, user, 'merge')
1021 1021
1022 1022 def has_valid_update_type(self, pull_request):
1023 1023 source_ref_type = pull_request.source_ref_parts.type
1024 1024 return source_ref_type in self.REF_TYPES
1025 1025
1026 1026 def get_flow_commits(self, pull_request):
1027 1027
1028 1028 # source repo
1029 1029 source_ref_name = pull_request.source_ref_parts.name
1030 1030 source_ref_type = pull_request.source_ref_parts.type
1031 1031 source_ref_id = pull_request.source_ref_parts.commit_id
1032 1032 source_repo = pull_request.source_repo.scm_instance()
1033 1033
1034 1034 try:
1035 1035 if source_ref_type in self.REF_TYPES:
1036 1036 source_commit = source_repo.get_commit(
1037 1037 source_ref_name, reference_obj=pull_request.source_ref_parts)
1038 1038 else:
1039 1039 source_commit = source_repo.get_commit(source_ref_id)
1040 1040 except CommitDoesNotExistError:
1041 1041 raise SourceRefMissing()
1042 1042
1043 1043 # target repo
1044 1044 target_ref_name = pull_request.target_ref_parts.name
1045 1045 target_ref_type = pull_request.target_ref_parts.type
1046 1046 target_ref_id = pull_request.target_ref_parts.commit_id
1047 1047 target_repo = pull_request.target_repo.scm_instance()
1048 1048
1049 1049 try:
1050 1050 if target_ref_type in self.REF_TYPES:
1051 1051 target_commit = target_repo.get_commit(
1052 1052 target_ref_name, reference_obj=pull_request.target_ref_parts)
1053 1053 else:
1054 1054 target_commit = target_repo.get_commit(target_ref_id)
1055 1055 except CommitDoesNotExistError:
1056 1056 raise TargetRefMissing()
1057 1057
1058 1058 return source_commit, target_commit
1059 1059
1060 1060 def update_commits(self, pull_request, updating_user):
1061 1061 """
1062 1062 Get the updated list of commits for the pull request
1063 1063 and return the new pull request version and the list
1064 1064 of commits processed by this update action
1065 1065
1066 1066 updating_user is the user_object who triggered the update
1067 1067 """
1068 1068 pull_request = self.__get_pull_request(pull_request)
1069 1069 source_ref_type = pull_request.source_ref_parts.type
1070 1070 source_ref_name = pull_request.source_ref_parts.name
1071 1071 source_ref_id = pull_request.source_ref_parts.commit_id
1072 1072
1073 1073 target_ref_type = pull_request.target_ref_parts.type
1074 1074 target_ref_name = pull_request.target_ref_parts.name
1075 1075 target_ref_id = pull_request.target_ref_parts.commit_id
1076 1076
1077 1077 if not self.has_valid_update_type(pull_request):
1078 1078 log.debug("Skipping update of pull request %s due to ref type: %s",
1079 1079 pull_request, source_ref_type)
1080 1080 return UpdateResponse(
1081 1081 executed=False,
1082 1082 reason=UpdateFailureReason.WRONG_REF_TYPE,
1083 1083 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1084 1084 source_changed=False, target_changed=False)
1085 1085
1086 1086 try:
1087 1087 source_commit, target_commit = self.get_flow_commits(pull_request)
1088 1088 except SourceRefMissing:
1089 1089 return UpdateResponse(
1090 1090 executed=False,
1091 1091 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1092 1092 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1093 1093 source_changed=False, target_changed=False)
1094 1094 except TargetRefMissing:
1095 1095 return UpdateResponse(
1096 1096 executed=False,
1097 1097 reason=UpdateFailureReason.MISSING_TARGET_REF,
1098 1098 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1099 1099 source_changed=False, target_changed=False)
1100 1100
1101 1101 source_changed = source_ref_id != source_commit.raw_id
1102 1102 target_changed = target_ref_id != target_commit.raw_id
1103 1103
1104 1104 if not (source_changed or target_changed):
1105 1105 log.debug("Nothing changed in pull request %s", pull_request)
1106 1106 return UpdateResponse(
1107 1107 executed=False,
1108 1108 reason=UpdateFailureReason.NO_CHANGE,
1109 1109 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1110 1110 source_changed=target_changed, target_changed=source_changed)
1111 1111
1112 1112 change_in_found = 'target repo' if target_changed else 'source repo'
1113 1113 log.debug('Updating pull request because of change in %s detected',
1114 1114 change_in_found)
1115 1115
1116 1116 # Finally there is a need for an update, in case of source change
1117 1117 # we create a new version, else just an update
1118 1118 if source_changed:
1119 1119 pull_request_version = self._create_version_from_snapshot(pull_request)
1120 1120 self._link_comments_to_version(pull_request_version)
1121 1121 else:
1122 1122 try:
1123 1123 ver = pull_request.versions[-1]
1124 1124 except IndexError:
1125 1125 ver = None
1126 1126
1127 1127 pull_request.pull_request_version_id = \
1128 1128 ver.pull_request_version_id if ver else None
1129 1129 pull_request_version = pull_request
1130 1130
1131 1131 source_repo = pull_request.source_repo.scm_instance()
1132 1132 target_repo = pull_request.target_repo.scm_instance()
1133 1133
1134 1134 # re-compute commit ids
1135 1135 old_commit_ids = pull_request.revisions
1136 1136 pre_load = ["author", "date", "message", "branch"]
1137 1137 commit_ranges = target_repo.compare(
1138 1138 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1139 1139 pre_load=pre_load)
1140 1140
1141 1141 target_ref = target_commit.raw_id
1142 1142 source_ref = source_commit.raw_id
1143 1143 ancestor_commit_id = target_repo.get_common_ancestor(
1144 1144 target_ref, source_ref, source_repo)
1145 1145
1146 1146 if not ancestor_commit_id:
1147 1147 raise ValueError(
1148 1148 'cannot calculate diff info without a common ancestor. '
1149 1149 'Make sure both repositories are related, and have a common forking commit.')
1150 1150
1151 1151 pull_request.common_ancestor_id = ancestor_commit_id
1152 1152
1153 1153 pull_request.source_ref = '%s:%s:%s' % (
1154 1154 source_ref_type, source_ref_name, source_commit.raw_id)
1155 1155 pull_request.target_ref = '%s:%s:%s' % (
1156 1156 target_ref_type, target_ref_name, ancestor_commit_id)
1157 1157
1158 1158 pull_request.revisions = [
1159 1159 commit.raw_id for commit in reversed(commit_ranges)]
1160 1160 pull_request.updated_on = datetime.datetime.now()
1161 1161 Session().add(pull_request)
1162 1162 new_commit_ids = pull_request.revisions
1163 1163
1164 1164 old_diff_data, new_diff_data = self._generate_update_diffs(
1165 1165 pull_request, pull_request_version)
1166 1166
1167 1167 # calculate commit and file changes
1168 1168 commit_changes = self._calculate_commit_id_changes(
1169 1169 old_commit_ids, new_commit_ids)
1170 1170 file_changes = self._calculate_file_changes(
1171 1171 old_diff_data, new_diff_data)
1172 1172
1173 1173 # set comments as outdated if DIFFS changed
1174 1174 CommentsModel().outdate_comments(
1175 1175 pull_request, old_diff_data=old_diff_data,
1176 1176 new_diff_data=new_diff_data)
1177 1177
1178 1178 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1179 1179 file_node_changes = (
1180 1180 file_changes.added or file_changes.modified or file_changes.removed)
1181 1181 pr_has_changes = valid_commit_changes or file_node_changes
1182 1182
1183 1183 # Add an automatic comment to the pull request, in case
1184 1184 # anything has changed
1185 1185 if pr_has_changes:
1186 1186 update_comment = CommentsModel().create(
1187 1187 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1188 1188 repo=pull_request.target_repo,
1189 1189 user=pull_request.author,
1190 1190 pull_request=pull_request,
1191 1191 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1192 1192
1193 1193 # Update status to "Under Review" for added commits
1194 1194 for commit_id in commit_changes.added:
1195 1195 ChangesetStatusModel().set_status(
1196 1196 repo=pull_request.source_repo,
1197 1197 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1198 1198 comment=update_comment,
1199 1199 user=pull_request.author,
1200 1200 pull_request=pull_request,
1201 1201 revision=commit_id)
1202 1202
1203 1203 # initial commit
1204 1204 Session().commit()
1205 1205
1206 1206 if pr_has_changes:
1207 1207 # send update email to users
1208 1208 try:
1209 1209 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1210 1210 ancestor_commit_id=ancestor_commit_id,
1211 1211 commit_changes=commit_changes,
1212 1212 file_changes=file_changes)
1213 1213 Session().commit()
1214 1214 except Exception:
1215 1215 log.exception('Failed to send email notification to users')
1216 1216 Session().rollback()
1217 1217
1218 1218 log.debug(
1219 1219 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1220 1220 'removed_ids: %s', pull_request.pull_request_id,
1221 1221 commit_changes.added, commit_changes.common, commit_changes.removed)
1222 1222 log.debug(
1223 1223 'Updated pull request with the following file changes: %s',
1224 1224 file_changes)
1225 1225
1226 1226 log.info(
1227 1227 "Updated pull request %s from commit %s to commit %s, "
1228 1228 "stored new version %s of this pull request.",
1229 1229 pull_request.pull_request_id, source_ref_id,
1230 1230 pull_request.source_ref_parts.commit_id,
1231 1231 pull_request_version.pull_request_version_id)
1232 1232
1233 1233 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1234 1234
1235 1235 return UpdateResponse(
1236 1236 executed=True, reason=UpdateFailureReason.NONE,
1237 1237 old=pull_request, new=pull_request_version,
1238 1238 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1239 1239 source_changed=source_changed, target_changed=target_changed)
1240 1240
1241 1241 def _create_version_from_snapshot(self, pull_request):
1242 1242 version = PullRequestVersion()
1243 1243 version.title = pull_request.title
1244 1244 version.description = pull_request.description
1245 1245 version.status = pull_request.status
1246 1246 version.pull_request_state = pull_request.pull_request_state
1247 1247 version.created_on = datetime.datetime.now()
1248 1248 version.updated_on = pull_request.updated_on
1249 1249 version.user_id = pull_request.user_id
1250 1250 version.source_repo = pull_request.source_repo
1251 1251 version.source_ref = pull_request.source_ref
1252 1252 version.target_repo = pull_request.target_repo
1253 1253 version.target_ref = pull_request.target_ref
1254 1254
1255 1255 version._last_merge_source_rev = pull_request._last_merge_source_rev
1256 1256 version._last_merge_target_rev = pull_request._last_merge_target_rev
1257 1257 version.last_merge_status = pull_request.last_merge_status
1258 1258 version.last_merge_metadata = pull_request.last_merge_metadata
1259 1259 version.shadow_merge_ref = pull_request.shadow_merge_ref
1260 1260 version.merge_rev = pull_request.merge_rev
1261 1261 version.reviewer_data = pull_request.reviewer_data
1262 1262
1263 1263 version.revisions = pull_request.revisions
1264 1264 version.common_ancestor_id = pull_request.common_ancestor_id
1265 1265 version.pull_request = pull_request
1266 1266 Session().add(version)
1267 1267 Session().flush()
1268 1268
1269 1269 return version
1270 1270
1271 1271 def _generate_update_diffs(self, pull_request, pull_request_version):
1272 1272
1273 1273 diff_context = (
1274 1274 self.DIFF_CONTEXT +
1275 1275 CommentsModel.needed_extra_diff_context())
1276 1276 hide_whitespace_changes = False
1277 1277 source_repo = pull_request_version.source_repo
1278 1278 source_ref_id = pull_request_version.source_ref_parts.commit_id
1279 1279 target_ref_id = pull_request_version.target_ref_parts.commit_id
1280 1280 old_diff = self._get_diff_from_pr_or_version(
1281 1281 source_repo, source_ref_id, target_ref_id,
1282 1282 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283 1283
1284 1284 source_repo = pull_request.source_repo
1285 1285 source_ref_id = pull_request.source_ref_parts.commit_id
1286 1286 target_ref_id = pull_request.target_ref_parts.commit_id
1287 1287
1288 1288 new_diff = self._get_diff_from_pr_or_version(
1289 1289 source_repo, source_ref_id, target_ref_id,
1290 1290 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291 1291
1292 1292 old_diff_data = diffs.DiffProcessor(old_diff)
1293 1293 old_diff_data.prepare()
1294 1294 new_diff_data = diffs.DiffProcessor(new_diff)
1295 1295 new_diff_data.prepare()
1296 1296
1297 1297 return old_diff_data, new_diff_data
1298 1298
1299 1299 def _link_comments_to_version(self, pull_request_version):
1300 1300 """
1301 1301 Link all unlinked comments of this pull request to the given version.
1302 1302
1303 1303 :param pull_request_version: The `PullRequestVersion` to which
1304 1304 the comments shall be linked.
1305 1305
1306 1306 """
1307 1307 pull_request = pull_request_version.pull_request
1308 1308 comments = ChangesetComment.query()\
1309 1309 .filter(
1310 1310 # TODO: johbo: Should we query for the repo at all here?
1311 1311 # Pending decision on how comments of PRs are to be related
1312 1312 # to either the source repo, the target repo or no repo at all.
1313 1313 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1314 1314 ChangesetComment.pull_request == pull_request,
1315 1315 ChangesetComment.pull_request_version == None)\
1316 1316 .order_by(ChangesetComment.comment_id.asc())
1317 1317
1318 1318 # TODO: johbo: Find out why this breaks if it is done in a bulk
1319 1319 # operation.
1320 1320 for comment in comments:
1321 1321 comment.pull_request_version_id = (
1322 1322 pull_request_version.pull_request_version_id)
1323 1323 Session().add(comment)
1324 1324
1325 1325 def _calculate_commit_id_changes(self, old_ids, new_ids):
1326 1326 added = [x for x in new_ids if x not in old_ids]
1327 1327 common = [x for x in new_ids if x in old_ids]
1328 1328 removed = [x for x in old_ids if x not in new_ids]
1329 1329 total = new_ids
1330 1330 return ChangeTuple(added, common, removed, total)
1331 1331
1332 1332 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1333 1333
1334 1334 old_files = OrderedDict()
1335 1335 for diff_data in old_diff_data.parsed_diff:
1336 1336 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1337 1337
1338 1338 added_files = []
1339 1339 modified_files = []
1340 1340 removed_files = []
1341 1341 for diff_data in new_diff_data.parsed_diff:
1342 1342 new_filename = diff_data['filename']
1343 1343 new_hash = md5_safe(diff_data['raw_diff'])
1344 1344
1345 1345 old_hash = old_files.get(new_filename)
1346 1346 if not old_hash:
1347 1347 # file is not present in old diff, we have to figure out from parsed diff
1348 1348 # operation ADD/REMOVE
1349 1349 operations_dict = diff_data['stats']['ops']
1350 1350 if diffs.DEL_FILENODE in operations_dict:
1351 1351 removed_files.append(new_filename)
1352 1352 else:
1353 1353 added_files.append(new_filename)
1354 1354 else:
1355 1355 if new_hash != old_hash:
1356 1356 modified_files.append(new_filename)
1357 1357 # now remove a file from old, since we have seen it already
1358 1358 del old_files[new_filename]
1359 1359
1360 1360 # removed files is when there are present in old, but not in NEW,
1361 1361 # since we remove old files that are present in new diff, left-overs
1362 1362 # if any should be the removed files
1363 1363 removed_files.extend(old_files.keys())
1364 1364
1365 1365 return FileChangeTuple(added_files, modified_files, removed_files)
1366 1366
1367 1367 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1368 1368 """
1369 1369 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1370 1370 so it's always looking the same disregarding on which default
1371 1371 renderer system is using.
1372 1372
1373 1373 :param ancestor_commit_id: ancestor raw_id
1374 1374 :param changes: changes named tuple
1375 1375 :param file_changes: file changes named tuple
1376 1376
1377 1377 """
1378 1378 new_status = ChangesetStatus.get_status_lbl(
1379 1379 ChangesetStatus.STATUS_UNDER_REVIEW)
1380 1380
1381 1381 changed_files = (
1382 1382 file_changes.added + file_changes.modified + file_changes.removed)
1383 1383
1384 1384 params = {
1385 1385 'under_review_label': new_status,
1386 1386 'added_commits': changes.added,
1387 1387 'removed_commits': changes.removed,
1388 1388 'changed_files': changed_files,
1389 1389 'added_files': file_changes.added,
1390 1390 'modified_files': file_changes.modified,
1391 1391 'removed_files': file_changes.removed,
1392 1392 'ancestor_commit_id': ancestor_commit_id
1393 1393 }
1394 1394 renderer = RstTemplateRenderer()
1395 1395 return renderer.render('pull_request_update.mako', **params)
1396 1396
1397 1397 def edit(self, pull_request, title, description, description_renderer, user):
1398 1398 pull_request = self.__get_pull_request(pull_request)
1399 1399 old_data = pull_request.get_api_data(with_merge_state=False)
1400 1400 if pull_request.is_closed():
1401 1401 raise ValueError('This pull request is closed')
1402 1402 if title:
1403 1403 pull_request.title = title
1404 1404 pull_request.description = description
1405 1405 pull_request.updated_on = datetime.datetime.now()
1406 1406 pull_request.description_renderer = description_renderer
1407 1407 Session().add(pull_request)
1408 1408 self._log_audit_action(
1409 1409 'repo.pull_request.edit', {'old_data': old_data},
1410 1410 user, pull_request)
1411 1411
1412 1412 def update_reviewers(self, pull_request, reviewer_data, user):
1413 1413 """
1414 1414 Update the reviewers in the pull request
1415 1415
1416 1416 :param pull_request: the pr to update
1417 1417 :param reviewer_data: list of tuples
1418 1418 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1419 1419 :param user: current use who triggers this action
1420 1420 """
1421 1421
1422 1422 pull_request = self.__get_pull_request(pull_request)
1423 1423 if pull_request.is_closed():
1424 1424 raise ValueError('This pull request is closed')
1425 1425
1426 1426 reviewers = {}
1427 1427 for user_id, reasons, mandatory, role, rules in reviewer_data:
1428 1428 if isinstance(user_id, (int, str)):
1429 1429 user_id = self._get_user(user_id).user_id
1430 1430 reviewers[user_id] = {
1431 1431 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1432 1432
1433 1433 reviewers_ids = set(reviewers.keys())
1434 1434 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1435 1435 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1436 1436
1437 1437 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1438 1438
1439 1439 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1440 1440 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1441 1441
1442 1442 log.debug("Adding %s reviewers", ids_to_add)
1443 1443 log.debug("Removing %s reviewers", ids_to_remove)
1444 1444 changed = False
1445 1445 added_audit_reviewers = []
1446 1446 removed_audit_reviewers = []
1447 1447
1448 1448 for uid in ids_to_add:
1449 1449 changed = True
1450 1450 _usr = self._get_user(uid)
1451 1451 reviewer = PullRequestReviewers()
1452 1452 reviewer.user = _usr
1453 1453 reviewer.pull_request = pull_request
1454 1454 reviewer.reasons = reviewers[uid]['reasons']
1455 1455 # NOTE(marcink): mandatory shouldn't be changed now
1456 1456 # reviewer.mandatory = reviewers[uid]['reasons']
1457 1457 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1458 1458 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1459 1459 Session().add(reviewer)
1460 1460 added_audit_reviewers.append(reviewer.get_dict())
1461 1461
1462 1462 for uid in ids_to_remove:
1463 1463 changed = True
1464 1464 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1465 1465 # This is an edge case that handles previous state of having the same reviewer twice.
1466 1466 # this CAN happen due to the lack of DB checks
1467 1467 reviewers = PullRequestReviewers.query()\
1468 1468 .filter(PullRequestReviewers.user_id == uid,
1469 1469 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1470 1470 PullRequestReviewers.pull_request == pull_request)\
1471 1471 .all()
1472 1472
1473 1473 for obj in reviewers:
1474 1474 added_audit_reviewers.append(obj.get_dict())
1475 1475 Session().delete(obj)
1476 1476
1477 1477 if changed:
1478 1478 Session().expire_all()
1479 1479 pull_request.updated_on = datetime.datetime.now()
1480 1480 Session().add(pull_request)
1481 1481
1482 1482 # finally store audit logs
1483 1483 for user_data in added_audit_reviewers:
1484 1484 self._log_audit_action(
1485 1485 'repo.pull_request.reviewer.add', {'data': user_data},
1486 1486 user, pull_request)
1487 1487 for user_data in removed_audit_reviewers:
1488 1488 self._log_audit_action(
1489 1489 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1490 1490 user, pull_request)
1491 1491
1492 1492 self.notify_reviewers(pull_request, ids_to_add, user)
1493 1493 return ids_to_add, ids_to_remove
1494 1494
1495 1495 def update_observers(self, pull_request, observer_data, user):
1496 1496 """
1497 1497 Update the observers in the pull request
1498 1498
1499 1499 :param pull_request: the pr to update
1500 1500 :param observer_data: list of tuples
1501 1501 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1502 1502 :param user: current use who triggers this action
1503 1503 """
1504 1504 pull_request = self.__get_pull_request(pull_request)
1505 1505 if pull_request.is_closed():
1506 1506 raise ValueError('This pull request is closed')
1507 1507
1508 1508 observers = {}
1509 1509 for user_id, reasons, mandatory, role, rules in observer_data:
1510 1510 if isinstance(user_id, (int, str)):
1511 1511 user_id = self._get_user(user_id).user_id
1512 1512 observers[user_id] = {
1513 1513 'reasons': reasons, 'observers': mandatory, 'role': role}
1514 1514
1515 1515 observers_ids = set(observers.keys())
1516 1516 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1517 1517 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1518 1518
1519 1519 current_observers_ids = set([x.user.user_id for x in current_observers])
1520 1520
1521 1521 ids_to_add = observers_ids.difference(current_observers_ids)
1522 1522 ids_to_remove = current_observers_ids.difference(observers_ids)
1523 1523
1524 1524 log.debug("Adding %s observer", ids_to_add)
1525 1525 log.debug("Removing %s observer", ids_to_remove)
1526 1526 changed = False
1527 1527 added_audit_observers = []
1528 1528 removed_audit_observers = []
1529 1529
1530 1530 for uid in ids_to_add:
1531 1531 changed = True
1532 1532 _usr = self._get_user(uid)
1533 1533 observer = PullRequestReviewers()
1534 1534 observer.user = _usr
1535 1535 observer.pull_request = pull_request
1536 1536 observer.reasons = observers[uid]['reasons']
1537 1537 # NOTE(marcink): mandatory shouldn't be changed now
1538 1538 # observer.mandatory = observer[uid]['reasons']
1539 1539
1540 1540 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1541 1541 observer.role = PullRequestReviewers.ROLE_OBSERVER
1542 1542 Session().add(observer)
1543 1543 added_audit_observers.append(observer.get_dict())
1544 1544
1545 1545 for uid in ids_to_remove:
1546 1546 changed = True
1547 1547 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1548 1548 # This is an edge case that handles previous state of having the same reviewer twice.
1549 1549 # this CAN happen due to the lack of DB checks
1550 1550 observers = PullRequestReviewers.query()\
1551 1551 .filter(PullRequestReviewers.user_id == uid,
1552 1552 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1553 1553 PullRequestReviewers.pull_request == pull_request)\
1554 1554 .all()
1555 1555
1556 1556 for obj in observers:
1557 1557 added_audit_observers.append(obj.get_dict())
1558 1558 Session().delete(obj)
1559 1559
1560 1560 if changed:
1561 1561 Session().expire_all()
1562 1562 pull_request.updated_on = datetime.datetime.now()
1563 1563 Session().add(pull_request)
1564 1564
1565 1565 # finally store audit logs
1566 1566 for user_data in added_audit_observers:
1567 1567 self._log_audit_action(
1568 1568 'repo.pull_request.observer.add', {'data': user_data},
1569 1569 user, pull_request)
1570 1570 for user_data in removed_audit_observers:
1571 1571 self._log_audit_action(
1572 1572 'repo.pull_request.observer.delete', {'old_data': user_data},
1573 1573 user, pull_request)
1574 1574
1575 1575 self.notify_observers(pull_request, ids_to_add, user)
1576 1576 return ids_to_add, ids_to_remove
1577 1577
1578 1578 def get_url(self, pull_request, request=None, permalink=False):
1579 1579 if not request:
1580 1580 request = get_current_request()
1581 1581
1582 1582 if permalink:
1583 1583 return request.route_url(
1584 1584 'pull_requests_global',
1585 1585 pull_request_id=pull_request.pull_request_id,)
1586 1586 else:
1587 1587 return request.route_url('pullrequest_show',
1588 1588 repo_name=safe_str(pull_request.target_repo.repo_name),
1589 1589 pull_request_id=pull_request.pull_request_id,)
1590 1590
1591 1591 def get_shadow_clone_url(self, pull_request, request=None):
1592 1592 """
1593 1593 Returns qualified url pointing to the shadow repository. If this pull
1594 1594 request is closed there is no shadow repository and ``None`` will be
1595 1595 returned.
1596 1596 """
1597 1597 if pull_request.is_closed():
1598 1598 return None
1599 1599 else:
1600 1600 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1601 1601 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1602 1602
1603 1603 def _notify_reviewers(self, pull_request, user_ids, role, user):
1604 1604 # notification to reviewers/observers
1605 1605 if not user_ids:
1606 1606 return
1607 1607
1608 1608 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1609 1609
1610 1610 pull_request_obj = pull_request
1611 1611 # get the current participants of this pull request
1612 1612 recipients = user_ids
1613 1613 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1614 1614
1615 1615 pr_source_repo = pull_request_obj.source_repo
1616 1616 pr_target_repo = pull_request_obj.target_repo
1617 1617
1618 1618 pr_url = h.route_url('pullrequest_show',
1619 1619 repo_name=pr_target_repo.repo_name,
1620 1620 pull_request_id=pull_request_obj.pull_request_id,)
1621 1621
1622 1622 # set some variables for email notification
1623 1623 pr_target_repo_url = h.route_url(
1624 1624 'repo_summary', repo_name=pr_target_repo.repo_name)
1625 1625
1626 1626 pr_source_repo_url = h.route_url(
1627 1627 'repo_summary', repo_name=pr_source_repo.repo_name)
1628 1628
1629 1629 # pull request specifics
1630 1630 pull_request_commits = [
1631 1631 (x.raw_id, x.message)
1632 1632 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1633 1633
1634 1634 current_rhodecode_user = user
1635 1635 kwargs = {
1636 1636 'user': current_rhodecode_user,
1637 1637 'pull_request_author': pull_request.author,
1638 1638 'pull_request': pull_request_obj,
1639 1639 'pull_request_commits': pull_request_commits,
1640 1640
1641 1641 'pull_request_target_repo': pr_target_repo,
1642 1642 'pull_request_target_repo_url': pr_target_repo_url,
1643 1643
1644 1644 'pull_request_source_repo': pr_source_repo,
1645 1645 'pull_request_source_repo_url': pr_source_repo_url,
1646 1646
1647 1647 'pull_request_url': pr_url,
1648 1648 'thread_ids': [pr_url],
1649 1649 'user_role': role
1650 1650 }
1651 1651
1652 1652 # create notification objects, and emails
1653 1653 NotificationModel().create(
1654 1654 created_by=current_rhodecode_user,
1655 1655 notification_subject='', # Filled in based on the notification_type
1656 1656 notification_body='', # Filled in based on the notification_type
1657 1657 notification_type=notification_type,
1658 1658 recipients=recipients,
1659 1659 email_kwargs=kwargs,
1660 1660 )
1661 1661
1662 1662 def notify_reviewers(self, pull_request, reviewers_ids, user):
1663 1663 return self._notify_reviewers(pull_request, reviewers_ids,
1664 1664 PullRequestReviewers.ROLE_REVIEWER, user)
1665 1665
1666 1666 def notify_observers(self, pull_request, observers_ids, user):
1667 1667 return self._notify_reviewers(pull_request, observers_ids,
1668 1668 PullRequestReviewers.ROLE_OBSERVER, user)
1669 1669
1670 1670 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1671 1671 commit_changes, file_changes):
1672 1672
1673 1673 updating_user_id = updating_user.user_id
1674 1674 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1675 1675 # NOTE(marcink): send notification to all other users except to
1676 1676 # person who updated the PR
1677 1677 recipients = reviewers.difference(set([updating_user_id]))
1678 1678
1679 1679 log.debug('Notify following recipients about pull-request update %s', recipients)
1680 1680
1681 1681 pull_request_obj = pull_request
1682 1682
1683 1683 # send email about the update
1684 1684 changed_files = (
1685 1685 file_changes.added + file_changes.modified + file_changes.removed)
1686 1686
1687 1687 pr_source_repo = pull_request_obj.source_repo
1688 1688 pr_target_repo = pull_request_obj.target_repo
1689 1689
1690 1690 pr_url = h.route_url('pullrequest_show',
1691 1691 repo_name=pr_target_repo.repo_name,
1692 1692 pull_request_id=pull_request_obj.pull_request_id,)
1693 1693
1694 1694 # set some variables for email notification
1695 1695 pr_target_repo_url = h.route_url(
1696 1696 'repo_summary', repo_name=pr_target_repo.repo_name)
1697 1697
1698 1698 pr_source_repo_url = h.route_url(
1699 1699 'repo_summary', repo_name=pr_source_repo.repo_name)
1700 1700
1701 1701 email_kwargs = {
1702 1702 'date': datetime.datetime.now(),
1703 1703 'updating_user': updating_user,
1704 1704
1705 1705 'pull_request': pull_request_obj,
1706 1706
1707 1707 'pull_request_target_repo': pr_target_repo,
1708 1708 'pull_request_target_repo_url': pr_target_repo_url,
1709 1709
1710 1710 'pull_request_source_repo': pr_source_repo,
1711 1711 'pull_request_source_repo_url': pr_source_repo_url,
1712 1712
1713 1713 'pull_request_url': pr_url,
1714 1714
1715 1715 'ancestor_commit_id': ancestor_commit_id,
1716 1716 'added_commits': commit_changes.added,
1717 1717 'removed_commits': commit_changes.removed,
1718 1718 'changed_files': changed_files,
1719 1719 'added_files': file_changes.added,
1720 1720 'modified_files': file_changes.modified,
1721 1721 'removed_files': file_changes.removed,
1722 1722 'thread_ids': [pr_url],
1723 1723 }
1724 1724
1725 1725 # create notification objects, and emails
1726 1726 NotificationModel().create(
1727 1727 created_by=updating_user,
1728 1728 notification_subject='', # Filled in based on the notification_type
1729 1729 notification_body='', # Filled in based on the notification_type
1730 1730 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1731 1731 recipients=recipients,
1732 1732 email_kwargs=email_kwargs,
1733 1733 )
1734 1734
1735 1735 def delete(self, pull_request, user=None):
1736 1736 if not user:
1737 1737 user = getattr(get_current_rhodecode_user(), 'username', None)
1738 1738
1739 1739 pull_request = self.__get_pull_request(pull_request)
1740 1740 old_data = pull_request.get_api_data(with_merge_state=False)
1741 1741 self._cleanup_merge_workspace(pull_request)
1742 1742 self._log_audit_action(
1743 1743 'repo.pull_request.delete', {'old_data': old_data},
1744 1744 user, pull_request)
1745 1745 Session().delete(pull_request)
1746 1746
1747 1747 def close_pull_request(self, pull_request, user):
1748 1748 pull_request = self.__get_pull_request(pull_request)
1749 1749 self._cleanup_merge_workspace(pull_request)
1750 1750 pull_request.status = PullRequest.STATUS_CLOSED
1751 1751 pull_request.updated_on = datetime.datetime.now()
1752 1752 Session().add(pull_request)
1753 1753 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1754 1754
1755 1755 pr_data = pull_request.get_api_data(with_merge_state=False)
1756 1756 self._log_audit_action(
1757 1757 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1758 1758
1759 1759 def close_pull_request_with_comment(
1760 1760 self, pull_request, user, repo, message=None, auth_user=None):
1761 1761
1762 1762 pull_request_review_status = pull_request.calculated_review_status()
1763 1763
1764 1764 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1765 1765 # approved only if we have voting consent
1766 1766 status = ChangesetStatus.STATUS_APPROVED
1767 1767 else:
1768 1768 status = ChangesetStatus.STATUS_REJECTED
1769 1769 status_lbl = ChangesetStatus.get_status_lbl(status)
1770 1770
1771 1771 default_message = (
1772 1772 'Closing with status change {transition_icon} {status}.'
1773 1773 ).format(transition_icon='>', status=status_lbl)
1774 1774 text = message or default_message
1775 1775
1776 1776 # create a comment, and link it to new status
1777 1777 comment = CommentsModel().create(
1778 1778 text=text,
1779 1779 repo=repo.repo_id,
1780 1780 user=user.user_id,
1781 1781 pull_request=pull_request.pull_request_id,
1782 1782 status_change=status_lbl,
1783 1783 status_change_type=status,
1784 1784 closing_pr=True,
1785 1785 auth_user=auth_user,
1786 1786 )
1787 1787
1788 1788 # calculate old status before we change it
1789 1789 old_calculated_status = pull_request.calculated_review_status()
1790 1790 ChangesetStatusModel().set_status(
1791 1791 repo.repo_id,
1792 1792 status,
1793 1793 user.user_id,
1794 1794 comment=comment,
1795 1795 pull_request=pull_request.pull_request_id
1796 1796 )
1797 1797
1798 1798 Session().flush()
1799 1799
1800 1800 self.trigger_pull_request_hook(pull_request, user, 'comment',
1801 1801 data={'comment': comment})
1802 1802
1803 1803 # we now calculate the status of pull request again, and based on that
1804 1804 # calculation trigger status change. This might happen in cases
1805 1805 # that non-reviewer admin closes a pr, which means his vote doesn't
1806 1806 # change the status, while if he's a reviewer this might change it.
1807 1807 calculated_status = pull_request.calculated_review_status()
1808 1808 if old_calculated_status != calculated_status:
1809 1809 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1810 1810 data={'status': calculated_status})
1811 1811
1812 1812 # finally close the PR
1813 1813 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1814 1814
1815 1815 return comment, status
1816 1816
1817 1817 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1818 1818 _ = translator or get_current_request().translate
1819 1819
1820 1820 if not self._is_merge_enabled(pull_request):
1821 1821 return None, False, _('Server-side pull request merging is disabled.')
1822 1822
1823 1823 if pull_request.is_closed():
1824 1824 return None, False, _('This pull request is closed.')
1825 1825
1826 1826 merge_possible, msg = self._check_repo_requirements(
1827 1827 target=pull_request.target_repo, source=pull_request.source_repo,
1828 1828 translator=_)
1829 1829 if not merge_possible:
1830 1830 return None, merge_possible, msg
1831 1831
1832 1832 try:
1833 1833 merge_response = self._try_merge(
1834 1834 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1835 1835 log.debug("Merge response: %s", merge_response)
1836 1836 return merge_response, merge_response.possible, merge_response.merge_status_message
1837 1837 except NotImplementedError:
1838 1838 return None, False, _('Pull request merging is not supported.')
1839 1839
1840 1840 def _check_repo_requirements(self, target, source, translator):
1841 1841 """
1842 1842 Check if `target` and `source` have compatible requirements.
1843 1843
1844 1844 Currently this is just checking for largefiles.
1845 1845 """
1846 1846 _ = translator
1847 1847 target_has_largefiles = self._has_largefiles(target)
1848 1848 source_has_largefiles = self._has_largefiles(source)
1849 1849 merge_possible = True
1850 1850 message = u''
1851 1851
1852 1852 if target_has_largefiles != source_has_largefiles:
1853 1853 merge_possible = False
1854 1854 if source_has_largefiles:
1855 1855 message = _(
1856 1856 'Target repository large files support is disabled.')
1857 1857 else:
1858 1858 message = _(
1859 1859 'Source repository large files support is disabled.')
1860 1860
1861 1861 return merge_possible, message
1862 1862
1863 1863 def _has_largefiles(self, repo):
1864 1864 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1865 1865 'extensions', 'largefiles')
1866 1866 return largefiles_ui and largefiles_ui[0].active
1867 1867
1868 1868 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1869 1869 """
1870 1870 Try to merge the pull request and return the merge status.
1871 1871 """
1872 1872 log.debug(
1873 1873 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1874 1874 pull_request.pull_request_id, force_shadow_repo_refresh)
1875 1875 target_vcs = pull_request.target_repo.scm_instance()
1876 1876 # Refresh the target reference.
1877 1877 try:
1878 1878 target_ref = self._refresh_reference(
1879 1879 pull_request.target_ref_parts, target_vcs)
1880 1880 except CommitDoesNotExistError:
1881 1881 merge_state = MergeResponse(
1882 1882 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1883 1883 metadata={'target_ref': pull_request.target_ref_parts})
1884 1884 return merge_state
1885 1885
1886 1886 target_locked = pull_request.target_repo.locked
1887 1887 if target_locked and target_locked[0]:
1888 1888 locked_by = 'user:{}'.format(target_locked[0])
1889 1889 log.debug("The target repository is locked by %s.", locked_by)
1890 1890 merge_state = MergeResponse(
1891 1891 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1892 1892 metadata={'locked_by': locked_by})
1893 1893 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1894 1894 pull_request, target_ref):
1895 1895 log.debug("Refreshing the merge status of the repository.")
1896 1896 merge_state = self._refresh_merge_state(
1897 1897 pull_request, target_vcs, target_ref)
1898 1898 else:
1899 1899 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1900 1900 metadata = {
1901 1901 'unresolved_files': '',
1902 1902 'target_ref': pull_request.target_ref_parts,
1903 1903 'source_ref': pull_request.source_ref_parts,
1904 1904 }
1905 1905 if pull_request.last_merge_metadata:
1906 1906 metadata.update(pull_request.last_merge_metadata_parsed)
1907 1907
1908 1908 if not possible and target_ref.type == 'branch':
1909 1909 # NOTE(marcink): case for mercurial multiple heads on branch
1910 1910 heads = target_vcs._heads(target_ref.name)
1911 1911 if len(heads) != 1:
1912 1912 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1913 1913 metadata.update({
1914 1914 'heads': heads
1915 1915 })
1916 1916
1917 1917 merge_state = MergeResponse(
1918 1918 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1919 1919
1920 1920 return merge_state
1921 1921
1922 1922 def _refresh_reference(self, reference, vcs_repository):
1923 1923 if reference.type in self.UPDATABLE_REF_TYPES:
1924 1924 name_or_id = reference.name
1925 1925 else:
1926 1926 name_or_id = reference.commit_id
1927 1927
1928 1928 refreshed_commit = vcs_repository.get_commit(name_or_id)
1929 1929 refreshed_reference = Reference(
1930 1930 reference.type, reference.name, refreshed_commit.raw_id)
1931 1931 return refreshed_reference
1932 1932
1933 1933 def _needs_merge_state_refresh(self, pull_request, target_reference):
1934 1934 return not(
1935 1935 pull_request.revisions and
1936 1936 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1937 1937 target_reference.commit_id == pull_request._last_merge_target_rev)
1938 1938
1939 1939 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1940 1940 workspace_id = self._workspace_id(pull_request)
1941 1941 source_vcs = pull_request.source_repo.scm_instance()
1942 1942 repo_id = pull_request.target_repo.repo_id
1943 1943 use_rebase = self._use_rebase_for_merging(pull_request)
1944 1944 close_branch = self._close_branch_before_merging(pull_request)
1945 1945 merge_state = target_vcs.merge(
1946 1946 repo_id, workspace_id,
1947 1947 target_reference, source_vcs, pull_request.source_ref_parts,
1948 1948 dry_run=True, use_rebase=use_rebase,
1949 1949 close_branch=close_branch)
1950 1950
1951 1951 # Do not store the response if there was an unknown error.
1952 1952 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1953 1953 pull_request._last_merge_source_rev = \
1954 1954 pull_request.source_ref_parts.commit_id
1955 1955 pull_request._last_merge_target_rev = target_reference.commit_id
1956 1956 pull_request.last_merge_status = merge_state.failure_reason
1957 1957 pull_request.last_merge_metadata = merge_state.metadata
1958 1958
1959 1959 pull_request.shadow_merge_ref = merge_state.merge_ref
1960 1960 Session().add(pull_request)
1961 1961 Session().commit()
1962 1962
1963 1963 return merge_state
1964 1964
1965 1965 def _workspace_id(self, pull_request):
1966 1966 workspace_id = 'pr-%s' % pull_request.pull_request_id
1967 1967 return workspace_id
1968 1968
1969 1969 def generate_repo_data(self, repo, commit_id=None, branch=None,
1970 1970 bookmark=None, translator=None):
1971 1971 from rhodecode.model.repo import RepoModel
1972 1972
1973 1973 all_refs, selected_ref = \
1974 1974 self._get_repo_pullrequest_sources(
1975 1975 repo.scm_instance(), commit_id=commit_id,
1976 1976 branch=branch, bookmark=bookmark, translator=translator)
1977 1977
1978 1978 refs_select2 = []
1979 1979 for element in all_refs:
1980 1980 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1981 1981 refs_select2.append({'text': element[1], 'children': children})
1982 1982
1983 1983 return {
1984 1984 'user': {
1985 1985 'user_id': repo.user.user_id,
1986 1986 'username': repo.user.username,
1987 1987 'firstname': repo.user.first_name,
1988 1988 'lastname': repo.user.last_name,
1989 1989 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1990 1990 },
1991 1991 'name': repo.repo_name,
1992 1992 'link': RepoModel().get_url(repo),
1993 1993 'description': h.chop_at_smart(repo.description_safe, '\n'),
1994 1994 'refs': {
1995 1995 'all_refs': all_refs,
1996 1996 'selected_ref': selected_ref,
1997 1997 'select2_refs': refs_select2
1998 1998 }
1999 1999 }
2000 2000
2001 2001 def generate_pullrequest_title(self, source, source_ref, target):
2002 2002 return u'{source}#{at_ref} to {target}'.format(
2003 2003 source=source,
2004 2004 at_ref=source_ref,
2005 2005 target=target,
2006 2006 )
2007 2007
2008 2008 def _cleanup_merge_workspace(self, pull_request):
2009 2009 # Merging related cleanup
2010 2010 repo_id = pull_request.target_repo.repo_id
2011 2011 target_scm = pull_request.target_repo.scm_instance()
2012 2012 workspace_id = self._workspace_id(pull_request)
2013 2013
2014 2014 try:
2015 2015 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2016 2016 except NotImplementedError:
2017 2017 pass
2018 2018
2019 2019 def _get_repo_pullrequest_sources(
2020 2020 self, repo, commit_id=None, branch=None, bookmark=None,
2021 2021 translator=None):
2022 2022 """
2023 2023 Return a structure with repo's interesting commits, suitable for
2024 2024 the selectors in pullrequest controller
2025 2025
2026 2026 :param commit_id: a commit that must be in the list somehow
2027 2027 and selected by default
2028 2028 :param branch: a branch that must be in the list and selected
2029 2029 by default - even if closed
2030 2030 :param bookmark: a bookmark that must be in the list and selected
2031 2031 """
2032 2032 _ = translator or get_current_request().translate
2033 2033
2034 2034 commit_id = safe_str(commit_id) if commit_id else None
2035 2035 branch = safe_unicode(branch) if branch else None
2036 2036 bookmark = safe_unicode(bookmark) if bookmark else None
2037 2037
2038 2038 selected = None
2039 2039
2040 2040 # order matters: first source that has commit_id in it will be selected
2041 2041 sources = []
2042 2042 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2043 2043 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2044 2044
2045 2045 if commit_id:
2046 2046 ref_commit = (h.short_id(commit_id), commit_id)
2047 2047 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2048 2048
2049 2049 sources.append(
2050 2050 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2051 2051 )
2052 2052
2053 2053 groups = []
2054 2054
2055 2055 for group_key, ref_list, group_name, match in sources:
2056 2056 group_refs = []
2057 2057 for ref_name, ref_id in ref_list:
2058 2058 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2059 2059 group_refs.append((ref_key, ref_name))
2060 2060
2061 2061 if not selected:
2062 2062 if set([commit_id, match]) & set([ref_id, ref_name]):
2063 2063 selected = ref_key
2064 2064
2065 2065 if group_refs:
2066 2066 groups.append((group_refs, group_name))
2067 2067
2068 2068 if not selected:
2069 2069 ref = commit_id or branch or bookmark
2070 2070 if ref:
2071 2071 raise CommitDoesNotExistError(
2072 2072 u'No commit refs could be found matching: {}'.format(ref))
2073 2073 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2074 2074 selected = u'branch:{}:{}'.format(
2075 2075 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2076 2076 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2077 2077 )
2078 2078 elif repo.commit_ids:
2079 2079 # make the user select in this case
2080 2080 selected = None
2081 2081 else:
2082 2082 raise EmptyRepositoryError()
2083 2083 return groups, selected
2084 2084
2085 2085 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2086 2086 hide_whitespace_changes, diff_context):
2087 2087
2088 2088 return self._get_diff_from_pr_or_version(
2089 2089 source_repo, source_ref_id, target_ref_id,
2090 2090 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2091 2091
2092 2092 def _get_diff_from_pr_or_version(
2093 2093 self, source_repo, source_ref_id, target_ref_id,
2094 2094 hide_whitespace_changes, diff_context):
2095 2095
2096 2096 target_commit = source_repo.get_commit(
2097 2097 commit_id=safe_str(target_ref_id))
2098 2098 source_commit = source_repo.get_commit(
2099 2099 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2100 2100 if isinstance(source_repo, Repository):
2101 2101 vcs_repo = source_repo.scm_instance()
2102 2102 else:
2103 2103 vcs_repo = source_repo
2104 2104
2105 2105 # TODO: johbo: In the context of an update, we cannot reach
2106 2106 # the old commit anymore with our normal mechanisms. It needs
2107 2107 # some sort of special support in the vcs layer to avoid this
2108 2108 # workaround.
2109 2109 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2110 2110 vcs_repo.alias == 'git'):
2111 2111 source_commit.raw_id = safe_str(source_ref_id)
2112 2112
2113 2113 log.debug('calculating diff between '
2114 2114 'source_ref:%s and target_ref:%s for repo `%s`',
2115 2115 target_ref_id, source_ref_id,
2116 2116 safe_unicode(vcs_repo.path))
2117 2117
2118 2118 vcs_diff = vcs_repo.get_diff(
2119 2119 commit1=target_commit, commit2=source_commit,
2120 2120 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2121 2121 return vcs_diff
2122 2122
2123 2123 def _is_merge_enabled(self, pull_request):
2124 2124 return self._get_general_setting(
2125 2125 pull_request, 'rhodecode_pr_merge_enabled')
2126 2126
2127 2127 def _use_rebase_for_merging(self, pull_request):
2128 2128 repo_type = pull_request.target_repo.repo_type
2129 2129 if repo_type == 'hg':
2130 2130 return self._get_general_setting(
2131 2131 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2132 2132 elif repo_type == 'git':
2133 2133 return self._get_general_setting(
2134 2134 pull_request, 'rhodecode_git_use_rebase_for_merging')
2135 2135
2136 2136 return False
2137 2137
2138 2138 def _user_name_for_merging(self, pull_request, user):
2139 2139 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2140 2140 if env_user_name_attr and hasattr(user, env_user_name_attr):
2141 2141 user_name_attr = env_user_name_attr
2142 2142 else:
2143 2143 user_name_attr = 'short_contact'
2144 2144
2145 2145 user_name = getattr(user, user_name_attr)
2146 2146 return user_name
2147 2147
2148 2148 def _close_branch_before_merging(self, pull_request):
2149 2149 repo_type = pull_request.target_repo.repo_type
2150 2150 if repo_type == 'hg':
2151 2151 return self._get_general_setting(
2152 2152 pull_request, 'rhodecode_hg_close_branch_before_merging')
2153 2153 elif repo_type == 'git':
2154 2154 return self._get_general_setting(
2155 2155 pull_request, 'rhodecode_git_close_branch_before_merging')
2156 2156
2157 2157 return False
2158 2158
2159 2159 def _get_general_setting(self, pull_request, settings_key, default=False):
2160 2160 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2161 2161 settings = settings_model.get_general_settings()
2162 2162 return settings.get(settings_key, default)
2163 2163
2164 2164 def _log_audit_action(self, action, action_data, user, pull_request):
2165 2165 audit_logger.store(
2166 2166 action=action,
2167 2167 action_data=action_data,
2168 2168 user=user,
2169 2169 repo=pull_request.target_repo)
2170 2170
2171 2171 def get_reviewer_functions(self):
2172 2172 """
2173 2173 Fetches functions for validation and fetching default reviewers.
2174 2174 If available we use the EE package, else we fallback to CE
2175 2175 package functions
2176 2176 """
2177 2177 try:
2178 2178 from rc_reviewers.utils import get_default_reviewers_data
2179 2179 from rc_reviewers.utils import validate_default_reviewers
2180 2180 from rc_reviewers.utils import validate_observers
2181 2181 except ImportError:
2182 2182 from rhodecode.apps.repository.utils import get_default_reviewers_data
2183 2183 from rhodecode.apps.repository.utils import validate_default_reviewers
2184 2184 from rhodecode.apps.repository.utils import validate_observers
2185 2185
2186 2186 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2187 2187
2188 2188
2189 2189 class MergeCheck(object):
2190 2190 """
2191 2191 Perform Merge Checks and returns a check object which stores information
2192 2192 about merge errors, and merge conditions
2193 2193 """
2194 2194 TODO_CHECK = 'todo'
2195 2195 PERM_CHECK = 'perm'
2196 2196 REVIEW_CHECK = 'review'
2197 2197 MERGE_CHECK = 'merge'
2198 2198 WIP_CHECK = 'wip'
2199 2199
2200 2200 def __init__(self):
2201 2201 self.review_status = None
2202 2202 self.merge_possible = None
2203 2203 self.merge_msg = ''
2204 2204 self.merge_response = None
2205 2205 self.failed = None
2206 2206 self.errors = []
2207 2207 self.error_details = OrderedDict()
2208 2208 self.source_commit = AttributeDict()
2209 2209 self.target_commit = AttributeDict()
2210 2210 self.reviewers_count = 0
2211 2211 self.observers_count = 0
2212 2212
2213 2213 def __repr__(self):
2214 2214 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2215 2215 self.merge_possible, self.failed, self.errors)
2216 2216
2217 2217 def push_error(self, error_type, message, error_key, details):
2218 2218 self.failed = True
2219 2219 self.errors.append([error_type, message])
2220 2220 self.error_details[error_key] = dict(
2221 2221 details=details,
2222 2222 error_type=error_type,
2223 2223 message=message
2224 2224 )
2225 2225
2226 2226 @classmethod
2227 2227 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2228 2228 force_shadow_repo_refresh=False):
2229 2229 _ = translator
2230 2230 merge_check = cls()
2231 2231
2232 2232 # title has WIP:
2233 2233 if pull_request.work_in_progress:
2234 2234 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2235 2235
2236 2236 msg = _('WIP marker in title prevents from accidental merge.')
2237 2237 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2238 2238 if fail_early:
2239 2239 return merge_check
2240 2240
2241 2241 # permissions to merge
2242 2242 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2243 2243 if not user_allowed_to_merge:
2244 2244 log.debug("MergeCheck: cannot merge, approval is pending.")
2245 2245
2246 2246 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2247 2247 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2248 2248 if fail_early:
2249 2249 return merge_check
2250 2250
2251 2251 # permission to merge into the target branch
2252 2252 target_commit_id = pull_request.target_ref_parts.commit_id
2253 2253 if pull_request.target_ref_parts.type == 'branch':
2254 2254 branch_name = pull_request.target_ref_parts.name
2255 2255 else:
2256 2256 # for mercurial we can always figure out the branch from the commit
2257 2257 # in case of bookmark
2258 2258 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2259 2259 branch_name = target_commit.branch
2260 2260
2261 2261 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2262 2262 pull_request.target_repo.repo_name, branch_name)
2263 2263 if branch_perm and branch_perm == 'branch.none':
2264 2264 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2265 2265 branch_name, rule)
2266 2266 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2267 2267 if fail_early:
2268 2268 return merge_check
2269 2269
2270 2270 # review status, must be always present
2271 2271 review_status = pull_request.calculated_review_status()
2272 2272 merge_check.review_status = review_status
2273 2273 merge_check.reviewers_count = pull_request.reviewers_count
2274 2274 merge_check.observers_count = pull_request.observers_count
2275 2275
2276 2276 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2277 2277 if not status_approved and merge_check.reviewers_count:
2278 2278 log.debug("MergeCheck: cannot merge, approval is pending.")
2279 2279 msg = _('Pull request reviewer approval is pending.')
2280 2280
2281 2281 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2282 2282
2283 2283 if fail_early:
2284 2284 return merge_check
2285 2285
2286 2286 # left over TODOs
2287 2287 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2288 2288 if todos:
2289 2289 log.debug("MergeCheck: cannot merge, {} "
2290 2290 "unresolved TODOs left.".format(len(todos)))
2291 2291
2292 2292 if len(todos) == 1:
2293 2293 msg = _('Cannot merge, {} TODO still not resolved.').format(
2294 2294 len(todos))
2295 2295 else:
2296 2296 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2297 2297 len(todos))
2298 2298
2299 2299 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2300 2300
2301 2301 if fail_early:
2302 2302 return merge_check
2303 2303
2304 2304 # merge possible, here is the filesystem simulation + shadow repo
2305 2305 merge_response, merge_status, msg = PullRequestModel().merge_status(
2306 2306 pull_request, translator=translator,
2307 2307 force_shadow_repo_refresh=force_shadow_repo_refresh)
2308 2308
2309 2309 merge_check.merge_possible = merge_status
2310 2310 merge_check.merge_msg = msg
2311 2311 merge_check.merge_response = merge_response
2312 2312
2313 2313 source_ref_id = pull_request.source_ref_parts.commit_id
2314 2314 target_ref_id = pull_request.target_ref_parts.commit_id
2315 2315
2316 2316 try:
2317 2317 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2318 2318 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2319 2319 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2320 2320 merge_check.source_commit.current_raw_id = source_commit.raw_id
2321 2321 merge_check.source_commit.previous_raw_id = source_ref_id
2322 2322
2323 2323 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2324 2324 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2325 2325 merge_check.target_commit.current_raw_id = target_commit.raw_id
2326 2326 merge_check.target_commit.previous_raw_id = target_ref_id
2327 2327 except (SourceRefMissing, TargetRefMissing):
2328 2328 pass
2329 2329
2330 2330 if not merge_status:
2331 2331 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2332 2332 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2333 2333
2334 2334 if fail_early:
2335 2335 return merge_check
2336 2336
2337 2337 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2338 2338 return merge_check
2339 2339
2340 2340 @classmethod
2341 2341 def get_merge_conditions(cls, pull_request, translator):
2342 2342 _ = translator
2343 2343 merge_details = {}
2344 2344
2345 2345 model = PullRequestModel()
2346 2346 use_rebase = model._use_rebase_for_merging(pull_request)
2347 2347
2348 2348 if use_rebase:
2349 2349 merge_details['merge_strategy'] = dict(
2350 2350 details={},
2351 2351 message=_('Merge strategy: rebase')
2352 2352 )
2353 2353 else:
2354 2354 merge_details['merge_strategy'] = dict(
2355 2355 details={},
2356 2356 message=_('Merge strategy: explicit merge commit')
2357 2357 )
2358 2358
2359 2359 close_branch = model._close_branch_before_merging(pull_request)
2360 2360 if close_branch:
2361 2361 repo_type = pull_request.target_repo.repo_type
2362 2362 close_msg = ''
2363 2363 if repo_type == 'hg':
2364 2364 close_msg = _('Source branch will be closed before the merge.')
2365 2365 elif repo_type == 'git':
2366 2366 close_msg = _('Source branch will be deleted after the merge.')
2367 2367
2368 2368 merge_details['close_branch'] = dict(
2369 2369 details={},
2370 2370 message=close_msg
2371 2371 )
2372 2372
2373 2373 return merge_details
2374 2374
2375 2375
2376 2376 ChangeTuple = collections.namedtuple(
2377 2377 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2378 2378
2379 2379 FileChangeTuple = collections.namedtuple(
2380 2380 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now