##// END OF EJS Templates
python3: removed compat modules
super-admin -
r4928:daf8135e default
parent child Browse files
Show More
@@ -1,96 +1,96 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import logging
22 import logging
23
23
24
24
25 from pyramid.httpexceptions import HTTPFound
25 from pyramid.httpexceptions import HTTPFound
26
26
27 from rhodecode.apps._base import BaseAppView
27 from rhodecode.apps._base import BaseAppView
28 from rhodecode.lib import helpers as h
28 from rhodecode.lib import helpers as h
29 from rhodecode.lib.auth import LoginRequired
29 from rhodecode.lib.auth import LoginRequired
30 from rhodecode.lib.compat import OrderedDict
30 from collections import OrderedDict
31 from rhodecode.model.db import UserApiKeys
31 from rhodecode.model.db import UserApiKeys
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 class OpsView(BaseAppView):
36 class OpsView(BaseAppView):
37
37
38 def load_default_context(self):
38 def load_default_context(self):
39 c = self._get_local_tmpl_context()
39 c = self._get_local_tmpl_context()
40 c.user = c.auth_user.get_instance()
40 c.user = c.auth_user.get_instance()
41
41
42 return c
42 return c
43
43
44 def ops_ping(self):
44 def ops_ping(self):
45 data = OrderedDict()
45 data = OrderedDict()
46 data['instance'] = self.request.registry.settings.get('instance_id')
46 data['instance'] = self.request.registry.settings.get('instance_id')
47
47
48 if getattr(self.request, 'user'):
48 if getattr(self.request, 'user'):
49 caller_name = 'anonymous'
49 caller_name = 'anonymous'
50 if self.request.user.user_id:
50 if self.request.user.user_id:
51 caller_name = self.request.user.username
51 caller_name = self.request.user.username
52
52
53 data['caller_ip'] = self.request.user.ip_addr
53 data['caller_ip'] = self.request.user.ip_addr
54 data['caller_name'] = caller_name
54 data['caller_name'] = caller_name
55
55
56 return {'ok': data}
56 return {'ok': data}
57
57
58 def ops_error_test(self):
58 def ops_error_test(self):
59 """
59 """
60 Test exception handling and emails on errors
60 Test exception handling and emails on errors
61 """
61 """
62
62
63 class TestException(Exception):
63 class TestException(Exception):
64 pass
64 pass
65 # add timeout so we add some sort of rate limiter
65 # add timeout so we add some sort of rate limiter
66 time.sleep(2)
66 time.sleep(2)
67 msg = ('RhodeCode Enterprise test exception. '
67 msg = ('RhodeCode Enterprise test exception. '
68 'Client:{}. Generation time: {}.'.format(self.request.user, time.time()))
68 'Client:{}. Generation time: {}.'.format(self.request.user, time.time()))
69 raise TestException(msg)
69 raise TestException(msg)
70
70
71 def ops_redirect_test(self):
71 def ops_redirect_test(self):
72 """
72 """
73 Test redirect handling
73 Test redirect handling
74 """
74 """
75 redirect_to = self.request.GET.get('to') or h.route_path('home')
75 redirect_to = self.request.GET.get('to') or h.route_path('home')
76 raise HTTPFound(redirect_to)
76 raise HTTPFound(redirect_to)
77
77
78 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_HTTP])
78 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_HTTP])
79 def ops_healthcheck(self):
79 def ops_healthcheck(self):
80 from rhodecode.lib.system_info import load_system_info
80 from rhodecode.lib.system_info import load_system_info
81
81
82 vcsserver_info = load_system_info('vcs_server')
82 vcsserver_info = load_system_info('vcs_server')
83 if vcsserver_info:
83 if vcsserver_info:
84 vcsserver_info = vcsserver_info['human_value']
84 vcsserver_info = vcsserver_info['human_value']
85
85
86 db_info = load_system_info('database_info')
86 db_info = load_system_info('database_info')
87 if db_info:
87 if db_info:
88 db_info = db_info['human_value']
88 db_info = db_info['human_value']
89
89
90 health_spec = {
90 health_spec = {
91 'caller_ip': self.request.user.ip_addr,
91 'caller_ip': self.request.user.ip_addr,
92 'vcsserver': vcsserver_info,
92 'vcsserver': vcsserver_info,
93 'db': db_info,
93 'db': db_info,
94 }
94 }
95
95
96 return {'healthcheck': health_spec}
96 return {'healthcheck': health_spec}
@@ -1,1092 +1,1092 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
27 from rhodecode.apps.repository.views.repo_files import RepoFilesView
27 from rhodecode.apps.repository.views.repo_files import RepoFilesView
28 from rhodecode.lib import helpers as h
28 from rhodecode.lib import helpers as h
29 from rhodecode.lib.compat import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.ext_json import json
30 from rhodecode.lib.ext_json import json
31 from rhodecode.lib.vcs import nodes
31 from rhodecode.lib.vcs import nodes
32
32
33 from rhodecode.lib.vcs.conf import settings
33 from rhodecode.lib.vcs.conf import settings
34 from rhodecode.tests import assert_session_flash
34 from rhodecode.tests import assert_session_flash
35 from rhodecode.tests.fixture import Fixture
35 from rhodecode.tests.fixture import Fixture
36 from rhodecode.model.db import Session
36 from rhodecode.model.db import Session
37
37
38 fixture = Fixture()
38 fixture = Fixture()
39
39
40
40
41 def get_node_history(backend_type):
41 def get_node_history(backend_type):
42 return {
42 return {
43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
46 }[backend_type]
46 }[backend_type]
47
47
48
48
49 def route_path(name, params=None, **kwargs):
49 def route_path(name, params=None, **kwargs):
50 import urllib.request, urllib.parse, urllib.error
50 import urllib.request, urllib.parse, urllib.error
51
51
52 base_url = {
52 base_url = {
53 'repo_summary': '/{repo_name}',
53 'repo_summary': '/{repo_name}',
54 'repo_archivefile': '/{repo_name}/archive/{fname}',
54 'repo_archivefile': '/{repo_name}/archive/{fname}',
55 'repo_files_diff': '/{repo_name}/diff/{f_path}',
55 'repo_files_diff': '/{repo_name}/diff/{f_path}',
56 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
56 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
57 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
57 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
58 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
58 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
59 'repo_files:default_commit': '/{repo_name}/files',
59 'repo_files:default_commit': '/{repo_name}/files',
60 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
60 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
61 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
61 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
62 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
62 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
63 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
63 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
64 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
64 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
65 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
65 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
66 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
66 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
67 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
67 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
68 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
68 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
69 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
69 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
70 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
70 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
71 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
71 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
72 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
72 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
73 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
73 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
74 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
74 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
75 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
75 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
76 }[name].format(**kwargs)
76 }[name].format(**kwargs)
77
77
78 if params:
78 if params:
79 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
79 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
80 return base_url
80 return base_url
81
81
82
82
83 def assert_files_in_response(response, files, params):
83 def assert_files_in_response(response, files, params):
84 template = (
84 template = (
85 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
85 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
86 _assert_items_in_response(response, files, template, params)
86 _assert_items_in_response(response, files, template, params)
87
87
88
88
89 def assert_dirs_in_response(response, dirs, params):
89 def assert_dirs_in_response(response, dirs, params):
90 template = (
90 template = (
91 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
91 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
92 _assert_items_in_response(response, dirs, template, params)
92 _assert_items_in_response(response, dirs, template, params)
93
93
94
94
95 def _assert_items_in_response(response, items, template, params):
95 def _assert_items_in_response(response, items, template, params):
96 for item in items:
96 for item in items:
97 item_params = {'name': item}
97 item_params = {'name': item}
98 item_params.update(params)
98 item_params.update(params)
99 response.mustcontain(template % item_params)
99 response.mustcontain(template % item_params)
100
100
101
101
102 def assert_timeago_in_response(response, items, params):
102 def assert_timeago_in_response(response, items, params):
103 for item in items:
103 for item in items:
104 response.mustcontain(h.age_component(params['date']))
104 response.mustcontain(h.age_component(params['date']))
105
105
106
106
107 @pytest.mark.usefixtures("app")
107 @pytest.mark.usefixtures("app")
108 class TestFilesViews(object):
108 class TestFilesViews(object):
109
109
110 def test_show_files(self, backend):
110 def test_show_files(self, backend):
111 response = self.app.get(
111 response = self.app.get(
112 route_path('repo_files',
112 route_path('repo_files',
113 repo_name=backend.repo_name,
113 repo_name=backend.repo_name,
114 commit_id='tip', f_path='/'))
114 commit_id='tip', f_path='/'))
115 commit = backend.repo.get_commit()
115 commit = backend.repo.get_commit()
116
116
117 params = {
117 params = {
118 'repo_name': backend.repo_name,
118 'repo_name': backend.repo_name,
119 'commit_id': commit.raw_id,
119 'commit_id': commit.raw_id,
120 'date': commit.date
120 'date': commit.date
121 }
121 }
122 assert_dirs_in_response(response, ['docs', 'vcs'], params)
122 assert_dirs_in_response(response, ['docs', 'vcs'], params)
123 files = [
123 files = [
124 '.gitignore',
124 '.gitignore',
125 '.hgignore',
125 '.hgignore',
126 '.hgtags',
126 '.hgtags',
127 # TODO: missing in Git
127 # TODO: missing in Git
128 # '.travis.yml',
128 # '.travis.yml',
129 'MANIFEST.in',
129 'MANIFEST.in',
130 'README.rst',
130 'README.rst',
131 # TODO: File is missing in svn repository
131 # TODO: File is missing in svn repository
132 # 'run_test_and_report.sh',
132 # 'run_test_and_report.sh',
133 'setup.cfg',
133 'setup.cfg',
134 'setup.py',
134 'setup.py',
135 'test_and_report.sh',
135 'test_and_report.sh',
136 'tox.ini',
136 'tox.ini',
137 ]
137 ]
138 assert_files_in_response(response, files, params)
138 assert_files_in_response(response, files, params)
139 assert_timeago_in_response(response, files, params)
139 assert_timeago_in_response(response, files, params)
140
140
141 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
141 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
142 repo = backend_hg['subrepos']
142 repo = backend_hg['subrepos']
143 response = self.app.get(
143 response = self.app.get(
144 route_path('repo_files',
144 route_path('repo_files',
145 repo_name=repo.repo_name,
145 repo_name=repo.repo_name,
146 commit_id='tip', f_path='/'))
146 commit_id='tip', f_path='/'))
147 assert_response = response.assert_response()
147 assert_response = response.assert_response()
148 assert_response.contains_one_link(
148 assert_response.contains_one_link(
149 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
149 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
150
150
151 def test_show_files_links_submodules_with_absolute_url_subpaths(
151 def test_show_files_links_submodules_with_absolute_url_subpaths(
152 self, backend_hg):
152 self, backend_hg):
153 repo = backend_hg['subrepos']
153 repo = backend_hg['subrepos']
154 response = self.app.get(
154 response = self.app.get(
155 route_path('repo_files',
155 route_path('repo_files',
156 repo_name=repo.repo_name,
156 repo_name=repo.repo_name,
157 commit_id='tip', f_path='/'))
157 commit_id='tip', f_path='/'))
158 assert_response = response.assert_response()
158 assert_response = response.assert_response()
159 assert_response.contains_one_link(
159 assert_response.contains_one_link(
160 'subpaths-path @ 000000000000',
160 'subpaths-path @ 000000000000',
161 'http://sub-base.example.com/subpaths-path')
161 'http://sub-base.example.com/subpaths-path')
162
162
163 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
163 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
164 def test_files_menu(self, backend):
164 def test_files_menu(self, backend):
165 new_branch = "temp_branch_name"
165 new_branch = "temp_branch_name"
166 commits = [
166 commits = [
167 {'message': 'a'},
167 {'message': 'a'},
168 {'message': 'b', 'branch': new_branch}
168 {'message': 'b', 'branch': new_branch}
169 ]
169 ]
170 backend.create_repo(commits)
170 backend.create_repo(commits)
171 backend.repo.landing_rev = "branch:%s" % new_branch
171 backend.repo.landing_rev = "branch:%s" % new_branch
172 Session().commit()
172 Session().commit()
173
173
174 # get response based on tip and not new commit
174 # get response based on tip and not new commit
175 response = self.app.get(
175 response = self.app.get(
176 route_path('repo_files',
176 route_path('repo_files',
177 repo_name=backend.repo_name,
177 repo_name=backend.repo_name,
178 commit_id='tip', f_path='/'))
178 commit_id='tip', f_path='/'))
179
179
180 # make sure Files menu url is not tip but new commit
180 # make sure Files menu url is not tip but new commit
181 landing_rev = backend.repo.landing_ref_name
181 landing_rev = backend.repo.landing_ref_name
182 files_url = route_path('repo_files:default_path',
182 files_url = route_path('repo_files:default_path',
183 repo_name=backend.repo_name,
183 repo_name=backend.repo_name,
184 commit_id=landing_rev, params={'at': landing_rev})
184 commit_id=landing_rev, params={'at': landing_rev})
185
185
186 assert landing_rev != 'tip'
186 assert landing_rev != 'tip'
187 response.mustcontain(
187 response.mustcontain(
188 '<li class="active"><a class="menulink" href="%s">' % files_url)
188 '<li class="active"><a class="menulink" href="%s">' % files_url)
189
189
190 def test_show_files_commit(self, backend):
190 def test_show_files_commit(self, backend):
191 commit = backend.repo.get_commit(commit_idx=32)
191 commit = backend.repo.get_commit(commit_idx=32)
192
192
193 response = self.app.get(
193 response = self.app.get(
194 route_path('repo_files',
194 route_path('repo_files',
195 repo_name=backend.repo_name,
195 repo_name=backend.repo_name,
196 commit_id=commit.raw_id, f_path='/'))
196 commit_id=commit.raw_id, f_path='/'))
197
197
198 dirs = ['docs', 'tests']
198 dirs = ['docs', 'tests']
199 files = ['README.rst']
199 files = ['README.rst']
200 params = {
200 params = {
201 'repo_name': backend.repo_name,
201 'repo_name': backend.repo_name,
202 'commit_id': commit.raw_id,
202 'commit_id': commit.raw_id,
203 }
203 }
204 assert_dirs_in_response(response, dirs, params)
204 assert_dirs_in_response(response, dirs, params)
205 assert_files_in_response(response, files, params)
205 assert_files_in_response(response, files, params)
206
206
207 def test_show_files_different_branch(self, backend):
207 def test_show_files_different_branch(self, backend):
208 branches = dict(
208 branches = dict(
209 hg=(150, ['git']),
209 hg=(150, ['git']),
210 # TODO: Git test repository does not contain other branches
210 # TODO: Git test repository does not contain other branches
211 git=(633, ['master']),
211 git=(633, ['master']),
212 # TODO: Branch support in Subversion
212 # TODO: Branch support in Subversion
213 svn=(150, [])
213 svn=(150, [])
214 )
214 )
215 idx, branches = branches[backend.alias]
215 idx, branches = branches[backend.alias]
216 commit = backend.repo.get_commit(commit_idx=idx)
216 commit = backend.repo.get_commit(commit_idx=idx)
217 response = self.app.get(
217 response = self.app.get(
218 route_path('repo_files',
218 route_path('repo_files',
219 repo_name=backend.repo_name,
219 repo_name=backend.repo_name,
220 commit_id=commit.raw_id, f_path='/'))
220 commit_id=commit.raw_id, f_path='/'))
221
221
222 assert_response = response.assert_response()
222 assert_response = response.assert_response()
223 for branch in branches:
223 for branch in branches:
224 assert_response.element_contains('.tags .branchtag', branch)
224 assert_response.element_contains('.tags .branchtag', branch)
225
225
226 def test_show_files_paging(self, backend):
226 def test_show_files_paging(self, backend):
227 repo = backend.repo
227 repo = backend.repo
228 indexes = [73, 92, 109, 1, 0]
228 indexes = [73, 92, 109, 1, 0]
229 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
229 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
230 for rev in indexes]
230 for rev in indexes]
231
231
232 for idx in idx_map:
232 for idx in idx_map:
233 response = self.app.get(
233 response = self.app.get(
234 route_path('repo_files',
234 route_path('repo_files',
235 repo_name=backend.repo_name,
235 repo_name=backend.repo_name,
236 commit_id=idx[1], f_path='/'))
236 commit_id=idx[1], f_path='/'))
237
237
238 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
238 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
239
239
240 def test_file_source(self, backend):
240 def test_file_source(self, backend):
241 commit = backend.repo.get_commit(commit_idx=167)
241 commit = backend.repo.get_commit(commit_idx=167)
242 response = self.app.get(
242 response = self.app.get(
243 route_path('repo_files',
243 route_path('repo_files',
244 repo_name=backend.repo_name,
244 repo_name=backend.repo_name,
245 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
245 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
246
246
247 msgbox = """<div class="commit">%s</div>"""
247 msgbox = """<div class="commit">%s</div>"""
248 response.mustcontain(msgbox % (commit.message, ))
248 response.mustcontain(msgbox % (commit.message, ))
249
249
250 assert_response = response.assert_response()
250 assert_response = response.assert_response()
251 if commit.branch:
251 if commit.branch:
252 assert_response.element_contains(
252 assert_response.element_contains(
253 '.tags.tags-main .branchtag', commit.branch)
253 '.tags.tags-main .branchtag', commit.branch)
254 if commit.tags:
254 if commit.tags:
255 for tag in commit.tags:
255 for tag in commit.tags:
256 assert_response.element_contains('.tags.tags-main .tagtag', tag)
256 assert_response.element_contains('.tags.tags-main .tagtag', tag)
257
257
258 def test_file_source_annotated(self, backend):
258 def test_file_source_annotated(self, backend):
259 response = self.app.get(
259 response = self.app.get(
260 route_path('repo_files:annotated',
260 route_path('repo_files:annotated',
261 repo_name=backend.repo_name,
261 repo_name=backend.repo_name,
262 commit_id='tip', f_path='vcs/nodes.py'))
262 commit_id='tip', f_path='vcs/nodes.py'))
263 expected_commits = {
263 expected_commits = {
264 'hg': 'r356',
264 'hg': 'r356',
265 'git': 'r345',
265 'git': 'r345',
266 'svn': 'r208',
266 'svn': 'r208',
267 }
267 }
268 response.mustcontain(expected_commits[backend.alias])
268 response.mustcontain(expected_commits[backend.alias])
269
269
270 def test_file_source_authors(self, backend):
270 def test_file_source_authors(self, backend):
271 response = self.app.get(
271 response = self.app.get(
272 route_path('repo_file_authors',
272 route_path('repo_file_authors',
273 repo_name=backend.repo_name,
273 repo_name=backend.repo_name,
274 commit_id='tip', f_path='vcs/nodes.py'))
274 commit_id='tip', f_path='vcs/nodes.py'))
275 expected_authors = {
275 expected_authors = {
276 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
276 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
277 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
277 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
278 'svn': ('marcin', 'lukasz'),
278 'svn': ('marcin', 'lukasz'),
279 }
279 }
280
280
281 for author in expected_authors[backend.alias]:
281 for author in expected_authors[backend.alias]:
282 response.mustcontain(author)
282 response.mustcontain(author)
283
283
284 def test_file_source_authors_with_annotation(self, backend):
284 def test_file_source_authors_with_annotation(self, backend):
285 response = self.app.get(
285 response = self.app.get(
286 route_path('repo_file_authors',
286 route_path('repo_file_authors',
287 repo_name=backend.repo_name,
287 repo_name=backend.repo_name,
288 commit_id='tip', f_path='vcs/nodes.py',
288 commit_id='tip', f_path='vcs/nodes.py',
289 params=dict(annotate=1)))
289 params=dict(annotate=1)))
290 expected_authors = {
290 expected_authors = {
291 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
291 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
292 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
292 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
293 'svn': ('marcin', 'lukasz'),
293 'svn': ('marcin', 'lukasz'),
294 }
294 }
295
295
296 for author in expected_authors[backend.alias]:
296 for author in expected_authors[backend.alias]:
297 response.mustcontain(author)
297 response.mustcontain(author)
298
298
299 def test_file_source_history(self, backend, xhr_header):
299 def test_file_source_history(self, backend, xhr_header):
300 response = self.app.get(
300 response = self.app.get(
301 route_path('repo_file_history',
301 route_path('repo_file_history',
302 repo_name=backend.repo_name,
302 repo_name=backend.repo_name,
303 commit_id='tip', f_path='vcs/nodes.py'),
303 commit_id='tip', f_path='vcs/nodes.py'),
304 extra_environ=xhr_header)
304 extra_environ=xhr_header)
305 assert get_node_history(backend.alias) == json.loads(response.body)
305 assert get_node_history(backend.alias) == json.loads(response.body)
306
306
307 def test_file_source_history_svn(self, backend_svn, xhr_header):
307 def test_file_source_history_svn(self, backend_svn, xhr_header):
308 simple_repo = backend_svn['svn-simple-layout']
308 simple_repo = backend_svn['svn-simple-layout']
309 response = self.app.get(
309 response = self.app.get(
310 route_path('repo_file_history',
310 route_path('repo_file_history',
311 repo_name=simple_repo.repo_name,
311 repo_name=simple_repo.repo_name,
312 commit_id='tip', f_path='trunk/example.py'),
312 commit_id='tip', f_path='trunk/example.py'),
313 extra_environ=xhr_header)
313 extra_environ=xhr_header)
314
314
315 expected_data = json.loads(
315 expected_data = json.loads(
316 fixture.load_resource('svn_node_history_branches.json'))
316 fixture.load_resource('svn_node_history_branches.json'))
317
317
318 assert expected_data == response.json
318 assert expected_data == response.json
319
319
320 def test_file_source_history_with_annotation(self, backend, xhr_header):
320 def test_file_source_history_with_annotation(self, backend, xhr_header):
321 response = self.app.get(
321 response = self.app.get(
322 route_path('repo_file_history',
322 route_path('repo_file_history',
323 repo_name=backend.repo_name,
323 repo_name=backend.repo_name,
324 commit_id='tip', f_path='vcs/nodes.py',
324 commit_id='tip', f_path='vcs/nodes.py',
325 params=dict(annotate=1)),
325 params=dict(annotate=1)),
326
326
327 extra_environ=xhr_header)
327 extra_environ=xhr_header)
328 assert get_node_history(backend.alias) == json.loads(response.body)
328 assert get_node_history(backend.alias) == json.loads(response.body)
329
329
330 def test_tree_search_top_level(self, backend, xhr_header):
330 def test_tree_search_top_level(self, backend, xhr_header):
331 commit = backend.repo.get_commit(commit_idx=173)
331 commit = backend.repo.get_commit(commit_idx=173)
332 response = self.app.get(
332 response = self.app.get(
333 route_path('repo_files_nodelist',
333 route_path('repo_files_nodelist',
334 repo_name=backend.repo_name,
334 repo_name=backend.repo_name,
335 commit_id=commit.raw_id, f_path='/'),
335 commit_id=commit.raw_id, f_path='/'),
336 extra_environ=xhr_header)
336 extra_environ=xhr_header)
337 assert 'nodes' in response.json
337 assert 'nodes' in response.json
338 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
338 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
339
339
340 def test_tree_search_missing_xhr(self, backend):
340 def test_tree_search_missing_xhr(self, backend):
341 self.app.get(
341 self.app.get(
342 route_path('repo_files_nodelist',
342 route_path('repo_files_nodelist',
343 repo_name=backend.repo_name,
343 repo_name=backend.repo_name,
344 commit_id='tip', f_path='/'),
344 commit_id='tip', f_path='/'),
345 status=404)
345 status=404)
346
346
347 def test_tree_search_at_path(self, backend, xhr_header):
347 def test_tree_search_at_path(self, backend, xhr_header):
348 commit = backend.repo.get_commit(commit_idx=173)
348 commit = backend.repo.get_commit(commit_idx=173)
349 response = self.app.get(
349 response = self.app.get(
350 route_path('repo_files_nodelist',
350 route_path('repo_files_nodelist',
351 repo_name=backend.repo_name,
351 repo_name=backend.repo_name,
352 commit_id=commit.raw_id, f_path='/docs'),
352 commit_id=commit.raw_id, f_path='/docs'),
353 extra_environ=xhr_header)
353 extra_environ=xhr_header)
354 assert 'nodes' in response.json
354 assert 'nodes' in response.json
355 nodes = response.json['nodes']
355 nodes = response.json['nodes']
356 assert {'name': 'docs/api', 'type': 'dir'} in nodes
356 assert {'name': 'docs/api', 'type': 'dir'} in nodes
357 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
357 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
358
358
359 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
359 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
360 commit = backend.repo.get_commit(commit_idx=173)
360 commit = backend.repo.get_commit(commit_idx=173)
361 response = self.app.get(
361 response = self.app.get(
362 route_path('repo_files_nodelist',
362 route_path('repo_files_nodelist',
363 repo_name=backend.repo_name,
363 repo_name=backend.repo_name,
364 commit_id=commit.raw_id, f_path='/docs/api'),
364 commit_id=commit.raw_id, f_path='/docs/api'),
365 extra_environ=xhr_header)
365 extra_environ=xhr_header)
366 assert 'nodes' in response.json
366 assert 'nodes' in response.json
367 nodes = response.json['nodes']
367 nodes = response.json['nodes']
368 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
368 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
369
369
370 def test_tree_search_at_path_missing_xhr(self, backend):
370 def test_tree_search_at_path_missing_xhr(self, backend):
371 self.app.get(
371 self.app.get(
372 route_path('repo_files_nodelist',
372 route_path('repo_files_nodelist',
373 repo_name=backend.repo_name,
373 repo_name=backend.repo_name,
374 commit_id='tip', f_path='/docs'),
374 commit_id='tip', f_path='/docs'),
375 status=404)
375 status=404)
376
376
377 def test_nodetree(self, backend, xhr_header):
377 def test_nodetree(self, backend, xhr_header):
378 commit = backend.repo.get_commit(commit_idx=173)
378 commit = backend.repo.get_commit(commit_idx=173)
379 response = self.app.get(
379 response = self.app.get(
380 route_path('repo_nodetree_full',
380 route_path('repo_nodetree_full',
381 repo_name=backend.repo_name,
381 repo_name=backend.repo_name,
382 commit_id=commit.raw_id, f_path='/'),
382 commit_id=commit.raw_id, f_path='/'),
383 extra_environ=xhr_header)
383 extra_environ=xhr_header)
384
384
385 assert_response = response.assert_response()
385 assert_response = response.assert_response()
386
386
387 for attr in ['data-commit-id', 'data-date', 'data-author']:
387 for attr in ['data-commit-id', 'data-date', 'data-author']:
388 elements = assert_response.get_elements('[{}]'.format(attr))
388 elements = assert_response.get_elements('[{}]'.format(attr))
389 assert len(elements) > 1
389 assert len(elements) > 1
390
390
391 for element in elements:
391 for element in elements:
392 assert element.get(attr)
392 assert element.get(attr)
393
393
394 def test_nodetree_if_file(self, backend, xhr_header):
394 def test_nodetree_if_file(self, backend, xhr_header):
395 commit = backend.repo.get_commit(commit_idx=173)
395 commit = backend.repo.get_commit(commit_idx=173)
396 response = self.app.get(
396 response = self.app.get(
397 route_path('repo_nodetree_full',
397 route_path('repo_nodetree_full',
398 repo_name=backend.repo_name,
398 repo_name=backend.repo_name,
399 commit_id=commit.raw_id, f_path='README.rst'),
399 commit_id=commit.raw_id, f_path='README.rst'),
400 extra_environ=xhr_header)
400 extra_environ=xhr_header)
401 assert response.body == ''
401 assert response.body == ''
402
402
403 def test_nodetree_wrong_path(self, backend, xhr_header):
403 def test_nodetree_wrong_path(self, backend, xhr_header):
404 commit = backend.repo.get_commit(commit_idx=173)
404 commit = backend.repo.get_commit(commit_idx=173)
405 response = self.app.get(
405 response = self.app.get(
406 route_path('repo_nodetree_full',
406 route_path('repo_nodetree_full',
407 repo_name=backend.repo_name,
407 repo_name=backend.repo_name,
408 commit_id=commit.raw_id, f_path='/dont-exist'),
408 commit_id=commit.raw_id, f_path='/dont-exist'),
409 extra_environ=xhr_header)
409 extra_environ=xhr_header)
410
410
411 err = 'error: There is no file nor ' \
411 err = 'error: There is no file nor ' \
412 'directory at the given path'
412 'directory at the given path'
413 assert err in response.body
413 assert err in response.body
414
414
415 def test_nodetree_missing_xhr(self, backend):
415 def test_nodetree_missing_xhr(self, backend):
416 self.app.get(
416 self.app.get(
417 route_path('repo_nodetree_full',
417 route_path('repo_nodetree_full',
418 repo_name=backend.repo_name,
418 repo_name=backend.repo_name,
419 commit_id='tip', f_path='/'),
419 commit_id='tip', f_path='/'),
420 status=404)
420 status=404)
421
421
422
422
423 @pytest.mark.usefixtures("app", "autologin_user")
423 @pytest.mark.usefixtures("app", "autologin_user")
424 class TestRawFileHandling(object):
424 class TestRawFileHandling(object):
425
425
426 def test_download_file(self, backend):
426 def test_download_file(self, backend):
427 commit = backend.repo.get_commit(commit_idx=173)
427 commit = backend.repo.get_commit(commit_idx=173)
428 response = self.app.get(
428 response = self.app.get(
429 route_path('repo_file_download',
429 route_path('repo_file_download',
430 repo_name=backend.repo_name,
430 repo_name=backend.repo_name,
431 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
431 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
432
432
433 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
433 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
434 assert response.content_type == "text/x-python"
434 assert response.content_type == "text/x-python"
435
435
436 def test_download_file_wrong_cs(self, backend):
436 def test_download_file_wrong_cs(self, backend):
437 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
437 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
438
438
439 response = self.app.get(
439 response = self.app.get(
440 route_path('repo_file_download',
440 route_path('repo_file_download',
441 repo_name=backend.repo_name,
441 repo_name=backend.repo_name,
442 commit_id=raw_id, f_path='vcs/nodes.svg'),
442 commit_id=raw_id, f_path='vcs/nodes.svg'),
443 status=404)
443 status=404)
444
444
445 msg = """No such commit exists for this repository"""
445 msg = """No such commit exists for this repository"""
446 response.mustcontain(msg)
446 response.mustcontain(msg)
447
447
448 def test_download_file_wrong_f_path(self, backend):
448 def test_download_file_wrong_f_path(self, backend):
449 commit = backend.repo.get_commit(commit_idx=173)
449 commit = backend.repo.get_commit(commit_idx=173)
450 f_path = 'vcs/ERRORnodes.py'
450 f_path = 'vcs/ERRORnodes.py'
451
451
452 response = self.app.get(
452 response = self.app.get(
453 route_path('repo_file_download',
453 route_path('repo_file_download',
454 repo_name=backend.repo_name,
454 repo_name=backend.repo_name,
455 commit_id=commit.raw_id, f_path=f_path),
455 commit_id=commit.raw_id, f_path=f_path),
456 status=404)
456 status=404)
457
457
458 msg = (
458 msg = (
459 "There is no file nor directory at the given path: "
459 "There is no file nor directory at the given path: "
460 "`%s` at commit %s" % (f_path, commit.short_id))
460 "`%s` at commit %s" % (f_path, commit.short_id))
461 response.mustcontain(msg)
461 response.mustcontain(msg)
462
462
463 def test_file_raw(self, backend):
463 def test_file_raw(self, backend):
464 commit = backend.repo.get_commit(commit_idx=173)
464 commit = backend.repo.get_commit(commit_idx=173)
465 response = self.app.get(
465 response = self.app.get(
466 route_path('repo_file_raw',
466 route_path('repo_file_raw',
467 repo_name=backend.repo_name,
467 repo_name=backend.repo_name,
468 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
468 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
469
469
470 assert response.content_type == "text/plain"
470 assert response.content_type == "text/plain"
471
471
472 def test_file_raw_binary(self, backend):
472 def test_file_raw_binary(self, backend):
473 commit = backend.repo.get_commit()
473 commit = backend.repo.get_commit()
474 response = self.app.get(
474 response = self.app.get(
475 route_path('repo_file_raw',
475 route_path('repo_file_raw',
476 repo_name=backend.repo_name,
476 repo_name=backend.repo_name,
477 commit_id=commit.raw_id,
477 commit_id=commit.raw_id,
478 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
478 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
479
479
480 assert response.content_disposition == 'inline'
480 assert response.content_disposition == 'inline'
481
481
482 def test_raw_file_wrong_cs(self, backend):
482 def test_raw_file_wrong_cs(self, backend):
483 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
483 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
484
484
485 response = self.app.get(
485 response = self.app.get(
486 route_path('repo_file_raw',
486 route_path('repo_file_raw',
487 repo_name=backend.repo_name,
487 repo_name=backend.repo_name,
488 commit_id=raw_id, f_path='vcs/nodes.svg'),
488 commit_id=raw_id, f_path='vcs/nodes.svg'),
489 status=404)
489 status=404)
490
490
491 msg = """No such commit exists for this repository"""
491 msg = """No such commit exists for this repository"""
492 response.mustcontain(msg)
492 response.mustcontain(msg)
493
493
494 def test_raw_wrong_f_path(self, backend):
494 def test_raw_wrong_f_path(self, backend):
495 commit = backend.repo.get_commit(commit_idx=173)
495 commit = backend.repo.get_commit(commit_idx=173)
496 f_path = 'vcs/ERRORnodes.py'
496 f_path = 'vcs/ERRORnodes.py'
497 response = self.app.get(
497 response = self.app.get(
498 route_path('repo_file_raw',
498 route_path('repo_file_raw',
499 repo_name=backend.repo_name,
499 repo_name=backend.repo_name,
500 commit_id=commit.raw_id, f_path=f_path),
500 commit_id=commit.raw_id, f_path=f_path),
501 status=404)
501 status=404)
502
502
503 msg = (
503 msg = (
504 "There is no file nor directory at the given path: "
504 "There is no file nor directory at the given path: "
505 "`%s` at commit %s" % (f_path, commit.short_id))
505 "`%s` at commit %s" % (f_path, commit.short_id))
506 response.mustcontain(msg)
506 response.mustcontain(msg)
507
507
508 def test_raw_svg_should_not_be_rendered(self, backend):
508 def test_raw_svg_should_not_be_rendered(self, backend):
509 backend.create_repo()
509 backend.create_repo()
510 backend.ensure_file("xss.svg")
510 backend.ensure_file("xss.svg")
511 response = self.app.get(
511 response = self.app.get(
512 route_path('repo_file_raw',
512 route_path('repo_file_raw',
513 repo_name=backend.repo_name,
513 repo_name=backend.repo_name,
514 commit_id='tip', f_path='xss.svg'),)
514 commit_id='tip', f_path='xss.svg'),)
515 # If the content type is image/svg+xml then it allows to render HTML
515 # If the content type is image/svg+xml then it allows to render HTML
516 # and malicious SVG.
516 # and malicious SVG.
517 assert response.content_type == "text/plain"
517 assert response.content_type == "text/plain"
518
518
519
519
520 @pytest.mark.usefixtures("app")
520 @pytest.mark.usefixtures("app")
521 class TestRepositoryArchival(object):
521 class TestRepositoryArchival(object):
522
522
523 def test_archival(self, backend):
523 def test_archival(self, backend):
524 backend.enable_downloads()
524 backend.enable_downloads()
525 commit = backend.repo.get_commit(commit_idx=173)
525 commit = backend.repo.get_commit(commit_idx=173)
526 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
526 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
527
527
528 short = commit.short_id + extension
528 short = commit.short_id + extension
529 fname = commit.raw_id + extension
529 fname = commit.raw_id + extension
530 filename = '%s-%s' % (backend.repo_name, short)
530 filename = '%s-%s' % (backend.repo_name, short)
531 response = self.app.get(
531 response = self.app.get(
532 route_path('repo_archivefile',
532 route_path('repo_archivefile',
533 repo_name=backend.repo_name,
533 repo_name=backend.repo_name,
534 fname=fname))
534 fname=fname))
535
535
536 assert response.status == '200 OK'
536 assert response.status == '200 OK'
537 headers = [
537 headers = [
538 ('Content-Disposition', 'attachment; filename=%s' % filename),
538 ('Content-Disposition', 'attachment; filename=%s' % filename),
539 ('Content-Type', '%s' % content_type),
539 ('Content-Type', '%s' % content_type),
540 ]
540 ]
541
541
542 for header in headers:
542 for header in headers:
543 assert header in response.headers.items()
543 assert header in response.headers.items()
544
544
545 def test_archival_no_hash(self, backend):
545 def test_archival_no_hash(self, backend):
546 backend.enable_downloads()
546 backend.enable_downloads()
547 commit = backend.repo.get_commit(commit_idx=173)
547 commit = backend.repo.get_commit(commit_idx=173)
548 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
548 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
549
549
550 short = 'plain' + extension
550 short = 'plain' + extension
551 fname = commit.raw_id + extension
551 fname = commit.raw_id + extension
552 filename = '%s-%s' % (backend.repo_name, short)
552 filename = '%s-%s' % (backend.repo_name, short)
553 response = self.app.get(
553 response = self.app.get(
554 route_path('repo_archivefile',
554 route_path('repo_archivefile',
555 repo_name=backend.repo_name,
555 repo_name=backend.repo_name,
556 fname=fname, params={'with_hash': 0}))
556 fname=fname, params={'with_hash': 0}))
557
557
558 assert response.status == '200 OK'
558 assert response.status == '200 OK'
559 headers = [
559 headers = [
560 ('Content-Disposition', 'attachment; filename=%s' % filename),
560 ('Content-Disposition', 'attachment; filename=%s' % filename),
561 ('Content-Type', '%s' % content_type),
561 ('Content-Type', '%s' % content_type),
562 ]
562 ]
563
563
564 for header in headers:
564 for header in headers:
565 assert header in response.headers.items()
565 assert header in response.headers.items()
566
566
567 @pytest.mark.parametrize('arch_ext',[
567 @pytest.mark.parametrize('arch_ext',[
568 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
568 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
569 def test_archival_wrong_ext(self, backend, arch_ext):
569 def test_archival_wrong_ext(self, backend, arch_ext):
570 backend.enable_downloads()
570 backend.enable_downloads()
571 commit = backend.repo.get_commit(commit_idx=173)
571 commit = backend.repo.get_commit(commit_idx=173)
572
572
573 fname = commit.raw_id + '.' + arch_ext
573 fname = commit.raw_id + '.' + arch_ext
574
574
575 response = self.app.get(
575 response = self.app.get(
576 route_path('repo_archivefile',
576 route_path('repo_archivefile',
577 repo_name=backend.repo_name,
577 repo_name=backend.repo_name,
578 fname=fname))
578 fname=fname))
579 response.mustcontain(
579 response.mustcontain(
580 'Unknown archive type for: `{}`'.format(fname))
580 'Unknown archive type for: `{}`'.format(fname))
581
581
582 @pytest.mark.parametrize('commit_id', [
582 @pytest.mark.parametrize('commit_id', [
583 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
583 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
584 def test_archival_wrong_commit_id(self, backend, commit_id):
584 def test_archival_wrong_commit_id(self, backend, commit_id):
585 backend.enable_downloads()
585 backend.enable_downloads()
586 fname = '%s.zip' % commit_id
586 fname = '%s.zip' % commit_id
587
587
588 response = self.app.get(
588 response = self.app.get(
589 route_path('repo_archivefile',
589 route_path('repo_archivefile',
590 repo_name=backend.repo_name,
590 repo_name=backend.repo_name,
591 fname=fname))
591 fname=fname))
592 response.mustcontain('Unknown commit_id')
592 response.mustcontain('Unknown commit_id')
593
593
594
594
595 @pytest.mark.usefixtures("app")
595 @pytest.mark.usefixtures("app")
596 class TestFilesDiff(object):
596 class TestFilesDiff(object):
597
597
598 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
598 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
599 def test_file_full_diff(self, backend, diff):
599 def test_file_full_diff(self, backend, diff):
600 commit1 = backend.repo.get_commit(commit_idx=-1)
600 commit1 = backend.repo.get_commit(commit_idx=-1)
601 commit2 = backend.repo.get_commit(commit_idx=-2)
601 commit2 = backend.repo.get_commit(commit_idx=-2)
602
602
603 response = self.app.get(
603 response = self.app.get(
604 route_path('repo_files_diff',
604 route_path('repo_files_diff',
605 repo_name=backend.repo_name,
605 repo_name=backend.repo_name,
606 f_path='README'),
606 f_path='README'),
607 params={
607 params={
608 'diff1': commit2.raw_id,
608 'diff1': commit2.raw_id,
609 'diff2': commit1.raw_id,
609 'diff2': commit1.raw_id,
610 'fulldiff': '1',
610 'fulldiff': '1',
611 'diff': diff,
611 'diff': diff,
612 })
612 })
613
613
614 if diff == 'diff':
614 if diff == 'diff':
615 # use redirect since this is OLD view redirecting to compare page
615 # use redirect since this is OLD view redirecting to compare page
616 response = response.follow()
616 response = response.follow()
617
617
618 # It's a symlink to README.rst
618 # It's a symlink to README.rst
619 response.mustcontain('README.rst')
619 response.mustcontain('README.rst')
620 response.mustcontain('No newline at end of file')
620 response.mustcontain('No newline at end of file')
621
621
622 def test_file_binary_diff(self, backend):
622 def test_file_binary_diff(self, backend):
623 commits = [
623 commits = [
624 {'message': 'First commit'},
624 {'message': 'First commit'},
625 {'message': 'Commit with binary',
625 {'message': 'Commit with binary',
626 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
626 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
627 ]
627 ]
628 repo = backend.create_repo(commits=commits)
628 repo = backend.create_repo(commits=commits)
629
629
630 response = self.app.get(
630 response = self.app.get(
631 route_path('repo_files_diff',
631 route_path('repo_files_diff',
632 repo_name=backend.repo_name,
632 repo_name=backend.repo_name,
633 f_path='file.bin'),
633 f_path='file.bin'),
634 params={
634 params={
635 'diff1': repo.get_commit(commit_idx=0).raw_id,
635 'diff1': repo.get_commit(commit_idx=0).raw_id,
636 'diff2': repo.get_commit(commit_idx=1).raw_id,
636 'diff2': repo.get_commit(commit_idx=1).raw_id,
637 'fulldiff': '1',
637 'fulldiff': '1',
638 'diff': 'diff',
638 'diff': 'diff',
639 })
639 })
640 # use redirect since this is OLD view redirecting to compare page
640 # use redirect since this is OLD view redirecting to compare page
641 response = response.follow()
641 response = response.follow()
642 response.mustcontain('Collapse 1 commit')
642 response.mustcontain('Collapse 1 commit')
643 file_changes = (1, 0, 0)
643 file_changes = (1, 0, 0)
644
644
645 compare_page = ComparePage(response)
645 compare_page = ComparePage(response)
646 compare_page.contains_change_summary(*file_changes)
646 compare_page.contains_change_summary(*file_changes)
647
647
648 if backend.alias == 'svn':
648 if backend.alias == 'svn':
649 response.mustcontain('new file 10644')
649 response.mustcontain('new file 10644')
650 # TODO(marcink): SVN doesn't yet detect binary changes
650 # TODO(marcink): SVN doesn't yet detect binary changes
651 else:
651 else:
652 response.mustcontain('new file 100644')
652 response.mustcontain('new file 100644')
653 response.mustcontain('binary diff hidden')
653 response.mustcontain('binary diff hidden')
654
654
655 def test_diff_2way(self, backend):
655 def test_diff_2way(self, backend):
656 commit1 = backend.repo.get_commit(commit_idx=-1)
656 commit1 = backend.repo.get_commit(commit_idx=-1)
657 commit2 = backend.repo.get_commit(commit_idx=-2)
657 commit2 = backend.repo.get_commit(commit_idx=-2)
658 response = self.app.get(
658 response = self.app.get(
659 route_path('repo_files_diff_2way_redirect',
659 route_path('repo_files_diff_2way_redirect',
660 repo_name=backend.repo_name,
660 repo_name=backend.repo_name,
661 f_path='README'),
661 f_path='README'),
662 params={
662 params={
663 'diff1': commit2.raw_id,
663 'diff1': commit2.raw_id,
664 'diff2': commit1.raw_id,
664 'diff2': commit1.raw_id,
665 })
665 })
666 # use redirect since this is OLD view redirecting to compare page
666 # use redirect since this is OLD view redirecting to compare page
667 response = response.follow()
667 response = response.follow()
668
668
669 # It's a symlink to README.rst
669 # It's a symlink to README.rst
670 response.mustcontain('README.rst')
670 response.mustcontain('README.rst')
671 response.mustcontain('No newline at end of file')
671 response.mustcontain('No newline at end of file')
672
672
673 def test_requires_one_commit_id(self, backend, autologin_user):
673 def test_requires_one_commit_id(self, backend, autologin_user):
674 response = self.app.get(
674 response = self.app.get(
675 route_path('repo_files_diff',
675 route_path('repo_files_diff',
676 repo_name=backend.repo_name,
676 repo_name=backend.repo_name,
677 f_path='README.rst'),
677 f_path='README.rst'),
678 status=400)
678 status=400)
679 response.mustcontain(
679 response.mustcontain(
680 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
680 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
681
681
682 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
682 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
683 repo = vcsbackend.repo
683 repo = vcsbackend.repo
684 response = self.app.get(
684 response = self.app.get(
685 route_path('repo_files_diff',
685 route_path('repo_files_diff',
686 repo_name=repo.name,
686 repo_name=repo.name,
687 f_path='does-not-exist-in-any-commit'),
687 f_path='does-not-exist-in-any-commit'),
688 params={
688 params={
689 'diff1': repo[0].raw_id,
689 'diff1': repo[0].raw_id,
690 'diff2': repo[1].raw_id
690 'diff2': repo[1].raw_id
691 })
691 })
692
692
693 response = response.follow()
693 response = response.follow()
694 response.mustcontain('No files')
694 response.mustcontain('No files')
695
695
696 def test_returns_redirect_if_file_not_changed(self, backend):
696 def test_returns_redirect_if_file_not_changed(self, backend):
697 commit = backend.repo.get_commit(commit_idx=-1)
697 commit = backend.repo.get_commit(commit_idx=-1)
698 response = self.app.get(
698 response = self.app.get(
699 route_path('repo_files_diff_2way_redirect',
699 route_path('repo_files_diff_2way_redirect',
700 repo_name=backend.repo_name,
700 repo_name=backend.repo_name,
701 f_path='README'),
701 f_path='README'),
702 params={
702 params={
703 'diff1': commit.raw_id,
703 'diff1': commit.raw_id,
704 'diff2': commit.raw_id,
704 'diff2': commit.raw_id,
705 })
705 })
706
706
707 response = response.follow()
707 response = response.follow()
708 response.mustcontain('No files')
708 response.mustcontain('No files')
709 response.mustcontain('No commits in this compare')
709 response.mustcontain('No commits in this compare')
710
710
711 def test_supports_diff_to_different_path_svn(self, backend_svn):
711 def test_supports_diff_to_different_path_svn(self, backend_svn):
712 #TODO: check this case
712 #TODO: check this case
713 return
713 return
714
714
715 repo = backend_svn['svn-simple-layout'].scm_instance()
715 repo = backend_svn['svn-simple-layout'].scm_instance()
716 commit_id_1 = '24'
716 commit_id_1 = '24'
717 commit_id_2 = '26'
717 commit_id_2 = '26'
718
718
719 response = self.app.get(
719 response = self.app.get(
720 route_path('repo_files_diff',
720 route_path('repo_files_diff',
721 repo_name=backend_svn.repo_name,
721 repo_name=backend_svn.repo_name,
722 f_path='trunk/example.py'),
722 f_path='trunk/example.py'),
723 params={
723 params={
724 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
724 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
725 'diff2': commit_id_2,
725 'diff2': commit_id_2,
726 })
726 })
727
727
728 response = response.follow()
728 response = response.follow()
729 response.mustcontain(
729 response.mustcontain(
730 # diff contains this
730 # diff contains this
731 "Will print out a useful message on invocation.")
731 "Will print out a useful message on invocation.")
732
732
733 # Note: Expecting that we indicate the user what's being compared
733 # Note: Expecting that we indicate the user what's being compared
734 response.mustcontain("trunk/example.py")
734 response.mustcontain("trunk/example.py")
735 response.mustcontain("tags/v0.2/example.py")
735 response.mustcontain("tags/v0.2/example.py")
736
736
737 def test_show_rev_redirects_to_svn_path(self, backend_svn):
737 def test_show_rev_redirects_to_svn_path(self, backend_svn):
738 #TODO: check this case
738 #TODO: check this case
739 return
739 return
740
740
741 repo = backend_svn['svn-simple-layout'].scm_instance()
741 repo = backend_svn['svn-simple-layout'].scm_instance()
742 commit_id = repo[-1].raw_id
742 commit_id = repo[-1].raw_id
743
743
744 response = self.app.get(
744 response = self.app.get(
745 route_path('repo_files_diff',
745 route_path('repo_files_diff',
746 repo_name=backend_svn.repo_name,
746 repo_name=backend_svn.repo_name,
747 f_path='trunk/example.py'),
747 f_path='trunk/example.py'),
748 params={
748 params={
749 'diff1': 'branches/argparse/example.py@' + commit_id,
749 'diff1': 'branches/argparse/example.py@' + commit_id,
750 'diff2': commit_id,
750 'diff2': commit_id,
751 },
751 },
752 status=302)
752 status=302)
753 response = response.follow()
753 response = response.follow()
754 assert response.headers['Location'].endswith(
754 assert response.headers['Location'].endswith(
755 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
755 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
756
756
757 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
757 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
758 #TODO: check this case
758 #TODO: check this case
759 return
759 return
760
760
761 repo = backend_svn['svn-simple-layout'].scm_instance()
761 repo = backend_svn['svn-simple-layout'].scm_instance()
762 commit_id = repo[-1].raw_id
762 commit_id = repo[-1].raw_id
763 response = self.app.get(
763 response = self.app.get(
764 route_path('repo_files_diff',
764 route_path('repo_files_diff',
765 repo_name=backend_svn.repo_name,
765 repo_name=backend_svn.repo_name,
766 f_path='trunk/example.py'),
766 f_path='trunk/example.py'),
767 params={
767 params={
768 'diff1': 'branches/argparse/example.py@' + commit_id,
768 'diff1': 'branches/argparse/example.py@' + commit_id,
769 'diff2': commit_id,
769 'diff2': commit_id,
770 'show_rev': 'Show at Revision',
770 'show_rev': 'Show at Revision',
771 'annotate': 'true',
771 'annotate': 'true',
772 },
772 },
773 status=302)
773 status=302)
774 response = response.follow()
774 response = response.follow()
775 assert response.headers['Location'].endswith(
775 assert response.headers['Location'].endswith(
776 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
776 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
777
777
778
778
779 @pytest.mark.usefixtures("app", "autologin_user")
779 @pytest.mark.usefixtures("app", "autologin_user")
780 class TestModifyFilesWithWebInterface(object):
780 class TestModifyFilesWithWebInterface(object):
781
781
782 def test_add_file_view(self, backend):
782 def test_add_file_view(self, backend):
783 self.app.get(
783 self.app.get(
784 route_path('repo_files_add_file',
784 route_path('repo_files_add_file',
785 repo_name=backend.repo_name,
785 repo_name=backend.repo_name,
786 commit_id='tip', f_path='/')
786 commit_id='tip', f_path='/')
787 )
787 )
788
788
789 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
789 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
790 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
790 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
791 backend.create_repo()
791 backend.create_repo()
792 filename = 'init.py'
792 filename = 'init.py'
793 response = self.app.post(
793 response = self.app.post(
794 route_path('repo_files_create_file',
794 route_path('repo_files_create_file',
795 repo_name=backend.repo_name,
795 repo_name=backend.repo_name,
796 commit_id='tip', f_path='/'),
796 commit_id='tip', f_path='/'),
797 params={
797 params={
798 'content': "",
798 'content': "",
799 'filename': filename,
799 'filename': filename,
800 'csrf_token': csrf_token,
800 'csrf_token': csrf_token,
801 },
801 },
802 status=302)
802 status=302)
803 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
803 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
804 assert_session_flash(response, expected_msg)
804 assert_session_flash(response, expected_msg)
805
805
806 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
806 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
807 commit_id = backend.repo.get_commit().raw_id
807 commit_id = backend.repo.get_commit().raw_id
808 response = self.app.post(
808 response = self.app.post(
809 route_path('repo_files_create_file',
809 route_path('repo_files_create_file',
810 repo_name=backend.repo_name,
810 repo_name=backend.repo_name,
811 commit_id=commit_id, f_path='/'),
811 commit_id=commit_id, f_path='/'),
812 params={
812 params={
813 'content': "foo",
813 'content': "foo",
814 'csrf_token': csrf_token,
814 'csrf_token': csrf_token,
815 },
815 },
816 status=302)
816 status=302)
817
817
818 assert_session_flash(response, 'No filename specified')
818 assert_session_flash(response, 'No filename specified')
819
819
820 def test_add_file_into_repo_errors_and_no_commits(
820 def test_add_file_into_repo_errors_and_no_commits(
821 self, backend, csrf_token):
821 self, backend, csrf_token):
822 repo = backend.create_repo()
822 repo = backend.create_repo()
823 # Create a file with no filename, it will display an error but
823 # Create a file with no filename, it will display an error but
824 # the repo has no commits yet
824 # the repo has no commits yet
825 response = self.app.post(
825 response = self.app.post(
826 route_path('repo_files_create_file',
826 route_path('repo_files_create_file',
827 repo_name=repo.repo_name,
827 repo_name=repo.repo_name,
828 commit_id='tip', f_path='/'),
828 commit_id='tip', f_path='/'),
829 params={
829 params={
830 'content': "foo",
830 'content': "foo",
831 'csrf_token': csrf_token,
831 'csrf_token': csrf_token,
832 },
832 },
833 status=302)
833 status=302)
834
834
835 assert_session_flash(response, 'No filename specified')
835 assert_session_flash(response, 'No filename specified')
836
836
837 # Not allowed, redirect to the summary
837 # Not allowed, redirect to the summary
838 redirected = response.follow()
838 redirected = response.follow()
839 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
839 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
840
840
841 # As there are no commits, displays the summary page with the error of
841 # As there are no commits, displays the summary page with the error of
842 # creating a file with no filename
842 # creating a file with no filename
843
843
844 assert redirected.request.path == summary_url
844 assert redirected.request.path == summary_url
845
845
846 @pytest.mark.parametrize("filename, clean_filename", [
846 @pytest.mark.parametrize("filename, clean_filename", [
847 ('/abs/foo', 'abs/foo'),
847 ('/abs/foo', 'abs/foo'),
848 ('../rel/foo', 'rel/foo'),
848 ('../rel/foo', 'rel/foo'),
849 ('file/../foo/foo', 'file/foo/foo'),
849 ('file/../foo/foo', 'file/foo/foo'),
850 ])
850 ])
851 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
851 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
852 repo = backend.create_repo()
852 repo = backend.create_repo()
853 commit_id = repo.get_commit().raw_id
853 commit_id = repo.get_commit().raw_id
854
854
855 response = self.app.post(
855 response = self.app.post(
856 route_path('repo_files_create_file',
856 route_path('repo_files_create_file',
857 repo_name=repo.repo_name,
857 repo_name=repo.repo_name,
858 commit_id=commit_id, f_path='/'),
858 commit_id=commit_id, f_path='/'),
859 params={
859 params={
860 'content': "foo",
860 'content': "foo",
861 'filename': filename,
861 'filename': filename,
862 'csrf_token': csrf_token,
862 'csrf_token': csrf_token,
863 },
863 },
864 status=302)
864 status=302)
865
865
866 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
866 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
867 assert_session_flash(response, expected_msg)
867 assert_session_flash(response, expected_msg)
868
868
869 @pytest.mark.parametrize("cnt, filename, content", [
869 @pytest.mark.parametrize("cnt, filename, content", [
870 (1, 'foo.txt', "Content"),
870 (1, 'foo.txt', "Content"),
871 (2, 'dir/foo.rst', "Content"),
871 (2, 'dir/foo.rst', "Content"),
872 (3, 'dir/foo-second.rst', "Content"),
872 (3, 'dir/foo-second.rst', "Content"),
873 (4, 'rel/dir/foo.bar', "Content"),
873 (4, 'rel/dir/foo.bar', "Content"),
874 ])
874 ])
875 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
875 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
876 repo = backend.create_repo()
876 repo = backend.create_repo()
877 commit_id = repo.get_commit().raw_id
877 commit_id = repo.get_commit().raw_id
878 response = self.app.post(
878 response = self.app.post(
879 route_path('repo_files_create_file',
879 route_path('repo_files_create_file',
880 repo_name=repo.repo_name,
880 repo_name=repo.repo_name,
881 commit_id=commit_id, f_path='/'),
881 commit_id=commit_id, f_path='/'),
882 params={
882 params={
883 'content': content,
883 'content': content,
884 'filename': filename,
884 'filename': filename,
885 'csrf_token': csrf_token,
885 'csrf_token': csrf_token,
886 },
886 },
887 status=302)
887 status=302)
888
888
889 expected_msg = 'Successfully committed new file `{}`'.format(filename)
889 expected_msg = 'Successfully committed new file `{}`'.format(filename)
890 assert_session_flash(response, expected_msg)
890 assert_session_flash(response, expected_msg)
891
891
892 def test_edit_file_view(self, backend):
892 def test_edit_file_view(self, backend):
893 response = self.app.get(
893 response = self.app.get(
894 route_path('repo_files_edit_file',
894 route_path('repo_files_edit_file',
895 repo_name=backend.repo_name,
895 repo_name=backend.repo_name,
896 commit_id=backend.default_head_id,
896 commit_id=backend.default_head_id,
897 f_path='vcs/nodes.py'),
897 f_path='vcs/nodes.py'),
898 status=200)
898 status=200)
899 response.mustcontain("Module holding everything related to vcs nodes.")
899 response.mustcontain("Module holding everything related to vcs nodes.")
900
900
901 def test_edit_file_view_not_on_branch(self, backend):
901 def test_edit_file_view_not_on_branch(self, backend):
902 repo = backend.create_repo()
902 repo = backend.create_repo()
903 backend.ensure_file("vcs/nodes.py")
903 backend.ensure_file("vcs/nodes.py")
904
904
905 response = self.app.get(
905 response = self.app.get(
906 route_path('repo_files_edit_file',
906 route_path('repo_files_edit_file',
907 repo_name=repo.repo_name,
907 repo_name=repo.repo_name,
908 commit_id='tip',
908 commit_id='tip',
909 f_path='vcs/nodes.py'),
909 f_path='vcs/nodes.py'),
910 status=302)
910 status=302)
911 assert_session_flash(
911 assert_session_flash(
912 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
912 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
913
913
914 def test_edit_file_view_commit_changes(self, backend, csrf_token):
914 def test_edit_file_view_commit_changes(self, backend, csrf_token):
915 repo = backend.create_repo()
915 repo = backend.create_repo()
916 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
916 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
917
917
918 response = self.app.post(
918 response = self.app.post(
919 route_path('repo_files_update_file',
919 route_path('repo_files_update_file',
920 repo_name=repo.repo_name,
920 repo_name=repo.repo_name,
921 commit_id=backend.default_head_id,
921 commit_id=backend.default_head_id,
922 f_path='vcs/nodes.py'),
922 f_path='vcs/nodes.py'),
923 params={
923 params={
924 'content': "print 'hello world'",
924 'content': "print 'hello world'",
925 'message': 'I committed',
925 'message': 'I committed',
926 'filename': "vcs/nodes.py",
926 'filename': "vcs/nodes.py",
927 'csrf_token': csrf_token,
927 'csrf_token': csrf_token,
928 },
928 },
929 status=302)
929 status=302)
930 assert_session_flash(
930 assert_session_flash(
931 response, 'Successfully committed changes to file `vcs/nodes.py`')
931 response, 'Successfully committed changes to file `vcs/nodes.py`')
932 tip = repo.get_commit(commit_idx=-1)
932 tip = repo.get_commit(commit_idx=-1)
933 assert tip.message == 'I committed'
933 assert tip.message == 'I committed'
934
934
935 def test_edit_file_view_commit_changes_default_message(self, backend,
935 def test_edit_file_view_commit_changes_default_message(self, backend,
936 csrf_token):
936 csrf_token):
937 repo = backend.create_repo()
937 repo = backend.create_repo()
938 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
938 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
939
939
940 commit_id = (
940 commit_id = (
941 backend.default_branch_name or
941 backend.default_branch_name or
942 backend.repo.scm_instance().commit_ids[-1])
942 backend.repo.scm_instance().commit_ids[-1])
943
943
944 response = self.app.post(
944 response = self.app.post(
945 route_path('repo_files_update_file',
945 route_path('repo_files_update_file',
946 repo_name=repo.repo_name,
946 repo_name=repo.repo_name,
947 commit_id=commit_id,
947 commit_id=commit_id,
948 f_path='vcs/nodes.py'),
948 f_path='vcs/nodes.py'),
949 params={
949 params={
950 'content': "print 'hello world'",
950 'content': "print 'hello world'",
951 'message': '',
951 'message': '',
952 'filename': "vcs/nodes.py",
952 'filename': "vcs/nodes.py",
953 'csrf_token': csrf_token,
953 'csrf_token': csrf_token,
954 },
954 },
955 status=302)
955 status=302)
956 assert_session_flash(
956 assert_session_flash(
957 response, 'Successfully committed changes to file `vcs/nodes.py`')
957 response, 'Successfully committed changes to file `vcs/nodes.py`')
958 tip = repo.get_commit(commit_idx=-1)
958 tip = repo.get_commit(commit_idx=-1)
959 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
959 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
960
960
961 def test_delete_file_view(self, backend):
961 def test_delete_file_view(self, backend):
962 self.app.get(
962 self.app.get(
963 route_path('repo_files_remove_file',
963 route_path('repo_files_remove_file',
964 repo_name=backend.repo_name,
964 repo_name=backend.repo_name,
965 commit_id=backend.default_head_id,
965 commit_id=backend.default_head_id,
966 f_path='vcs/nodes.py'),
966 f_path='vcs/nodes.py'),
967 status=200)
967 status=200)
968
968
969 def test_delete_file_view_not_on_branch(self, backend):
969 def test_delete_file_view_not_on_branch(self, backend):
970 repo = backend.create_repo()
970 repo = backend.create_repo()
971 backend.ensure_file('vcs/nodes.py')
971 backend.ensure_file('vcs/nodes.py')
972
972
973 response = self.app.get(
973 response = self.app.get(
974 route_path('repo_files_remove_file',
974 route_path('repo_files_remove_file',
975 repo_name=repo.repo_name,
975 repo_name=repo.repo_name,
976 commit_id='tip',
976 commit_id='tip',
977 f_path='vcs/nodes.py'),
977 f_path='vcs/nodes.py'),
978 status=302)
978 status=302)
979 assert_session_flash(
979 assert_session_flash(
980 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
980 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
981
981
982 def test_delete_file_view_commit_changes(self, backend, csrf_token):
982 def test_delete_file_view_commit_changes(self, backend, csrf_token):
983 repo = backend.create_repo()
983 repo = backend.create_repo()
984 backend.ensure_file("vcs/nodes.py")
984 backend.ensure_file("vcs/nodes.py")
985
985
986 response = self.app.post(
986 response = self.app.post(
987 route_path('repo_files_delete_file',
987 route_path('repo_files_delete_file',
988 repo_name=repo.repo_name,
988 repo_name=repo.repo_name,
989 commit_id=backend.default_head_id,
989 commit_id=backend.default_head_id,
990 f_path='vcs/nodes.py'),
990 f_path='vcs/nodes.py'),
991 params={
991 params={
992 'message': 'i committed',
992 'message': 'i committed',
993 'csrf_token': csrf_token,
993 'csrf_token': csrf_token,
994 },
994 },
995 status=302)
995 status=302)
996 assert_session_flash(
996 assert_session_flash(
997 response, 'Successfully deleted file `vcs/nodes.py`')
997 response, 'Successfully deleted file `vcs/nodes.py`')
998
998
999
999
1000 @pytest.mark.usefixtures("app")
1000 @pytest.mark.usefixtures("app")
1001 class TestFilesViewOtherCases(object):
1001 class TestFilesViewOtherCases(object):
1002
1002
1003 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1003 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
1004 self, backend_stub, autologin_regular_user, user_regular,
1004 self, backend_stub, autologin_regular_user, user_regular,
1005 user_util):
1005 user_util):
1006
1006
1007 repo = backend_stub.create_repo()
1007 repo = backend_stub.create_repo()
1008 user_util.grant_user_permission_to_repo(
1008 user_util.grant_user_permission_to_repo(
1009 repo, user_regular, 'repository.write')
1009 repo, user_regular, 'repository.write')
1010 response = self.app.get(
1010 response = self.app.get(
1011 route_path('repo_files',
1011 route_path('repo_files',
1012 repo_name=repo.repo_name,
1012 repo_name=repo.repo_name,
1013 commit_id='tip', f_path='/'))
1013 commit_id='tip', f_path='/'))
1014
1014
1015 repo_file_add_url = route_path(
1015 repo_file_add_url = route_path(
1016 'repo_files_add_file',
1016 'repo_files_add_file',
1017 repo_name=repo.repo_name,
1017 repo_name=repo.repo_name,
1018 commit_id=0, f_path='')
1018 commit_id=0, f_path='')
1019
1019
1020 assert_session_flash(
1020 assert_session_flash(
1021 response,
1021 response,
1022 'There are no files yet. <a class="alert-link" '
1022 'There are no files yet. <a class="alert-link" '
1023 'href="{}">Click here to add a new file.</a>'
1023 'href="{}">Click here to add a new file.</a>'
1024 .format(repo_file_add_url))
1024 .format(repo_file_add_url))
1025
1025
1026 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1026 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1027 self, backend_stub, autologin_regular_user):
1027 self, backend_stub, autologin_regular_user):
1028 repo = backend_stub.create_repo()
1028 repo = backend_stub.create_repo()
1029 # init session for anon user
1029 # init session for anon user
1030 route_path('repo_summary', repo_name=repo.repo_name)
1030 route_path('repo_summary', repo_name=repo.repo_name)
1031
1031
1032 repo_file_add_url = route_path(
1032 repo_file_add_url = route_path(
1033 'repo_files_add_file',
1033 'repo_files_add_file',
1034 repo_name=repo.repo_name,
1034 repo_name=repo.repo_name,
1035 commit_id=0, f_path='')
1035 commit_id=0, f_path='')
1036
1036
1037 response = self.app.get(
1037 response = self.app.get(
1038 route_path('repo_files',
1038 route_path('repo_files',
1039 repo_name=repo.repo_name,
1039 repo_name=repo.repo_name,
1040 commit_id='tip', f_path='/'))
1040 commit_id='tip', f_path='/'))
1041
1041
1042 assert_session_flash(response, no_=repo_file_add_url)
1042 assert_session_flash(response, no_=repo_file_add_url)
1043
1043
1044 @pytest.mark.parametrize('file_node', [
1044 @pytest.mark.parametrize('file_node', [
1045 'archive/file.zip',
1045 'archive/file.zip',
1046 'diff/my-file.txt',
1046 'diff/my-file.txt',
1047 'render.py',
1047 'render.py',
1048 'render',
1048 'render',
1049 'remove_file',
1049 'remove_file',
1050 'remove_file/to-delete.txt',
1050 'remove_file/to-delete.txt',
1051 ])
1051 ])
1052 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1052 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1053 backend.create_repo()
1053 backend.create_repo()
1054 backend.ensure_file(file_node)
1054 backend.ensure_file(file_node)
1055
1055
1056 self.app.get(
1056 self.app.get(
1057 route_path('repo_files',
1057 route_path('repo_files',
1058 repo_name=backend.repo_name,
1058 repo_name=backend.repo_name,
1059 commit_id='tip', f_path=file_node),
1059 commit_id='tip', f_path=file_node),
1060 status=200)
1060 status=200)
1061
1061
1062
1062
1063 class TestAdjustFilePathForSvn(object):
1063 class TestAdjustFilePathForSvn(object):
1064 """
1064 """
1065 SVN specific adjustments of node history in RepoFilesView.
1065 SVN specific adjustments of node history in RepoFilesView.
1066 """
1066 """
1067
1067
1068 def test_returns_path_relative_to_matched_reference(self):
1068 def test_returns_path_relative_to_matched_reference(self):
1069 repo = self._repo(branches=['trunk'])
1069 repo = self._repo(branches=['trunk'])
1070 self.assert_file_adjustment('trunk/file', 'file', repo)
1070 self.assert_file_adjustment('trunk/file', 'file', repo)
1071
1071
1072 def test_does_not_modify_file_if_no_reference_matches(self):
1072 def test_does_not_modify_file_if_no_reference_matches(self):
1073 repo = self._repo(branches=['trunk'])
1073 repo = self._repo(branches=['trunk'])
1074 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1074 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1075
1075
1076 def test_does_not_adjust_partial_directory_names(self):
1076 def test_does_not_adjust_partial_directory_names(self):
1077 repo = self._repo(branches=['trun'])
1077 repo = self._repo(branches=['trun'])
1078 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1078 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1079
1079
1080 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1080 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1081 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1081 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1082 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1082 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1083
1083
1084 def assert_file_adjustment(self, f_path, expected, repo):
1084 def assert_file_adjustment(self, f_path, expected, repo):
1085 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1085 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1086 assert result == expected
1086 assert result == expected
1087
1087
1088 def _repo(self, branches=None):
1088 def _repo(self, branches=None):
1089 repo = mock.Mock()
1089 repo = mock.Mock()
1090 repo.branches = OrderedDict((name, '0') for name in branches or [])
1090 repo.branches = OrderedDict((name, '0') for name in branches or [])
1091 repo.tags = {}
1091 repo.tags = {}
1092 return repo
1092 return repo
@@ -1,524 +1,524 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.compat import OrderedDict
28 from collections import OrderedDict
29 from rhodecode.lib.utils2 import AttributeDict, safe_str
29 from rhodecode.lib.utils2 import AttributeDict, safe_str
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 from rhodecode.model.db import Repository
31 from rhodecode.model.db import Repository
32 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
33 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo import RepoModel
34 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.scm import ScmModel
35 from rhodecode.tests import assert_session_flash
35 from rhodecode.tests import assert_session_flash
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixture import Fixture
37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
38
38
39
39
40 fixture = Fixture()
40 fixture = Fixture()
41
41
42
42
43 def route_path(name, params=None, **kwargs):
43 def route_path(name, params=None, **kwargs):
44 import urllib.request, urllib.parse, urllib.error
44 import urllib.request, urllib.parse, urllib.error
45
45
46 base_url = {
46 base_url = {
47 'repo_summary': '/{repo_name}',
47 'repo_summary': '/{repo_name}',
48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
49 'repo_refs_data': '/{repo_name}/refs-data',
49 'repo_refs_data': '/{repo_name}/refs-data',
50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog',
50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog',
51 'repo_creating_check': '/{repo_name}/repo_creating_check',
51 'repo_creating_check': '/{repo_name}/repo_creating_check',
52 }[name].format(**kwargs)
52 }[name].format(**kwargs)
53
53
54 if params:
54 if params:
55 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
55 base_url = '{}?{}'.format(base_url, urllib.parse.urlencode(params))
56 return base_url
56 return base_url
57
57
58
58
59 def assert_clone_url(response, server, repo, disabled=False):
59 def assert_clone_url(response, server, repo, disabled=False):
60
60
61 response.mustcontain(
61 response.mustcontain(
62 '<input type="text" class="input-monospace clone_url_input" '
62 '<input type="text" class="input-monospace clone_url_input" '
63 '{disabled}readonly="readonly" '
63 '{disabled}readonly="readonly" '
64 'value="http://test_admin@{server}/{repo}"/>'.format(
64 'value="http://test_admin@{server}/{repo}"/>'.format(
65 server=server, repo=repo, disabled='disabled ' if disabled else ' ')
65 server=server, repo=repo, disabled='disabled ' if disabled else ' ')
66 )
66 )
67
67
68
68
69 @pytest.mark.usefixtures('app')
69 @pytest.mark.usefixtures('app')
70 class TestSummaryView(object):
70 class TestSummaryView(object):
71 def test_index(self, autologin_user, backend, http_host_only_stub):
71 def test_index(self, autologin_user, backend, http_host_only_stub):
72 repo_id = backend.repo.repo_id
72 repo_id = backend.repo.repo_id
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
74 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
75 return_value=False):
75 return_value=False):
76 response = self.app.get(
76 response = self.app.get(
77 route_path('repo_summary', repo_name=repo_name))
77 route_path('repo_summary', repo_name=repo_name))
78
78
79 # repo type
79 # repo type
80 response.mustcontain(
80 response.mustcontain(
81 '<i class="icon-%s">' % (backend.alias, )
81 '<i class="icon-%s">' % (backend.alias, )
82 )
82 )
83 # public/private
83 # public/private
84 response.mustcontain(
84 response.mustcontain(
85 """<i class="icon-unlock-alt">"""
85 """<i class="icon-unlock-alt">"""
86 )
86 )
87
87
88 # clone url...
88 # clone url...
89 assert_clone_url(response, http_host_only_stub, repo_name)
89 assert_clone_url(response, http_host_only_stub, repo_name)
90 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
90 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
91
91
92 def test_index_svn_without_proxy(
92 def test_index_svn_without_proxy(
93 self, autologin_user, backend_svn, http_host_only_stub):
93 self, autologin_user, backend_svn, http_host_only_stub):
94 repo_id = backend_svn.repo.repo_id
94 repo_id = backend_svn.repo.repo_id
95 repo_name = backend_svn.repo_name
95 repo_name = backend_svn.repo_name
96 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
96 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
97 # clone url...
97 # clone url...
98
98
99 assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
99 assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
100 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
100 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
101
101
102 def test_index_with_trailing_slash(
102 def test_index_with_trailing_slash(
103 self, autologin_user, backend, http_host_only_stub):
103 self, autologin_user, backend, http_host_only_stub):
104
104
105 repo_id = backend.repo.repo_id
105 repo_id = backend.repo.repo_id
106 repo_name = backend.repo_name
106 repo_name = backend.repo_name
107 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
107 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
108 return_value=False):
108 return_value=False):
109 response = self.app.get(
109 response = self.app.get(
110 route_path('repo_summary', repo_name=repo_name) + '/',
110 route_path('repo_summary', repo_name=repo_name) + '/',
111 status=200)
111 status=200)
112
112
113 # clone url...
113 # clone url...
114 assert_clone_url(response, http_host_only_stub, repo_name)
114 assert_clone_url(response, http_host_only_stub, repo_name)
115 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
115 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
116
116
117 def test_index_by_id(self, autologin_user, backend):
117 def test_index_by_id(self, autologin_user, backend):
118 repo_id = backend.repo.repo_id
118 repo_id = backend.repo.repo_id
119 response = self.app.get(
119 response = self.app.get(
120 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
120 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
121
121
122 # repo type
122 # repo type
123 response.mustcontain(
123 response.mustcontain(
124 '<i class="icon-%s">' % (backend.alias, )
124 '<i class="icon-%s">' % (backend.alias, )
125 )
125 )
126 # public/private
126 # public/private
127 response.mustcontain(
127 response.mustcontain(
128 """<i class="icon-unlock-alt">"""
128 """<i class="icon-unlock-alt">"""
129 )
129 )
130
130
131 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
131 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
132 fixture.create_repo(name='repo_1')
132 fixture.create_repo(name='repo_1')
133 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
133 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
134
134
135 try:
135 try:
136 response.mustcontain("repo_1")
136 response.mustcontain("repo_1")
137 finally:
137 finally:
138 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
138 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
139 Session().commit()
139 Session().commit()
140
140
141 def test_index_with_anonymous_access_disabled(
141 def test_index_with_anonymous_access_disabled(
142 self, backend, disable_anonymous_user):
142 self, backend, disable_anonymous_user):
143 response = self.app.get(
143 response = self.app.get(
144 route_path('repo_summary', repo_name=backend.repo_name), status=302)
144 route_path('repo_summary', repo_name=backend.repo_name), status=302)
145 assert 'login' in response.location
145 assert 'login' in response.location
146
146
147 def _enable_stats(self, repo):
147 def _enable_stats(self, repo):
148 r = Repository.get_by_repo_name(repo)
148 r = Repository.get_by_repo_name(repo)
149 r.enable_statistics = True
149 r.enable_statistics = True
150 Session().add(r)
150 Session().add(r)
151 Session().commit()
151 Session().commit()
152
152
153 expected_trending = {
153 expected_trending = {
154 'hg': {
154 'hg': {
155 "py": {"count": 68, "desc": ["Python"]},
155 "py": {"count": 68, "desc": ["Python"]},
156 "rst": {"count": 16, "desc": ["Rst"]},
156 "rst": {"count": 16, "desc": ["Rst"]},
157 "css": {"count": 2, "desc": ["Css"]},
157 "css": {"count": 2, "desc": ["Css"]},
158 "sh": {"count": 2, "desc": ["Bash"]},
158 "sh": {"count": 2, "desc": ["Bash"]},
159 "bat": {"count": 1, "desc": ["Batch"]},
159 "bat": {"count": 1, "desc": ["Batch"]},
160 "cfg": {"count": 1, "desc": ["Ini"]},
160 "cfg": {"count": 1, "desc": ["Ini"]},
161 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
161 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
162 "ini": {"count": 1, "desc": ["Ini"]},
162 "ini": {"count": 1, "desc": ["Ini"]},
163 "js": {"count": 1, "desc": ["Javascript"]},
163 "js": {"count": 1, "desc": ["Javascript"]},
164 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
164 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
165 },
165 },
166 'git': {
166 'git': {
167 "py": {"count": 68, "desc": ["Python"]},
167 "py": {"count": 68, "desc": ["Python"]},
168 "rst": {"count": 16, "desc": ["Rst"]},
168 "rst": {"count": 16, "desc": ["Rst"]},
169 "css": {"count": 2, "desc": ["Css"]},
169 "css": {"count": 2, "desc": ["Css"]},
170 "sh": {"count": 2, "desc": ["Bash"]},
170 "sh": {"count": 2, "desc": ["Bash"]},
171 "bat": {"count": 1, "desc": ["Batch"]},
171 "bat": {"count": 1, "desc": ["Batch"]},
172 "cfg": {"count": 1, "desc": ["Ini"]},
172 "cfg": {"count": 1, "desc": ["Ini"]},
173 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
173 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
174 "ini": {"count": 1, "desc": ["Ini"]},
174 "ini": {"count": 1, "desc": ["Ini"]},
175 "js": {"count": 1, "desc": ["Javascript"]},
175 "js": {"count": 1, "desc": ["Javascript"]},
176 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
176 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
177 },
177 },
178 'svn': {
178 'svn': {
179 "py": {"count": 75, "desc": ["Python"]},
179 "py": {"count": 75, "desc": ["Python"]},
180 "rst": {"count": 16, "desc": ["Rst"]},
180 "rst": {"count": 16, "desc": ["Rst"]},
181 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
181 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
182 "css": {"count": 2, "desc": ["Css"]},
182 "css": {"count": 2, "desc": ["Css"]},
183 "bat": {"count": 1, "desc": ["Batch"]},
183 "bat": {"count": 1, "desc": ["Batch"]},
184 "cfg": {"count": 1, "desc": ["Ini"]},
184 "cfg": {"count": 1, "desc": ["Ini"]},
185 "ini": {"count": 1, "desc": ["Ini"]},
185 "ini": {"count": 1, "desc": ["Ini"]},
186 "js": {"count": 1, "desc": ["Javascript"]},
186 "js": {"count": 1, "desc": ["Javascript"]},
187 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
187 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
188 "sh": {"count": 1, "desc": ["Bash"]}
188 "sh": {"count": 1, "desc": ["Bash"]}
189 },
189 },
190 }
190 }
191
191
192 def test_repo_stats(self, autologin_user, backend, xhr_header):
192 def test_repo_stats(self, autologin_user, backend, xhr_header):
193 response = self.app.get(
193 response = self.app.get(
194 route_path(
194 route_path(
195 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
195 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
196 extra_environ=xhr_header,
196 extra_environ=xhr_header,
197 status=200)
197 status=200)
198 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
198 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
199
199
200 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
200 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
201 repo_name = backend.repo_name
201 repo_name = backend.repo_name
202
202
203 # codes stats
203 # codes stats
204 self._enable_stats(repo_name)
204 self._enable_stats(repo_name)
205 ScmModel().mark_for_invalidation(repo_name)
205 ScmModel().mark_for_invalidation(repo_name)
206
206
207 response = self.app.get(
207 response = self.app.get(
208 route_path(
208 route_path(
209 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
209 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
210 extra_environ=xhr_header,
210 extra_environ=xhr_header,
211 status=200)
211 status=200)
212
212
213 expected_data = self.expected_trending[backend.alias]
213 expected_data = self.expected_trending[backend.alias]
214 returned_stats = response.json['code_stats']
214 returned_stats = response.json['code_stats']
215 for k, v in expected_data.items():
215 for k, v in expected_data.items():
216 assert v == returned_stats[k]
216 assert v == returned_stats[k]
217
217
218 def test_repo_refs_data(self, backend):
218 def test_repo_refs_data(self, backend):
219 response = self.app.get(
219 response = self.app.get(
220 route_path('repo_refs_data', repo_name=backend.repo_name),
220 route_path('repo_refs_data', repo_name=backend.repo_name),
221 status=200)
221 status=200)
222
222
223 # Ensure that there is the correct amount of items in the result
223 # Ensure that there is the correct amount of items in the result
224 repo = backend.repo.scm_instance()
224 repo = backend.repo.scm_instance()
225 data = response.json['results']
225 data = response.json['results']
226 items = sum(len(section['children']) for section in data)
226 items = sum(len(section['children']) for section in data)
227 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
227 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
228 assert items == repo_refs
228 assert items == repo_refs
229
229
230 def test_index_shows_missing_requirements_message(
230 def test_index_shows_missing_requirements_message(
231 self, backend, autologin_user):
231 self, backend, autologin_user):
232 repo_name = backend.repo_name
232 repo_name = backend.repo_name
233 scm_patcher = mock.patch.object(
233 scm_patcher = mock.patch.object(
234 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
234 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
235
235
236 with scm_patcher:
236 with scm_patcher:
237 response = self.app.get(
237 response = self.app.get(
238 route_path('repo_summary', repo_name=repo_name))
238 route_path('repo_summary', repo_name=repo_name))
239 assert_response = response.assert_response()
239 assert_response = response.assert_response()
240 assert_response.element_contains(
240 assert_response.element_contains(
241 '.main .alert-warning strong', 'Missing requirements')
241 '.main .alert-warning strong', 'Missing requirements')
242 assert_response.element_contains(
242 assert_response.element_contains(
243 '.main .alert-warning',
243 '.main .alert-warning',
244 'Commits cannot be displayed, because this repository '
244 'Commits cannot be displayed, because this repository '
245 'uses one or more extensions, which was not enabled.')
245 'uses one or more extensions, which was not enabled.')
246
246
247 def test_missing_requirements_page_does_not_contains_switch_to(
247 def test_missing_requirements_page_does_not_contains_switch_to(
248 self, autologin_user, backend):
248 self, autologin_user, backend):
249 repo_name = backend.repo_name
249 repo_name = backend.repo_name
250 scm_patcher = mock.patch.object(
250 scm_patcher = mock.patch.object(
251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
252
252
253 with scm_patcher:
253 with scm_patcher:
254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
255 response.mustcontain(no='Switch To')
255 response.mustcontain(no='Switch To')
256
256
257
257
258 @pytest.mark.usefixtures('app')
258 @pytest.mark.usefixtures('app')
259 class TestRepoLocation(object):
259 class TestRepoLocation(object):
260
260
261 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
261 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
262 def test_missing_filesystem_repo(
262 def test_missing_filesystem_repo(
263 self, autologin_user, backend, suffix, csrf_token):
263 self, autologin_user, backend, suffix, csrf_token):
264 repo = backend.create_repo(name_suffix=suffix)
264 repo = backend.create_repo(name_suffix=suffix)
265 repo_name = repo.repo_name
265 repo_name = repo.repo_name
266
266
267 # delete from file system
267 # delete from file system
268 RepoModel()._delete_filesystem_repo(repo)
268 RepoModel()._delete_filesystem_repo(repo)
269
269
270 # test if the repo is still in the database
270 # test if the repo is still in the database
271 new_repo = RepoModel().get_by_repo_name(repo_name)
271 new_repo = RepoModel().get_by_repo_name(repo_name)
272 assert new_repo.repo_name == repo_name
272 assert new_repo.repo_name == repo_name
273
273
274 # check if repo is not in the filesystem
274 # check if repo is not in the filesystem
275 assert not repo_on_filesystem(repo_name)
275 assert not repo_on_filesystem(repo_name)
276
276
277 response = self.app.get(
277 response = self.app.get(
278 route_path('repo_summary', repo_name=safe_str(repo_name)), status=302)
278 route_path('repo_summary', repo_name=safe_str(repo_name)), status=302)
279
279
280 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
280 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
281 'Please check if it exist, or is not damaged.' % repo_name
281 'Please check if it exist, or is not damaged.' % repo_name
282 assert_session_flash(response, msg)
282 assert_session_flash(response, msg)
283
283
284 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
284 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
285 def test_missing_filesystem_repo_on_repo_check(
285 def test_missing_filesystem_repo_on_repo_check(
286 self, autologin_user, backend, suffix, csrf_token):
286 self, autologin_user, backend, suffix, csrf_token):
287 repo = backend.create_repo(name_suffix=suffix)
287 repo = backend.create_repo(name_suffix=suffix)
288 repo_name = repo.repo_name
288 repo_name = repo.repo_name
289
289
290 # delete from file system
290 # delete from file system
291 RepoModel()._delete_filesystem_repo(repo)
291 RepoModel()._delete_filesystem_repo(repo)
292
292
293 # test if the repo is still in the database
293 # test if the repo is still in the database
294 new_repo = RepoModel().get_by_repo_name(repo_name)
294 new_repo = RepoModel().get_by_repo_name(repo_name)
295 assert new_repo.repo_name == repo_name
295 assert new_repo.repo_name == repo_name
296
296
297 # check if repo is not in the filesystem
297 # check if repo is not in the filesystem
298 assert not repo_on_filesystem(repo_name)
298 assert not repo_on_filesystem(repo_name)
299
299
300 # flush the session
300 # flush the session
301 self.app.get(
301 self.app.get(
302 route_path('repo_summary', repo_name=safe_str(repo_name)),
302 route_path('repo_summary', repo_name=safe_str(repo_name)),
303 status=302)
303 status=302)
304
304
305 response = self.app.get(
305 response = self.app.get(
306 route_path('repo_creating_check', repo_name=safe_str(repo_name)),
306 route_path('repo_creating_check', repo_name=safe_str(repo_name)),
307 status=200)
307 status=200)
308 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
308 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
309 'Please check if it exist, or is not damaged.' % repo_name
309 'Please check if it exist, or is not damaged.' % repo_name
310 assert_session_flash(response, msg )
310 assert_session_flash(response, msg )
311
311
312
312
313 @pytest.fixture()
313 @pytest.fixture()
314 def summary_view(context_stub, request_stub, user_util):
314 def summary_view(context_stub, request_stub, user_util):
315 """
315 """
316 Bootstrap view to test the view functions
316 Bootstrap view to test the view functions
317 """
317 """
318 request_stub.matched_route = AttributeDict(name='test_view')
318 request_stub.matched_route = AttributeDict(name='test_view')
319
319
320 request_stub.user = user_util.create_user().AuthUser()
320 request_stub.user = user_util.create_user().AuthUser()
321 request_stub.db_repo = user_util.create_repo()
321 request_stub.db_repo = user_util.create_repo()
322
322
323 view = RepoSummaryView(context=context_stub, request=request_stub)
323 view = RepoSummaryView(context=context_stub, request=request_stub)
324 return view
324 return view
325
325
326
326
327 @pytest.mark.usefixtures('app')
327 @pytest.mark.usefixtures('app')
328 class TestCreateReferenceData(object):
328 class TestCreateReferenceData(object):
329
329
330 @pytest.fixture()
330 @pytest.fixture()
331 def example_refs(self):
331 def example_refs(self):
332 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
332 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
333 example_refs = [
333 example_refs = [
334 ('section_1', section_1_refs, 't1'),
334 ('section_1', section_1_refs, 't1'),
335 ('section_2', {'c': 'c_id'}, 't2'),
335 ('section_2', {'c': 'c_id'}, 't2'),
336 ]
336 ]
337 return example_refs
337 return example_refs
338
338
339 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
339 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
340 repo = mock.Mock()
340 repo = mock.Mock()
341 repo.name = 'test-repo'
341 repo.name = 'test-repo'
342 repo.alias = 'git'
342 repo.alias = 'git'
343 full_repo_name = 'pytest-repo-group/' + repo.name
343 full_repo_name = 'pytest-repo-group/' + repo.name
344
344
345 result = summary_view._create_reference_data(
345 result = summary_view._create_reference_data(
346 repo, full_repo_name, example_refs)
346 repo, full_repo_name, example_refs)
347
347
348 expected_files_url = '/{}/files/'.format(full_repo_name)
348 expected_files_url = '/{}/files/'.format(full_repo_name)
349 expected_result = [
349 expected_result = [
350 {
350 {
351 'children': [
351 'children': [
352 {
352 {
353 'id': 'a', 'idx': 0, 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
353 'id': 'a', 'idx': 0, 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
354 'files_url': expected_files_url + 'a/?at=a',
354 'files_url': expected_files_url + 'a/?at=a',
355 },
355 },
356 {
356 {
357 'id': 'b', 'idx': 0, 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
357 'id': 'b', 'idx': 0, 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
358 'files_url': expected_files_url + 'b/?at=b',
358 'files_url': expected_files_url + 'b/?at=b',
359 }
359 }
360 ],
360 ],
361 'text': 'section_1'
361 'text': 'section_1'
362 },
362 },
363 {
363 {
364 'children': [
364 'children': [
365 {
365 {
366 'id': 'c', 'idx': 0, 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
366 'id': 'c', 'idx': 0, 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
367 'files_url': expected_files_url + 'c/?at=c',
367 'files_url': expected_files_url + 'c/?at=c',
368 }
368 }
369 ],
369 ],
370 'text': 'section_2'
370 'text': 'section_2'
371 }]
371 }]
372 assert result == expected_result
372 assert result == expected_result
373
373
374 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
374 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
375 repo = mock.Mock()
375 repo = mock.Mock()
376 repo.name = 'test-repo'
376 repo.name = 'test-repo'
377 repo.alias = 'svn'
377 repo.alias = 'svn'
378 full_repo_name = 'pytest-repo-group/' + repo.name
378 full_repo_name = 'pytest-repo-group/' + repo.name
379
379
380 result = summary_view._create_reference_data(
380 result = summary_view._create_reference_data(
381 repo, full_repo_name, example_refs)
381 repo, full_repo_name, example_refs)
382
382
383 expected_files_url = '/{}/files/'.format(full_repo_name)
383 expected_files_url = '/{}/files/'.format(full_repo_name)
384 expected_result = [
384 expected_result = [
385 {
385 {
386 'children': [
386 'children': [
387 {
387 {
388 'id': 'a@a_id', 'idx': 0, 'raw_id': 'a_id',
388 'id': 'a@a_id', 'idx': 0, 'raw_id': 'a_id',
389 'text': 'a', 'type': 't1',
389 'text': 'a', 'type': 't1',
390 'files_url': expected_files_url + 'a_id/a?at=a',
390 'files_url': expected_files_url + 'a_id/a?at=a',
391 },
391 },
392 {
392 {
393 'id': 'b@b_id', 'idx': 0, 'raw_id': 'b_id',
393 'id': 'b@b_id', 'idx': 0, 'raw_id': 'b_id',
394 'text': 'b', 'type': 't1',
394 'text': 'b', 'type': 't1',
395 'files_url': expected_files_url + 'b_id/b?at=b',
395 'files_url': expected_files_url + 'b_id/b?at=b',
396 }
396 }
397 ],
397 ],
398 'text': 'section_1'
398 'text': 'section_1'
399 },
399 },
400 {
400 {
401 'children': [
401 'children': [
402 {
402 {
403 'id': 'c@c_id', 'idx': 0, 'raw_id': 'c_id',
403 'id': 'c@c_id', 'idx': 0, 'raw_id': 'c_id',
404 'text': 'c', 'type': 't2',
404 'text': 'c', 'type': 't2',
405 'files_url': expected_files_url + 'c_id/c?at=c',
405 'files_url': expected_files_url + 'c_id/c?at=c',
406 }
406 }
407 ],
407 ],
408 'text': 'section_2'
408 'text': 'section_2'
409 }
409 }
410 ]
410 ]
411 assert result == expected_result
411 assert result == expected_result
412
412
413
413
414 class TestCreateFilesUrl(object):
414 class TestCreateFilesUrl(object):
415
415
416 def test_creates_non_svn_url(self, app, summary_view):
416 def test_creates_non_svn_url(self, app, summary_view):
417 repo = mock.Mock()
417 repo = mock.Mock()
418 repo.name = 'abcde'
418 repo.name = 'abcde'
419 full_repo_name = 'test-repo-group/' + repo.name
419 full_repo_name = 'test-repo-group/' + repo.name
420 ref_name = 'branch1'
420 ref_name = 'branch1'
421 raw_id = 'deadbeef0123456789'
421 raw_id = 'deadbeef0123456789'
422 is_svn = False
422 is_svn = False
423
423
424 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
424 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
425 result = summary_view._create_files_url(
425 result = summary_view._create_files_url(
426 repo, full_repo_name, ref_name, raw_id, is_svn)
426 repo, full_repo_name, ref_name, raw_id, is_svn)
427 url_mock.assert_called_once_with(
427 url_mock.assert_called_once_with(
428 'repo_files', repo_name=full_repo_name, commit_id=ref_name,
428 'repo_files', repo_name=full_repo_name, commit_id=ref_name,
429 f_path='', _query=dict(at=ref_name))
429 f_path='', _query=dict(at=ref_name))
430 assert result == url_mock.return_value
430 assert result == url_mock.return_value
431
431
432 def test_creates_svn_url(self, app, summary_view):
432 def test_creates_svn_url(self, app, summary_view):
433 repo = mock.Mock()
433 repo = mock.Mock()
434 repo.name = 'abcde'
434 repo.name = 'abcde'
435 full_repo_name = 'test-repo-group/' + repo.name
435 full_repo_name = 'test-repo-group/' + repo.name
436 ref_name = 'branch1'
436 ref_name = 'branch1'
437 raw_id = 'deadbeef0123456789'
437 raw_id = 'deadbeef0123456789'
438 is_svn = True
438 is_svn = True
439
439
440 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
440 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
441 result = summary_view._create_files_url(
441 result = summary_view._create_files_url(
442 repo, full_repo_name, ref_name, raw_id, is_svn)
442 repo, full_repo_name, ref_name, raw_id, is_svn)
443 url_mock.assert_called_once_with(
443 url_mock.assert_called_once_with(
444 'repo_files', repo_name=full_repo_name, f_path=ref_name,
444 'repo_files', repo_name=full_repo_name, f_path=ref_name,
445 commit_id=raw_id, _query=dict(at=ref_name))
445 commit_id=raw_id, _query=dict(at=ref_name))
446 assert result == url_mock.return_value
446 assert result == url_mock.return_value
447
447
448 def test_name_has_slashes(self, app, summary_view):
448 def test_name_has_slashes(self, app, summary_view):
449 repo = mock.Mock()
449 repo = mock.Mock()
450 repo.name = 'abcde'
450 repo.name = 'abcde'
451 full_repo_name = 'test-repo-group/' + repo.name
451 full_repo_name = 'test-repo-group/' + repo.name
452 ref_name = 'branch1/branch2'
452 ref_name = 'branch1/branch2'
453 raw_id = 'deadbeef0123456789'
453 raw_id = 'deadbeef0123456789'
454 is_svn = False
454 is_svn = False
455
455
456 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
456 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
457 result = summary_view._create_files_url(
457 result = summary_view._create_files_url(
458 repo, full_repo_name, ref_name, raw_id, is_svn)
458 repo, full_repo_name, ref_name, raw_id, is_svn)
459 url_mock.assert_called_once_with(
459 url_mock.assert_called_once_with(
460 'repo_files', repo_name=full_repo_name, commit_id=raw_id,
460 'repo_files', repo_name=full_repo_name, commit_id=raw_id,
461 f_path='', _query=dict(at=ref_name))
461 f_path='', _query=dict(at=ref_name))
462 assert result == url_mock.return_value
462 assert result == url_mock.return_value
463
463
464
464
465 class TestReferenceItems(object):
465 class TestReferenceItems(object):
466 repo = mock.Mock()
466 repo = mock.Mock()
467 repo.name = 'pytest-repo'
467 repo.name = 'pytest-repo'
468 repo_full_name = 'pytest-repo-group/' + repo.name
468 repo_full_name = 'pytest-repo-group/' + repo.name
469 ref_type = 'branch'
469 ref_type = 'branch'
470 fake_url = '/abcde/'
470 fake_url = '/abcde/'
471
471
472 @staticmethod
472 @staticmethod
473 def _format_function(name, id_):
473 def _format_function(name, id_):
474 return 'format_function_{}_{}'.format(name, id_)
474 return 'format_function_{}_{}'.format(name, id_)
475
475
476 def test_creates_required_amount_of_items(self, summary_view):
476 def test_creates_required_amount_of_items(self, summary_view):
477 amount = 100
477 amount = 100
478 refs = {
478 refs = {
479 'ref{}'.format(i): '{0:040d}'.format(i)
479 'ref{}'.format(i): '{0:040d}'.format(i)
480 for i in range(amount)
480 for i in range(amount)
481 }
481 }
482
482
483 url_patcher = mock.patch.object(summary_view, '_create_files_url')
483 url_patcher = mock.patch.object(summary_view, '_create_files_url')
484 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
484 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
485 return_value=False)
485 return_value=False)
486
486
487 with url_patcher as url_mock, svn_patcher:
487 with url_patcher as url_mock, svn_patcher:
488 result = summary_view._create_reference_items(
488 result = summary_view._create_reference_items(
489 self.repo, self.repo_full_name, refs, self.ref_type,
489 self.repo, self.repo_full_name, refs, self.ref_type,
490 self._format_function)
490 self._format_function)
491 assert len(result) == amount
491 assert len(result) == amount
492 assert url_mock.call_count == amount
492 assert url_mock.call_count == amount
493
493
494 def test_single_item_details(self, summary_view):
494 def test_single_item_details(self, summary_view):
495 ref_name = 'ref1'
495 ref_name = 'ref1'
496 ref_id = 'deadbeef'
496 ref_id = 'deadbeef'
497 refs = {
497 refs = {
498 ref_name: ref_id
498 ref_name: ref_id
499 }
499 }
500
500
501 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
501 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
502 return_value=False)
502 return_value=False)
503
503
504 url_patcher = mock.patch.object(
504 url_patcher = mock.patch.object(
505 summary_view, '_create_files_url', return_value=self.fake_url)
505 summary_view, '_create_files_url', return_value=self.fake_url)
506
506
507 with url_patcher as url_mock, svn_patcher:
507 with url_patcher as url_mock, svn_patcher:
508 result = summary_view._create_reference_items(
508 result = summary_view._create_reference_items(
509 self.repo, self.repo_full_name, refs, self.ref_type,
509 self.repo, self.repo_full_name, refs, self.ref_type,
510 self._format_function)
510 self._format_function)
511
511
512 url_mock.assert_called_once_with(
512 url_mock.assert_called_once_with(
513 self.repo, self.repo_full_name, ref_name, ref_id, False)
513 self.repo, self.repo_full_name, ref_name, ref_id, False)
514 expected_result = [
514 expected_result = [
515 {
515 {
516 'text': ref_name,
516 'text': ref_name,
517 'id': self._format_function(ref_name, ref_id),
517 'id': self._format_function(ref_name, ref_id),
518 'raw_id': ref_id,
518 'raw_id': ref_id,
519 'idx': 0,
519 'idx': 0,
520 'type': self.ref_type,
520 'type': self.ref_type,
521 'files_url': self.fake_url
521 'files_url': self.fake_url
522 }
522 }
523 ]
523 ]
524 assert result == expected_result
524 assert result == expected_result
@@ -1,819 +1,819 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 from pyramid.httpexceptions import (
24 from pyramid.httpexceptions import (
25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
26 from pyramid.renderers import render
26 from pyramid.renderers import render
27 from pyramid.response import Response
27 from pyramid.response import Response
28
28
29 from rhodecode.apps._base import RepoAppView
29 from rhodecode.apps._base import RepoAppView
30 from rhodecode.apps.file_store import utils as store_utils
30 from rhodecode.apps.file_store import utils as store_utils
31 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
31 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
32
32
33 from rhodecode.lib import diffs, codeblocks, channelstream
33 from rhodecode.lib import diffs, codeblocks, channelstream
34 from rhodecode.lib.auth import (
34 from rhodecode.lib.auth import (
35 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
35 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
36 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.compat import OrderedDict
37 from collections import OrderedDict
38 from rhodecode.lib.diffs import (
38 from rhodecode.lib.diffs import (
39 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
39 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
40 get_diff_whitespace_flag)
40 get_diff_whitespace_flag)
41 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
41 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
42 import rhodecode.lib.helpers as h
42 import rhodecode.lib.helpers as h
43 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict, safe_str
43 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict, safe_str
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 RepositoryError, CommitDoesNotExistError)
46 RepositoryError, CommitDoesNotExistError)
47 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
47 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
48 ChangesetCommentHistory
48 ChangesetCommentHistory
49 from rhodecode.model.changeset_status import ChangesetStatusModel
49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.comment import CommentsModel
50 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.meta import Session
51 from rhodecode.model.meta import Session
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 def _update_with_GET(params, request):
57 def _update_with_GET(params, request):
58 for k in ['diff1', 'diff2', 'diff']:
58 for k in ['diff1', 'diff2', 'diff']:
59 params[k] += request.GET.getall(k)
59 params[k] += request.GET.getall(k)
60
60
61
61
62 class RepoCommitsView(RepoAppView):
62 class RepoCommitsView(RepoAppView):
63 def load_default_context(self):
63 def load_default_context(self):
64 c = self._get_local_tmpl_context(include_app_defaults=True)
64 c = self._get_local_tmpl_context(include_app_defaults=True)
65 c.rhodecode_repo = self.rhodecode_vcs_repo
65 c.rhodecode_repo = self.rhodecode_vcs_repo
66
66
67 return c
67 return c
68
68
69 def _is_diff_cache_enabled(self, target_repo):
69 def _is_diff_cache_enabled(self, target_repo):
70 caching_enabled = self._get_general_setting(
70 caching_enabled = self._get_general_setting(
71 target_repo, 'rhodecode_diff_cache')
71 target_repo, 'rhodecode_diff_cache')
72 log.debug('Diff caching enabled: %s', caching_enabled)
72 log.debug('Diff caching enabled: %s', caching_enabled)
73 return caching_enabled
73 return caching_enabled
74
74
75 def _commit(self, commit_id_range, method):
75 def _commit(self, commit_id_range, method):
76 _ = self.request.translate
76 _ = self.request.translate
77 c = self.load_default_context()
77 c = self.load_default_context()
78 c.fulldiff = self.request.GET.get('fulldiff')
78 c.fulldiff = self.request.GET.get('fulldiff')
79 redirect_to_combined = str2bool(self.request.GET.get('redirect_combined'))
79 redirect_to_combined = str2bool(self.request.GET.get('redirect_combined'))
80
80
81 # fetch global flags of ignore ws or context lines
81 # fetch global flags of ignore ws or context lines
82 diff_context = get_diff_context(self.request)
82 diff_context = get_diff_context(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84
84
85 # diff_limit will cut off the whole diff if the limit is applied
85 # diff_limit will cut off the whole diff if the limit is applied
86 # otherwise it will just hide the big files from the front-end
86 # otherwise it will just hide the big files from the front-end
87 diff_limit = c.visual.cut_off_limit_diff
87 diff_limit = c.visual.cut_off_limit_diff
88 file_limit = c.visual.cut_off_limit_file
88 file_limit = c.visual.cut_off_limit_file
89
89
90 # get ranges of commit ids if preset
90 # get ranges of commit ids if preset
91 commit_range = commit_id_range.split('...')[:2]
91 commit_range = commit_id_range.split('...')[:2]
92
92
93 try:
93 try:
94 pre_load = ['affected_files', 'author', 'branch', 'date',
94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 'message', 'parents']
95 'message', 'parents']
96 if self.rhodecode_vcs_repo.alias == 'hg':
96 if self.rhodecode_vcs_repo.alias == 'hg':
97 pre_load += ['hidden', 'obsolete', 'phase']
97 pre_load += ['hidden', 'obsolete', 'phase']
98
98
99 if len(commit_range) == 2:
99 if len(commit_range) == 2:
100 commits = self.rhodecode_vcs_repo.get_commits(
100 commits = self.rhodecode_vcs_repo.get_commits(
101 start_id=commit_range[0], end_id=commit_range[1],
101 start_id=commit_range[0], end_id=commit_range[1],
102 pre_load=pre_load, translate_tags=False)
102 pre_load=pre_load, translate_tags=False)
103 commits = list(commits)
103 commits = list(commits)
104 else:
104 else:
105 commits = [self.rhodecode_vcs_repo.get_commit(
105 commits = [self.rhodecode_vcs_repo.get_commit(
106 commit_id=commit_id_range, pre_load=pre_load)]
106 commit_id=commit_id_range, pre_load=pre_load)]
107
107
108 c.commit_ranges = commits
108 c.commit_ranges = commits
109 if not c.commit_ranges:
109 if not c.commit_ranges:
110 raise RepositoryError('The commit range returned an empty result')
110 raise RepositoryError('The commit range returned an empty result')
111 except CommitDoesNotExistError as e:
111 except CommitDoesNotExistError as e:
112 msg = _('No such commit exists. Org exception: `{}`').format(safe_str(e))
112 msg = _('No such commit exists. Org exception: `{}`').format(safe_str(e))
113 h.flash(msg, category='error')
113 h.flash(msg, category='error')
114 raise HTTPNotFound()
114 raise HTTPNotFound()
115 except Exception:
115 except Exception:
116 log.exception("General failure")
116 log.exception("General failure")
117 raise HTTPNotFound()
117 raise HTTPNotFound()
118 single_commit = len(c.commit_ranges) == 1
118 single_commit = len(c.commit_ranges) == 1
119
119
120 if redirect_to_combined and not single_commit:
120 if redirect_to_combined and not single_commit:
121 source_ref = getattr(c.commit_ranges[0].parents[0]
121 source_ref = getattr(c.commit_ranges[0].parents[0]
122 if c.commit_ranges[0].parents else h.EmptyCommit(), 'raw_id')
122 if c.commit_ranges[0].parents else h.EmptyCommit(), 'raw_id')
123 target_ref = c.commit_ranges[-1].raw_id
123 target_ref = c.commit_ranges[-1].raw_id
124 next_url = h.route_path(
124 next_url = h.route_path(
125 'repo_compare',
125 'repo_compare',
126 repo_name=c.repo_name,
126 repo_name=c.repo_name,
127 source_ref_type='rev',
127 source_ref_type='rev',
128 source_ref=source_ref,
128 source_ref=source_ref,
129 target_ref_type='rev',
129 target_ref_type='rev',
130 target_ref=target_ref)
130 target_ref=target_ref)
131 raise HTTPFound(next_url)
131 raise HTTPFound(next_url)
132
132
133 c.changes = OrderedDict()
133 c.changes = OrderedDict()
134 c.lines_added = 0
134 c.lines_added = 0
135 c.lines_deleted = 0
135 c.lines_deleted = 0
136
136
137 # auto collapse if we have more than limit
137 # auto collapse if we have more than limit
138 collapse_limit = diffs.DiffProcessor._collapse_commits_over
138 collapse_limit = diffs.DiffProcessor._collapse_commits_over
139 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
139 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
140
140
141 c.commit_statuses = ChangesetStatus.STATUSES
141 c.commit_statuses = ChangesetStatus.STATUSES
142 c.inline_comments = []
142 c.inline_comments = []
143 c.files = []
143 c.files = []
144
144
145 c.comments = []
145 c.comments = []
146 c.unresolved_comments = []
146 c.unresolved_comments = []
147 c.resolved_comments = []
147 c.resolved_comments = []
148
148
149 # Single commit
149 # Single commit
150 if single_commit:
150 if single_commit:
151 commit = c.commit_ranges[0]
151 commit = c.commit_ranges[0]
152 c.comments = CommentsModel().get_comments(
152 c.comments = CommentsModel().get_comments(
153 self.db_repo.repo_id,
153 self.db_repo.repo_id,
154 revision=commit.raw_id)
154 revision=commit.raw_id)
155
155
156 # comments from PR
156 # comments from PR
157 statuses = ChangesetStatusModel().get_statuses(
157 statuses = ChangesetStatusModel().get_statuses(
158 self.db_repo.repo_id, commit.raw_id,
158 self.db_repo.repo_id, commit.raw_id,
159 with_revisions=True)
159 with_revisions=True)
160
160
161 prs = set()
161 prs = set()
162 reviewers = list()
162 reviewers = list()
163 reviewers_duplicates = set() # to not have duplicates from multiple votes
163 reviewers_duplicates = set() # to not have duplicates from multiple votes
164 for c_status in statuses:
164 for c_status in statuses:
165
165
166 # extract associated pull-requests from votes
166 # extract associated pull-requests from votes
167 if c_status.pull_request:
167 if c_status.pull_request:
168 prs.add(c_status.pull_request)
168 prs.add(c_status.pull_request)
169
169
170 # extract reviewers
170 # extract reviewers
171 _user_id = c_status.author.user_id
171 _user_id = c_status.author.user_id
172 if _user_id not in reviewers_duplicates:
172 if _user_id not in reviewers_duplicates:
173 reviewers.append(
173 reviewers.append(
174 StrictAttributeDict({
174 StrictAttributeDict({
175 'user': c_status.author,
175 'user': c_status.author,
176
176
177 # fake attributed for commit, page that we don't have
177 # fake attributed for commit, page that we don't have
178 # but we share the display with PR page
178 # but we share the display with PR page
179 'mandatory': False,
179 'mandatory': False,
180 'reasons': [],
180 'reasons': [],
181 'rule_user_group_data': lambda: None
181 'rule_user_group_data': lambda: None
182 })
182 })
183 )
183 )
184 reviewers_duplicates.add(_user_id)
184 reviewers_duplicates.add(_user_id)
185
185
186 c.reviewers_count = len(reviewers)
186 c.reviewers_count = len(reviewers)
187 c.observers_count = 0
187 c.observers_count = 0
188
188
189 # from associated statuses, check the pull requests, and
189 # from associated statuses, check the pull requests, and
190 # show comments from them
190 # show comments from them
191 for pr in prs:
191 for pr in prs:
192 c.comments.extend(pr.comments)
192 c.comments.extend(pr.comments)
193
193
194 c.unresolved_comments = CommentsModel()\
194 c.unresolved_comments = CommentsModel()\
195 .get_commit_unresolved_todos(commit.raw_id)
195 .get_commit_unresolved_todos(commit.raw_id)
196 c.resolved_comments = CommentsModel()\
196 c.resolved_comments = CommentsModel()\
197 .get_commit_resolved_todos(commit.raw_id)
197 .get_commit_resolved_todos(commit.raw_id)
198
198
199 c.inline_comments_flat = CommentsModel()\
199 c.inline_comments_flat = CommentsModel()\
200 .get_commit_inline_comments(commit.raw_id)
200 .get_commit_inline_comments(commit.raw_id)
201
201
202 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
202 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
203 statuses, reviewers)
203 statuses, reviewers)
204
204
205 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
205 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
206
206
207 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
207 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
208
208
209 for review_obj, member, reasons, mandatory, status in review_statuses:
209 for review_obj, member, reasons, mandatory, status in review_statuses:
210 member_reviewer = h.reviewer_as_json(
210 member_reviewer = h.reviewer_as_json(
211 member, reasons=reasons, mandatory=mandatory, role=None,
211 member, reasons=reasons, mandatory=mandatory, role=None,
212 user_group=None
212 user_group=None
213 )
213 )
214
214
215 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
215 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
216 member_reviewer['review_status'] = current_review_status
216 member_reviewer['review_status'] = current_review_status
217 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
217 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
218 member_reviewer['allowed_to_update'] = False
218 member_reviewer['allowed_to_update'] = False
219 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
219 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
220
220
221 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
221 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
222
222
223 # NOTE(marcink): this uses the same voting logic as in pull-requests
223 # NOTE(marcink): this uses the same voting logic as in pull-requests
224 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
224 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
225 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
225 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
226
226
227 diff = None
227 diff = None
228 # Iterate over ranges (default commit view is always one commit)
228 # Iterate over ranges (default commit view is always one commit)
229 for commit in c.commit_ranges:
229 for commit in c.commit_ranges:
230 c.changes[commit.raw_id] = []
230 c.changes[commit.raw_id] = []
231
231
232 commit2 = commit
232 commit2 = commit
233 commit1 = commit.first_parent
233 commit1 = commit.first_parent
234
234
235 if method == 'show':
235 if method == 'show':
236 inline_comments = CommentsModel().get_inline_comments(
236 inline_comments = CommentsModel().get_inline_comments(
237 self.db_repo.repo_id, revision=commit.raw_id)
237 self.db_repo.repo_id, revision=commit.raw_id)
238 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
238 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
239 inline_comments))
239 inline_comments))
240 c.inline_comments = inline_comments
240 c.inline_comments = inline_comments
241
241
242 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
242 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
243 self.db_repo)
243 self.db_repo)
244 cache_file_path = diff_cache_exist(
244 cache_file_path = diff_cache_exist(
245 cache_path, 'diff', commit.raw_id,
245 cache_path, 'diff', commit.raw_id,
246 hide_whitespace_changes, diff_context, c.fulldiff)
246 hide_whitespace_changes, diff_context, c.fulldiff)
247
247
248 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
248 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
249 force_recache = str2bool(self.request.GET.get('force_recache'))
249 force_recache = str2bool(self.request.GET.get('force_recache'))
250
250
251 cached_diff = None
251 cached_diff = None
252 if caching_enabled:
252 if caching_enabled:
253 cached_diff = load_cached_diff(cache_file_path)
253 cached_diff = load_cached_diff(cache_file_path)
254
254
255 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
255 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
256 if not force_recache and has_proper_diff_cache:
256 if not force_recache and has_proper_diff_cache:
257 diffset = cached_diff['diff']
257 diffset = cached_diff['diff']
258 else:
258 else:
259 vcs_diff = self.rhodecode_vcs_repo.get_diff(
259 vcs_diff = self.rhodecode_vcs_repo.get_diff(
260 commit1, commit2,
260 commit1, commit2,
261 ignore_whitespace=hide_whitespace_changes,
261 ignore_whitespace=hide_whitespace_changes,
262 context=diff_context)
262 context=diff_context)
263
263
264 diff_processor = diffs.DiffProcessor(
264 diff_processor = diffs.DiffProcessor(
265 vcs_diff, format='newdiff', diff_limit=diff_limit,
265 vcs_diff, format='newdiff', diff_limit=diff_limit,
266 file_limit=file_limit, show_full_diff=c.fulldiff)
266 file_limit=file_limit, show_full_diff=c.fulldiff)
267
267
268 _parsed = diff_processor.prepare()
268 _parsed = diff_processor.prepare()
269
269
270 diffset = codeblocks.DiffSet(
270 diffset = codeblocks.DiffSet(
271 repo_name=self.db_repo_name,
271 repo_name=self.db_repo_name,
272 source_node_getter=codeblocks.diffset_node_getter(commit1),
272 source_node_getter=codeblocks.diffset_node_getter(commit1),
273 target_node_getter=codeblocks.diffset_node_getter(commit2))
273 target_node_getter=codeblocks.diffset_node_getter(commit2))
274
274
275 diffset = self.path_filter.render_patchset_filtered(
275 diffset = self.path_filter.render_patchset_filtered(
276 diffset, _parsed, commit1.raw_id, commit2.raw_id)
276 diffset, _parsed, commit1.raw_id, commit2.raw_id)
277
277
278 # save cached diff
278 # save cached diff
279 if caching_enabled:
279 if caching_enabled:
280 cache_diff(cache_file_path, diffset, None)
280 cache_diff(cache_file_path, diffset, None)
281
281
282 c.limited_diff = diffset.limited_diff
282 c.limited_diff = diffset.limited_diff
283 c.changes[commit.raw_id] = diffset
283 c.changes[commit.raw_id] = diffset
284 else:
284 else:
285 # TODO(marcink): no cache usage here...
285 # TODO(marcink): no cache usage here...
286 _diff = self.rhodecode_vcs_repo.get_diff(
286 _diff = self.rhodecode_vcs_repo.get_diff(
287 commit1, commit2,
287 commit1, commit2,
288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
289 diff_processor = diffs.DiffProcessor(
289 diff_processor = diffs.DiffProcessor(
290 _diff, format='newdiff', diff_limit=diff_limit,
290 _diff, format='newdiff', diff_limit=diff_limit,
291 file_limit=file_limit, show_full_diff=c.fulldiff)
291 file_limit=file_limit, show_full_diff=c.fulldiff)
292 # downloads/raw we only need RAW diff nothing else
292 # downloads/raw we only need RAW diff nothing else
293 diff = self.path_filter.get_raw_patch(diff_processor)
293 diff = self.path_filter.get_raw_patch(diff_processor)
294 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
294 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
295
295
296 # sort comments by how they were generated
296 # sort comments by how they were generated
297 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
297 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
298 c.at_version_num = None
298 c.at_version_num = None
299
299
300 if len(c.commit_ranges) == 1:
300 if len(c.commit_ranges) == 1:
301 c.commit = c.commit_ranges[0]
301 c.commit = c.commit_ranges[0]
302 c.parent_tmpl = ''.join(
302 c.parent_tmpl = ''.join(
303 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
303 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
304
304
305 if method == 'download':
305 if method == 'download':
306 response = Response(diff)
306 response = Response(diff)
307 response.content_type = 'text/plain'
307 response.content_type = 'text/plain'
308 response.content_disposition = (
308 response.content_disposition = (
309 'attachment; filename=%s.diff' % commit_id_range[:12])
309 'attachment; filename=%s.diff' % commit_id_range[:12])
310 return response
310 return response
311 elif method == 'patch':
311 elif method == 'patch':
312 c.diff = safe_unicode(diff)
312 c.diff = safe_unicode(diff)
313 patch = render(
313 patch = render(
314 'rhodecode:templates/changeset/patch_changeset.mako',
314 'rhodecode:templates/changeset/patch_changeset.mako',
315 self._get_template_context(c), self.request)
315 self._get_template_context(c), self.request)
316 response = Response(patch)
316 response = Response(patch)
317 response.content_type = 'text/plain'
317 response.content_type = 'text/plain'
318 return response
318 return response
319 elif method == 'raw':
319 elif method == 'raw':
320 response = Response(diff)
320 response = Response(diff)
321 response.content_type = 'text/plain'
321 response.content_type = 'text/plain'
322 return response
322 return response
323 elif method == 'show':
323 elif method == 'show':
324 if len(c.commit_ranges) == 1:
324 if len(c.commit_ranges) == 1:
325 html = render(
325 html = render(
326 'rhodecode:templates/changeset/changeset.mako',
326 'rhodecode:templates/changeset/changeset.mako',
327 self._get_template_context(c), self.request)
327 self._get_template_context(c), self.request)
328 return Response(html)
328 return Response(html)
329 else:
329 else:
330 c.ancestor = None
330 c.ancestor = None
331 c.target_repo = self.db_repo
331 c.target_repo = self.db_repo
332 html = render(
332 html = render(
333 'rhodecode:templates/changeset/changeset_range.mako',
333 'rhodecode:templates/changeset/changeset_range.mako',
334 self._get_template_context(c), self.request)
334 self._get_template_context(c), self.request)
335 return Response(html)
335 return Response(html)
336
336
337 raise HTTPBadRequest()
337 raise HTTPBadRequest()
338
338
339 @LoginRequired()
339 @LoginRequired()
340 @HasRepoPermissionAnyDecorator(
340 @HasRepoPermissionAnyDecorator(
341 'repository.read', 'repository.write', 'repository.admin')
341 'repository.read', 'repository.write', 'repository.admin')
342 def repo_commit_show(self):
342 def repo_commit_show(self):
343 commit_id = self.request.matchdict['commit_id']
343 commit_id = self.request.matchdict['commit_id']
344 return self._commit(commit_id, method='show')
344 return self._commit(commit_id, method='show')
345
345
346 @LoginRequired()
346 @LoginRequired()
347 @HasRepoPermissionAnyDecorator(
347 @HasRepoPermissionAnyDecorator(
348 'repository.read', 'repository.write', 'repository.admin')
348 'repository.read', 'repository.write', 'repository.admin')
349 def repo_commit_raw(self):
349 def repo_commit_raw(self):
350 commit_id = self.request.matchdict['commit_id']
350 commit_id = self.request.matchdict['commit_id']
351 return self._commit(commit_id, method='raw')
351 return self._commit(commit_id, method='raw')
352
352
353 @LoginRequired()
353 @LoginRequired()
354 @HasRepoPermissionAnyDecorator(
354 @HasRepoPermissionAnyDecorator(
355 'repository.read', 'repository.write', 'repository.admin')
355 'repository.read', 'repository.write', 'repository.admin')
356 def repo_commit_patch(self):
356 def repo_commit_patch(self):
357 commit_id = self.request.matchdict['commit_id']
357 commit_id = self.request.matchdict['commit_id']
358 return self._commit(commit_id, method='patch')
358 return self._commit(commit_id, method='patch')
359
359
360 @LoginRequired()
360 @LoginRequired()
361 @HasRepoPermissionAnyDecorator(
361 @HasRepoPermissionAnyDecorator(
362 'repository.read', 'repository.write', 'repository.admin')
362 'repository.read', 'repository.write', 'repository.admin')
363 def repo_commit_download(self):
363 def repo_commit_download(self):
364 commit_id = self.request.matchdict['commit_id']
364 commit_id = self.request.matchdict['commit_id']
365 return self._commit(commit_id, method='download')
365 return self._commit(commit_id, method='download')
366
366
367 def _commit_comments_create(self, commit_id, comments):
367 def _commit_comments_create(self, commit_id, comments):
368 _ = self.request.translate
368 _ = self.request.translate
369 data = {}
369 data = {}
370 if not comments:
370 if not comments:
371 return
371 return
372
372
373 commit = self.db_repo.get_commit(commit_id)
373 commit = self.db_repo.get_commit(commit_id)
374
374
375 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
375 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
376 for entry in comments:
376 for entry in comments:
377 c = self.load_default_context()
377 c = self.load_default_context()
378 comment_type = entry['comment_type']
378 comment_type = entry['comment_type']
379 text = entry['text']
379 text = entry['text']
380 status = entry['status']
380 status = entry['status']
381 is_draft = str2bool(entry['is_draft'])
381 is_draft = str2bool(entry['is_draft'])
382 resolves_comment_id = entry['resolves_comment_id']
382 resolves_comment_id = entry['resolves_comment_id']
383 f_path = entry['f_path']
383 f_path = entry['f_path']
384 line_no = entry['line']
384 line_no = entry['line']
385 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
385 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
386
386
387 if status:
387 if status:
388 text = text or (_('Status change %(transition_icon)s %(status)s')
388 text = text or (_('Status change %(transition_icon)s %(status)s')
389 % {'transition_icon': '>',
389 % {'transition_icon': '>',
390 'status': ChangesetStatus.get_status_lbl(status)})
390 'status': ChangesetStatus.get_status_lbl(status)})
391
391
392 comment = CommentsModel().create(
392 comment = CommentsModel().create(
393 text=text,
393 text=text,
394 repo=self.db_repo.repo_id,
394 repo=self.db_repo.repo_id,
395 user=self._rhodecode_db_user.user_id,
395 user=self._rhodecode_db_user.user_id,
396 commit_id=commit_id,
396 commit_id=commit_id,
397 f_path=f_path,
397 f_path=f_path,
398 line_no=line_no,
398 line_no=line_no,
399 status_change=(ChangesetStatus.get_status_lbl(status)
399 status_change=(ChangesetStatus.get_status_lbl(status)
400 if status else None),
400 if status else None),
401 status_change_type=status,
401 status_change_type=status,
402 comment_type=comment_type,
402 comment_type=comment_type,
403 is_draft=is_draft,
403 is_draft=is_draft,
404 resolves_comment_id=resolves_comment_id,
404 resolves_comment_id=resolves_comment_id,
405 auth_user=self._rhodecode_user,
405 auth_user=self._rhodecode_user,
406 send_email=not is_draft, # skip notification for draft comments
406 send_email=not is_draft, # skip notification for draft comments
407 )
407 )
408 is_inline = comment.is_inline
408 is_inline = comment.is_inline
409
409
410 # get status if set !
410 # get status if set !
411 if status:
411 if status:
412 # `dont_allow_on_closed_pull_request = True` means
412 # `dont_allow_on_closed_pull_request = True` means
413 # if latest status was from pull request and it's closed
413 # if latest status was from pull request and it's closed
414 # disallow changing status !
414 # disallow changing status !
415
415
416 try:
416 try:
417 ChangesetStatusModel().set_status(
417 ChangesetStatusModel().set_status(
418 self.db_repo.repo_id,
418 self.db_repo.repo_id,
419 status,
419 status,
420 self._rhodecode_db_user.user_id,
420 self._rhodecode_db_user.user_id,
421 comment,
421 comment,
422 revision=commit_id,
422 revision=commit_id,
423 dont_allow_on_closed_pull_request=True
423 dont_allow_on_closed_pull_request=True
424 )
424 )
425 except StatusChangeOnClosedPullRequestError:
425 except StatusChangeOnClosedPullRequestError:
426 msg = _('Changing the status of a commit associated with '
426 msg = _('Changing the status of a commit associated with '
427 'a closed pull request is not allowed')
427 'a closed pull request is not allowed')
428 log.exception(msg)
428 log.exception(msg)
429 h.flash(msg, category='warning')
429 h.flash(msg, category='warning')
430 raise HTTPFound(h.route_path(
430 raise HTTPFound(h.route_path(
431 'repo_commit', repo_name=self.db_repo_name,
431 'repo_commit', repo_name=self.db_repo_name,
432 commit_id=commit_id))
432 commit_id=commit_id))
433
433
434 Session().flush()
434 Session().flush()
435 # this is somehow required to get access to some relationship
435 # this is somehow required to get access to some relationship
436 # loaded on comment
436 # loaded on comment
437 Session().refresh(comment)
437 Session().refresh(comment)
438
438
439 # skip notifications for drafts
439 # skip notifications for drafts
440 if not is_draft:
440 if not is_draft:
441 CommentsModel().trigger_commit_comment_hook(
441 CommentsModel().trigger_commit_comment_hook(
442 self.db_repo, self._rhodecode_user, 'create',
442 self.db_repo, self._rhodecode_user, 'create',
443 data={'comment': comment, 'commit': commit})
443 data={'comment': comment, 'commit': commit})
444
444
445 comment_id = comment.comment_id
445 comment_id = comment.comment_id
446 data[comment_id] = {
446 data[comment_id] = {
447 'target_id': target_elem_id
447 'target_id': target_elem_id
448 }
448 }
449 Session().flush()
449 Session().flush()
450
450
451 c.co = comment
451 c.co = comment
452 c.at_version_num = 0
452 c.at_version_num = 0
453 c.is_new = True
453 c.is_new = True
454 rendered_comment = render(
454 rendered_comment = render(
455 'rhodecode:templates/changeset/changeset_comment_block.mako',
455 'rhodecode:templates/changeset/changeset_comment_block.mako',
456 self._get_template_context(c), self.request)
456 self._get_template_context(c), self.request)
457
457
458 data[comment_id].update(comment.get_dict())
458 data[comment_id].update(comment.get_dict())
459 data[comment_id].update({'rendered_text': rendered_comment})
459 data[comment_id].update({'rendered_text': rendered_comment})
460
460
461 # finalize, commit and redirect
461 # finalize, commit and redirect
462 Session().commit()
462 Session().commit()
463
463
464 # skip channelstream for draft comments
464 # skip channelstream for draft comments
465 if not all_drafts:
465 if not all_drafts:
466 comment_broadcast_channel = channelstream.comment_channel(
466 comment_broadcast_channel = channelstream.comment_channel(
467 self.db_repo_name, commit_obj=commit)
467 self.db_repo_name, commit_obj=commit)
468
468
469 comment_data = data
469 comment_data = data
470 posted_comment_type = 'inline' if is_inline else 'general'
470 posted_comment_type = 'inline' if is_inline else 'general'
471 if len(data) == 1:
471 if len(data) == 1:
472 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
472 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
473 else:
473 else:
474 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
474 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
475
475
476 channelstream.comment_channelstream_push(
476 channelstream.comment_channelstream_push(
477 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
477 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
478 comment_data=comment_data)
478 comment_data=comment_data)
479
479
480 return data
480 return data
481
481
482 @LoginRequired()
482 @LoginRequired()
483 @NotAnonymous()
483 @NotAnonymous()
484 @HasRepoPermissionAnyDecorator(
484 @HasRepoPermissionAnyDecorator(
485 'repository.read', 'repository.write', 'repository.admin')
485 'repository.read', 'repository.write', 'repository.admin')
486 @CSRFRequired()
486 @CSRFRequired()
487 def repo_commit_comment_create(self):
487 def repo_commit_comment_create(self):
488 _ = self.request.translate
488 _ = self.request.translate
489 commit_id = self.request.matchdict['commit_id']
489 commit_id = self.request.matchdict['commit_id']
490
490
491 multi_commit_ids = []
491 multi_commit_ids = []
492 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
492 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
493 if _commit_id not in ['', None, EmptyCommit.raw_id]:
493 if _commit_id not in ['', None, EmptyCommit.raw_id]:
494 if _commit_id not in multi_commit_ids:
494 if _commit_id not in multi_commit_ids:
495 multi_commit_ids.append(_commit_id)
495 multi_commit_ids.append(_commit_id)
496
496
497 commit_ids = multi_commit_ids or [commit_id]
497 commit_ids = multi_commit_ids or [commit_id]
498
498
499 data = []
499 data = []
500 # Multiple comments for each passed commit id
500 # Multiple comments for each passed commit id
501 for current_id in filter(None, commit_ids):
501 for current_id in filter(None, commit_ids):
502 comment_data = {
502 comment_data = {
503 'comment_type': self.request.POST.get('comment_type'),
503 'comment_type': self.request.POST.get('comment_type'),
504 'text': self.request.POST.get('text'),
504 'text': self.request.POST.get('text'),
505 'status': self.request.POST.get('changeset_status', None),
505 'status': self.request.POST.get('changeset_status', None),
506 'is_draft': self.request.POST.get('draft'),
506 'is_draft': self.request.POST.get('draft'),
507 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
507 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
508 'close_pull_request': self.request.POST.get('close_pull_request'),
508 'close_pull_request': self.request.POST.get('close_pull_request'),
509 'f_path': self.request.POST.get('f_path'),
509 'f_path': self.request.POST.get('f_path'),
510 'line': self.request.POST.get('line'),
510 'line': self.request.POST.get('line'),
511 }
511 }
512 comment = self._commit_comments_create(commit_id=current_id, comments=[comment_data])
512 comment = self._commit_comments_create(commit_id=current_id, comments=[comment_data])
513 data.append(comment)
513 data.append(comment)
514
514
515 return data if len(data) > 1 else data[0]
515 return data if len(data) > 1 else data[0]
516
516
517 @LoginRequired()
517 @LoginRequired()
518 @NotAnonymous()
518 @NotAnonymous()
519 @HasRepoPermissionAnyDecorator(
519 @HasRepoPermissionAnyDecorator(
520 'repository.read', 'repository.write', 'repository.admin')
520 'repository.read', 'repository.write', 'repository.admin')
521 @CSRFRequired()
521 @CSRFRequired()
522 def repo_commit_comment_preview(self):
522 def repo_commit_comment_preview(self):
523 # Technically a CSRF token is not needed as no state changes with this
523 # Technically a CSRF token is not needed as no state changes with this
524 # call. However, as this is a POST is better to have it, so automated
524 # call. However, as this is a POST is better to have it, so automated
525 # tools don't flag it as potential CSRF.
525 # tools don't flag it as potential CSRF.
526 # Post is required because the payload could be bigger than the maximum
526 # Post is required because the payload could be bigger than the maximum
527 # allowed by GET.
527 # allowed by GET.
528
528
529 text = self.request.POST.get('text')
529 text = self.request.POST.get('text')
530 renderer = self.request.POST.get('renderer') or 'rst'
530 renderer = self.request.POST.get('renderer') or 'rst'
531 if text:
531 if text:
532 return h.render(text, renderer=renderer, mentions=True,
532 return h.render(text, renderer=renderer, mentions=True,
533 repo_name=self.db_repo_name)
533 repo_name=self.db_repo_name)
534 return ''
534 return ''
535
535
536 @LoginRequired()
536 @LoginRequired()
537 @HasRepoPermissionAnyDecorator(
537 @HasRepoPermissionAnyDecorator(
538 'repository.read', 'repository.write', 'repository.admin')
538 'repository.read', 'repository.write', 'repository.admin')
539 @CSRFRequired()
539 @CSRFRequired()
540 def repo_commit_comment_history_view(self):
540 def repo_commit_comment_history_view(self):
541 c = self.load_default_context()
541 c = self.load_default_context()
542 comment_id = self.request.matchdict['comment_id']
542 comment_id = self.request.matchdict['comment_id']
543 comment_history_id = self.request.matchdict['comment_history_id']
543 comment_history_id = self.request.matchdict['comment_history_id']
544
544
545 comment = ChangesetComment.get_or_404(comment_id)
545 comment = ChangesetComment.get_or_404(comment_id)
546 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
546 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
547 if comment.draft and not comment_owner:
547 if comment.draft and not comment_owner:
548 # if we see draft comments history, we only allow this for owner
548 # if we see draft comments history, we only allow this for owner
549 raise HTTPNotFound()
549 raise HTTPNotFound()
550
550
551 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
551 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
552 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
552 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
553
553
554 if is_repo_comment:
554 if is_repo_comment:
555 c.comment_history = comment_history
555 c.comment_history = comment_history
556
556
557 rendered_comment = render(
557 rendered_comment = render(
558 'rhodecode:templates/changeset/comment_history.mako',
558 'rhodecode:templates/changeset/comment_history.mako',
559 self._get_template_context(c), self.request)
559 self._get_template_context(c), self.request)
560 return rendered_comment
560 return rendered_comment
561 else:
561 else:
562 log.warning('No permissions for user %s to show comment_history_id: %s',
562 log.warning('No permissions for user %s to show comment_history_id: %s',
563 self._rhodecode_db_user, comment_history_id)
563 self._rhodecode_db_user, comment_history_id)
564 raise HTTPNotFound()
564 raise HTTPNotFound()
565
565
566 @LoginRequired()
566 @LoginRequired()
567 @NotAnonymous()
567 @NotAnonymous()
568 @HasRepoPermissionAnyDecorator(
568 @HasRepoPermissionAnyDecorator(
569 'repository.read', 'repository.write', 'repository.admin')
569 'repository.read', 'repository.write', 'repository.admin')
570 @CSRFRequired()
570 @CSRFRequired()
571 def repo_commit_comment_attachment_upload(self):
571 def repo_commit_comment_attachment_upload(self):
572 c = self.load_default_context()
572 c = self.load_default_context()
573 upload_key = 'attachment'
573 upload_key = 'attachment'
574
574
575 file_obj = self.request.POST.get(upload_key)
575 file_obj = self.request.POST.get(upload_key)
576
576
577 if file_obj is None:
577 if file_obj is None:
578 self.request.response.status = 400
578 self.request.response.status = 400
579 return {'store_fid': None,
579 return {'store_fid': None,
580 'access_path': None,
580 'access_path': None,
581 'error': '{} data field is missing'.format(upload_key)}
581 'error': '{} data field is missing'.format(upload_key)}
582
582
583 if not hasattr(file_obj, 'filename'):
583 if not hasattr(file_obj, 'filename'):
584 self.request.response.status = 400
584 self.request.response.status = 400
585 return {'store_fid': None,
585 return {'store_fid': None,
586 'access_path': None,
586 'access_path': None,
587 'error': 'filename cannot be read from the data field'}
587 'error': 'filename cannot be read from the data field'}
588
588
589 filename = file_obj.filename
589 filename = file_obj.filename
590 file_display_name = filename
590 file_display_name = filename
591
591
592 metadata = {
592 metadata = {
593 'user_uploaded': {'username': self._rhodecode_user.username,
593 'user_uploaded': {'username': self._rhodecode_user.username,
594 'user_id': self._rhodecode_user.user_id,
594 'user_id': self._rhodecode_user.user_id,
595 'ip': self._rhodecode_user.ip_addr}}
595 'ip': self._rhodecode_user.ip_addr}}
596
596
597 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
597 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
598 allowed_extensions = [
598 allowed_extensions = [
599 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
599 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
600 '.pptx', '.txt', '.xlsx', '.zip']
600 '.pptx', '.txt', '.xlsx', '.zip']
601 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
601 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
602
602
603 try:
603 try:
604 storage = store_utils.get_file_storage(self.request.registry.settings)
604 storage = store_utils.get_file_storage(self.request.registry.settings)
605 store_uid, metadata = storage.save_file(
605 store_uid, metadata = storage.save_file(
606 file_obj.file, filename, extra_metadata=metadata,
606 file_obj.file, filename, extra_metadata=metadata,
607 extensions=allowed_extensions, max_filesize=max_file_size)
607 extensions=allowed_extensions, max_filesize=max_file_size)
608 except FileNotAllowedException:
608 except FileNotAllowedException:
609 self.request.response.status = 400
609 self.request.response.status = 400
610 permitted_extensions = ', '.join(allowed_extensions)
610 permitted_extensions = ', '.join(allowed_extensions)
611 error_msg = 'File `{}` is not allowed. ' \
611 error_msg = 'File `{}` is not allowed. ' \
612 'Only following extensions are permitted: {}'.format(
612 'Only following extensions are permitted: {}'.format(
613 filename, permitted_extensions)
613 filename, permitted_extensions)
614 return {'store_fid': None,
614 return {'store_fid': None,
615 'access_path': None,
615 'access_path': None,
616 'error': error_msg}
616 'error': error_msg}
617 except FileOverSizeException:
617 except FileOverSizeException:
618 self.request.response.status = 400
618 self.request.response.status = 400
619 limit_mb = h.format_byte_size_binary(max_file_size)
619 limit_mb = h.format_byte_size_binary(max_file_size)
620 return {'store_fid': None,
620 return {'store_fid': None,
621 'access_path': None,
621 'access_path': None,
622 'error': 'File {} is exceeding allowed limit of {}.'.format(
622 'error': 'File {} is exceeding allowed limit of {}.'.format(
623 filename, limit_mb)}
623 filename, limit_mb)}
624
624
625 try:
625 try:
626 entry = FileStore.create(
626 entry = FileStore.create(
627 file_uid=store_uid, filename=metadata["filename"],
627 file_uid=store_uid, filename=metadata["filename"],
628 file_hash=metadata["sha256"], file_size=metadata["size"],
628 file_hash=metadata["sha256"], file_size=metadata["size"],
629 file_display_name=file_display_name,
629 file_display_name=file_display_name,
630 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
630 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
631 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
631 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
632 scope_repo_id=self.db_repo.repo_id
632 scope_repo_id=self.db_repo.repo_id
633 )
633 )
634 Session().add(entry)
634 Session().add(entry)
635 Session().commit()
635 Session().commit()
636 log.debug('Stored upload in DB as %s', entry)
636 log.debug('Stored upload in DB as %s', entry)
637 except Exception:
637 except Exception:
638 log.exception('Failed to store file %s', filename)
638 log.exception('Failed to store file %s', filename)
639 self.request.response.status = 400
639 self.request.response.status = 400
640 return {'store_fid': None,
640 return {'store_fid': None,
641 'access_path': None,
641 'access_path': None,
642 'error': 'File {} failed to store in DB.'.format(filename)}
642 'error': 'File {} failed to store in DB.'.format(filename)}
643
643
644 Session().commit()
644 Session().commit()
645
645
646 return {
646 return {
647 'store_fid': store_uid,
647 'store_fid': store_uid,
648 'access_path': h.route_path(
648 'access_path': h.route_path(
649 'download_file', fid=store_uid),
649 'download_file', fid=store_uid),
650 'fqn_access_path': h.route_url(
650 'fqn_access_path': h.route_url(
651 'download_file', fid=store_uid),
651 'download_file', fid=store_uid),
652 'repo_access_path': h.route_path(
652 'repo_access_path': h.route_path(
653 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
653 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
654 'repo_fqn_access_path': h.route_url(
654 'repo_fqn_access_path': h.route_url(
655 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
655 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
656 }
656 }
657
657
658 @LoginRequired()
658 @LoginRequired()
659 @NotAnonymous()
659 @NotAnonymous()
660 @HasRepoPermissionAnyDecorator(
660 @HasRepoPermissionAnyDecorator(
661 'repository.read', 'repository.write', 'repository.admin')
661 'repository.read', 'repository.write', 'repository.admin')
662 @CSRFRequired()
662 @CSRFRequired()
663 def repo_commit_comment_delete(self):
663 def repo_commit_comment_delete(self):
664 commit_id = self.request.matchdict['commit_id']
664 commit_id = self.request.matchdict['commit_id']
665 comment_id = self.request.matchdict['comment_id']
665 comment_id = self.request.matchdict['comment_id']
666
666
667 comment = ChangesetComment.get_or_404(comment_id)
667 comment = ChangesetComment.get_or_404(comment_id)
668 if not comment:
668 if not comment:
669 log.debug('Comment with id:%s not found, skipping', comment_id)
669 log.debug('Comment with id:%s not found, skipping', comment_id)
670 # comment already deleted in another call probably
670 # comment already deleted in another call probably
671 return True
671 return True
672
672
673 if comment.immutable:
673 if comment.immutable:
674 # don't allow deleting comments that are immutable
674 # don't allow deleting comments that are immutable
675 raise HTTPForbidden()
675 raise HTTPForbidden()
676
676
677 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
677 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
678 super_admin = h.HasPermissionAny('hg.admin')()
678 super_admin = h.HasPermissionAny('hg.admin')()
679 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
679 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
680 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
680 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
681 comment_repo_admin = is_repo_admin and is_repo_comment
681 comment_repo_admin = is_repo_admin and is_repo_comment
682
682
683 if comment.draft and not comment_owner:
683 if comment.draft and not comment_owner:
684 # We never allow to delete draft comments for other than owners
684 # We never allow to delete draft comments for other than owners
685 raise HTTPNotFound()
685 raise HTTPNotFound()
686
686
687 if super_admin or comment_owner or comment_repo_admin:
687 if super_admin or comment_owner or comment_repo_admin:
688 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
688 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
689 Session().commit()
689 Session().commit()
690 return True
690 return True
691 else:
691 else:
692 log.warning('No permissions for user %s to delete comment_id: %s',
692 log.warning('No permissions for user %s to delete comment_id: %s',
693 self._rhodecode_db_user, comment_id)
693 self._rhodecode_db_user, comment_id)
694 raise HTTPNotFound()
694 raise HTTPNotFound()
695
695
696 @LoginRequired()
696 @LoginRequired()
697 @NotAnonymous()
697 @NotAnonymous()
698 @HasRepoPermissionAnyDecorator(
698 @HasRepoPermissionAnyDecorator(
699 'repository.read', 'repository.write', 'repository.admin')
699 'repository.read', 'repository.write', 'repository.admin')
700 @CSRFRequired()
700 @CSRFRequired()
701 def repo_commit_comment_edit(self):
701 def repo_commit_comment_edit(self):
702 self.load_default_context()
702 self.load_default_context()
703
703
704 commit_id = self.request.matchdict['commit_id']
704 commit_id = self.request.matchdict['commit_id']
705 comment_id = self.request.matchdict['comment_id']
705 comment_id = self.request.matchdict['comment_id']
706 comment = ChangesetComment.get_or_404(comment_id)
706 comment = ChangesetComment.get_or_404(comment_id)
707
707
708 if comment.immutable:
708 if comment.immutable:
709 # don't allow deleting comments that are immutable
709 # don't allow deleting comments that are immutable
710 raise HTTPForbidden()
710 raise HTTPForbidden()
711
711
712 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
712 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
713 super_admin = h.HasPermissionAny('hg.admin')()
713 super_admin = h.HasPermissionAny('hg.admin')()
714 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
714 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
715 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
715 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
716 comment_repo_admin = is_repo_admin and is_repo_comment
716 comment_repo_admin = is_repo_admin and is_repo_comment
717
717
718 if super_admin or comment_owner or comment_repo_admin:
718 if super_admin or comment_owner or comment_repo_admin:
719 text = self.request.POST.get('text')
719 text = self.request.POST.get('text')
720 version = self.request.POST.get('version')
720 version = self.request.POST.get('version')
721 if text == comment.text:
721 if text == comment.text:
722 log.warning(
722 log.warning(
723 'Comment(repo): '
723 'Comment(repo): '
724 'Trying to create new version '
724 'Trying to create new version '
725 'with the same comment body {}'.format(
725 'with the same comment body {}'.format(
726 comment_id,
726 comment_id,
727 )
727 )
728 )
728 )
729 raise HTTPNotFound()
729 raise HTTPNotFound()
730
730
731 if version.isdigit():
731 if version.isdigit():
732 version = int(version)
732 version = int(version)
733 else:
733 else:
734 log.warning(
734 log.warning(
735 'Comment(repo): Wrong version type {} {} '
735 'Comment(repo): Wrong version type {} {} '
736 'for comment {}'.format(
736 'for comment {}'.format(
737 version,
737 version,
738 type(version),
738 type(version),
739 comment_id,
739 comment_id,
740 )
740 )
741 )
741 )
742 raise HTTPNotFound()
742 raise HTTPNotFound()
743
743
744 try:
744 try:
745 comment_history = CommentsModel().edit(
745 comment_history = CommentsModel().edit(
746 comment_id=comment_id,
746 comment_id=comment_id,
747 text=text,
747 text=text,
748 auth_user=self._rhodecode_user,
748 auth_user=self._rhodecode_user,
749 version=version,
749 version=version,
750 )
750 )
751 except CommentVersionMismatch:
751 except CommentVersionMismatch:
752 raise HTTPConflict()
752 raise HTTPConflict()
753
753
754 if not comment_history:
754 if not comment_history:
755 raise HTTPNotFound()
755 raise HTTPNotFound()
756
756
757 if not comment.draft:
757 if not comment.draft:
758 commit = self.db_repo.get_commit(commit_id)
758 commit = self.db_repo.get_commit(commit_id)
759 CommentsModel().trigger_commit_comment_hook(
759 CommentsModel().trigger_commit_comment_hook(
760 self.db_repo, self._rhodecode_user, 'edit',
760 self.db_repo, self._rhodecode_user, 'edit',
761 data={'comment': comment, 'commit': commit})
761 data={'comment': comment, 'commit': commit})
762
762
763 Session().commit()
763 Session().commit()
764 return {
764 return {
765 'comment_history_id': comment_history.comment_history_id,
765 'comment_history_id': comment_history.comment_history_id,
766 'comment_id': comment.comment_id,
766 'comment_id': comment.comment_id,
767 'comment_version': comment_history.version,
767 'comment_version': comment_history.version,
768 'comment_author_username': comment_history.author.username,
768 'comment_author_username': comment_history.author.username,
769 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
769 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
770 'comment_created_on': h.age_component(comment_history.created_on,
770 'comment_created_on': h.age_component(comment_history.created_on,
771 time_is_local=True),
771 time_is_local=True),
772 }
772 }
773 else:
773 else:
774 log.warning('No permissions for user %s to edit comment_id: %s',
774 log.warning('No permissions for user %s to edit comment_id: %s',
775 self._rhodecode_db_user, comment_id)
775 self._rhodecode_db_user, comment_id)
776 raise HTTPNotFound()
776 raise HTTPNotFound()
777
777
778 @LoginRequired()
778 @LoginRequired()
779 @HasRepoPermissionAnyDecorator(
779 @HasRepoPermissionAnyDecorator(
780 'repository.read', 'repository.write', 'repository.admin')
780 'repository.read', 'repository.write', 'repository.admin')
781 def repo_commit_data(self):
781 def repo_commit_data(self):
782 commit_id = self.request.matchdict['commit_id']
782 commit_id = self.request.matchdict['commit_id']
783 self.load_default_context()
783 self.load_default_context()
784
784
785 try:
785 try:
786 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
786 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
787 except CommitDoesNotExistError as e:
787 except CommitDoesNotExistError as e:
788 return EmptyCommit(message=str(e))
788 return EmptyCommit(message=str(e))
789
789
790 @LoginRequired()
790 @LoginRequired()
791 @HasRepoPermissionAnyDecorator(
791 @HasRepoPermissionAnyDecorator(
792 'repository.read', 'repository.write', 'repository.admin')
792 'repository.read', 'repository.write', 'repository.admin')
793 def repo_commit_children(self):
793 def repo_commit_children(self):
794 commit_id = self.request.matchdict['commit_id']
794 commit_id = self.request.matchdict['commit_id']
795 self.load_default_context()
795 self.load_default_context()
796
796
797 try:
797 try:
798 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
798 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
799 children = commit.children
799 children = commit.children
800 except CommitDoesNotExistError:
800 except CommitDoesNotExistError:
801 children = []
801 children = []
802
802
803 result = {"results": children}
803 result = {"results": children}
804 return result
804 return result
805
805
806 @LoginRequired()
806 @LoginRequired()
807 @HasRepoPermissionAnyDecorator(
807 @HasRepoPermissionAnyDecorator(
808 'repository.read', 'repository.write', 'repository.admin')
808 'repository.read', 'repository.write', 'repository.admin')
809 def repo_commit_parents(self):
809 def repo_commit_parents(self):
810 commit_id = self.request.matchdict['commit_id']
810 commit_id = self.request.matchdict['commit_id']
811 self.load_default_context()
811 self.load_default_context()
812
812
813 try:
813 try:
814 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
814 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
815 parents = commit.parents
815 parents = commit.parents
816 except CommitDoesNotExistError:
816 except CommitDoesNotExistError:
817 parents = []
817 parents = []
818 result = {"results": parents}
818 result = {"results": parents}
819 return result
819 return result
@@ -1,149 +1,148 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import errno
22 import errno
23
23
24 from multiprocessing.util import Finalize
24 from multiprocessing.util import Finalize
25
25
26 from rhodecode.lib.compat import kill
27
26
28
27
29 class LockHeld(Exception):
28 class LockHeld(Exception):
30 pass
29 pass
31
30
32
31
33 class DaemonLock(object):
32 class DaemonLock(object):
34 """daemon locking
33 """daemon locking
35 USAGE:
34 USAGE:
36 try:
35 try:
37 l = DaemonLock(file_='/path/tolockfile',desc='test lock')
36 l = DaemonLock(file_='/path/tolockfile',desc='test lock')
38 main()
37 main()
39 l.release()
38 l.release()
40 except LockHeld:
39 except LockHeld:
41 sys.exit(1)
40 sys.exit(1)
42 """
41 """
43
42
44 def __init__(self, file_=None, callbackfn=None,
43 def __init__(self, file_=None, callbackfn=None,
45 desc='daemon lock', debug=False):
44 desc='daemon lock', debug=False):
46
45
47 lock_name = os.path.join(os.path.dirname(__file__), 'running.lock')
46 lock_name = os.path.join(os.path.dirname(__file__), 'running.lock')
48 self.pidfile = file_ if file_ else lock_name
47 self.pidfile = file_ if file_ else lock_name
49 self.callbackfn = callbackfn
48 self.callbackfn = callbackfn
50 self.desc = desc
49 self.desc = desc
51 self.debug = debug
50 self.debug = debug
52 self.held = False
51 self.held = False
53 #run the lock automatically !
52 #run the lock automatically !
54 self.lock()
53 self.lock()
55 self._finalize = Finalize(self, DaemonLock._on_finalize,
54 self._finalize = Finalize(self, DaemonLock._on_finalize,
56 args=(self, debug), exitpriority=10)
55 args=(self, debug), exitpriority=10)
57
56
58 @staticmethod
57 @staticmethod
59 def _on_finalize(lock, debug):
58 def _on_finalize(lock, debug):
60 if lock.held:
59 if lock.held:
61 if debug:
60 if debug:
62 print('leck held finilazing and running lock.release()')
61 print('leck held finilazing and running lock.release()')
63 lock.release()
62 lock.release()
64
63
65 def lock(self):
64 def lock(self):
66 """
65 """
67 locking function, if lock is present it
66 locking function, if lock is present it
68 will raise LockHeld exception
67 will raise LockHeld exception
69 """
68 """
70 lockname = '%s' % (os.getpid())
69 lockname = '%s' % (os.getpid())
71 if self.debug:
70 if self.debug:
72 print('running lock')
71 print('running lock')
73 self.trylock()
72 self.trylock()
74 self.makelock(lockname, self.pidfile)
73 self.makelock(lockname, self.pidfile)
75 return True
74 return True
76
75
77 def trylock(self):
76 def trylock(self):
78 running_pid = False
77 running_pid = False
79 if self.debug:
78 if self.debug:
80 print('checking for already running process')
79 print('checking for already running process')
81 try:
80 try:
82 with open(self.pidfile, 'r') as f:
81 with open(self.pidfile, 'r') as f:
83 try:
82 try:
84 running_pid = int(f.readline())
83 running_pid = int(f.readline())
85 except ValueError:
84 except ValueError:
86 running_pid = -1
85 running_pid = -1
87
86
88 if self.debug:
87 if self.debug:
89 print('lock file present running_pid: %s, '
88 print('lock file present running_pid: %s, '
90 'checking for execution' % (running_pid,))
89 'checking for execution' % (running_pid,))
91 # Now we check the PID from lock file matches to the current
90 # Now we check the PID from lock file matches to the current
92 # process PID
91 # process PID
93 if running_pid:
92 if running_pid:
94 try:
93 try:
95 kill(running_pid, 0)
94 os.kill(running_pid, 0)
96 except OSError as exc:
95 except OSError as exc:
97 if exc.errno in (errno.ESRCH, errno.EPERM):
96 if exc.errno in (errno.ESRCH, errno.EPERM):
98 print("Lock File is there but"
97 print("Lock File is there but"
99 " the program is not running")
98 " the program is not running")
100 print("Removing lock file for the: %s" % running_pid)
99 print("Removing lock file for the: %s" % running_pid)
101 self.release()
100 self.release()
102 else:
101 else:
103 raise
102 raise
104 else:
103 else:
105 print("You already have an instance of the program running")
104 print("You already have an instance of the program running")
106 print("It is running as process %s" % running_pid)
105 print("It is running as process %s" % running_pid)
107 raise LockHeld()
106 raise LockHeld()
108
107
109 except IOError as e:
108 except IOError as e:
110 if e.errno != 2:
109 if e.errno != 2:
111 raise
110 raise
112
111
113 def release(self):
112 def release(self):
114 """releases the pid by removing the pidfile
113 """releases the pid by removing the pidfile
115 """
114 """
116 if self.debug:
115 if self.debug:
117 print('trying to release the pidlock')
116 print('trying to release the pidlock')
118
117
119 if self.callbackfn:
118 if self.callbackfn:
120 # execute callback function on release
119 # execute callback function on release
121 if self.debug:
120 if self.debug:
122 print('executing callback function %s' % self.callbackfn)
121 print('executing callback function %s' % self.callbackfn)
123 self.callbackfn()
122 self.callbackfn()
124 try:
123 try:
125 if self.debug:
124 if self.debug:
126 print('removing pidfile %s' % self.pidfile)
125 print('removing pidfile %s' % self.pidfile)
127 os.remove(self.pidfile)
126 os.remove(self.pidfile)
128 self.held = False
127 self.held = False
129 except OSError as e:
128 except OSError as e:
130 if self.debug:
129 if self.debug:
131 print('removing pidfile failed %s' % e)
130 print('removing pidfile failed %s' % e)
132 pass
131 pass
133
132
134 def makelock(self, lockname, pidfile):
133 def makelock(self, lockname, pidfile):
135 """
134 """
136 this function will make an actual lock
135 this function will make an actual lock
137
136
138 :param lockname: acctual pid of file
137 :param lockname: acctual pid of file
139 :param pidfile: the file to write the pid in
138 :param pidfile: the file to write the pid in
140 """
139 """
141 if self.debug:
140 if self.debug:
142 print('creating a file %s and pid: %s' % (pidfile, lockname))
141 print('creating a file %s and pid: %s' % (pidfile, lockname))
143
142
144 dir_, file_ = os.path.split(pidfile)
143 dir_, file_ = os.path.split(pidfile)
145 if not os.path.isdir(dir_):
144 if not os.path.isdir(dir_):
146 os.makedirs(dir_)
145 os.makedirs(dir_)
147 with open(self.pidfile, 'wb') as f:
146 with open(self.pidfile, 'wb') as f:
148 f.write(lockname)
147 f.write(lockname)
149 self.held = True
148 self.held = True
@@ -1,1052 +1,1052 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from collections import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
57 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
58 DEFAULT_REF = 'branch:{}'.format(DEFAULT_BRANCH_NAME)
58 DEFAULT_REF = 'branch:{}'.format(DEFAULT_BRANCH_NAME)
59
59
60 contact = BaseRepository.DEFAULT_CONTACT
60 contact = BaseRepository.DEFAULT_CONTACT
61
61
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
63 do_workspace_checkout=False, with_wire=None, bare=False):
63 do_workspace_checkout=False, with_wire=None, bare=False):
64
64
65 self.path = safe_str(os.path.abspath(repo_path))
65 self.path = safe_str(os.path.abspath(repo_path))
66 self.config = config if config else self.get_default_config()
66 self.config = config if config else self.get_default_config()
67 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 self.with_wire = with_wire or {"cache": False} # default should not use cache
68
68
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
70
70
71 # caches
71 # caches
72 self._commit_ids = {}
72 self._commit_ids = {}
73
73
74 @LazyProperty
74 @LazyProperty
75 def _remote(self):
75 def _remote(self):
76 repo_id = self.path
76 repo_id = self.path
77 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
78
78
79 @LazyProperty
79 @LazyProperty
80 def bare(self):
80 def bare(self):
81 return self._remote.bare()
81 return self._remote.bare()
82
82
83 @LazyProperty
83 @LazyProperty
84 def head(self):
84 def head(self):
85 return self._remote.head()
85 return self._remote.head()
86
86
87 @CachedProperty
87 @CachedProperty
88 def commit_ids(self):
88 def commit_ids(self):
89 """
89 """
90 Returns list of commit ids, in ascending order. Being lazy
90 Returns list of commit ids, in ascending order. Being lazy
91 attribute allows external tools to inject commit ids from cache.
91 attribute allows external tools to inject commit ids from cache.
92 """
92 """
93 commit_ids = self._get_all_commit_ids()
93 commit_ids = self._get_all_commit_ids()
94 self._rebuild_cache(commit_ids)
94 self._rebuild_cache(commit_ids)
95 return commit_ids
95 return commit_ids
96
96
97 def _rebuild_cache(self, commit_ids):
97 def _rebuild_cache(self, commit_ids):
98 self._commit_ids = dict((commit_id, index)
98 self._commit_ids = dict((commit_id, index)
99 for index, commit_id in enumerate(commit_ids))
99 for index, commit_id in enumerate(commit_ids))
100
100
101 def run_git_command(self, cmd, **opts):
101 def run_git_command(self, cmd, **opts):
102 """
102 """
103 Runs given ``cmd`` as git command and returns tuple
103 Runs given ``cmd`` as git command and returns tuple
104 (stdout, stderr).
104 (stdout, stderr).
105
105
106 :param cmd: git command to be executed
106 :param cmd: git command to be executed
107 :param opts: env options to pass into Subprocess command
107 :param opts: env options to pass into Subprocess command
108 """
108 """
109 if not isinstance(cmd, list):
109 if not isinstance(cmd, list):
110 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
111
111
112 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 skip_stderr_log = opts.pop('skip_stderr_log', False)
113 out, err = self._remote.run_git_command(cmd, **opts)
113 out, err = self._remote.run_git_command(cmd, **opts)
114 if err and not skip_stderr_log:
114 if err and not skip_stderr_log:
115 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
116 return out, err
116 return out, err
117
117
118 @staticmethod
118 @staticmethod
119 def check_url(url, config):
119 def check_url(url, config):
120 """
120 """
121 Function will check given url and try to verify if it's a valid
121 Function will check given url and try to verify if it's a valid
122 link. Sometimes it may happened that git will issue basic
122 link. Sometimes it may happened that git will issue basic
123 auth request that can cause whole API to hang when used from python
123 auth request that can cause whole API to hang when used from python
124 or other external calls.
124 or other external calls.
125
125
126 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 On failures it'll raise urllib2.HTTPError, exception is also thrown
127 when the return code is non 200
127 when the return code is non 200
128 """
128 """
129 # check first if it's not an url
129 # check first if it's not an url
130 if os.path.isdir(url) or url.startswith('file:'):
130 if os.path.isdir(url) or url.startswith('file:'):
131 return True
131 return True
132
132
133 if '+' in url.split('://', 1)[0]:
133 if '+' in url.split('://', 1)[0]:
134 url = url.split('+', 1)[1]
134 url = url.split('+', 1)[1]
135
135
136 # Request the _remote to verify the url
136 # Request the _remote to verify the url
137 return connection.Git.check_url(url, config.serialize())
137 return connection.Git.check_url(url, config.serialize())
138
138
139 @staticmethod
139 @staticmethod
140 def is_valid_repository(path):
140 def is_valid_repository(path):
141 if os.path.isdir(os.path.join(path, '.git')):
141 if os.path.isdir(os.path.join(path, '.git')):
142 return True
142 return True
143 # check case of bare repository
143 # check case of bare repository
144 try:
144 try:
145 GitRepository(path)
145 GitRepository(path)
146 return True
146 return True
147 except VCSError:
147 except VCSError:
148 pass
148 pass
149 return False
149 return False
150
150
151 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
152 bare=False):
152 bare=False):
153 if create and os.path.exists(self.path):
153 if create and os.path.exists(self.path):
154 raise RepositoryError(
154 raise RepositoryError(
155 "Cannot create repository at %s, location already exist"
155 "Cannot create repository at %s, location already exist"
156 % self.path)
156 % self.path)
157
157
158 if bare and do_workspace_checkout:
158 if bare and do_workspace_checkout:
159 raise RepositoryError("Cannot update a bare repository")
159 raise RepositoryError("Cannot update a bare repository")
160 try:
160 try:
161
161
162 if src_url:
162 if src_url:
163 # check URL before any actions
163 # check URL before any actions
164 GitRepository.check_url(src_url, self.config)
164 GitRepository.check_url(src_url, self.config)
165
165
166 if create:
166 if create:
167 os.makedirs(self.path, mode=0o755)
167 os.makedirs(self.path, mode=0o755)
168
168
169 if bare:
169 if bare:
170 self._remote.init_bare()
170 self._remote.init_bare()
171 else:
171 else:
172 self._remote.init()
172 self._remote.init()
173
173
174 if src_url and bare:
174 if src_url and bare:
175 # bare repository only allows a fetch and checkout is not allowed
175 # bare repository only allows a fetch and checkout is not allowed
176 self.fetch(src_url, commit_ids=None)
176 self.fetch(src_url, commit_ids=None)
177 elif src_url:
177 elif src_url:
178 self.pull(src_url, commit_ids=None,
178 self.pull(src_url, commit_ids=None,
179 update_after=do_workspace_checkout)
179 update_after=do_workspace_checkout)
180
180
181 else:
181 else:
182 if not self._remote.assert_correct_path():
182 if not self._remote.assert_correct_path():
183 raise RepositoryError(
183 raise RepositoryError(
184 'Path "%s" does not contain a Git repository' %
184 'Path "%s" does not contain a Git repository' %
185 (self.path,))
185 (self.path,))
186
186
187 # TODO: johbo: check if we have to translate the OSError here
187 # TODO: johbo: check if we have to translate the OSError here
188 except OSError as err:
188 except OSError as err:
189 raise RepositoryError(err)
189 raise RepositoryError(err)
190
190
191 def _get_all_commit_ids(self):
191 def _get_all_commit_ids(self):
192 return self._remote.get_all_commit_ids()
192 return self._remote.get_all_commit_ids()
193
193
194 def _get_commit_ids(self, filters=None):
194 def _get_commit_ids(self, filters=None):
195 # we must check if this repo is not empty, since later command
195 # we must check if this repo is not empty, since later command
196 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # fails if it is. And it's cheaper to ask than throw the subprocess
197 # errors
197 # errors
198
198
199 head = self._remote.head(show_exc=False)
199 head = self._remote.head(show_exc=False)
200
200
201 if not head:
201 if not head:
202 return []
202 return []
203
203
204 rev_filter = ['--branches', '--tags']
204 rev_filter = ['--branches', '--tags']
205 extra_filter = []
205 extra_filter = []
206
206
207 if filters:
207 if filters:
208 if filters.get('since'):
208 if filters.get('since'):
209 extra_filter.append('--since=%s' % (filters['since']))
209 extra_filter.append('--since=%s' % (filters['since']))
210 if filters.get('until'):
210 if filters.get('until'):
211 extra_filter.append('--until=%s' % (filters['until']))
211 extra_filter.append('--until=%s' % (filters['until']))
212 if filters.get('branch_name'):
212 if filters.get('branch_name'):
213 rev_filter = []
213 rev_filter = []
214 extra_filter.append(filters['branch_name'])
214 extra_filter.append(filters['branch_name'])
215 rev_filter.extend(extra_filter)
215 rev_filter.extend(extra_filter)
216
216
217 # if filters.get('start') or filters.get('end'):
217 # if filters.get('start') or filters.get('end'):
218 # # skip is offset, max-count is limit
218 # # skip is offset, max-count is limit
219 # if filters.get('start'):
219 # if filters.get('start'):
220 # extra_filter += ' --skip=%s' % filters['start']
220 # extra_filter += ' --skip=%s' % filters['start']
221 # if filters.get('end'):
221 # if filters.get('end'):
222 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
223
223
224 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
225 try:
225 try:
226 output, __ = self.run_git_command(cmd)
226 output, __ = self.run_git_command(cmd)
227 except RepositoryError:
227 except RepositoryError:
228 # Can be raised for empty repositories
228 # Can be raised for empty repositories
229 return []
229 return []
230 return output.splitlines()
230 return output.splitlines()
231
231
232 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
232 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
233
233
234 def is_null(value):
234 def is_null(value):
235 return len(value) == commit_id_or_idx.count('0')
235 return len(value) == commit_id_or_idx.count('0')
236
236
237 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
237 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
238 return self.commit_ids[-1]
238 return self.commit_ids[-1]
239
239
240 commit_missing_err = "Commit {} does not exist for `{}`".format(
240 commit_missing_err = "Commit {} does not exist for `{}`".format(
241 *map(safe_str, [commit_id_or_idx, self.name]))
241 *map(safe_str, [commit_id_or_idx, self.name]))
242
242
243 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
243 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
244 is_branch = reference_obj and reference_obj.branch
244 is_branch = reference_obj and reference_obj.branch
245
245
246 lookup_ok = False
246 lookup_ok = False
247 if is_bstr:
247 if is_bstr:
248 # Need to call remote to translate id for tagging scenarios,
248 # Need to call remote to translate id for tagging scenarios,
249 # or branch that are numeric
249 # or branch that are numeric
250 try:
250 try:
251 remote_data = self._remote.get_object(commit_id_or_idx,
251 remote_data = self._remote.get_object(commit_id_or_idx,
252 maybe_unreachable=maybe_unreachable)
252 maybe_unreachable=maybe_unreachable)
253 commit_id_or_idx = remote_data["commit_id"]
253 commit_id_or_idx = remote_data["commit_id"]
254 lookup_ok = True
254 lookup_ok = True
255 except (CommitDoesNotExistError,):
255 except (CommitDoesNotExistError,):
256 lookup_ok = False
256 lookup_ok = False
257
257
258 if lookup_ok is False:
258 if lookup_ok is False:
259 is_numeric_idx = \
259 is_numeric_idx = \
260 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
260 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
261 or isinstance(commit_id_or_idx, int)
261 or isinstance(commit_id_or_idx, int)
262 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
262 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
263 try:
263 try:
264 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
264 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
265 lookup_ok = True
265 lookup_ok = True
266 except Exception:
266 except Exception:
267 raise CommitDoesNotExistError(commit_missing_err)
267 raise CommitDoesNotExistError(commit_missing_err)
268
268
269 # we failed regular lookup, and by integer number lookup
269 # we failed regular lookup, and by integer number lookup
270 if lookup_ok is False:
270 if lookup_ok is False:
271 raise CommitDoesNotExistError(commit_missing_err)
271 raise CommitDoesNotExistError(commit_missing_err)
272
272
273 # Ensure we return full id
273 # Ensure we return full id
274 if not SHA_PATTERN.match(str(commit_id_or_idx)):
274 if not SHA_PATTERN.match(str(commit_id_or_idx)):
275 raise CommitDoesNotExistError(
275 raise CommitDoesNotExistError(
276 "Given commit id %s not recognized" % commit_id_or_idx)
276 "Given commit id %s not recognized" % commit_id_or_idx)
277 return commit_id_or_idx
277 return commit_id_or_idx
278
278
279 def get_hook_location(self):
279 def get_hook_location(self):
280 """
280 """
281 returns absolute path to location where hooks are stored
281 returns absolute path to location where hooks are stored
282 """
282 """
283 loc = os.path.join(self.path, 'hooks')
283 loc = os.path.join(self.path, 'hooks')
284 if not self.bare:
284 if not self.bare:
285 loc = os.path.join(self.path, '.git', 'hooks')
285 loc = os.path.join(self.path, '.git', 'hooks')
286 return loc
286 return loc
287
287
288 @LazyProperty
288 @LazyProperty
289 def last_change(self):
289 def last_change(self):
290 """
290 """
291 Returns last change made on this repository as
291 Returns last change made on this repository as
292 `datetime.datetime` object.
292 `datetime.datetime` object.
293 """
293 """
294 try:
294 try:
295 return self.get_commit().date
295 return self.get_commit().date
296 except RepositoryError:
296 except RepositoryError:
297 tzoffset = makedate()[1]
297 tzoffset = makedate()[1]
298 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
298 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
299
299
300 def _get_fs_mtime(self):
300 def _get_fs_mtime(self):
301 idx_loc = '' if self.bare else '.git'
301 idx_loc = '' if self.bare else '.git'
302 # fallback to filesystem
302 # fallback to filesystem
303 in_path = os.path.join(self.path, idx_loc, "index")
303 in_path = os.path.join(self.path, idx_loc, "index")
304 he_path = os.path.join(self.path, idx_loc, "HEAD")
304 he_path = os.path.join(self.path, idx_loc, "HEAD")
305 if os.path.exists(in_path):
305 if os.path.exists(in_path):
306 return os.stat(in_path).st_mtime
306 return os.stat(in_path).st_mtime
307 else:
307 else:
308 return os.stat(he_path).st_mtime
308 return os.stat(he_path).st_mtime
309
309
310 @LazyProperty
310 @LazyProperty
311 def description(self):
311 def description(self):
312 description = self._remote.get_description()
312 description = self._remote.get_description()
313 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
313 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
314
314
315 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
315 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
316 if self.is_empty():
316 if self.is_empty():
317 return OrderedDict()
317 return OrderedDict()
318
318
319 result = []
319 result = []
320 for ref, sha in self._refs.iteritems():
320 for ref, sha in self._refs.iteritems():
321 if ref.startswith(prefix):
321 if ref.startswith(prefix):
322 ref_name = ref
322 ref_name = ref
323 if strip_prefix:
323 if strip_prefix:
324 ref_name = ref[len(prefix):]
324 ref_name = ref[len(prefix):]
325 result.append((safe_unicode(ref_name), sha))
325 result.append((safe_unicode(ref_name), sha))
326
326
327 def get_name(entry):
327 def get_name(entry):
328 return entry[0]
328 return entry[0]
329
329
330 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
330 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
331
331
332 def _get_branches(self):
332 def _get_branches(self):
333 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
333 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
334
334
335 @CachedProperty
335 @CachedProperty
336 def branches(self):
336 def branches(self):
337 return self._get_branches()
337 return self._get_branches()
338
338
339 @CachedProperty
339 @CachedProperty
340 def branches_closed(self):
340 def branches_closed(self):
341 return {}
341 return {}
342
342
343 @CachedProperty
343 @CachedProperty
344 def bookmarks(self):
344 def bookmarks(self):
345 return {}
345 return {}
346
346
347 @CachedProperty
347 @CachedProperty
348 def branches_all(self):
348 def branches_all(self):
349 all_branches = {}
349 all_branches = {}
350 all_branches.update(self.branches)
350 all_branches.update(self.branches)
351 all_branches.update(self.branches_closed)
351 all_branches.update(self.branches_closed)
352 return all_branches
352 return all_branches
353
353
354 @CachedProperty
354 @CachedProperty
355 def tags(self):
355 def tags(self):
356 return self._get_tags()
356 return self._get_tags()
357
357
358 def _get_tags(self):
358 def _get_tags(self):
359 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
359 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
360
360
361 def tag(self, name, user, commit_id=None, message=None, date=None,
361 def tag(self, name, user, commit_id=None, message=None, date=None,
362 **kwargs):
362 **kwargs):
363 # TODO: fix this method to apply annotated tags correct with message
363 # TODO: fix this method to apply annotated tags correct with message
364 """
364 """
365 Creates and returns a tag for the given ``commit_id``.
365 Creates and returns a tag for the given ``commit_id``.
366
366
367 :param name: name for new tag
367 :param name: name for new tag
368 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
368 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
369 :param commit_id: commit id for which new tag would be created
369 :param commit_id: commit id for which new tag would be created
370 :param message: message of the tag's commit
370 :param message: message of the tag's commit
371 :param date: date of tag's commit
371 :param date: date of tag's commit
372
372
373 :raises TagAlreadyExistError: if tag with same name already exists
373 :raises TagAlreadyExistError: if tag with same name already exists
374 """
374 """
375 if name in self.tags:
375 if name in self.tags:
376 raise TagAlreadyExistError("Tag %s already exists" % name)
376 raise TagAlreadyExistError("Tag %s already exists" % name)
377 commit = self.get_commit(commit_id=commit_id)
377 commit = self.get_commit(commit_id=commit_id)
378 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
378 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
379
379
380 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
380 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
381
381
382 self._invalidate_prop_cache('tags')
382 self._invalidate_prop_cache('tags')
383 self._invalidate_prop_cache('_refs')
383 self._invalidate_prop_cache('_refs')
384
384
385 return commit
385 return commit
386
386
387 def remove_tag(self, name, user, message=None, date=None):
387 def remove_tag(self, name, user, message=None, date=None):
388 """
388 """
389 Removes tag with the given ``name``.
389 Removes tag with the given ``name``.
390
390
391 :param name: name of the tag to be removed
391 :param name: name of the tag to be removed
392 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
392 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
393 :param message: message of the tag's removal commit
393 :param message: message of the tag's removal commit
394 :param date: date of tag's removal commit
394 :param date: date of tag's removal commit
395
395
396 :raises TagDoesNotExistError: if tag with given name does not exists
396 :raises TagDoesNotExistError: if tag with given name does not exists
397 """
397 """
398 if name not in self.tags:
398 if name not in self.tags:
399 raise TagDoesNotExistError("Tag %s does not exist" % name)
399 raise TagDoesNotExistError("Tag %s does not exist" % name)
400
400
401 self._remote.tag_remove(name)
401 self._remote.tag_remove(name)
402 self._invalidate_prop_cache('tags')
402 self._invalidate_prop_cache('tags')
403 self._invalidate_prop_cache('_refs')
403 self._invalidate_prop_cache('_refs')
404
404
405 def _get_refs(self):
405 def _get_refs(self):
406 return self._remote.get_refs()
406 return self._remote.get_refs()
407
407
408 @CachedProperty
408 @CachedProperty
409 def _refs(self):
409 def _refs(self):
410 return self._get_refs()
410 return self._get_refs()
411
411
412 @property
412 @property
413 def _ref_tree(self):
413 def _ref_tree(self):
414 node = tree = {}
414 node = tree = {}
415 for ref, sha in self._refs.iteritems():
415 for ref, sha in self._refs.iteritems():
416 path = ref.split('/')
416 path = ref.split('/')
417 for bit in path[:-1]:
417 for bit in path[:-1]:
418 node = node.setdefault(bit, {})
418 node = node.setdefault(bit, {})
419 node[path[-1]] = sha
419 node[path[-1]] = sha
420 node = tree
420 node = tree
421 return tree
421 return tree
422
422
423 def get_remote_ref(self, ref_name):
423 def get_remote_ref(self, ref_name):
424 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
425 try:
425 try:
426 return self._refs[ref_key]
426 return self._refs[ref_key]
427 except Exception:
427 except Exception:
428 return
428 return
429
429
430 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
430 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
431 translate_tag=True, maybe_unreachable=False, reference_obj=None):
431 translate_tag=True, maybe_unreachable=False, reference_obj=None):
432 """
432 """
433 Returns `GitCommit` object representing commit from git repository
433 Returns `GitCommit` object representing commit from git repository
434 at the given `commit_id` or head (most recent commit) if None given.
434 at the given `commit_id` or head (most recent commit) if None given.
435 """
435 """
436
436
437 if self.is_empty():
437 if self.is_empty():
438 raise EmptyRepositoryError("There are no commits yet")
438 raise EmptyRepositoryError("There are no commits yet")
439
439
440 if commit_id is not None:
440 if commit_id is not None:
441 self._validate_commit_id(commit_id)
441 self._validate_commit_id(commit_id)
442 try:
442 try:
443 # we have cached idx, use it without contacting the remote
443 # we have cached idx, use it without contacting the remote
444 idx = self._commit_ids[commit_id]
444 idx = self._commit_ids[commit_id]
445 return GitCommit(self, commit_id, idx, pre_load=pre_load)
445 return GitCommit(self, commit_id, idx, pre_load=pre_load)
446 except KeyError:
446 except KeyError:
447 pass
447 pass
448
448
449 elif commit_idx is not None:
449 elif commit_idx is not None:
450 self._validate_commit_idx(commit_idx)
450 self._validate_commit_idx(commit_idx)
451 try:
451 try:
452 _commit_id = self.commit_ids[commit_idx]
452 _commit_id = self.commit_ids[commit_idx]
453 if commit_idx < 0:
453 if commit_idx < 0:
454 commit_idx = self.commit_ids.index(_commit_id)
454 commit_idx = self.commit_ids.index(_commit_id)
455 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
455 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
456 except IndexError:
456 except IndexError:
457 commit_id = commit_idx
457 commit_id = commit_idx
458 else:
458 else:
459 commit_id = "tip"
459 commit_id = "tip"
460
460
461 if translate_tag:
461 if translate_tag:
462 commit_id = self._lookup_commit(
462 commit_id = self._lookup_commit(
463 commit_id, maybe_unreachable=maybe_unreachable,
463 commit_id, maybe_unreachable=maybe_unreachable,
464 reference_obj=reference_obj)
464 reference_obj=reference_obj)
465
465
466 try:
466 try:
467 idx = self._commit_ids[commit_id]
467 idx = self._commit_ids[commit_id]
468 except KeyError:
468 except KeyError:
469 idx = -1
469 idx = -1
470
470
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
472
472
473 def get_commits(
473 def get_commits(
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
476 """
476 """
477 Returns generator of `GitCommit` objects from start to end (both
477 Returns generator of `GitCommit` objects from start to end (both
478 are inclusive), in ascending date order.
478 are inclusive), in ascending date order.
479
479
480 :param start_id: None, str(commit_id)
480 :param start_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
482 :param start_date: if specified, commits with commit date less than
482 :param start_date: if specified, commits with commit date less than
483 ``start_date`` would be filtered out from returned set
483 ``start_date`` would be filtered out from returned set
484 :param end_date: if specified, commits with commit date greater than
484 :param end_date: if specified, commits with commit date greater than
485 ``end_date`` would be filtered out from returned set
485 ``end_date`` would be filtered out from returned set
486 :param branch_name: if specified, commits not reachable from given
486 :param branch_name: if specified, commits not reachable from given
487 branch would be filtered out from returned set
487 branch would be filtered out from returned set
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
489 Mercurial evolve
489 Mercurial evolve
490 :raise BranchDoesNotExistError: If given `branch_name` does not
490 :raise BranchDoesNotExistError: If given `branch_name` does not
491 exist.
491 exist.
492 :raise CommitDoesNotExistError: If commits for given `start` or
492 :raise CommitDoesNotExistError: If commits for given `start` or
493 `end` could not be found.
493 `end` could not be found.
494
494
495 """
495 """
496 if self.is_empty():
496 if self.is_empty():
497 raise EmptyRepositoryError("There are no commits yet")
497 raise EmptyRepositoryError("There are no commits yet")
498
498
499 self._validate_branch_name(branch_name)
499 self._validate_branch_name(branch_name)
500
500
501 if start_id is not None:
501 if start_id is not None:
502 self._validate_commit_id(start_id)
502 self._validate_commit_id(start_id)
503 if end_id is not None:
503 if end_id is not None:
504 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
505
505
506 start_raw_id = self._lookup_commit(start_id)
506 start_raw_id = self._lookup_commit(start_id)
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
508 end_raw_id = self._lookup_commit(end_id)
508 end_raw_id = self._lookup_commit(end_id)
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
510
510
511 if None not in [start_id, end_id] and start_pos > end_pos:
511 if None not in [start_id, end_id] and start_pos > end_pos:
512 raise RepositoryError(
512 raise RepositoryError(
513 "Start commit '%s' cannot be after end commit '%s'" %
513 "Start commit '%s' cannot be after end commit '%s'" %
514 (start_id, end_id))
514 (start_id, end_id))
515
515
516 if end_pos is not None:
516 if end_pos is not None:
517 end_pos += 1
517 end_pos += 1
518
518
519 filter_ = []
519 filter_ = []
520 if branch_name:
520 if branch_name:
521 filter_.append({'branch_name': branch_name})
521 filter_.append({'branch_name': branch_name})
522 if start_date and not end_date:
522 if start_date and not end_date:
523 filter_.append({'since': start_date})
523 filter_.append({'since': start_date})
524 if end_date and not start_date:
524 if end_date and not start_date:
525 filter_.append({'until': end_date})
525 filter_.append({'until': end_date})
526 if start_date and end_date:
526 if start_date and end_date:
527 filter_.append({'since': start_date})
527 filter_.append({'since': start_date})
528 filter_.append({'until': end_date})
528 filter_.append({'until': end_date})
529
529
530 # if start_pos or end_pos:
530 # if start_pos or end_pos:
531 # filter_.append({'start': start_pos})
531 # filter_.append({'start': start_pos})
532 # filter_.append({'end': end_pos})
532 # filter_.append({'end': end_pos})
533
533
534 if filter_:
534 if filter_:
535 revfilters = {
535 revfilters = {
536 'branch_name': branch_name,
536 'branch_name': branch_name,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
539 'start': start_pos,
539 'start': start_pos,
540 'end': end_pos,
540 'end': end_pos,
541 }
541 }
542 commit_ids = self._get_commit_ids(filters=revfilters)
542 commit_ids = self._get_commit_ids(filters=revfilters)
543
543
544 else:
544 else:
545 commit_ids = self.commit_ids
545 commit_ids = self.commit_ids
546
546
547 if start_pos or end_pos:
547 if start_pos or end_pos:
548 commit_ids = commit_ids[start_pos: end_pos]
548 commit_ids = commit_ids[start_pos: end_pos]
549
549
550 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
550 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
551 translate_tag=translate_tags)
551 translate_tag=translate_tags)
552
552
553 def get_diff(
553 def get_diff(
554 self, commit1, commit2, path='', ignore_whitespace=False,
554 self, commit1, commit2, path='', ignore_whitespace=False,
555 context=3, path1=None):
555 context=3, path1=None):
556 """
556 """
557 Returns (git like) *diff*, as plain text. Shows changes introduced by
557 Returns (git like) *diff*, as plain text. Shows changes introduced by
558 ``commit2`` since ``commit1``.
558 ``commit2`` since ``commit1``.
559
559
560 :param commit1: Entry point from which diff is shown. Can be
560 :param commit1: Entry point from which diff is shown. Can be
561 ``self.EMPTY_COMMIT`` - in this case, patch showing all
561 ``self.EMPTY_COMMIT`` - in this case, patch showing all
562 the changes since empty state of the repository until ``commit2``
562 the changes since empty state of the repository until ``commit2``
563 :param commit2: Until which commits changes should be shown.
563 :param commit2: Until which commits changes should be shown.
564 :param ignore_whitespace: If set to ``True``, would not show whitespace
564 :param ignore_whitespace: If set to ``True``, would not show whitespace
565 changes. Defaults to ``False``.
565 changes. Defaults to ``False``.
566 :param context: How many lines before/after changed lines should be
566 :param context: How many lines before/after changed lines should be
567 shown. Defaults to ``3``.
567 shown. Defaults to ``3``.
568 """
568 """
569 self._validate_diff_commits(commit1, commit2)
569 self._validate_diff_commits(commit1, commit2)
570 if path1 is not None and path1 != path:
570 if path1 is not None and path1 != path:
571 raise ValueError("Diff of two different paths not supported.")
571 raise ValueError("Diff of two different paths not supported.")
572
572
573 if path:
573 if path:
574 file_filter = path
574 file_filter = path
575 else:
575 else:
576 file_filter = None
576 file_filter = None
577
577
578 diff = self._remote.diff(
578 diff = self._remote.diff(
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
580 opt_ignorews=ignore_whitespace,
580 opt_ignorews=ignore_whitespace,
581 context=context)
581 context=context)
582 return GitDiff(diff)
582 return GitDiff(diff)
583
583
584 def strip(self, commit_id, branch_name):
584 def strip(self, commit_id, branch_name):
585 commit = self.get_commit(commit_id=commit_id)
585 commit = self.get_commit(commit_id=commit_id)
586 if commit.merge:
586 if commit.merge:
587 raise Exception('Cannot reset to merge commit')
587 raise Exception('Cannot reset to merge commit')
588
588
589 # parent is going to be the new head now
589 # parent is going to be the new head now
590 commit = commit.parents[0]
590 commit = commit.parents[0]
591 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
591 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
592
592
593 # clear cached properties
593 # clear cached properties
594 self._invalidate_prop_cache('commit_ids')
594 self._invalidate_prop_cache('commit_ids')
595 self._invalidate_prop_cache('_refs')
595 self._invalidate_prop_cache('_refs')
596 self._invalidate_prop_cache('branches')
596 self._invalidate_prop_cache('branches')
597
597
598 return len(self.commit_ids)
598 return len(self.commit_ids)
599
599
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 self, commit_id1, repo2, commit_id2)
602 self, commit_id1, repo2, commit_id2)
603
603
604 if commit_id1 == commit_id2:
604 if commit_id1 == commit_id2:
605 return commit_id1
605 return commit_id1
606
606
607 if self != repo2:
607 if self != repo2:
608 commits = self._remote.get_missing_revs(
608 commits = self._remote.get_missing_revs(
609 commit_id1, commit_id2, repo2.path)
609 commit_id1, commit_id2, repo2.path)
610 if commits:
610 if commits:
611 commit = repo2.get_commit(commits[-1])
611 commit = repo2.get_commit(commits[-1])
612 if commit.parents:
612 if commit.parents:
613 ancestor_id = commit.parents[0].raw_id
613 ancestor_id = commit.parents[0].raw_id
614 else:
614 else:
615 ancestor_id = None
615 ancestor_id = None
616 else:
616 else:
617 # no commits from other repo, ancestor_id is the commit_id2
617 # no commits from other repo, ancestor_id is the commit_id2
618 ancestor_id = commit_id2
618 ancestor_id = commit_id2
619 else:
619 else:
620 output, __ = self.run_git_command(
620 output, __ = self.run_git_command(
621 ['merge-base', commit_id1, commit_id2])
621 ['merge-base', commit_id1, commit_id2])
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623
623
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
625
625
626 return ancestor_id
626 return ancestor_id
627
627
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 repo1 = self
629 repo1 = self
630 ancestor_id = None
630 ancestor_id = None
631
631
632 if commit_id1 == commit_id2:
632 if commit_id1 == commit_id2:
633 commits = []
633 commits = []
634 elif repo1 != repo2:
634 elif repo1 != repo2:
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 repo2.path)
636 repo2.path)
637 commits = [
637 commits = [
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 for commit_id in reversed(missing_ids)]
639 for commit_id in reversed(missing_ids)]
640 else:
640 else:
641 output, __ = repo1.run_git_command(
641 output, __ = repo1.run_git_command(
642 ['log', '--reverse', '--pretty=format: %H', '-s',
642 ['log', '--reverse', '--pretty=format: %H', '-s',
643 '%s..%s' % (commit_id1, commit_id2)])
643 '%s..%s' % (commit_id1, commit_id2)])
644 commits = [
644 commits = [
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647
647
648 return commits
648 return commits
649
649
650 @LazyProperty
650 @LazyProperty
651 def in_memory_commit(self):
651 def in_memory_commit(self):
652 """
652 """
653 Returns ``GitInMemoryCommit`` object for this repository.
653 Returns ``GitInMemoryCommit`` object for this repository.
654 """
654 """
655 return GitInMemoryCommit(self)
655 return GitInMemoryCommit(self)
656
656
657 def pull(self, url, commit_ids=None, update_after=False):
657 def pull(self, url, commit_ids=None, update_after=False):
658 """
658 """
659 Pull changes from external location. Pull is different in GIT
659 Pull changes from external location. Pull is different in GIT
660 that fetch since it's doing a checkout
660 that fetch since it's doing a checkout
661
661
662 :param commit_ids: Optional. Can be set to a list of commit ids
662 :param commit_ids: Optional. Can be set to a list of commit ids
663 which shall be pulled from the other repository.
663 which shall be pulled from the other repository.
664 """
664 """
665 refs = None
665 refs = None
666 if commit_ids is not None:
666 if commit_ids is not None:
667 remote_refs = self._remote.get_remote_refs(url)
667 remote_refs = self._remote.get_remote_refs(url)
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 self._remote.pull(url, refs=refs, update_after=update_after)
669 self._remote.pull(url, refs=refs, update_after=update_after)
670 self._remote.invalidate_vcs_cache()
670 self._remote.invalidate_vcs_cache()
671
671
672 def fetch(self, url, commit_ids=None):
672 def fetch(self, url, commit_ids=None):
673 """
673 """
674 Fetch all git objects from external location.
674 Fetch all git objects from external location.
675 """
675 """
676 self._remote.sync_fetch(url, refs=commit_ids)
676 self._remote.sync_fetch(url, refs=commit_ids)
677 self._remote.invalidate_vcs_cache()
677 self._remote.invalidate_vcs_cache()
678
678
679 def push(self, url):
679 def push(self, url):
680 refs = None
680 refs = None
681 self._remote.sync_push(url, refs=refs)
681 self._remote.sync_push(url, refs=refs)
682
682
683 def set_refs(self, ref_name, commit_id):
683 def set_refs(self, ref_name, commit_id):
684 self._remote.set_refs(ref_name, commit_id)
684 self._remote.set_refs(ref_name, commit_id)
685 self._invalidate_prop_cache('_refs')
685 self._invalidate_prop_cache('_refs')
686
686
687 def remove_ref(self, ref_name):
687 def remove_ref(self, ref_name):
688 self._remote.remove_ref(ref_name)
688 self._remote.remove_ref(ref_name)
689 self._invalidate_prop_cache('_refs')
689 self._invalidate_prop_cache('_refs')
690
690
691 def run_gc(self, prune=True):
691 def run_gc(self, prune=True):
692 cmd = ['gc', '--aggressive']
692 cmd = ['gc', '--aggressive']
693 if prune:
693 if prune:
694 cmd += ['--prune=now']
694 cmd += ['--prune=now']
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 return stderr
696 return stderr
697
697
698 def _update_server_info(self):
698 def _update_server_info(self):
699 """
699 """
700 runs gits update-server-info command in this repo instance
700 runs gits update-server-info command in this repo instance
701 """
701 """
702 self._remote.update_server_info()
702 self._remote.update_server_info()
703
703
704 def _current_branch(self):
704 def _current_branch(self):
705 """
705 """
706 Return the name of the current branch.
706 Return the name of the current branch.
707
707
708 It only works for non bare repositories (i.e. repositories with a
708 It only works for non bare repositories (i.e. repositories with a
709 working copy)
709 working copy)
710 """
710 """
711 if self.bare:
711 if self.bare:
712 raise RepositoryError('Bare git repos do not have active branches')
712 raise RepositoryError('Bare git repos do not have active branches')
713
713
714 if self.is_empty():
714 if self.is_empty():
715 return None
715 return None
716
716
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 return stdout.strip()
718 return stdout.strip()
719
719
720 def _checkout(self, branch_name, create=False, force=False):
720 def _checkout(self, branch_name, create=False, force=False):
721 """
721 """
722 Checkout a branch in the working directory.
722 Checkout a branch in the working directory.
723
723
724 It tries to create the branch if create is True, failing if the branch
724 It tries to create the branch if create is True, failing if the branch
725 already exists.
725 already exists.
726
726
727 It only works for non bare repositories (i.e. repositories with a
727 It only works for non bare repositories (i.e. repositories with a
728 working copy)
728 working copy)
729 """
729 """
730 if self.bare:
730 if self.bare:
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
732
732
733 cmd = ['checkout']
733 cmd = ['checkout']
734 if force:
734 if force:
735 cmd.append('-f')
735 cmd.append('-f')
736 if create:
736 if create:
737 cmd.append('-b')
737 cmd.append('-b')
738 cmd.append(branch_name)
738 cmd.append(branch_name)
739 self.run_git_command(cmd, fail_on_stderr=False)
739 self.run_git_command(cmd, fail_on_stderr=False)
740
740
741 def _create_branch(self, branch_name, commit_id):
741 def _create_branch(self, branch_name, commit_id):
742 """
742 """
743 creates a branch in a GIT repo
743 creates a branch in a GIT repo
744 """
744 """
745 self._remote.create_branch(branch_name, commit_id)
745 self._remote.create_branch(branch_name, commit_id)
746
746
747 def _identify(self):
747 def _identify(self):
748 """
748 """
749 Return the current state of the working directory.
749 Return the current state of the working directory.
750 """
750 """
751 if self.bare:
751 if self.bare:
752 raise RepositoryError('Bare git repos do not have active branches')
752 raise RepositoryError('Bare git repos do not have active branches')
753
753
754 if self.is_empty():
754 if self.is_empty():
755 return None
755 return None
756
756
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 return stdout.strip()
758 return stdout.strip()
759
759
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 """
761 """
762 Create a local clone of the current repo.
762 Create a local clone of the current repo.
763 """
763 """
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # clone will only fetch the active branch.
765 # clone will only fetch the active branch.
766 cmd = ['clone', '--branch', branch_name,
766 cmd = ['clone', '--branch', branch_name,
767 self.path, os.path.abspath(clone_path)]
767 self.path, os.path.abspath(clone_path)]
768
768
769 self.run_git_command(cmd, fail_on_stderr=False)
769 self.run_git_command(cmd, fail_on_stderr=False)
770
770
771 # if we get the different source branch, make sure we also fetch it for
771 # if we get the different source branch, make sure we also fetch it for
772 # merge conditions
772 # merge conditions
773 if source_branch and source_branch != branch_name:
773 if source_branch and source_branch != branch_name:
774 # check if the ref exists.
774 # check if the ref exists.
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 if shadow_repo.get_remote_ref(source_branch):
776 if shadow_repo.get_remote_ref(source_branch):
777 cmd = ['fetch', self.path, source_branch]
777 cmd = ['fetch', self.path, source_branch]
778 self.run_git_command(cmd, fail_on_stderr=False)
778 self.run_git_command(cmd, fail_on_stderr=False)
779
779
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 """
781 """
782 Fetch a branch from a local repository.
782 Fetch a branch from a local repository.
783 """
783 """
784 repository_path = os.path.abspath(repository_path)
784 repository_path = os.path.abspath(repository_path)
785 if repository_path == self.path:
785 if repository_path == self.path:
786 raise ValueError('Cannot fetch from the same repository')
786 raise ValueError('Cannot fetch from the same repository')
787
787
788 if use_origin:
788 if use_origin:
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch=branch_name)
790 branch=branch_name)
791
791
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 repository_path, branch_name]
793 repository_path, branch_name]
794 self.run_git_command(cmd, fail_on_stderr=False)
794 self.run_git_command(cmd, fail_on_stderr=False)
795
795
796 def _local_reset(self, branch_name):
796 def _local_reset(self, branch_name):
797 branch_name = '{}'.format(branch_name)
797 branch_name = '{}'.format(branch_name)
798 cmd = ['reset', '--hard', branch_name, '--']
798 cmd = ['reset', '--hard', branch_name, '--']
799 self.run_git_command(cmd, fail_on_stderr=False)
799 self.run_git_command(cmd, fail_on_stderr=False)
800
800
801 def _last_fetch_heads(self):
801 def _last_fetch_heads(self):
802 """
802 """
803 Return the last fetched heads that need merging.
803 Return the last fetched heads that need merging.
804
804
805 The algorithm is defined at
805 The algorithm is defined at
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 """
807 """
808 if not self.bare:
808 if not self.bare:
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 else:
810 else:
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812
812
813 heads = []
813 heads = []
814 with open(fetch_heads_path) as f:
814 with open(fetch_heads_path) as f:
815 for line in f:
815 for line in f:
816 if ' not-for-merge ' in line:
816 if ' not-for-merge ' in line:
817 continue
817 continue
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 heads.append(line)
819 heads.append(line)
820
820
821 return heads
821 return heads
822
822
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825
825
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 """
827 """
828 Pull a branch from a local repository.
828 Pull a branch from a local repository.
829 """
829 """
830 if self.bare:
830 if self.bare:
831 raise RepositoryError('Cannot pull into a bare git repository')
831 raise RepositoryError('Cannot pull into a bare git repository')
832 # N.B.(skreft): The --ff-only option is to make sure this is a
832 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # fast-forward (i.e., we are only pulling new changes and there are no
833 # fast-forward (i.e., we are only pulling new changes and there are no
834 # conflicts with our current branch)
834 # conflicts with our current branch)
835 # Additionally, that option needs to go before --no-tags, otherwise git
835 # Additionally, that option needs to go before --no-tags, otherwise git
836 # pull complains about it being an unknown flag.
836 # pull complains about it being an unknown flag.
837 cmd = ['pull']
837 cmd = ['pull']
838 if ff_only:
838 if ff_only:
839 cmd.append('--ff-only')
839 cmd.append('--ff-only')
840 cmd.extend(['--no-tags', repository_path, branch_name])
840 cmd.extend(['--no-tags', repository_path, branch_name])
841 self.run_git_command(cmd, fail_on_stderr=False)
841 self.run_git_command(cmd, fail_on_stderr=False)
842
842
843 def _local_merge(self, merge_message, user_name, user_email, heads):
843 def _local_merge(self, merge_message, user_name, user_email, heads):
844 """
844 """
845 Merge the given head into the checked out branch.
845 Merge the given head into the checked out branch.
846
846
847 It will force a merge commit.
847 It will force a merge commit.
848
848
849 Currently it raises an error if the repo is empty, as it is not possible
849 Currently it raises an error if the repo is empty, as it is not possible
850 to create a merge commit in an empty repo.
850 to create a merge commit in an empty repo.
851
851
852 :param merge_message: The message to use for the merge commit.
852 :param merge_message: The message to use for the merge commit.
853 :param heads: the heads to merge.
853 :param heads: the heads to merge.
854 """
854 """
855 if self.bare:
855 if self.bare:
856 raise RepositoryError('Cannot merge into a bare git repository')
856 raise RepositoryError('Cannot merge into a bare git repository')
857
857
858 if not heads:
858 if not heads:
859 return
859 return
860
860
861 if self.is_empty():
861 if self.is_empty():
862 # TODO(skreft): do something more robust in this case.
862 # TODO(skreft): do something more robust in this case.
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 unresolved = None
864 unresolved = None
865
865
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # commit message. We also specify the user who is doing the merge.
867 # commit message. We also specify the user who is doing the merge.
868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
869 '-c', 'user.email=%s' % safe_str(user_email),
869 '-c', 'user.email=%s' % safe_str(user_email),
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
871
871
872 merge_cmd = cmd + heads
872 merge_cmd = cmd + heads
873
873
874 try:
874 try:
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 except RepositoryError:
876 except RepositoryError:
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 fail_on_stderr=False)[0].splitlines()
878 fail_on_stderr=False)[0].splitlines()
879 # NOTE(marcink): we add U notation for consistent with HG backend output
879 # NOTE(marcink): we add U notation for consistent with HG backend output
880 unresolved = ['U {}'.format(f) for f in files]
880 unresolved = ['U {}'.format(f) for f in files]
881
881
882 # Cleanup any merge leftovers
882 # Cleanup any merge leftovers
883 self._remote.invalidate_vcs_cache()
883 self._remote.invalidate_vcs_cache()
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885
885
886 if unresolved:
886 if unresolved:
887 raise UnresolvedFilesInRepo(unresolved)
887 raise UnresolvedFilesInRepo(unresolved)
888 else:
888 else:
889 raise
889 raise
890
890
891 def _local_push(
891 def _local_push(
892 self, source_branch, repository_path, target_branch,
892 self, source_branch, repository_path, target_branch,
893 enable_hooks=False, rc_scm_data=None):
893 enable_hooks=False, rc_scm_data=None):
894 """
894 """
895 Push the source_branch to the given repository and target_branch.
895 Push the source_branch to the given repository and target_branch.
896
896
897 Currently it if the target_branch is not master and the target repo is
897 Currently it if the target_branch is not master and the target repo is
898 empty, the push will work, but then GitRepository won't be able to find
898 empty, the push will work, but then GitRepository won't be able to find
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 pointing to master, which does not exist).
900 pointing to master, which does not exist).
901
901
902 It does not run the hooks in the target repo.
902 It does not run the hooks in the target repo.
903 """
903 """
904 # TODO(skreft): deal with the case in which the target repo is empty,
904 # TODO(skreft): deal with the case in which the target repo is empty,
905 # and the target_branch is not master.
905 # and the target_branch is not master.
906 target_repo = GitRepository(repository_path)
906 target_repo = GitRepository(repository_path)
907 if (not target_repo.bare and
907 if (not target_repo.bare and
908 target_repo._current_branch() == target_branch):
908 target_repo._current_branch() == target_branch):
909 # Git prevents pushing to the checked out branch, so simulate it by
909 # Git prevents pushing to the checked out branch, so simulate it by
910 # pulling into the target repository.
910 # pulling into the target repository.
911 target_repo._local_pull(self.path, source_branch)
911 target_repo._local_pull(self.path, source_branch)
912 else:
912 else:
913 cmd = ['push', os.path.abspath(repository_path),
913 cmd = ['push', os.path.abspath(repository_path),
914 '%s:%s' % (source_branch, target_branch)]
914 '%s:%s' % (source_branch, target_branch)]
915 gitenv = {}
915 gitenv = {}
916 if rc_scm_data:
916 if rc_scm_data:
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918
918
919 if not enable_hooks:
919 if not enable_hooks:
920 gitenv['RC_SKIP_HOOKS'] = '1'
920 gitenv['RC_SKIP_HOOKS'] = '1'
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922
922
923 def _get_new_pr_branch(self, source_branch, target_branch):
923 def _get_new_pr_branch(self, source_branch, target_branch):
924 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
924 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
925 pr_branches = []
925 pr_branches = []
926 for branch in self.branches:
926 for branch in self.branches:
927 if branch.startswith(prefix):
927 if branch.startswith(prefix):
928 pr_branches.append(int(branch[len(prefix):]))
928 pr_branches.append(int(branch[len(prefix):]))
929
929
930 if not pr_branches:
930 if not pr_branches:
931 branch_id = 0
931 branch_id = 0
932 else:
932 else:
933 branch_id = max(pr_branches) + 1
933 branch_id = max(pr_branches) + 1
934
934
935 return '%s%d' % (prefix, branch_id)
935 return '%s%d' % (prefix, branch_id)
936
936
937 def _maybe_prepare_merge_workspace(
937 def _maybe_prepare_merge_workspace(
938 self, repo_id, workspace_id, target_ref, source_ref):
938 self, repo_id, workspace_id, target_ref, source_ref):
939 shadow_repository_path = self._get_shadow_repository_path(
939 shadow_repository_path = self._get_shadow_repository_path(
940 self.path, repo_id, workspace_id)
940 self.path, repo_id, workspace_id)
941 if not os.path.exists(shadow_repository_path):
941 if not os.path.exists(shadow_repository_path):
942 self._local_clone(
942 self._local_clone(
943 shadow_repository_path, target_ref.name, source_ref.name)
943 shadow_repository_path, target_ref.name, source_ref.name)
944 log.debug('Prepared %s shadow repository in %s',
944 log.debug('Prepared %s shadow repository in %s',
945 self.alias, shadow_repository_path)
945 self.alias, shadow_repository_path)
946
946
947 return shadow_repository_path
947 return shadow_repository_path
948
948
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 source_repo, source_ref, merge_message,
950 source_repo, source_ref, merge_message,
951 merger_name, merger_email, dry_run=False,
951 merger_name, merger_email, dry_run=False,
952 use_rebase=False, close_branch=False):
952 use_rebase=False, close_branch=False):
953
953
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 'rebase' if use_rebase else 'merge', dry_run)
955 'rebase' if use_rebase else 'merge', dry_run)
956 if target_ref.commit_id != self.branches[target_ref.name]:
956 if target_ref.commit_id != self.branches[target_ref.name]:
957 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
957 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
958 target_ref.commit_id, self.branches[target_ref.name])
958 target_ref.commit_id, self.branches[target_ref.name])
959 return MergeResponse(
959 return MergeResponse(
960 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
960 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 metadata={'target_ref': target_ref})
961 metadata={'target_ref': target_ref})
962
962
963 shadow_repository_path = self._maybe_prepare_merge_workspace(
963 shadow_repository_path = self._maybe_prepare_merge_workspace(
964 repo_id, workspace_id, target_ref, source_ref)
964 repo_id, workspace_id, target_ref, source_ref)
965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966
966
967 # checkout source, if it's different. Otherwise we could not
967 # checkout source, if it's different. Otherwise we could not
968 # fetch proper commits for merge testing
968 # fetch proper commits for merge testing
969 if source_ref.name != target_ref.name:
969 if source_ref.name != target_ref.name:
970 if shadow_repo.get_remote_ref(source_ref.name):
970 if shadow_repo.get_remote_ref(source_ref.name):
971 shadow_repo._checkout(source_ref.name, force=True)
971 shadow_repo._checkout(source_ref.name, force=True)
972
972
973 # checkout target, and fetch changes
973 # checkout target, and fetch changes
974 shadow_repo._checkout(target_ref.name, force=True)
974 shadow_repo._checkout(target_ref.name, force=True)
975
975
976 # fetch/reset pull the target, in case it is changed
976 # fetch/reset pull the target, in case it is changed
977 # this handles even force changes
977 # this handles even force changes
978 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
978 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
979 shadow_repo._local_reset(target_ref.name)
979 shadow_repo._local_reset(target_ref.name)
980
980
981 # Need to reload repo to invalidate the cache, or otherwise we cannot
981 # Need to reload repo to invalidate the cache, or otherwise we cannot
982 # retrieve the last target commit.
982 # retrieve the last target commit.
983 shadow_repo = self.get_shadow_instance(shadow_repository_path)
983 shadow_repo = self.get_shadow_instance(shadow_repository_path)
984 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
984 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
985 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
985 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
986 target_ref, target_ref.commit_id,
986 target_ref, target_ref.commit_id,
987 shadow_repo.branches[target_ref.name])
987 shadow_repo.branches[target_ref.name])
988 return MergeResponse(
988 return MergeResponse(
989 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
989 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
990 metadata={'target_ref': target_ref})
990 metadata={'target_ref': target_ref})
991
991
992 # calculate new branch
992 # calculate new branch
993 pr_branch = shadow_repo._get_new_pr_branch(
993 pr_branch = shadow_repo._get_new_pr_branch(
994 source_ref.name, target_ref.name)
994 source_ref.name, target_ref.name)
995 log.debug('using pull-request merge branch: `%s`', pr_branch)
995 log.debug('using pull-request merge branch: `%s`', pr_branch)
996 # checkout to temp branch, and fetch changes
996 # checkout to temp branch, and fetch changes
997 shadow_repo._checkout(pr_branch, create=True)
997 shadow_repo._checkout(pr_branch, create=True)
998 try:
998 try:
999 shadow_repo._local_fetch(source_repo.path, source_ref.name)
999 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1000 except RepositoryError:
1000 except RepositoryError:
1001 log.exception('Failure when doing local fetch on '
1001 log.exception('Failure when doing local fetch on '
1002 'shadow repo: %s', shadow_repo)
1002 'shadow repo: %s', shadow_repo)
1003 return MergeResponse(
1003 return MergeResponse(
1004 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1004 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1005 metadata={'source_ref': source_ref})
1005 metadata={'source_ref': source_ref})
1006
1006
1007 merge_ref = None
1007 merge_ref = None
1008 merge_failure_reason = MergeFailureReason.NONE
1008 merge_failure_reason = MergeFailureReason.NONE
1009 metadata = {}
1009 metadata = {}
1010 try:
1010 try:
1011 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1011 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1012 [source_ref.commit_id])
1012 [source_ref.commit_id])
1013 merge_possible = True
1013 merge_possible = True
1014
1014
1015 # Need to invalidate the cache, or otherwise we
1015 # Need to invalidate the cache, or otherwise we
1016 # cannot retrieve the merge commit.
1016 # cannot retrieve the merge commit.
1017 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1017 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1018 merge_commit_id = shadow_repo.branches[pr_branch]
1018 merge_commit_id = shadow_repo.branches[pr_branch]
1019
1019
1020 # Set a reference pointing to the merge commit. This reference may
1020 # Set a reference pointing to the merge commit. This reference may
1021 # be used to easily identify the last successful merge commit in
1021 # be used to easily identify the last successful merge commit in
1022 # the shadow repository.
1022 # the shadow repository.
1023 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1023 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1024 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1024 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1025 except RepositoryError as e:
1025 except RepositoryError as e:
1026 log.exception('Failure when doing local merge on git shadow repo')
1026 log.exception('Failure when doing local merge on git shadow repo')
1027 if isinstance(e, UnresolvedFilesInRepo):
1027 if isinstance(e, UnresolvedFilesInRepo):
1028 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1028 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1029
1029
1030 merge_possible = False
1030 merge_possible = False
1031 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1031 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1032
1032
1033 if merge_possible and not dry_run:
1033 if merge_possible and not dry_run:
1034 try:
1034 try:
1035 shadow_repo._local_push(
1035 shadow_repo._local_push(
1036 pr_branch, self.path, target_ref.name, enable_hooks=True,
1036 pr_branch, self.path, target_ref.name, enable_hooks=True,
1037 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1037 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1038 merge_succeeded = True
1038 merge_succeeded = True
1039 except RepositoryError:
1039 except RepositoryError:
1040 log.exception(
1040 log.exception(
1041 'Failure when doing local push from the shadow '
1041 'Failure when doing local push from the shadow '
1042 'repository to the target repository at %s.', self.path)
1042 'repository to the target repository at %s.', self.path)
1043 merge_succeeded = False
1043 merge_succeeded = False
1044 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1044 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1045 metadata['target'] = 'git shadow repo'
1045 metadata['target'] = 'git shadow repo'
1046 metadata['merge_commit'] = pr_branch
1046 metadata['merge_commit'] = pr_branch
1047 else:
1047 else:
1048 merge_succeeded = False
1048 merge_succeeded = False
1049
1049
1050 return MergeResponse(
1050 return MergeResponse(
1051 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1051 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1052 metadata=metadata)
1052 metadata=metadata)
@@ -1,1012 +1,1012 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import configparser
27 import configparser
28 import urllib.request, urllib.parse, urllib.error
28 import urllib.request, urllib.parse, urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.compat import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 "Cannot create repository at %s, location already exist"
368 "Cannot create repository at %s, location already exist"
369 % self.path)
369 % self.path)
370
370
371 if src_url:
371 if src_url:
372 url = str(self._get_url(src_url))
372 url = str(self._get_url(src_url))
373 MercurialRepository.check_url(url, self.config)
373 MercurialRepository.check_url(url, self.config)
374
374
375 self._remote.clone(url, self.path, do_workspace_checkout)
375 self._remote.clone(url, self.path, do_workspace_checkout)
376
376
377 # Don't try to create if we've already cloned repo
377 # Don't try to create if we've already cloned repo
378 create = False
378 create = False
379
379
380 if create:
380 if create:
381 os.makedirs(self.path, mode=0o755)
381 os.makedirs(self.path, mode=0o755)
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 url = url.encode('utf8')
428 url = url.encode('utf8')
429 if url != 'default' and '://' not in url:
429 if url != 'default' and '://' not in url:
430 url = "file:" + urllib.request.pathname2url(url)
430 url = "file:" + urllib.request.pathname2url(url)
431 return url
431 return url
432
432
433 def get_hook_location(self):
433 def get_hook_location(self):
434 """
434 """
435 returns absolute path to location where hooks are stored
435 returns absolute path to location where hooks are stored
436 """
436 """
437 return os.path.join(self.path, '.hg', '.hgrc')
437 return os.path.join(self.path, '.hg', '.hgrc')
438
438
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 """
441 """
442 Returns ``MercurialCommit`` object representing repository's
442 Returns ``MercurialCommit`` object representing repository's
443 commit at the given `commit_id` or `commit_idx`.
443 commit at the given `commit_id` or `commit_idx`.
444 """
444 """
445 if self.is_empty():
445 if self.is_empty():
446 raise EmptyRepositoryError("There are no commits yet")
446 raise EmptyRepositoryError("There are no commits yet")
447
447
448 if commit_id is not None:
448 if commit_id is not None:
449 self._validate_commit_id(commit_id)
449 self._validate_commit_id(commit_id)
450 try:
450 try:
451 # we have cached idx, use it without contacting the remote
451 # we have cached idx, use it without contacting the remote
452 idx = self._commit_ids[commit_id]
452 idx = self._commit_ids[commit_id]
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 except KeyError:
454 except KeyError:
455 pass
455 pass
456
456
457 elif commit_idx is not None:
457 elif commit_idx is not None:
458 self._validate_commit_idx(commit_idx)
458 self._validate_commit_idx(commit_idx)
459 try:
459 try:
460 _commit_id = self.commit_ids[commit_idx]
460 _commit_id = self.commit_ids[commit_idx]
461 if commit_idx < 0:
461 if commit_idx < 0:
462 commit_idx = self.commit_ids.index(_commit_id)
462 commit_idx = self.commit_ids.index(_commit_id)
463
463
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 except IndexError:
465 except IndexError:
466 commit_id = commit_idx
466 commit_id = commit_idx
467 else:
467 else:
468 commit_id = "tip"
468 commit_id = "tip"
469
469
470 if isinstance(commit_id, unicode):
470 if isinstance(commit_id, unicode):
471 commit_id = safe_str(commit_id)
471 commit_id = safe_str(commit_id)
472
472
473 try:
473 try:
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 except CommitDoesNotExistError:
475 except CommitDoesNotExistError:
476 msg = "Commit {} does not exist for `{}`".format(
476 msg = "Commit {} does not exist for `{}`".format(
477 *map(safe_str, [commit_id, self.name]))
477 *map(safe_str, [commit_id, self.name]))
478 raise CommitDoesNotExistError(msg)
478 raise CommitDoesNotExistError(msg)
479
479
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481
481
482 def get_commits(
482 def get_commits(
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 """
485 """
486 Returns generator of ``MercurialCommit`` objects from start to end
486 Returns generator of ``MercurialCommit`` objects from start to end
487 (both are inclusive)
487 (both are inclusive)
488
488
489 :param start_id: None, str(commit_id)
489 :param start_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
491 :param start_date: if specified, commits with commit date less than
491 :param start_date: if specified, commits with commit date less than
492 ``start_date`` would be filtered out from returned set
492 ``start_date`` would be filtered out from returned set
493 :param end_date: if specified, commits with commit date greater than
493 :param end_date: if specified, commits with commit date greater than
494 ``end_date`` would be filtered out from returned set
494 ``end_date`` would be filtered out from returned set
495 :param branch_name: if specified, commits not reachable from given
495 :param branch_name: if specified, commits not reachable from given
496 branch would be filtered out from returned set
496 branch would be filtered out from returned set
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 Mercurial evolve
498 Mercurial evolve
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 exist.
500 exist.
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 ``end`` could not be found.
502 ``end`` could not be found.
503 """
503 """
504 # actually we should check now if it's not an empty repo
504 # actually we should check now if it's not an empty repo
505 if self.is_empty():
505 if self.is_empty():
506 raise EmptyRepositoryError("There are no commits yet")
506 raise EmptyRepositoryError("There are no commits yet")
507 self._validate_branch_name(branch_name)
507 self._validate_branch_name(branch_name)
508
508
509 branch_ancestors = False
509 branch_ancestors = False
510 if start_id is not None:
510 if start_id is not None:
511 self._validate_commit_id(start_id)
511 self._validate_commit_id(start_id)
512 c_start = self.get_commit(commit_id=start_id)
512 c_start = self.get_commit(commit_id=start_id)
513 start_pos = self._commit_ids[c_start.raw_id]
513 start_pos = self._commit_ids[c_start.raw_id]
514 else:
514 else:
515 start_pos = None
515 start_pos = None
516
516
517 if end_id is not None:
517 if end_id is not None:
518 self._validate_commit_id(end_id)
518 self._validate_commit_id(end_id)
519 c_end = self.get_commit(commit_id=end_id)
519 c_end = self.get_commit(commit_id=end_id)
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 else:
521 else:
522 end_pos = None
522 end_pos = None
523
523
524 if None not in [start_id, end_id] and start_pos > end_pos:
524 if None not in [start_id, end_id] and start_pos > end_pos:
525 raise RepositoryError(
525 raise RepositoryError(
526 "Start commit '%s' cannot be after end commit '%s'" %
526 "Start commit '%s' cannot be after end commit '%s'" %
527 (start_id, end_id))
527 (start_id, end_id))
528
528
529 if end_pos is not None:
529 if end_pos is not None:
530 end_pos += 1
530 end_pos += 1
531
531
532 commit_filter = []
532 commit_filter = []
533
533
534 if branch_name and not branch_ancestors:
534 if branch_name and not branch_ancestors:
535 commit_filter.append('branch("%s")' % (branch_name,))
535 commit_filter.append('branch("%s")' % (branch_name,))
536 elif branch_name and branch_ancestors:
536 elif branch_name and branch_ancestors:
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538
538
539 if start_date and not end_date:
539 if start_date and not end_date:
540 commit_filter.append('date(">%s")' % (start_date,))
540 commit_filter.append('date(">%s")' % (start_date,))
541 if end_date and not start_date:
541 if end_date and not start_date:
542 commit_filter.append('date("<%s")' % (end_date,))
542 commit_filter.append('date("<%s")' % (end_date,))
543 if start_date and end_date:
543 if start_date and end_date:
544 commit_filter.append(
544 commit_filter.append(
545 'date(">%s") and date("<%s")' % (start_date, end_date))
545 'date(">%s") and date("<%s")' % (start_date, end_date))
546
546
547 if not show_hidden:
547 if not show_hidden:
548 commit_filter.append('not obsolete()')
548 commit_filter.append('not obsolete()')
549 commit_filter.append('not hidden()')
549 commit_filter.append('not hidden()')
550
550
551 # TODO: johbo: Figure out a simpler way for this solution
551 # TODO: johbo: Figure out a simpler way for this solution
552 collection_generator = CollectionGenerator
552 collection_generator = CollectionGenerator
553 if commit_filter:
553 if commit_filter:
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 revisions = self._remote.rev_range([commit_filter])
555 revisions = self._remote.rev_range([commit_filter])
556 collection_generator = MercurialIndexBasedCollectionGenerator
556 collection_generator = MercurialIndexBasedCollectionGenerator
557 else:
557 else:
558 revisions = self.commit_ids
558 revisions = self.commit_ids
559
559
560 if start_pos or end_pos:
560 if start_pos or end_pos:
561 revisions = revisions[start_pos:end_pos]
561 revisions = revisions[start_pos:end_pos]
562
562
563 return collection_generator(self, revisions, pre_load=pre_load)
563 return collection_generator(self, revisions, pre_load=pre_load)
564
564
565 def pull(self, url, commit_ids=None):
565 def pull(self, url, commit_ids=None):
566 """
566 """
567 Pull changes from external location.
567 Pull changes from external location.
568
568
569 :param commit_ids: Optional. Can be set to a list of commit ids
569 :param commit_ids: Optional. Can be set to a list of commit ids
570 which shall be pulled from the other repository.
570 which shall be pulled from the other repository.
571 """
571 """
572 url = self._get_url(url)
572 url = self._get_url(url)
573 self._remote.pull(url, commit_ids=commit_ids)
573 self._remote.pull(url, commit_ids=commit_ids)
574 self._remote.invalidate_vcs_cache()
574 self._remote.invalidate_vcs_cache()
575
575
576 def fetch(self, url, commit_ids=None):
576 def fetch(self, url, commit_ids=None):
577 """
577 """
578 Backward compatibility with GIT fetch==pull
578 Backward compatibility with GIT fetch==pull
579 """
579 """
580 return self.pull(url, commit_ids=commit_ids)
580 return self.pull(url, commit_ids=commit_ids)
581
581
582 def push(self, url):
582 def push(self, url):
583 url = self._get_url(url)
583 url = self._get_url(url)
584 self._remote.sync_push(url)
584 self._remote.sync_push(url)
585
585
586 def _local_clone(self, clone_path):
586 def _local_clone(self, clone_path):
587 """
587 """
588 Create a local clone of the current repo.
588 Create a local clone of the current repo.
589 """
589 """
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 hooks=False)
591 hooks=False)
592
592
593 def _update(self, revision, clean=False):
593 def _update(self, revision, clean=False):
594 """
594 """
595 Update the working copy to the specified revision.
595 Update the working copy to the specified revision.
596 """
596 """
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 self._remote.update(revision, clean=clean)
598 self._remote.update(revision, clean=clean)
599
599
600 def _identify(self):
600 def _identify(self):
601 """
601 """
602 Return the current state of the working directory.
602 Return the current state of the working directory.
603 """
603 """
604 return self._remote.identify().strip().rstrip('+')
604 return self._remote.identify().strip().rstrip('+')
605
605
606 def _heads(self, branch=None):
606 def _heads(self, branch=None):
607 """
607 """
608 Return the commit ids of the repository heads.
608 Return the commit ids of the repository heads.
609 """
609 """
610 return self._remote.heads(branch=branch).strip().split(' ')
610 return self._remote.heads(branch=branch).strip().split(' ')
611
611
612 def _ancestor(self, revision1, revision2):
612 def _ancestor(self, revision1, revision2):
613 """
613 """
614 Return the common ancestor of the two revisions.
614 Return the common ancestor of the two revisions.
615 """
615 """
616 return self._remote.ancestor(revision1, revision2)
616 return self._remote.ancestor(revision1, revision2)
617
617
618 def _local_push(
618 def _local_push(
619 self, revision, repository_path, push_branches=False,
619 self, revision, repository_path, push_branches=False,
620 enable_hooks=False):
620 enable_hooks=False):
621 """
621 """
622 Push the given revision to the specified repository.
622 Push the given revision to the specified repository.
623
623
624 :param push_branches: allow to create branches in the target repo.
624 :param push_branches: allow to create branches in the target repo.
625 """
625 """
626 self._remote.push(
626 self._remote.push(
627 [revision], repository_path, hooks=enable_hooks,
627 [revision], repository_path, hooks=enable_hooks,
628 push_branches=push_branches)
628 push_branches=push_branches)
629
629
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 """
632 """
633 Merge the given source_revision into the checked out revision.
633 Merge the given source_revision into the checked out revision.
634
634
635 Returns the commit id of the merge and a boolean indicating if the
635 Returns the commit id of the merge and a boolean indicating if the
636 commit needs to be pushed.
636 commit needs to be pushed.
637 """
637 """
638 source_ref_commit_id = source_ref.commit_id
638 source_ref_commit_id = source_ref.commit_id
639 target_ref_commit_id = target_ref.commit_id
639 target_ref_commit_id = target_ref.commit_id
640
640
641 # update our workdir to target ref, for proper merge
641 # update our workdir to target ref, for proper merge
642 self._update(target_ref_commit_id, clean=True)
642 self._update(target_ref_commit_id, clean=True)
643
643
644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646
646
647 if close_commit_id:
647 if close_commit_id:
648 # NOTE(marcink): if we get the close commit, this is our new source
648 # NOTE(marcink): if we get the close commit, this is our new source
649 # which will include the close commit itself.
649 # which will include the close commit itself.
650 source_ref_commit_id = close_commit_id
650 source_ref_commit_id = close_commit_id
651
651
652 if ancestor == source_ref_commit_id:
652 if ancestor == source_ref_commit_id:
653 # Nothing to do, the changes were already integrated
653 # Nothing to do, the changes were already integrated
654 return target_ref_commit_id, False
654 return target_ref_commit_id, False
655
655
656 elif ancestor == target_ref_commit_id and is_the_same_branch:
656 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 # In this case we should force a commit message
657 # In this case we should force a commit message
658 return source_ref_commit_id, True
658 return source_ref_commit_id, True
659
659
660 unresolved = None
660 unresolved = None
661 if use_rebase:
661 if use_rebase:
662 try:
662 try:
663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 self._remote.rebase(
665 self._remote.rebase(
666 source=source_ref_commit_id, dest=target_ref_commit_id)
666 source=source_ref_commit_id, dest=target_ref_commit_id)
667 self._remote.invalidate_vcs_cache()
667 self._remote.invalidate_vcs_cache()
668 self._update(bookmark_name, clean=True)
668 self._update(bookmark_name, clean=True)
669 return self._identify(), True
669 return self._identify(), True
670 except RepositoryError as e:
670 except RepositoryError as e:
671 # The rebase-abort may raise another exception which 'hides'
671 # The rebase-abort may raise another exception which 'hides'
672 # the original one, therefore we log it here.
672 # the original one, therefore we log it here.
673 log.exception('Error while rebasing shadow repo during merge.')
673 log.exception('Error while rebasing shadow repo during merge.')
674 if 'unresolved conflicts' in safe_str(e):
674 if 'unresolved conflicts' in safe_str(e):
675 unresolved = self._remote.get_unresolved_files()
675 unresolved = self._remote.get_unresolved_files()
676 log.debug('unresolved files: %s', unresolved)
676 log.debug('unresolved files: %s', unresolved)
677
677
678 # Cleanup any rebase leftovers
678 # Cleanup any rebase leftovers
679 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
680 self._remote.rebase(abort=True)
680 self._remote.rebase(abort=True)
681 self._remote.invalidate_vcs_cache()
681 self._remote.invalidate_vcs_cache()
682 self._remote.update(clean=True)
682 self._remote.update(clean=True)
683 if unresolved:
683 if unresolved:
684 raise UnresolvedFilesInRepo(unresolved)
684 raise UnresolvedFilesInRepo(unresolved)
685 else:
685 else:
686 raise
686 raise
687 else:
687 else:
688 try:
688 try:
689 self._remote.merge(source_ref_commit_id)
689 self._remote.merge(source_ref_commit_id)
690 self._remote.invalidate_vcs_cache()
690 self._remote.invalidate_vcs_cache()
691 self._remote.commit(
691 self._remote.commit(
692 message=safe_str(merge_message),
692 message=safe_str(merge_message),
693 username=safe_str('%s <%s>' % (user_name, user_email)))
693 username=safe_str('%s <%s>' % (user_name, user_email)))
694 self._remote.invalidate_vcs_cache()
694 self._remote.invalidate_vcs_cache()
695 return self._identify(), True
695 return self._identify(), True
696 except RepositoryError as e:
696 except RepositoryError as e:
697 # The merge-abort may raise another exception which 'hides'
697 # The merge-abort may raise another exception which 'hides'
698 # the original one, therefore we log it here.
698 # the original one, therefore we log it here.
699 log.exception('Error while merging shadow repo during merge.')
699 log.exception('Error while merging shadow repo during merge.')
700 if 'unresolved merge conflicts' in safe_str(e):
700 if 'unresolved merge conflicts' in safe_str(e):
701 unresolved = self._remote.get_unresolved_files()
701 unresolved = self._remote.get_unresolved_files()
702 log.debug('unresolved files: %s', unresolved)
702 log.debug('unresolved files: %s', unresolved)
703
703
704 # Cleanup any merge leftovers
704 # Cleanup any merge leftovers
705 self._remote.update(clean=True)
705 self._remote.update(clean=True)
706 if unresolved:
706 if unresolved:
707 raise UnresolvedFilesInRepo(unresolved)
707 raise UnresolvedFilesInRepo(unresolved)
708 else:
708 else:
709 raise
709 raise
710
710
711 def _local_close(self, target_ref, user_name, user_email,
711 def _local_close(self, target_ref, user_name, user_email,
712 source_ref, close_message=''):
712 source_ref, close_message=''):
713 """
713 """
714 Close the branch of the given source_revision
714 Close the branch of the given source_revision
715
715
716 Returns the commit id of the close and a boolean indicating if the
716 Returns the commit id of the close and a boolean indicating if the
717 commit needs to be pushed.
717 commit needs to be pushed.
718 """
718 """
719 self._update(source_ref.commit_id)
719 self._update(source_ref.commit_id)
720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 try:
721 try:
722 self._remote.commit(
722 self._remote.commit(
723 message=safe_str(message),
723 message=safe_str(message),
724 username=safe_str('%s <%s>' % (user_name, user_email)),
724 username=safe_str('%s <%s>' % (user_name, user_email)),
725 close_branch=True)
725 close_branch=True)
726 self._remote.invalidate_vcs_cache()
726 self._remote.invalidate_vcs_cache()
727 return self._identify(), True
727 return self._identify(), True
728 except RepositoryError:
728 except RepositoryError:
729 # Cleanup any commit leftovers
729 # Cleanup any commit leftovers
730 self._remote.update(clean=True)
730 self._remote.update(clean=True)
731 raise
731 raise
732
732
733 def _is_the_same_branch(self, target_ref, source_ref):
733 def _is_the_same_branch(self, target_ref, source_ref):
734 return (
734 return (
735 self._get_branch_name(target_ref) ==
735 self._get_branch_name(target_ref) ==
736 self._get_branch_name(source_ref))
736 self._get_branch_name(source_ref))
737
737
738 def _get_branch_name(self, ref):
738 def _get_branch_name(self, ref):
739 if ref.type == 'branch':
739 if ref.type == 'branch':
740 return ref.name
740 return ref.name
741 return self._remote.ctx_branch(ref.commit_id)
741 return self._remote.ctx_branch(ref.commit_id)
742
742
743 def _maybe_prepare_merge_workspace(
743 def _maybe_prepare_merge_workspace(
744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 shadow_repository_path = self._get_shadow_repository_path(
745 shadow_repository_path = self._get_shadow_repository_path(
746 self.path, repo_id, workspace_id)
746 self.path, repo_id, workspace_id)
747 if not os.path.exists(shadow_repository_path):
747 if not os.path.exists(shadow_repository_path):
748 self._local_clone(shadow_repository_path)
748 self._local_clone(shadow_repository_path)
749 log.debug(
749 log.debug(
750 'Prepared shadow repository in %s', shadow_repository_path)
750 'Prepared shadow repository in %s', shadow_repository_path)
751
751
752 return shadow_repository_path
752 return shadow_repository_path
753
753
754 def _merge_repo(self, repo_id, workspace_id, target_ref,
754 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 source_repo, source_ref, merge_message,
755 source_repo, source_ref, merge_message,
756 merger_name, merger_email, dry_run=False,
756 merger_name, merger_email, dry_run=False,
757 use_rebase=False, close_branch=False):
757 use_rebase=False, close_branch=False):
758
758
759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 'rebase' if use_rebase else 'merge', dry_run)
760 'rebase' if use_rebase else 'merge', dry_run)
761 if target_ref.commit_id not in self._heads():
761 if target_ref.commit_id not in self._heads():
762 return MergeResponse(
762 return MergeResponse(
763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 metadata={'target_ref': target_ref})
764 metadata={'target_ref': target_ref})
765
765
766 try:
766 try:
767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 heads_all = self._heads(target_ref.name)
768 heads_all = self._heads(target_ref.name)
769 max_heads = 10
769 max_heads = 10
770 if len(heads_all) > max_heads:
770 if len(heads_all) > max_heads:
771 heads = '\n,'.join(
771 heads = '\n,'.join(
772 heads_all[:max_heads] +
772 heads_all[:max_heads] +
773 ['and {} more.'.format(len(heads_all)-max_heads)])
773 ['and {} more.'.format(len(heads_all)-max_heads)])
774 else:
774 else:
775 heads = '\n,'.join(heads_all)
775 heads = '\n,'.join(heads_all)
776 metadata = {
776 metadata = {
777 'target_ref': target_ref,
777 'target_ref': target_ref,
778 'source_ref': source_ref,
778 'source_ref': source_ref,
779 'heads': heads
779 'heads': heads
780 }
780 }
781 return MergeResponse(
781 return MergeResponse(
782 False, False, None,
782 False, False, None,
783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
784 metadata=metadata)
784 metadata=metadata)
785 except CommitDoesNotExistError:
785 except CommitDoesNotExistError:
786 log.exception('Failure when looking up branch heads on hg target')
786 log.exception('Failure when looking up branch heads on hg target')
787 return MergeResponse(
787 return MergeResponse(
788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
789 metadata={'target_ref': target_ref})
789 metadata={'target_ref': target_ref})
790
790
791 shadow_repository_path = self._maybe_prepare_merge_workspace(
791 shadow_repository_path = self._maybe_prepare_merge_workspace(
792 repo_id, workspace_id, target_ref, source_ref)
792 repo_id, workspace_id, target_ref, source_ref)
793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
794
794
795 log.debug('Pulling in target reference %s', target_ref)
795 log.debug('Pulling in target reference %s', target_ref)
796 self._validate_pull_reference(target_ref)
796 self._validate_pull_reference(target_ref)
797 shadow_repo._local_pull(self.path, target_ref)
797 shadow_repo._local_pull(self.path, target_ref)
798
798
799 try:
799 try:
800 log.debug('Pulling in source reference %s', source_ref)
800 log.debug('Pulling in source reference %s', source_ref)
801 source_repo._validate_pull_reference(source_ref)
801 source_repo._validate_pull_reference(source_ref)
802 shadow_repo._local_pull(source_repo.path, source_ref)
802 shadow_repo._local_pull(source_repo.path, source_ref)
803 except CommitDoesNotExistError:
803 except CommitDoesNotExistError:
804 log.exception('Failure when doing local pull on hg shadow repo')
804 log.exception('Failure when doing local pull on hg shadow repo')
805 return MergeResponse(
805 return MergeResponse(
806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
807 metadata={'source_ref': source_ref})
807 metadata={'source_ref': source_ref})
808
808
809 merge_ref = None
809 merge_ref = None
810 merge_commit_id = None
810 merge_commit_id = None
811 close_commit_id = None
811 close_commit_id = None
812 merge_failure_reason = MergeFailureReason.NONE
812 merge_failure_reason = MergeFailureReason.NONE
813 metadata = {}
813 metadata = {}
814
814
815 # enforce that close branch should be used only in case we source from
815 # enforce that close branch should be used only in case we source from
816 # an actual Branch
816 # an actual Branch
817 close_branch = close_branch and source_ref.type == 'branch'
817 close_branch = close_branch and source_ref.type == 'branch'
818
818
819 # don't allow to close branch if source and target are the same
819 # don't allow to close branch if source and target are the same
820 close_branch = close_branch and source_ref.name != target_ref.name
820 close_branch = close_branch and source_ref.name != target_ref.name
821
821
822 needs_push_on_close = False
822 needs_push_on_close = False
823 if close_branch and not use_rebase and not dry_run:
823 if close_branch and not use_rebase and not dry_run:
824 try:
824 try:
825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
826 target_ref, merger_name, merger_email, source_ref)
826 target_ref, merger_name, merger_email, source_ref)
827 merge_possible = True
827 merge_possible = True
828 except RepositoryError:
828 except RepositoryError:
829 log.exception('Failure when doing close branch on '
829 log.exception('Failure when doing close branch on '
830 'shadow repo: %s', shadow_repo)
830 'shadow repo: %s', shadow_repo)
831 merge_possible = False
831 merge_possible = False
832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
833 else:
833 else:
834 merge_possible = True
834 merge_possible = True
835
835
836 needs_push = False
836 needs_push = False
837 if merge_possible:
837 if merge_possible:
838
838
839 try:
839 try:
840 merge_commit_id, needs_push = shadow_repo._local_merge(
840 merge_commit_id, needs_push = shadow_repo._local_merge(
841 target_ref, merge_message, merger_name, merger_email,
841 target_ref, merge_message, merger_name, merger_email,
842 source_ref, use_rebase=use_rebase,
842 source_ref, use_rebase=use_rebase,
843 close_commit_id=close_commit_id, dry_run=dry_run)
843 close_commit_id=close_commit_id, dry_run=dry_run)
844 merge_possible = True
844 merge_possible = True
845
845
846 # read the state of the close action, if it
846 # read the state of the close action, if it
847 # maybe required a push
847 # maybe required a push
848 needs_push = needs_push or needs_push_on_close
848 needs_push = needs_push or needs_push_on_close
849
849
850 # Set a bookmark pointing to the merge commit. This bookmark
850 # Set a bookmark pointing to the merge commit. This bookmark
851 # may be used to easily identify the last successful merge
851 # may be used to easily identify the last successful merge
852 # commit in the shadow repository.
852 # commit in the shadow repository.
853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
855 except SubrepoMergeError:
855 except SubrepoMergeError:
856 log.exception(
856 log.exception(
857 'Subrepo merge error during local merge on hg shadow repo.')
857 'Subrepo merge error during local merge on hg shadow repo.')
858 merge_possible = False
858 merge_possible = False
859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
860 needs_push = False
860 needs_push = False
861 except RepositoryError as e:
861 except RepositoryError as e:
862 log.exception('Failure when doing local merge on hg shadow repo')
862 log.exception('Failure when doing local merge on hg shadow repo')
863 if isinstance(e, UnresolvedFilesInRepo):
863 if isinstance(e, UnresolvedFilesInRepo):
864 all_conflicts = list(e.args[0])
864 all_conflicts = list(e.args[0])
865 max_conflicts = 20
865 max_conflicts = 20
866 if len(all_conflicts) > max_conflicts:
866 if len(all_conflicts) > max_conflicts:
867 conflicts = all_conflicts[:max_conflicts] \
867 conflicts = all_conflicts[:max_conflicts] \
868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 else:
869 else:
870 conflicts = all_conflicts
870 conflicts = all_conflicts
871 metadata['unresolved_files'] = \
871 metadata['unresolved_files'] = \
872 '\n* conflict: ' + \
872 '\n* conflict: ' + \
873 ('\n * conflict: '.join(conflicts))
873 ('\n * conflict: '.join(conflicts))
874
874
875 merge_possible = False
875 merge_possible = False
876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 needs_push = False
877 needs_push = False
878
878
879 if merge_possible and not dry_run:
879 if merge_possible and not dry_run:
880 if needs_push:
880 if needs_push:
881 # In case the target is a bookmark, update it, so after pushing
881 # In case the target is a bookmark, update it, so after pushing
882 # the bookmarks is also updated in the target.
882 # the bookmarks is also updated in the target.
883 if target_ref.type == 'book':
883 if target_ref.type == 'book':
884 shadow_repo.bookmark(
884 shadow_repo.bookmark(
885 target_ref.name, revision=merge_commit_id)
885 target_ref.name, revision=merge_commit_id)
886 try:
886 try:
887 shadow_repo_with_hooks = self.get_shadow_instance(
887 shadow_repo_with_hooks = self.get_shadow_instance(
888 shadow_repository_path,
888 shadow_repository_path,
889 enable_hooks=True)
889 enable_hooks=True)
890 # This is the actual merge action, we push from shadow
890 # This is the actual merge action, we push from shadow
891 # into origin.
891 # into origin.
892 # Note: the push_branches option will push any new branch
892 # Note: the push_branches option will push any new branch
893 # defined in the source repository to the target. This may
893 # defined in the source repository to the target. This may
894 # be dangerous as branches are permanent in Mercurial.
894 # be dangerous as branches are permanent in Mercurial.
895 # This feature was requested in issue #441.
895 # This feature was requested in issue #441.
896 shadow_repo_with_hooks._local_push(
896 shadow_repo_with_hooks._local_push(
897 merge_commit_id, self.path, push_branches=True,
897 merge_commit_id, self.path, push_branches=True,
898 enable_hooks=True)
898 enable_hooks=True)
899
899
900 # maybe we also need to push the close_commit_id
900 # maybe we also need to push the close_commit_id
901 if close_commit_id:
901 if close_commit_id:
902 shadow_repo_with_hooks._local_push(
902 shadow_repo_with_hooks._local_push(
903 close_commit_id, self.path, push_branches=True,
903 close_commit_id, self.path, push_branches=True,
904 enable_hooks=True)
904 enable_hooks=True)
905 merge_succeeded = True
905 merge_succeeded = True
906 except RepositoryError:
906 except RepositoryError:
907 log.exception(
907 log.exception(
908 'Failure when doing local push from the shadow '
908 'Failure when doing local push from the shadow '
909 'repository to the target repository at %s.', self.path)
909 'repository to the target repository at %s.', self.path)
910 merge_succeeded = False
910 merge_succeeded = False
911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 metadata['target'] = 'hg shadow repo'
912 metadata['target'] = 'hg shadow repo'
913 metadata['merge_commit'] = merge_commit_id
913 metadata['merge_commit'] = merge_commit_id
914 else:
914 else:
915 merge_succeeded = True
915 merge_succeeded = True
916 else:
916 else:
917 merge_succeeded = False
917 merge_succeeded = False
918
918
919 return MergeResponse(
919 return MergeResponse(
920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 metadata=metadata)
921 metadata=metadata)
922
922
923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 config = self.config.copy()
924 config = self.config.copy()
925 if not enable_hooks:
925 if not enable_hooks:
926 config.clear_section('hooks')
926 config.clear_section('hooks')
927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928
928
929 def _validate_pull_reference(self, reference):
929 def _validate_pull_reference(self, reference):
930 if not (reference.name in self.bookmarks or
930 if not (reference.name in self.bookmarks or
931 reference.name in self.branches or
931 reference.name in self.branches or
932 self.get_commit(reference.commit_id)):
932 self.get_commit(reference.commit_id)):
933 raise CommitDoesNotExistError(
933 raise CommitDoesNotExistError(
934 'Unknown branch, bookmark or commit id')
934 'Unknown branch, bookmark or commit id')
935
935
936 def _local_pull(self, repository_path, reference):
936 def _local_pull(self, repository_path, reference):
937 """
937 """
938 Fetch a branch, bookmark or commit from a local repository.
938 Fetch a branch, bookmark or commit from a local repository.
939 """
939 """
940 repository_path = os.path.abspath(repository_path)
940 repository_path = os.path.abspath(repository_path)
941 if repository_path == self.path:
941 if repository_path == self.path:
942 raise ValueError('Cannot pull from the same repository')
942 raise ValueError('Cannot pull from the same repository')
943
943
944 reference_type_to_option_name = {
944 reference_type_to_option_name = {
945 'book': 'bookmark',
945 'book': 'bookmark',
946 'branch': 'branch',
946 'branch': 'branch',
947 }
947 }
948 option_name = reference_type_to_option_name.get(
948 option_name = reference_type_to_option_name.get(
949 reference.type, 'revision')
949 reference.type, 'revision')
950
950
951 if option_name == 'revision':
951 if option_name == 'revision':
952 ref = reference.commit_id
952 ref = reference.commit_id
953 else:
953 else:
954 ref = reference.name
954 ref = reference.name
955
955
956 options = {option_name: [ref]}
956 options = {option_name: [ref]}
957 self._remote.pull_cmd(repository_path, hooks=False, **options)
957 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 self._remote.invalidate_vcs_cache()
958 self._remote.invalidate_vcs_cache()
959
959
960 def bookmark(self, bookmark, revision=None):
960 def bookmark(self, bookmark, revision=None):
961 if isinstance(bookmark, unicode):
961 if isinstance(bookmark, unicode):
962 bookmark = safe_str(bookmark)
962 bookmark = safe_str(bookmark)
963 self._remote.bookmark(bookmark, revision=revision)
963 self._remote.bookmark(bookmark, revision=revision)
964 self._remote.invalidate_vcs_cache()
964 self._remote.invalidate_vcs_cache()
965
965
966 def get_path_permissions(self, username):
966 def get_path_permissions(self, username):
967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968
968
969 def read_patterns(suffix):
969 def read_patterns(suffix):
970 svalue = None
970 svalue = None
971 for section, option in [
971 for section, option in [
972 ('narrowacl', username + suffix),
972 ('narrowacl', username + suffix),
973 ('narrowacl', 'default' + suffix),
973 ('narrowacl', 'default' + suffix),
974 ('narrowhgacl', username + suffix),
974 ('narrowhgacl', username + suffix),
975 ('narrowhgacl', 'default' + suffix)
975 ('narrowhgacl', 'default' + suffix)
976 ]:
976 ]:
977 try:
977 try:
978 svalue = hgacl.get(section, option)
978 svalue = hgacl.get(section, option)
979 break # stop at the first value we find
979 break # stop at the first value we find
980 except configparser.NoOptionError:
980 except configparser.NoOptionError:
981 pass
981 pass
982 if not svalue:
982 if not svalue:
983 return None
983 return None
984 result = ['/']
984 result = ['/']
985 for pattern in svalue.split():
985 for pattern in svalue.split():
986 result.append(pattern)
986 result.append(pattern)
987 if '*' not in pattern and '?' not in pattern:
987 if '*' not in pattern and '?' not in pattern:
988 result.append(pattern + '/*')
988 result.append(pattern + '/*')
989 return result
989 return result
990
990
991 if os.path.exists(hgacl_file):
991 if os.path.exists(hgacl_file):
992 try:
992 try:
993 hgacl = configparser.RawConfigParser()
993 hgacl = configparser.RawConfigParser()
994 hgacl.read(hgacl_file)
994 hgacl.read(hgacl_file)
995
995
996 includes = read_patterns('.includes')
996 includes = read_patterns('.includes')
997 excludes = read_patterns('.excludes')
997 excludes = read_patterns('.excludes')
998 return BasePathPermissionChecker.create_from_patterns(
998 return BasePathPermissionChecker.create_from_patterns(
999 includes, excludes)
999 includes, excludes)
1000 except BaseException as e:
1000 except BaseException as e:
1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 hgacl_file, self.name, e)
1002 hgacl_file, self.name, e)
1003 raise exceptions.RepositoryRequirementError(msg)
1003 raise exceptions.RepositoryRequirementError(msg)
1004 else:
1004 else:
1005 return None
1005 return None
1006
1006
1007
1007
1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009
1009
1010 def _commit_factory(self, commit_id):
1010 def _commit_factory(self, commit_id):
1011 return self.repo.get_commit(
1011 return self.repo.get_commit(
1012 commit_idx=commit_id, pre_load=self.pre_load)
1012 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,370 +1,370 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib.request, urllib.parse, urllib.error
27 import urllib.request, urllib.parse, urllib.error
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from collections import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.utils import safe_str, safe_unicode
33 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
51 """
51 """
52 Subversion backend implementation
52 Subversion backend implementation
53
53
54 .. important::
54 .. important::
55
55
56 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
59 the other side will always be a `str`.
60
60
61 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
63
63
64 """
64 """
65
65
66 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
68
68
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
71
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 bare=False, **kwargs):
73 bare=False, **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77
77
78 self._init_repo(create, src_url)
78 self._init_repo(create, src_url)
79
79
80 # caches
80 # caches
81 self._commit_ids = {}
81 self._commit_ids = {}
82
82
83 @LazyProperty
83 @LazyProperty
84 def _remote(self):
84 def _remote(self):
85 repo_id = self.path
85 repo_id = self.path
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87
87
88 def _init_repo(self, create, src_url):
88 def _init_repo(self, create, src_url):
89 if create and os.path.exists(self.path):
89 if create and os.path.exists(self.path):
90 raise RepositoryError(
90 raise RepositoryError(
91 "Cannot create repository at %s, location already exist"
91 "Cannot create repository at %s, location already exist"
92 % self.path)
92 % self.path)
93
93
94 if create:
94 if create:
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 if src_url:
96 if src_url:
97 src_url = _sanitize_url(src_url)
97 src_url = _sanitize_url(src_url)
98 self._remote.import_remote_repository(src_url)
98 self._remote.import_remote_repository(src_url)
99 else:
99 else:
100 self._check_path()
100 self._check_path()
101
101
102 @CachedProperty
102 @CachedProperty
103 def commit_ids(self):
103 def commit_ids(self):
104 head = self._remote.lookup(None)
104 head = self._remote.lookup(None)
105 return [str(r) for r in range(1, head + 1)]
105 return [str(r) for r in range(1, head + 1)]
106
106
107 def _rebuild_cache(self, commit_ids):
107 def _rebuild_cache(self, commit_ids):
108 pass
108 pass
109
109
110 def run_svn_command(self, cmd, **opts):
110 def run_svn_command(self, cmd, **opts):
111 """
111 """
112 Runs given ``cmd`` as svn command and returns tuple
112 Runs given ``cmd`` as svn command and returns tuple
113 (stdout, stderr).
113 (stdout, stderr).
114
114
115 :param cmd: full svn command to be executed
115 :param cmd: full svn command to be executed
116 :param opts: env options to pass into Subprocess command
116 :param opts: env options to pass into Subprocess command
117 """
117 """
118 if not isinstance(cmd, list):
118 if not isinstance(cmd, list):
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
120
120
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 out, err = self._remote.run_svn_command(cmd, **opts)
122 out, err = self._remote.run_svn_command(cmd, **opts)
123 if err and not skip_stderr_log:
123 if err and not skip_stderr_log:
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 return out, err
125 return out, err
126
126
127 @LazyProperty
127 @LazyProperty
128 def branches(self):
128 def branches(self):
129 return self._tags_or_branches('vcs_svn_branch')
129 return self._tags_or_branches('vcs_svn_branch')
130
130
131 @LazyProperty
131 @LazyProperty
132 def branches_closed(self):
132 def branches_closed(self):
133 return {}
133 return {}
134
134
135 @LazyProperty
135 @LazyProperty
136 def bookmarks(self):
136 def bookmarks(self):
137 return {}
137 return {}
138
138
139 @LazyProperty
139 @LazyProperty
140 def branches_all(self):
140 def branches_all(self):
141 # TODO: johbo: Implement proper branch support
141 # TODO: johbo: Implement proper branch support
142 all_branches = {}
142 all_branches = {}
143 all_branches.update(self.branches)
143 all_branches.update(self.branches)
144 all_branches.update(self.branches_closed)
144 all_branches.update(self.branches_closed)
145 return all_branches
145 return all_branches
146
146
147 @LazyProperty
147 @LazyProperty
148 def tags(self):
148 def tags(self):
149 return self._tags_or_branches('vcs_svn_tag')
149 return self._tags_or_branches('vcs_svn_tag')
150
150
151 def _tags_or_branches(self, config_section):
151 def _tags_or_branches(self, config_section):
152 found_items = {}
152 found_items = {}
153
153
154 if self.is_empty():
154 if self.is_empty():
155 return {}
155 return {}
156
156
157 for pattern in self._patterns_from_section(config_section):
157 for pattern in self._patterns_from_section(config_section):
158 pattern = vcspath.sanitize(pattern)
158 pattern = vcspath.sanitize(pattern)
159 tip = self.get_commit()
159 tip = self.get_commit()
160 try:
160 try:
161 if pattern.endswith('*'):
161 if pattern.endswith('*'):
162 basedir = tip.get_node(vcspath.dirname(pattern))
162 basedir = tip.get_node(vcspath.dirname(pattern))
163 directories = basedir.dirs
163 directories = basedir.dirs
164 else:
164 else:
165 directories = (tip.get_node(pattern), )
165 directories = (tip.get_node(pattern), )
166 except NodeDoesNotExistError:
166 except NodeDoesNotExistError:
167 continue
167 continue
168 found_items.update(
168 found_items.update(
169 (safe_unicode(n.path),
169 (safe_unicode(n.path),
170 self.commit_ids[-1])
170 self.commit_ids[-1])
171 for n in directories)
171 for n in directories)
172
172
173 def get_name(item):
173 def get_name(item):
174 return item[0]
174 return item[0]
175
175
176 return OrderedDict(sorted(found_items.items(), key=get_name))
176 return OrderedDict(sorted(found_items.items(), key=get_name))
177
177
178 def _patterns_from_section(self, section):
178 def _patterns_from_section(self, section):
179 return (pattern for key, pattern in self.config.items(section))
179 return (pattern for key, pattern in self.config.items(section))
180
180
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 if self != repo2:
182 if self != repo2:
183 raise ValueError(
183 raise ValueError(
184 "Subversion does not support getting common ancestor of"
184 "Subversion does not support getting common ancestor of"
185 " different repositories.")
185 " different repositories.")
186
186
187 if int(commit_id1) < int(commit_id2):
187 if int(commit_id1) < int(commit_id2):
188 return commit_id1
188 return commit_id1
189 return commit_id2
189 return commit_id2
190
190
191 def verify(self):
191 def verify(self):
192 verify = self._remote.verify()
192 verify = self._remote.verify()
193
193
194 self._remote.invalidate_vcs_cache()
194 self._remote.invalidate_vcs_cache()
195 return verify
195 return verify
196
196
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 # TODO: johbo: Implement better comparison, this is a very naive
198 # TODO: johbo: Implement better comparison, this is a very naive
199 # version which does not allow to compare branches, tags or folders
199 # version which does not allow to compare branches, tags or folders
200 # at all.
200 # at all.
201 if repo2 != self:
201 if repo2 != self:
202 raise ValueError(
202 raise ValueError(
203 "Subversion does not support comparison of of different "
203 "Subversion does not support comparison of of different "
204 "repositories.")
204 "repositories.")
205
205
206 if commit_id1 == commit_id2:
206 if commit_id1 == commit_id2:
207 return []
207 return []
208
208
209 commit_idx1 = self._get_commit_idx(commit_id1)
209 commit_idx1 = self._get_commit_idx(commit_id1)
210 commit_idx2 = self._get_commit_idx(commit_id2)
210 commit_idx2 = self._get_commit_idx(commit_id2)
211
211
212 commits = [
212 commits = [
213 self.get_commit(commit_idx=idx)
213 self.get_commit(commit_idx=idx)
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215
215
216 return commits
216 return commits
217
217
218 def _get_commit_idx(self, commit_id):
218 def _get_commit_idx(self, commit_id):
219 try:
219 try:
220 svn_rev = int(commit_id)
220 svn_rev = int(commit_id)
221 except:
221 except:
222 # TODO: johbo: this might be only one case, HEAD, check this
222 # TODO: johbo: this might be only one case, HEAD, check this
223 svn_rev = self._remote.lookup(commit_id)
223 svn_rev = self._remote.lookup(commit_id)
224 commit_idx = svn_rev - 1
224 commit_idx = svn_rev - 1
225 if commit_idx >= len(self.commit_ids):
225 if commit_idx >= len(self.commit_ids):
226 raise CommitDoesNotExistError(
226 raise CommitDoesNotExistError(
227 "Commit at index %s does not exist." % (commit_idx, ))
227 "Commit at index %s does not exist." % (commit_idx, ))
228 return commit_idx
228 return commit_idx
229
229
230 @staticmethod
230 @staticmethod
231 def check_url(url, config):
231 def check_url(url, config):
232 """
232 """
233 Check if `url` is a valid source to import a Subversion repository.
233 Check if `url` is a valid source to import a Subversion repository.
234 """
234 """
235 # convert to URL if it's a local directory
235 # convert to URL if it's a local directory
236 if os.path.isdir(url):
236 if os.path.isdir(url):
237 url = 'file://' + urllib.request.pathname2url(url)
237 url = 'file://' + urllib.request.pathname2url(url)
238 return connection.Svn.check_url(url, config.serialize())
238 return connection.Svn.check_url(url, config.serialize())
239
239
240 @staticmethod
240 @staticmethod
241 def is_valid_repository(path):
241 def is_valid_repository(path):
242 try:
242 try:
243 SubversionRepository(path)
243 SubversionRepository(path)
244 return True
244 return True
245 except VCSError:
245 except VCSError:
246 pass
246 pass
247 return False
247 return False
248
248
249 def _check_path(self):
249 def _check_path(self):
250 if not os.path.exists(self.path):
250 if not os.path.exists(self.path):
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 if not self._remote.is_path_valid_repository(self.path):
252 if not self._remote.is_path_valid_repository(self.path):
253 raise VCSError(
253 raise VCSError(
254 'Path "%s" does not contain a Subversion repository' %
254 'Path "%s" does not contain a Subversion repository' %
255 (self.path, ))
255 (self.path, ))
256
256
257 @LazyProperty
257 @LazyProperty
258 def last_change(self):
258 def last_change(self):
259 """
259 """
260 Returns last change made on this repository as
260 Returns last change made on this repository as
261 `datetime.datetime` object.
261 `datetime.datetime` object.
262 """
262 """
263 # Subversion always has a first commit which has id "0" and contains
263 # Subversion always has a first commit which has id "0" and contains
264 # what we are looking for.
264 # what we are looking for.
265 last_id = len(self.commit_ids)
265 last_id = len(self.commit_ids)
266 properties = self._remote.revision_properties(last_id)
266 properties = self._remote.revision_properties(last_id)
267 return _date_from_svn_properties(properties)
267 return _date_from_svn_properties(properties)
268
268
269 @LazyProperty
269 @LazyProperty
270 def in_memory_commit(self):
270 def in_memory_commit(self):
271 return SubversionInMemoryCommit(self)
271 return SubversionInMemoryCommit(self)
272
272
273 def get_hook_location(self):
273 def get_hook_location(self):
274 """
274 """
275 returns absolute path to location where hooks are stored
275 returns absolute path to location where hooks are stored
276 """
276 """
277 return os.path.join(self.path, 'hooks')
277 return os.path.join(self.path, 'hooks')
278
278
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 translate_tag=None, maybe_unreachable=False, reference_obj=None):
280 translate_tag=None, maybe_unreachable=False, reference_obj=None):
281 if self.is_empty():
281 if self.is_empty():
282 raise EmptyRepositoryError("There are no commits yet")
282 raise EmptyRepositoryError("There are no commits yet")
283 if commit_id is not None:
283 if commit_id is not None:
284 self._validate_commit_id(commit_id)
284 self._validate_commit_id(commit_id)
285 elif commit_idx is not None:
285 elif commit_idx is not None:
286 self._validate_commit_idx(commit_idx)
286 self._validate_commit_idx(commit_idx)
287 try:
287 try:
288 commit_id = self.commit_ids[commit_idx]
288 commit_id = self.commit_ids[commit_idx]
289 except IndexError:
289 except IndexError:
290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
291
291
292 commit_id = self._sanitize_commit_id(commit_id)
292 commit_id = self._sanitize_commit_id(commit_id)
293 commit = SubversionCommit(repository=self, commit_id=commit_id)
293 commit = SubversionCommit(repository=self, commit_id=commit_id)
294 return commit
294 return commit
295
295
296 def get_commits(
296 def get_commits(
297 self, start_id=None, end_id=None, start_date=None, end_date=None,
297 self, start_id=None, end_id=None, start_date=None, end_date=None,
298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
299 if self.is_empty():
299 if self.is_empty():
300 raise EmptyRepositoryError("There are no commit_ids yet")
300 raise EmptyRepositoryError("There are no commit_ids yet")
301 self._validate_branch_name(branch_name)
301 self._validate_branch_name(branch_name)
302
302
303 if start_id is not None:
303 if start_id is not None:
304 self._validate_commit_id(start_id)
304 self._validate_commit_id(start_id)
305 if end_id is not None:
305 if end_id is not None:
306 self._validate_commit_id(end_id)
306 self._validate_commit_id(end_id)
307
307
308 start_raw_id = self._sanitize_commit_id(start_id)
308 start_raw_id = self._sanitize_commit_id(start_id)
309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
310 end_raw_id = self._sanitize_commit_id(end_id)
310 end_raw_id = self._sanitize_commit_id(end_id)
311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
312
312
313 if None not in [start_id, end_id] and start_pos > end_pos:
313 if None not in [start_id, end_id] and start_pos > end_pos:
314 raise RepositoryError(
314 raise RepositoryError(
315 "Start commit '%s' cannot be after end commit '%s'" %
315 "Start commit '%s' cannot be after end commit '%s'" %
316 (start_id, end_id))
316 (start_id, end_id))
317 if end_pos is not None:
317 if end_pos is not None:
318 end_pos += 1
318 end_pos += 1
319
319
320 # Date based filtering
320 # Date based filtering
321 if start_date or end_date:
321 if start_date or end_date:
322 start_raw_id, end_raw_id = self._remote.lookup_interval(
322 start_raw_id, end_raw_id = self._remote.lookup_interval(
323 date_astimestamp(start_date) if start_date else None,
323 date_astimestamp(start_date) if start_date else None,
324 date_astimestamp(end_date) if end_date else None)
324 date_astimestamp(end_date) if end_date else None)
325 start_pos = start_raw_id - 1
325 start_pos = start_raw_id - 1
326 end_pos = end_raw_id
326 end_pos = end_raw_id
327
327
328 commit_ids = self.commit_ids
328 commit_ids = self.commit_ids
329
329
330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
332 svn_rev = long(self.commit_ids[-1])
332 svn_rev = long(self.commit_ids[-1])
333 commit_ids = self._remote.node_history(
333 commit_ids = self._remote.node_history(
334 path=branch_name, revision=svn_rev, limit=None)
334 path=branch_name, revision=svn_rev, limit=None)
335 commit_ids = [str(i) for i in reversed(commit_ids)]
335 commit_ids = [str(i) for i in reversed(commit_ids)]
336
336
337 if start_pos or end_pos:
337 if start_pos or end_pos:
338 commit_ids = commit_ids[start_pos:end_pos]
338 commit_ids = commit_ids[start_pos:end_pos]
339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
340
340
341 def _sanitize_commit_id(self, commit_id):
341 def _sanitize_commit_id(self, commit_id):
342 if commit_id and commit_id.isdigit():
342 if commit_id and commit_id.isdigit():
343 if int(commit_id) <= len(self.commit_ids):
343 if int(commit_id) <= len(self.commit_ids):
344 return commit_id
344 return commit_id
345 else:
345 else:
346 raise CommitDoesNotExistError(
346 raise CommitDoesNotExistError(
347 "Commit %s does not exist." % (commit_id, ))
347 "Commit %s does not exist." % (commit_id, ))
348 if commit_id not in [
348 if commit_id not in [
349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
350 raise CommitDoesNotExistError(
350 raise CommitDoesNotExistError(
351 "Commit id %s not understood." % (commit_id, ))
351 "Commit id %s not understood." % (commit_id, ))
352 svn_rev = self._remote.lookup('HEAD')
352 svn_rev = self._remote.lookup('HEAD')
353 return str(svn_rev)
353 return str(svn_rev)
354
354
355 def get_diff(
355 def get_diff(
356 self, commit1, commit2, path=None, ignore_whitespace=False,
356 self, commit1, commit2, path=None, ignore_whitespace=False,
357 context=3, path1=None):
357 context=3, path1=None):
358 self._validate_diff_commits(commit1, commit2)
358 self._validate_diff_commits(commit1, commit2)
359 svn_rev1 = long(commit1.raw_id)
359 svn_rev1 = long(commit1.raw_id)
360 svn_rev2 = long(commit2.raw_id)
360 svn_rev2 = long(commit2.raw_id)
361 diff = self._remote.diff(
361 diff = self._remote.diff(
362 svn_rev1, svn_rev2, path1=path1, path2=path,
362 svn_rev1, svn_rev2, path1=path1, path2=path,
363 ignore_whitespace=ignore_whitespace, context=context)
363 ignore_whitespace=ignore_whitespace, context=context)
364 return SubversionDiff(diff)
364 return SubversionDiff(diff)
365
365
366
366
367 def _sanitize_url(url):
367 def _sanitize_url(url):
368 if '://' not in url:
368 if '://' not in url:
369 url = 'file://' + urllib.request.pathname2url(url)
369 url = 'file://' + urllib.request.pathname2url(url)
370 return url
370 return url
@@ -1,2380 +1,2380 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30
30
31 import datetime
31 import datetime
32 import urllib.request, urllib.parse, urllib.error
32 import urllib.request, urllib.parse, urllib.error
33 import collections
33 import collections
34
34
35 from pyramid.threadlocal import get_current_request
35 from pyramid.threadlocal import get_current_request
36
36
37 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.nodes import FileNode
38 from rhodecode.translation import lazy_ugettext
38 from rhodecode.translation import lazy_ugettext
39 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import audit_logger
40 from rhodecode.lib import audit_logger
41 from rhodecode.lib.compat import OrderedDict
41 from collections import OrderedDict
42 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.markup_renderer import (
43 from rhodecode.lib.markup_renderer import (
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 from rhodecode.lib.utils2 import (
45 from rhodecode.lib.utils2 import (
46 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
46 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
47 get_current_rhodecode_user)
47 get_current_rhodecode_user)
48 from rhodecode.lib.vcs.backends.base import (
48 from rhodecode.lib.vcs.backends.base import (
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 TargetRefMissing, SourceRefMissing)
50 TargetRefMissing, SourceRefMissing)
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 from rhodecode.lib.vcs.exceptions import (
52 from rhodecode.lib.vcs.exceptions import (
53 CommitDoesNotExistError, EmptyRepositoryError)
53 CommitDoesNotExistError, EmptyRepositoryError)
54 from rhodecode.model import BaseModel
54 from rhodecode.model import BaseModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 from rhodecode.model.comment import CommentsModel
56 from rhodecode.model.comment import CommentsModel
57 from rhodecode.model.db import (
57 from rhodecode.model.db import (
58 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
58 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 from rhodecode.model.meta import Session
60 from rhodecode.model.meta import Session
61 from rhodecode.model.notification import NotificationModel, \
61 from rhodecode.model.notification import NotificationModel, \
62 EmailNotificationModel
62 EmailNotificationModel
63 from rhodecode.model.scm import ScmModel
63 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.settings import VcsSettingsModel
64 from rhodecode.model.settings import VcsSettingsModel
65
65
66
66
67 log = logging.getLogger(__name__)
67 log = logging.getLogger(__name__)
68
68
69
69
70 # Data structure to hold the response data when updating commits during a pull
70 # Data structure to hold the response data when updating commits during a pull
71 # request update.
71 # request update.
72 class UpdateResponse(object):
72 class UpdateResponse(object):
73
73
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 commit_changes, source_changed, target_changed):
75 commit_changes, source_changed, target_changed):
76
76
77 self.executed = executed
77 self.executed = executed
78 self.reason = reason
78 self.reason = reason
79 self.new = new
79 self.new = new
80 self.old = old
80 self.old = old
81 self.common_ancestor_id = common_ancestor_id
81 self.common_ancestor_id = common_ancestor_id
82 self.changes = commit_changes
82 self.changes = commit_changes
83 self.source_changed = source_changed
83 self.source_changed = source_changed
84 self.target_changed = target_changed
84 self.target_changed = target_changed
85
85
86
86
87 def get_diff_info(
87 def get_diff_info(
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 get_commit_authors=True):
89 get_commit_authors=True):
90 """
90 """
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 This is also used for default reviewers logic
92 This is also used for default reviewers logic
93 """
93 """
94
94
95 source_scm = source_repo.scm_instance()
95 source_scm = source_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
97
97
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 if not ancestor_id:
99 if not ancestor_id:
100 raise ValueError(
100 raise ValueError(
101 'cannot calculate diff info without a common ancestor. '
101 'cannot calculate diff info without a common ancestor. '
102 'Make sure both repositories are related, and have a common forking commit.')
102 'Make sure both repositories are related, and have a common forking commit.')
103
103
104 # case here is that want a simple diff without incoming commits,
104 # case here is that want a simple diff without incoming commits,
105 # previewing what will be merged based only on commits in the source.
105 # previewing what will be merged based only on commits in the source.
106 log.debug('Using ancestor %s as source_ref instead of %s',
106 log.debug('Using ancestor %s as source_ref instead of %s',
107 ancestor_id, source_ref)
107 ancestor_id, source_ref)
108
108
109 # source of changes now is the common ancestor
109 # source of changes now is the common ancestor
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 # target commit becomes the source ref as it is the last commit
111 # target commit becomes the source ref as it is the last commit
112 # for diff generation this logic gives proper diff
112 # for diff generation this logic gives proper diff
113 target_commit = source_scm.get_commit(commit_id=source_ref)
113 target_commit = source_scm.get_commit(commit_id=source_ref)
114
114
115 vcs_diff = \
115 vcs_diff = \
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 ignore_whitespace=False, context=3)
117 ignore_whitespace=False, context=3)
118
118
119 diff_processor = diffs.DiffProcessor(
119 diff_processor = diffs.DiffProcessor(
120 vcs_diff, format='newdiff', diff_limit=None,
120 vcs_diff, format='newdiff', diff_limit=None,
121 file_limit=None, show_full_diff=True)
121 file_limit=None, show_full_diff=True)
122
122
123 _parsed = diff_processor.prepare()
123 _parsed = diff_processor.prepare()
124
124
125 all_files = []
125 all_files = []
126 all_files_changes = []
126 all_files_changes = []
127 changed_lines = {}
127 changed_lines = {}
128 stats = [0, 0]
128 stats = [0, 0]
129 for f in _parsed:
129 for f in _parsed:
130 all_files.append(f['filename'])
130 all_files.append(f['filename'])
131 all_files_changes.append({
131 all_files_changes.append({
132 'filename': f['filename'],
132 'filename': f['filename'],
133 'stats': f['stats']
133 'stats': f['stats']
134 })
134 })
135 stats[0] += f['stats']['added']
135 stats[0] += f['stats']['added']
136 stats[1] += f['stats']['deleted']
136 stats[1] += f['stats']['deleted']
137
137
138 changed_lines[f['filename']] = []
138 changed_lines[f['filename']] = []
139 if len(f['chunks']) < 2:
139 if len(f['chunks']) < 2:
140 continue
140 continue
141 # first line is "context" information
141 # first line is "context" information
142 for chunks in f['chunks'][1:]:
142 for chunks in f['chunks'][1:]:
143 for chunk in chunks['lines']:
143 for chunk in chunks['lines']:
144 if chunk['action'] not in ('del', 'mod'):
144 if chunk['action'] not in ('del', 'mod'):
145 continue
145 continue
146 changed_lines[f['filename']].append(chunk['old_lineno'])
146 changed_lines[f['filename']].append(chunk['old_lineno'])
147
147
148 commit_authors = []
148 commit_authors = []
149 user_counts = {}
149 user_counts = {}
150 email_counts = {}
150 email_counts = {}
151 author_counts = {}
151 author_counts = {}
152 _commit_cache = {}
152 _commit_cache = {}
153
153
154 commits = []
154 commits = []
155 if get_commit_authors:
155 if get_commit_authors:
156 log.debug('Obtaining commit authors from set of commits')
156 log.debug('Obtaining commit authors from set of commits')
157 _compare_data = target_scm.compare(
157 _compare_data = target_scm.compare(
158 target_ref, source_ref, source_scm, merge=True,
158 target_ref, source_ref, source_scm, merge=True,
159 pre_load=["author", "date", "message"]
159 pre_load=["author", "date", "message"]
160 )
160 )
161
161
162 for commit in _compare_data:
162 for commit in _compare_data:
163 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
163 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
164 # at this function which is later called via JSON serialization
164 # at this function which is later called via JSON serialization
165 serialized_commit = dict(
165 serialized_commit = dict(
166 author=commit.author,
166 author=commit.author,
167 date=commit.date,
167 date=commit.date,
168 message=commit.message,
168 message=commit.message,
169 commit_id=commit.raw_id,
169 commit_id=commit.raw_id,
170 raw_id=commit.raw_id
170 raw_id=commit.raw_id
171 )
171 )
172 commits.append(serialized_commit)
172 commits.append(serialized_commit)
173 user = User.get_from_cs_author(serialized_commit['author'])
173 user = User.get_from_cs_author(serialized_commit['author'])
174 if user and user not in commit_authors:
174 if user and user not in commit_authors:
175 commit_authors.append(user)
175 commit_authors.append(user)
176
176
177 # lines
177 # lines
178 if get_authors:
178 if get_authors:
179 log.debug('Calculating authors of changed files')
179 log.debug('Calculating authors of changed files')
180 target_commit = source_repo.get_commit(ancestor_id)
180 target_commit = source_repo.get_commit(ancestor_id)
181
181
182 for fname, lines in changed_lines.items():
182 for fname, lines in changed_lines.items():
183
183
184 try:
184 try:
185 node = target_commit.get_node(fname, pre_load=["is_binary"])
185 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 except Exception:
186 except Exception:
187 log.exception("Failed to load node with path %s", fname)
187 log.exception("Failed to load node with path %s", fname)
188 continue
188 continue
189
189
190 if not isinstance(node, FileNode):
190 if not isinstance(node, FileNode):
191 continue
191 continue
192
192
193 # NOTE(marcink): for binary node we don't do annotation, just use last author
193 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 if node.is_binary:
194 if node.is_binary:
195 author = node.last_commit.author
195 author = node.last_commit.author
196 email = node.last_commit.author_email
196 email = node.last_commit.author_email
197
197
198 user = User.get_from_cs_author(author)
198 user = User.get_from_cs_author(author)
199 if user:
199 if user:
200 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
200 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 author_counts[author] = author_counts.get(author, 0) + 1
201 author_counts[author] = author_counts.get(author, 0) + 1
202 email_counts[email] = email_counts.get(email, 0) + 1
202 email_counts[email] = email_counts.get(email, 0) + 1
203
203
204 continue
204 continue
205
205
206 for annotation in node.annotate:
206 for annotation in node.annotate:
207 line_no, commit_id, get_commit_func, line_text = annotation
207 line_no, commit_id, get_commit_func, line_text = annotation
208 if line_no in lines:
208 if line_no in lines:
209 if commit_id not in _commit_cache:
209 if commit_id not in _commit_cache:
210 _commit_cache[commit_id] = get_commit_func()
210 _commit_cache[commit_id] = get_commit_func()
211 commit = _commit_cache[commit_id]
211 commit = _commit_cache[commit_id]
212 author = commit.author
212 author = commit.author
213 email = commit.author_email
213 email = commit.author_email
214 user = User.get_from_cs_author(author)
214 user = User.get_from_cs_author(author)
215 if user:
215 if user:
216 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
216 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 author_counts[author] = author_counts.get(author, 0) + 1
217 author_counts[author] = author_counts.get(author, 0) + 1
218 email_counts[email] = email_counts.get(email, 0) + 1
218 email_counts[email] = email_counts.get(email, 0) + 1
219
219
220 log.debug('Default reviewers processing finished')
220 log.debug('Default reviewers processing finished')
221
221
222 return {
222 return {
223 'commits': commits,
223 'commits': commits,
224 'files': all_files_changes,
224 'files': all_files_changes,
225 'stats': stats,
225 'stats': stats,
226 'ancestor': ancestor_id,
226 'ancestor': ancestor_id,
227 # original authors of modified files
227 # original authors of modified files
228 'original_authors': {
228 'original_authors': {
229 'users': user_counts,
229 'users': user_counts,
230 'authors': author_counts,
230 'authors': author_counts,
231 'emails': email_counts,
231 'emails': email_counts,
232 },
232 },
233 'commit_authors': commit_authors
233 'commit_authors': commit_authors
234 }
234 }
235
235
236
236
237 class PullRequestModel(BaseModel):
237 class PullRequestModel(BaseModel):
238
238
239 cls = PullRequest
239 cls = PullRequest
240
240
241 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
241 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242
242
243 UPDATE_STATUS_MESSAGES = {
243 UPDATE_STATUS_MESSAGES = {
244 UpdateFailureReason.NONE: lazy_ugettext(
244 UpdateFailureReason.NONE: lazy_ugettext(
245 'Pull request update successful.'),
245 'Pull request update successful.'),
246 UpdateFailureReason.UNKNOWN: lazy_ugettext(
246 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 'Pull request update failed because of an unknown error.'),
247 'Pull request update failed because of an unknown error.'),
248 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
248 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 'No update needed because the source and target have not changed.'),
249 'No update needed because the source and target have not changed.'),
250 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
250 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 'Pull request cannot be updated because the reference type is '
251 'Pull request cannot be updated because the reference type is '
252 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
252 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
253 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 'This pull request cannot be updated because the target '
254 'This pull request cannot be updated because the target '
255 'reference is missing.'),
255 'reference is missing.'),
256 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
256 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 'This pull request cannot be updated because the source '
257 'This pull request cannot be updated because the source '
258 'reference is missing.'),
258 'reference is missing.'),
259 }
259 }
260 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
260 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
261 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262
262
263 def __get_pull_request(self, pull_request):
263 def __get_pull_request(self, pull_request):
264 return self._get_instance((
264 return self._get_instance((
265 PullRequest, PullRequestVersion), pull_request)
265 PullRequest, PullRequestVersion), pull_request)
266
266
267 def _check_perms(self, perms, pull_request, user, api=False):
267 def _check_perms(self, perms, pull_request, user, api=False):
268 if not api:
268 if not api:
269 return h.HasRepoPermissionAny(*perms)(
269 return h.HasRepoPermissionAny(*perms)(
270 user=user, repo_name=pull_request.target_repo.repo_name)
270 user=user, repo_name=pull_request.target_repo.repo_name)
271 else:
271 else:
272 return h.HasRepoPermissionAnyApi(*perms)(
272 return h.HasRepoPermissionAnyApi(*perms)(
273 user=user, repo_name=pull_request.target_repo.repo_name)
273 user=user, repo_name=pull_request.target_repo.repo_name)
274
274
275 def check_user_read(self, pull_request, user, api=False):
275 def check_user_read(self, pull_request, user, api=False):
276 _perms = ('repository.admin', 'repository.write', 'repository.read',)
276 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 return self._check_perms(_perms, pull_request, user, api)
277 return self._check_perms(_perms, pull_request, user, api)
278
278
279 def check_user_merge(self, pull_request, user, api=False):
279 def check_user_merge(self, pull_request, user, api=False):
280 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
280 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 return self._check_perms(_perms, pull_request, user, api)
281 return self._check_perms(_perms, pull_request, user, api)
282
282
283 def check_user_update(self, pull_request, user, api=False):
283 def check_user_update(self, pull_request, user, api=False):
284 owner = user.user_id == pull_request.user_id
284 owner = user.user_id == pull_request.user_id
285 return self.check_user_merge(pull_request, user, api) or owner
285 return self.check_user_merge(pull_request, user, api) or owner
286
286
287 def check_user_delete(self, pull_request, user):
287 def check_user_delete(self, pull_request, user):
288 owner = user.user_id == pull_request.user_id
288 owner = user.user_id == pull_request.user_id
289 _perms = ('repository.admin',)
289 _perms = ('repository.admin',)
290 return self._check_perms(_perms, pull_request, user) or owner
290 return self._check_perms(_perms, pull_request, user) or owner
291
291
292 def is_user_reviewer(self, pull_request, user):
292 def is_user_reviewer(self, pull_request, user):
293 return user.user_id in [
293 return user.user_id in [
294 x.user_id for x in
294 x.user_id for x in
295 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
295 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 if x.user
296 if x.user
297 ]
297 ]
298
298
299 def check_user_change_status(self, pull_request, user, api=False):
299 def check_user_change_status(self, pull_request, user, api=False):
300 return self.check_user_update(pull_request, user, api) \
300 return self.check_user_update(pull_request, user, api) \
301 or self.is_user_reviewer(pull_request, user)
301 or self.is_user_reviewer(pull_request, user)
302
302
303 def check_user_comment(self, pull_request, user):
303 def check_user_comment(self, pull_request, user):
304 owner = user.user_id == pull_request.user_id
304 owner = user.user_id == pull_request.user_id
305 return self.check_user_read(pull_request, user) or owner
305 return self.check_user_read(pull_request, user) or owner
306
306
307 def get(self, pull_request):
307 def get(self, pull_request):
308 return self.__get_pull_request(pull_request)
308 return self.__get_pull_request(pull_request)
309
309
310 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
310 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 statuses=None, opened_by=None, order_by=None,
311 statuses=None, opened_by=None, order_by=None,
312 order_dir='desc', only_created=False):
312 order_dir='desc', only_created=False):
313 repo = None
313 repo = None
314 if repo_name:
314 if repo_name:
315 repo = self._get_repo(repo_name)
315 repo = self._get_repo(repo_name)
316
316
317 q = PullRequest.query()
317 q = PullRequest.query()
318
318
319 if search_q:
319 if search_q:
320 like_expression = u'%{}%'.format(safe_unicode(search_q))
320 like_expression = u'%{}%'.format(safe_unicode(search_q))
321 q = q.join(User, User.user_id == PullRequest.user_id)
321 q = q.join(User, User.user_id == PullRequest.user_id)
322 q = q.filter(or_(
322 q = q.filter(or_(
323 cast(PullRequest.pull_request_id, String).ilike(like_expression),
323 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 User.username.ilike(like_expression),
324 User.username.ilike(like_expression),
325 PullRequest.title.ilike(like_expression),
325 PullRequest.title.ilike(like_expression),
326 PullRequest.description.ilike(like_expression),
326 PullRequest.description.ilike(like_expression),
327 ))
327 ))
328
328
329 # source or target
329 # source or target
330 if repo and source:
330 if repo and source:
331 q = q.filter(PullRequest.source_repo == repo)
331 q = q.filter(PullRequest.source_repo == repo)
332 elif repo:
332 elif repo:
333 q = q.filter(PullRequest.target_repo == repo)
333 q = q.filter(PullRequest.target_repo == repo)
334
334
335 # closed,opened
335 # closed,opened
336 if statuses:
336 if statuses:
337 q = q.filter(PullRequest.status.in_(statuses))
337 q = q.filter(PullRequest.status.in_(statuses))
338
338
339 # opened by filter
339 # opened by filter
340 if opened_by:
340 if opened_by:
341 q = q.filter(PullRequest.user_id.in_(opened_by))
341 q = q.filter(PullRequest.user_id.in_(opened_by))
342
342
343 # only get those that are in "created" state
343 # only get those that are in "created" state
344 if only_created:
344 if only_created:
345 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
345 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346
346
347 order_map = {
347 order_map = {
348 'name_raw': PullRequest.pull_request_id,
348 'name_raw': PullRequest.pull_request_id,
349 'id': PullRequest.pull_request_id,
349 'id': PullRequest.pull_request_id,
350 'title': PullRequest.title,
350 'title': PullRequest.title,
351 'updated_on_raw': PullRequest.updated_on,
351 'updated_on_raw': PullRequest.updated_on,
352 'target_repo': PullRequest.target_repo_id
352 'target_repo': PullRequest.target_repo_id
353 }
353 }
354 if order_by and order_by in order_map:
354 if order_by and order_by in order_map:
355 if order_dir == 'asc':
355 if order_dir == 'asc':
356 q = q.order_by(order_map[order_by].asc())
356 q = q.order_by(order_map[order_by].asc())
357 else:
357 else:
358 q = q.order_by(order_map[order_by].desc())
358 q = q.order_by(order_map[order_by].desc())
359
359
360 return q
360 return q
361
361
362 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
362 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 opened_by=None):
363 opened_by=None):
364 """
364 """
365 Count the number of pull requests for a specific repository.
365 Count the number of pull requests for a specific repository.
366
366
367 :param repo_name: target or source repo
367 :param repo_name: target or source repo
368 :param search_q: filter by text
368 :param search_q: filter by text
369 :param source: boolean flag to specify if repo_name refers to source
369 :param source: boolean flag to specify if repo_name refers to source
370 :param statuses: list of pull request statuses
370 :param statuses: list of pull request statuses
371 :param opened_by: author user of the pull request
371 :param opened_by: author user of the pull request
372 :returns: int number of pull requests
372 :returns: int number of pull requests
373 """
373 """
374 q = self._prepare_get_all_query(
374 q = self._prepare_get_all_query(
375 repo_name, search_q=search_q, source=source, statuses=statuses,
375 repo_name, search_q=search_q, source=source, statuses=statuses,
376 opened_by=opened_by)
376 opened_by=opened_by)
377
377
378 return q.count()
378 return q.count()
379
379
380 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
380 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
381 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 """
382 """
383 Get all pull requests for a specific repository.
383 Get all pull requests for a specific repository.
384
384
385 :param repo_name: target or source repo
385 :param repo_name: target or source repo
386 :param search_q: filter by text
386 :param search_q: filter by text
387 :param source: boolean flag to specify if repo_name refers to source
387 :param source: boolean flag to specify if repo_name refers to source
388 :param statuses: list of pull request statuses
388 :param statuses: list of pull request statuses
389 :param opened_by: author user of the pull request
389 :param opened_by: author user of the pull request
390 :param offset: pagination offset
390 :param offset: pagination offset
391 :param length: length of returned list
391 :param length: length of returned list
392 :param order_by: order of the returned list
392 :param order_by: order of the returned list
393 :param order_dir: 'asc' or 'desc' ordering direction
393 :param order_dir: 'asc' or 'desc' ordering direction
394 :returns: list of pull requests
394 :returns: list of pull requests
395 """
395 """
396 q = self._prepare_get_all_query(
396 q = self._prepare_get_all_query(
397 repo_name, search_q=search_q, source=source, statuses=statuses,
397 repo_name, search_q=search_q, source=source, statuses=statuses,
398 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
398 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399
399
400 if length:
400 if length:
401 pull_requests = q.limit(length).offset(offset).all()
401 pull_requests = q.limit(length).offset(offset).all()
402 else:
402 else:
403 pull_requests = q.all()
403 pull_requests = q.all()
404
404
405 return pull_requests
405 return pull_requests
406
406
407 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
407 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
408 """
408 """
409 Count the number of pull requests for a specific repository that are
409 Count the number of pull requests for a specific repository that are
410 awaiting review.
410 awaiting review.
411
411
412 :param repo_name: target or source repo
412 :param repo_name: target or source repo
413 :param search_q: filter by text
413 :param search_q: filter by text
414 :param statuses: list of pull request statuses
414 :param statuses: list of pull request statuses
415 :returns: int number of pull requests
415 :returns: int number of pull requests
416 """
416 """
417 pull_requests = self.get_awaiting_review(
417 pull_requests = self.get_awaiting_review(
418 repo_name, search_q=search_q, statuses=statuses)
418 repo_name, search_q=search_q, statuses=statuses)
419
419
420 return len(pull_requests)
420 return len(pull_requests)
421
421
422 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
422 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
423 offset=0, length=None, order_by=None, order_dir='desc'):
423 offset=0, length=None, order_by=None, order_dir='desc'):
424 """
424 """
425 Get all pull requests for a specific repository that are awaiting
425 Get all pull requests for a specific repository that are awaiting
426 review.
426 review.
427
427
428 :param repo_name: target or source repo
428 :param repo_name: target or source repo
429 :param search_q: filter by text
429 :param search_q: filter by text
430 :param statuses: list of pull request statuses
430 :param statuses: list of pull request statuses
431 :param offset: pagination offset
431 :param offset: pagination offset
432 :param length: length of returned list
432 :param length: length of returned list
433 :param order_by: order of the returned list
433 :param order_by: order of the returned list
434 :param order_dir: 'asc' or 'desc' ordering direction
434 :param order_dir: 'asc' or 'desc' ordering direction
435 :returns: list of pull requests
435 :returns: list of pull requests
436 """
436 """
437 pull_requests = self.get_all(
437 pull_requests = self.get_all(
438 repo_name, search_q=search_q, statuses=statuses,
438 repo_name, search_q=search_q, statuses=statuses,
439 order_by=order_by, order_dir=order_dir)
439 order_by=order_by, order_dir=order_dir)
440
440
441 _filtered_pull_requests = []
441 _filtered_pull_requests = []
442 for pr in pull_requests:
442 for pr in pull_requests:
443 status = pr.calculated_review_status()
443 status = pr.calculated_review_status()
444 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
444 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
445 ChangesetStatus.STATUS_UNDER_REVIEW]:
445 ChangesetStatus.STATUS_UNDER_REVIEW]:
446 _filtered_pull_requests.append(pr)
446 _filtered_pull_requests.append(pr)
447 if length:
447 if length:
448 return _filtered_pull_requests[offset:offset+length]
448 return _filtered_pull_requests[offset:offset+length]
449 else:
449 else:
450 return _filtered_pull_requests
450 return _filtered_pull_requests
451
451
452 def _prepare_awaiting_my_review_review_query(
452 def _prepare_awaiting_my_review_review_query(
453 self, repo_name, user_id, search_q=None, statuses=None,
453 self, repo_name, user_id, search_q=None, statuses=None,
454 order_by=None, order_dir='desc'):
454 order_by=None, order_dir='desc'):
455
455
456 for_review_statuses = [
456 for_review_statuses = [
457 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
457 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
458 ]
458 ]
459
459
460 pull_request_alias = aliased(PullRequest)
460 pull_request_alias = aliased(PullRequest)
461 status_alias = aliased(ChangesetStatus)
461 status_alias = aliased(ChangesetStatus)
462 reviewers_alias = aliased(PullRequestReviewers)
462 reviewers_alias = aliased(PullRequestReviewers)
463 repo_alias = aliased(Repository)
463 repo_alias = aliased(Repository)
464
464
465 last_ver_subq = Session()\
465 last_ver_subq = Session()\
466 .query(func.min(ChangesetStatus.version)) \
466 .query(func.min(ChangesetStatus.version)) \
467 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
467 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
468 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
468 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
469 .subquery()
469 .subquery()
470
470
471 q = Session().query(pull_request_alias) \
471 q = Session().query(pull_request_alias) \
472 .options(lazyload(pull_request_alias.author)) \
472 .options(lazyload(pull_request_alias.author)) \
473 .join(reviewers_alias,
473 .join(reviewers_alias,
474 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
474 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
475 .join(repo_alias,
475 .join(repo_alias,
476 repo_alias.repo_id == pull_request_alias.target_repo_id) \
476 repo_alias.repo_id == pull_request_alias.target_repo_id) \
477 .outerjoin(status_alias,
477 .outerjoin(status_alias,
478 and_(status_alias.user_id == reviewers_alias.user_id,
478 and_(status_alias.user_id == reviewers_alias.user_id,
479 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
479 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
480 .filter(or_(status_alias.version == null(),
480 .filter(or_(status_alias.version == null(),
481 status_alias.version == last_ver_subq)) \
481 status_alias.version == last_ver_subq)) \
482 .filter(reviewers_alias.user_id == user_id) \
482 .filter(reviewers_alias.user_id == user_id) \
483 .filter(repo_alias.repo_name == repo_name) \
483 .filter(repo_alias.repo_name == repo_name) \
484 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
484 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
485 .group_by(pull_request_alias)
485 .group_by(pull_request_alias)
486
486
487 # closed,opened
487 # closed,opened
488 if statuses:
488 if statuses:
489 q = q.filter(pull_request_alias.status.in_(statuses))
489 q = q.filter(pull_request_alias.status.in_(statuses))
490
490
491 if search_q:
491 if search_q:
492 like_expression = u'%{}%'.format(safe_unicode(search_q))
492 like_expression = u'%{}%'.format(safe_unicode(search_q))
493 q = q.join(User, User.user_id == pull_request_alias.user_id)
493 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 q = q.filter(or_(
494 q = q.filter(or_(
495 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
495 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
496 User.username.ilike(like_expression),
496 User.username.ilike(like_expression),
497 pull_request_alias.title.ilike(like_expression),
497 pull_request_alias.title.ilike(like_expression),
498 pull_request_alias.description.ilike(like_expression),
498 pull_request_alias.description.ilike(like_expression),
499 ))
499 ))
500
500
501 order_map = {
501 order_map = {
502 'name_raw': pull_request_alias.pull_request_id,
502 'name_raw': pull_request_alias.pull_request_id,
503 'title': pull_request_alias.title,
503 'title': pull_request_alias.title,
504 'updated_on_raw': pull_request_alias.updated_on,
504 'updated_on_raw': pull_request_alias.updated_on,
505 'target_repo': pull_request_alias.target_repo_id
505 'target_repo': pull_request_alias.target_repo_id
506 }
506 }
507 if order_by and order_by in order_map:
507 if order_by and order_by in order_map:
508 if order_dir == 'asc':
508 if order_dir == 'asc':
509 q = q.order_by(order_map[order_by].asc())
509 q = q.order_by(order_map[order_by].asc())
510 else:
510 else:
511 q = q.order_by(order_map[order_by].desc())
511 q = q.order_by(order_map[order_by].desc())
512
512
513 return q
513 return q
514
514
515 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
515 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
516 """
516 """
517 Count the number of pull requests for a specific repository that are
517 Count the number of pull requests for a specific repository that are
518 awaiting review from a specific user.
518 awaiting review from a specific user.
519
519
520 :param repo_name: target or source repo
520 :param repo_name: target or source repo
521 :param user_id: reviewer user of the pull request
521 :param user_id: reviewer user of the pull request
522 :param search_q: filter by text
522 :param search_q: filter by text
523 :param statuses: list of pull request statuses
523 :param statuses: list of pull request statuses
524 :returns: int number of pull requests
524 :returns: int number of pull requests
525 """
525 """
526 q = self._prepare_awaiting_my_review_review_query(
526 q = self._prepare_awaiting_my_review_review_query(
527 repo_name, user_id, search_q=search_q, statuses=statuses)
527 repo_name, user_id, search_q=search_q, statuses=statuses)
528 return q.count()
528 return q.count()
529
529
530 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
530 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
531 offset=0, length=None, order_by=None, order_dir='desc'):
531 offset=0, length=None, order_by=None, order_dir='desc'):
532 """
532 """
533 Get all pull requests for a specific repository that are awaiting
533 Get all pull requests for a specific repository that are awaiting
534 review from a specific user.
534 review from a specific user.
535
535
536 :param repo_name: target or source repo
536 :param repo_name: target or source repo
537 :param user_id: reviewer user of the pull request
537 :param user_id: reviewer user of the pull request
538 :param search_q: filter by text
538 :param search_q: filter by text
539 :param statuses: list of pull request statuses
539 :param statuses: list of pull request statuses
540 :param offset: pagination offset
540 :param offset: pagination offset
541 :param length: length of returned list
541 :param length: length of returned list
542 :param order_by: order of the returned list
542 :param order_by: order of the returned list
543 :param order_dir: 'asc' or 'desc' ordering direction
543 :param order_dir: 'asc' or 'desc' ordering direction
544 :returns: list of pull requests
544 :returns: list of pull requests
545 """
545 """
546
546
547 q = self._prepare_awaiting_my_review_review_query(
547 q = self._prepare_awaiting_my_review_review_query(
548 repo_name, user_id, search_q=search_q, statuses=statuses,
548 repo_name, user_id, search_q=search_q, statuses=statuses,
549 order_by=order_by, order_dir=order_dir)
549 order_by=order_by, order_dir=order_dir)
550
550
551 if length:
551 if length:
552 pull_requests = q.limit(length).offset(offset).all()
552 pull_requests = q.limit(length).offset(offset).all()
553 else:
553 else:
554 pull_requests = q.all()
554 pull_requests = q.all()
555
555
556 return pull_requests
556 return pull_requests
557
557
558 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
558 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
559 order_by=None, order_dir='desc'):
559 order_by=None, order_dir='desc'):
560 """
560 """
561 return a query of pull-requests user is an creator, or he's added as a reviewer
561 return a query of pull-requests user is an creator, or he's added as a reviewer
562 """
562 """
563 q = PullRequest.query()
563 q = PullRequest.query()
564 if user_id:
564 if user_id:
565 reviewers_subquery = Session().query(
565 reviewers_subquery = Session().query(
566 PullRequestReviewers.pull_request_id).filter(
566 PullRequestReviewers.pull_request_id).filter(
567 PullRequestReviewers.user_id == user_id).subquery()
567 PullRequestReviewers.user_id == user_id).subquery()
568 user_filter = or_(
568 user_filter = or_(
569 PullRequest.user_id == user_id,
569 PullRequest.user_id == user_id,
570 PullRequest.pull_request_id.in_(reviewers_subquery)
570 PullRequest.pull_request_id.in_(reviewers_subquery)
571 )
571 )
572 q = PullRequest.query().filter(user_filter)
572 q = PullRequest.query().filter(user_filter)
573
573
574 # closed,opened
574 # closed,opened
575 if statuses:
575 if statuses:
576 q = q.filter(PullRequest.status.in_(statuses))
576 q = q.filter(PullRequest.status.in_(statuses))
577
577
578 if query:
578 if query:
579 like_expression = u'%{}%'.format(safe_unicode(query))
579 like_expression = u'%{}%'.format(safe_unicode(query))
580 q = q.join(User, User.user_id == PullRequest.user_id)
580 q = q.join(User, User.user_id == PullRequest.user_id)
581 q = q.filter(or_(
581 q = q.filter(or_(
582 cast(PullRequest.pull_request_id, String).ilike(like_expression),
582 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 User.username.ilike(like_expression),
583 User.username.ilike(like_expression),
584 PullRequest.title.ilike(like_expression),
584 PullRequest.title.ilike(like_expression),
585 PullRequest.description.ilike(like_expression),
585 PullRequest.description.ilike(like_expression),
586 ))
586 ))
587
587
588 order_map = {
588 order_map = {
589 'name_raw': PullRequest.pull_request_id,
589 'name_raw': PullRequest.pull_request_id,
590 'title': PullRequest.title,
590 'title': PullRequest.title,
591 'updated_on_raw': PullRequest.updated_on,
591 'updated_on_raw': PullRequest.updated_on,
592 'target_repo': PullRequest.target_repo_id
592 'target_repo': PullRequest.target_repo_id
593 }
593 }
594 if order_by and order_by in order_map:
594 if order_by and order_by in order_map:
595 if order_dir == 'asc':
595 if order_dir == 'asc':
596 q = q.order_by(order_map[order_by].asc())
596 q = q.order_by(order_map[order_by].asc())
597 else:
597 else:
598 q = q.order_by(order_map[order_by].desc())
598 q = q.order_by(order_map[order_by].desc())
599
599
600 return q
600 return q
601
601
602 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
602 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
603 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 return q.count()
604 return q.count()
605
605
606 def get_im_participating_in(
606 def get_im_participating_in(
607 self, user_id=None, statuses=None, query='', offset=0,
607 self, user_id=None, statuses=None, query='', offset=0,
608 length=None, order_by=None, order_dir='desc'):
608 length=None, order_by=None, order_dir='desc'):
609 """
609 """
610 Get all Pull requests that i'm participating in as a reviewer, or i have opened
610 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 """
611 """
612
612
613 q = self._prepare_im_participating_query(
613 q = self._prepare_im_participating_query(
614 user_id, statuses=statuses, query=query, order_by=order_by,
614 user_id, statuses=statuses, query=query, order_by=order_by,
615 order_dir=order_dir)
615 order_dir=order_dir)
616
616
617 if length:
617 if length:
618 pull_requests = q.limit(length).offset(offset).all()
618 pull_requests = q.limit(length).offset(offset).all()
619 else:
619 else:
620 pull_requests = q.all()
620 pull_requests = q.all()
621
621
622 return pull_requests
622 return pull_requests
623
623
624 def _prepare_participating_in_for_review_query(
624 def _prepare_participating_in_for_review_query(
625 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
625 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626
626
627 for_review_statuses = [
627 for_review_statuses = [
628 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
628 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 ]
629 ]
630
630
631 pull_request_alias = aliased(PullRequest)
631 pull_request_alias = aliased(PullRequest)
632 status_alias = aliased(ChangesetStatus)
632 status_alias = aliased(ChangesetStatus)
633 reviewers_alias = aliased(PullRequestReviewers)
633 reviewers_alias = aliased(PullRequestReviewers)
634
634
635 last_ver_subq = Session()\
635 last_ver_subq = Session()\
636 .query(func.min(ChangesetStatus.version)) \
636 .query(func.min(ChangesetStatus.version)) \
637 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
637 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
638 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 .subquery()
639 .subquery()
640
640
641 q = Session().query(pull_request_alias) \
641 q = Session().query(pull_request_alias) \
642 .options(lazyload(pull_request_alias.author)) \
642 .options(lazyload(pull_request_alias.author)) \
643 .join(reviewers_alias,
643 .join(reviewers_alias,
644 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
644 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 .outerjoin(status_alias,
645 .outerjoin(status_alias,
646 and_(status_alias.user_id == reviewers_alias.user_id,
646 and_(status_alias.user_id == reviewers_alias.user_id,
647 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
647 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 .filter(or_(status_alias.version == null(),
648 .filter(or_(status_alias.version == null(),
649 status_alias.version == last_ver_subq)) \
649 status_alias.version == last_ver_subq)) \
650 .filter(reviewers_alias.user_id == user_id) \
650 .filter(reviewers_alias.user_id == user_id) \
651 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
651 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 .group_by(pull_request_alias)
652 .group_by(pull_request_alias)
653
653
654 # closed,opened
654 # closed,opened
655 if statuses:
655 if statuses:
656 q = q.filter(pull_request_alias.status.in_(statuses))
656 q = q.filter(pull_request_alias.status.in_(statuses))
657
657
658 if query:
658 if query:
659 like_expression = u'%{}%'.format(safe_unicode(query))
659 like_expression = u'%{}%'.format(safe_unicode(query))
660 q = q.join(User, User.user_id == pull_request_alias.user_id)
660 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 q = q.filter(or_(
661 q = q.filter(or_(
662 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
662 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 User.username.ilike(like_expression),
663 User.username.ilike(like_expression),
664 pull_request_alias.title.ilike(like_expression),
664 pull_request_alias.title.ilike(like_expression),
665 pull_request_alias.description.ilike(like_expression),
665 pull_request_alias.description.ilike(like_expression),
666 ))
666 ))
667
667
668 order_map = {
668 order_map = {
669 'name_raw': pull_request_alias.pull_request_id,
669 'name_raw': pull_request_alias.pull_request_id,
670 'title': pull_request_alias.title,
670 'title': pull_request_alias.title,
671 'updated_on_raw': pull_request_alias.updated_on,
671 'updated_on_raw': pull_request_alias.updated_on,
672 'target_repo': pull_request_alias.target_repo_id
672 'target_repo': pull_request_alias.target_repo_id
673 }
673 }
674 if order_by and order_by in order_map:
674 if order_by and order_by in order_map:
675 if order_dir == 'asc':
675 if order_dir == 'asc':
676 q = q.order_by(order_map[order_by].asc())
676 q = q.order_by(order_map[order_by].asc())
677 else:
677 else:
678 q = q.order_by(order_map[order_by].desc())
678 q = q.order_by(order_map[order_by].desc())
679
679
680 return q
680 return q
681
681
682 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
682 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
683 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 return q.count()
684 return q.count()
685
685
686 def get_im_participating_in_for_review(
686 def get_im_participating_in_for_review(
687 self, user_id, statuses=None, query='', offset=0,
687 self, user_id, statuses=None, query='', offset=0,
688 length=None, order_by=None, order_dir='desc'):
688 length=None, order_by=None, order_dir='desc'):
689 """
689 """
690 Get all Pull requests that needs user approval or rejection
690 Get all Pull requests that needs user approval or rejection
691 """
691 """
692
692
693 q = self._prepare_participating_in_for_review_query(
693 q = self._prepare_participating_in_for_review_query(
694 user_id, statuses=statuses, query=query, order_by=order_by,
694 user_id, statuses=statuses, query=query, order_by=order_by,
695 order_dir=order_dir)
695 order_dir=order_dir)
696
696
697 if length:
697 if length:
698 pull_requests = q.limit(length).offset(offset).all()
698 pull_requests = q.limit(length).offset(offset).all()
699 else:
699 else:
700 pull_requests = q.all()
700 pull_requests = q.all()
701
701
702 return pull_requests
702 return pull_requests
703
703
704 def get_versions(self, pull_request):
704 def get_versions(self, pull_request):
705 """
705 """
706 returns version of pull request sorted by ID descending
706 returns version of pull request sorted by ID descending
707 """
707 """
708 return PullRequestVersion.query()\
708 return PullRequestVersion.query()\
709 .filter(PullRequestVersion.pull_request == pull_request)\
709 .filter(PullRequestVersion.pull_request == pull_request)\
710 .order_by(PullRequestVersion.pull_request_version_id.asc())\
710 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 .all()
711 .all()
712
712
713 def get_pr_version(self, pull_request_id, version=None):
713 def get_pr_version(self, pull_request_id, version=None):
714 at_version = None
714 at_version = None
715
715
716 if version and version == 'latest':
716 if version and version == 'latest':
717 pull_request_ver = PullRequest.get(pull_request_id)
717 pull_request_ver = PullRequest.get(pull_request_id)
718 pull_request_obj = pull_request_ver
718 pull_request_obj = pull_request_ver
719 _org_pull_request_obj = pull_request_obj
719 _org_pull_request_obj = pull_request_obj
720 at_version = 'latest'
720 at_version = 'latest'
721 elif version:
721 elif version:
722 pull_request_ver = PullRequestVersion.get_or_404(version)
722 pull_request_ver = PullRequestVersion.get_or_404(version)
723 pull_request_obj = pull_request_ver
723 pull_request_obj = pull_request_ver
724 _org_pull_request_obj = pull_request_ver.pull_request
724 _org_pull_request_obj = pull_request_ver.pull_request
725 at_version = pull_request_ver.pull_request_version_id
725 at_version = pull_request_ver.pull_request_version_id
726 else:
726 else:
727 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
727 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 pull_request_id)
728 pull_request_id)
729
729
730 pull_request_display_obj = PullRequest.get_pr_display_object(
730 pull_request_display_obj = PullRequest.get_pr_display_object(
731 pull_request_obj, _org_pull_request_obj)
731 pull_request_obj, _org_pull_request_obj)
732
732
733 return _org_pull_request_obj, pull_request_obj, \
733 return _org_pull_request_obj, pull_request_obj, \
734 pull_request_display_obj, at_version
734 pull_request_display_obj, at_version
735
735
736 def pr_commits_versions(self, versions):
736 def pr_commits_versions(self, versions):
737 """
737 """
738 Maps the pull-request commits into all known PR versions. This way we can obtain
738 Maps the pull-request commits into all known PR versions. This way we can obtain
739 each pr version the commit was introduced in.
739 each pr version the commit was introduced in.
740 """
740 """
741 commit_versions = collections.defaultdict(list)
741 commit_versions = collections.defaultdict(list)
742 num_versions = [x.pull_request_version_id for x in versions]
742 num_versions = [x.pull_request_version_id for x in versions]
743 for ver in versions:
743 for ver in versions:
744 for commit_id in ver.revisions:
744 for commit_id in ver.revisions:
745 ver_idx = ChangesetComment.get_index_from_version(
745 ver_idx = ChangesetComment.get_index_from_version(
746 ver.pull_request_version_id, num_versions=num_versions)
746 ver.pull_request_version_id, num_versions=num_versions)
747 commit_versions[commit_id].append(ver_idx)
747 commit_versions[commit_id].append(ver_idx)
748 return commit_versions
748 return commit_versions
749
749
750 def create(self, created_by, source_repo, source_ref, target_repo,
750 def create(self, created_by, source_repo, source_ref, target_repo,
751 target_ref, revisions, reviewers, observers, title, description=None,
751 target_ref, revisions, reviewers, observers, title, description=None,
752 common_ancestor_id=None,
752 common_ancestor_id=None,
753 description_renderer=None,
753 description_renderer=None,
754 reviewer_data=None, translator=None, auth_user=None):
754 reviewer_data=None, translator=None, auth_user=None):
755 translator = translator or get_current_request().translate
755 translator = translator or get_current_request().translate
756
756
757 created_by_user = self._get_user(created_by)
757 created_by_user = self._get_user(created_by)
758 auth_user = auth_user or created_by_user.AuthUser()
758 auth_user = auth_user or created_by_user.AuthUser()
759 source_repo = self._get_repo(source_repo)
759 source_repo = self._get_repo(source_repo)
760 target_repo = self._get_repo(target_repo)
760 target_repo = self._get_repo(target_repo)
761
761
762 pull_request = PullRequest()
762 pull_request = PullRequest()
763 pull_request.source_repo = source_repo
763 pull_request.source_repo = source_repo
764 pull_request.source_ref = source_ref
764 pull_request.source_ref = source_ref
765 pull_request.target_repo = target_repo
765 pull_request.target_repo = target_repo
766 pull_request.target_ref = target_ref
766 pull_request.target_ref = target_ref
767 pull_request.revisions = revisions
767 pull_request.revisions = revisions
768 pull_request.title = title
768 pull_request.title = title
769 pull_request.description = description
769 pull_request.description = description
770 pull_request.description_renderer = description_renderer
770 pull_request.description_renderer = description_renderer
771 pull_request.author = created_by_user
771 pull_request.author = created_by_user
772 pull_request.reviewer_data = reviewer_data
772 pull_request.reviewer_data = reviewer_data
773 pull_request.pull_request_state = pull_request.STATE_CREATING
773 pull_request.pull_request_state = pull_request.STATE_CREATING
774 pull_request.common_ancestor_id = common_ancestor_id
774 pull_request.common_ancestor_id = common_ancestor_id
775
775
776 Session().add(pull_request)
776 Session().add(pull_request)
777 Session().flush()
777 Session().flush()
778
778
779 reviewer_ids = set()
779 reviewer_ids = set()
780 # members / reviewers
780 # members / reviewers
781 for reviewer_object in reviewers:
781 for reviewer_object in reviewers:
782 user_id, reasons, mandatory, role, rules = reviewer_object
782 user_id, reasons, mandatory, role, rules = reviewer_object
783 user = self._get_user(user_id)
783 user = self._get_user(user_id)
784
784
785 # skip duplicates
785 # skip duplicates
786 if user.user_id in reviewer_ids:
786 if user.user_id in reviewer_ids:
787 continue
787 continue
788
788
789 reviewer_ids.add(user.user_id)
789 reviewer_ids.add(user.user_id)
790
790
791 reviewer = PullRequestReviewers()
791 reviewer = PullRequestReviewers()
792 reviewer.user = user
792 reviewer.user = user
793 reviewer.pull_request = pull_request
793 reviewer.pull_request = pull_request
794 reviewer.reasons = reasons
794 reviewer.reasons = reasons
795 reviewer.mandatory = mandatory
795 reviewer.mandatory = mandatory
796 reviewer.role = role
796 reviewer.role = role
797
797
798 # NOTE(marcink): pick only first rule for now
798 # NOTE(marcink): pick only first rule for now
799 rule_id = list(rules)[0] if rules else None
799 rule_id = list(rules)[0] if rules else None
800 rule = RepoReviewRule.get(rule_id) if rule_id else None
800 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 if rule:
801 if rule:
802 review_group = rule.user_group_vote_rule(user_id)
802 review_group = rule.user_group_vote_rule(user_id)
803 # we check if this particular reviewer is member of a voting group
803 # we check if this particular reviewer is member of a voting group
804 if review_group:
804 if review_group:
805 # NOTE(marcink):
805 # NOTE(marcink):
806 # can be that user is member of more but we pick the first same,
806 # can be that user is member of more but we pick the first same,
807 # same as default reviewers algo
807 # same as default reviewers algo
808 review_group = review_group[0]
808 review_group = review_group[0]
809
809
810 rule_data = {
810 rule_data = {
811 'rule_name':
811 'rule_name':
812 rule.review_rule_name,
812 rule.review_rule_name,
813 'rule_user_group_entry_id':
813 'rule_user_group_entry_id':
814 review_group.repo_review_rule_users_group_id,
814 review_group.repo_review_rule_users_group_id,
815 'rule_user_group_name':
815 'rule_user_group_name':
816 review_group.users_group.users_group_name,
816 review_group.users_group.users_group_name,
817 'rule_user_group_members':
817 'rule_user_group_members':
818 [x.user.username for x in review_group.users_group.members],
818 [x.user.username for x in review_group.users_group.members],
819 'rule_user_group_members_id':
819 'rule_user_group_members_id':
820 [x.user.user_id for x in review_group.users_group.members],
820 [x.user.user_id for x in review_group.users_group.members],
821 }
821 }
822 # e.g {'vote_rule': -1, 'mandatory': True}
822 # e.g {'vote_rule': -1, 'mandatory': True}
823 rule_data.update(review_group.rule_data())
823 rule_data.update(review_group.rule_data())
824
824
825 reviewer.rule_data = rule_data
825 reviewer.rule_data = rule_data
826
826
827 Session().add(reviewer)
827 Session().add(reviewer)
828 Session().flush()
828 Session().flush()
829
829
830 for observer_object in observers:
830 for observer_object in observers:
831 user_id, reasons, mandatory, role, rules = observer_object
831 user_id, reasons, mandatory, role, rules = observer_object
832 user = self._get_user(user_id)
832 user = self._get_user(user_id)
833
833
834 # skip duplicates from reviewers
834 # skip duplicates from reviewers
835 if user.user_id in reviewer_ids:
835 if user.user_id in reviewer_ids:
836 continue
836 continue
837
837
838 #reviewer_ids.add(user.user_id)
838 #reviewer_ids.add(user.user_id)
839
839
840 observer = PullRequestReviewers()
840 observer = PullRequestReviewers()
841 observer.user = user
841 observer.user = user
842 observer.pull_request = pull_request
842 observer.pull_request = pull_request
843 observer.reasons = reasons
843 observer.reasons = reasons
844 observer.mandatory = mandatory
844 observer.mandatory = mandatory
845 observer.role = role
845 observer.role = role
846
846
847 # NOTE(marcink): pick only first rule for now
847 # NOTE(marcink): pick only first rule for now
848 rule_id = list(rules)[0] if rules else None
848 rule_id = list(rules)[0] if rules else None
849 rule = RepoReviewRule.get(rule_id) if rule_id else None
849 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 if rule:
850 if rule:
851 # TODO(marcink): do we need this for observers ??
851 # TODO(marcink): do we need this for observers ??
852 pass
852 pass
853
853
854 Session().add(observer)
854 Session().add(observer)
855 Session().flush()
855 Session().flush()
856
856
857 # Set approval status to "Under Review" for all commits which are
857 # Set approval status to "Under Review" for all commits which are
858 # part of this pull request.
858 # part of this pull request.
859 ChangesetStatusModel().set_status(
859 ChangesetStatusModel().set_status(
860 repo=target_repo,
860 repo=target_repo,
861 status=ChangesetStatus.STATUS_UNDER_REVIEW,
861 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 user=created_by_user,
862 user=created_by_user,
863 pull_request=pull_request
863 pull_request=pull_request
864 )
864 )
865 # we commit early at this point. This has to do with a fact
865 # we commit early at this point. This has to do with a fact
866 # that before queries do some row-locking. And because of that
866 # that before queries do some row-locking. And because of that
867 # we need to commit and finish transaction before below validate call
867 # we need to commit and finish transaction before below validate call
868 # that for large repos could be long resulting in long row locks
868 # that for large repos could be long resulting in long row locks
869 Session().commit()
869 Session().commit()
870
870
871 # prepare workspace, and run initial merge simulation. Set state during that
871 # prepare workspace, and run initial merge simulation. Set state during that
872 # operation
872 # operation
873 pull_request = PullRequest.get(pull_request.pull_request_id)
873 pull_request = PullRequest.get(pull_request.pull_request_id)
874
874
875 # set as merging, for merge simulation, and if finished to created so we mark
875 # set as merging, for merge simulation, and if finished to created so we mark
876 # simulation is working fine
876 # simulation is working fine
877 with pull_request.set_state(PullRequest.STATE_MERGING,
877 with pull_request.set_state(PullRequest.STATE_MERGING,
878 final_state=PullRequest.STATE_CREATED) as state_obj:
878 final_state=PullRequest.STATE_CREATED) as state_obj:
879 MergeCheck.validate(
879 MergeCheck.validate(
880 pull_request, auth_user=auth_user, translator=translator)
880 pull_request, auth_user=auth_user, translator=translator)
881
881
882 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
882 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
883 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884
884
885 creation_data = pull_request.get_api_data(with_merge_state=False)
885 creation_data = pull_request.get_api_data(with_merge_state=False)
886 self._log_audit_action(
886 self._log_audit_action(
887 'repo.pull_request.create', {'data': creation_data},
887 'repo.pull_request.create', {'data': creation_data},
888 auth_user, pull_request)
888 auth_user, pull_request)
889
889
890 return pull_request
890 return pull_request
891
891
892 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
892 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 pull_request = self.__get_pull_request(pull_request)
893 pull_request = self.__get_pull_request(pull_request)
894 target_scm = pull_request.target_repo.scm_instance()
894 target_scm = pull_request.target_repo.scm_instance()
895 if action == 'create':
895 if action == 'create':
896 trigger_hook = hooks_utils.trigger_create_pull_request_hook
896 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 elif action == 'merge':
897 elif action == 'merge':
898 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
898 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 elif action == 'close':
899 elif action == 'close':
900 trigger_hook = hooks_utils.trigger_close_pull_request_hook
900 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 elif action == 'review_status_change':
901 elif action == 'review_status_change':
902 trigger_hook = hooks_utils.trigger_review_pull_request_hook
902 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 elif action == 'update':
903 elif action == 'update':
904 trigger_hook = hooks_utils.trigger_update_pull_request_hook
904 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 elif action == 'comment':
905 elif action == 'comment':
906 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
906 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 elif action == 'comment_edit':
907 elif action == 'comment_edit':
908 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
908 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 else:
909 else:
910 return
910 return
911
911
912 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
912 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 pull_request, action, trigger_hook)
913 pull_request, action, trigger_hook)
914 trigger_hook(
914 trigger_hook(
915 username=user.username,
915 username=user.username,
916 repo_name=pull_request.target_repo.repo_name,
916 repo_name=pull_request.target_repo.repo_name,
917 repo_type=target_scm.alias,
917 repo_type=target_scm.alias,
918 pull_request=pull_request,
918 pull_request=pull_request,
919 data=data)
919 data=data)
920
920
921 def _get_commit_ids(self, pull_request):
921 def _get_commit_ids(self, pull_request):
922 """
922 """
923 Return the commit ids of the merged pull request.
923 Return the commit ids of the merged pull request.
924
924
925 This method is not dealing correctly yet with the lack of autoupdates
925 This method is not dealing correctly yet with the lack of autoupdates
926 nor with the implicit target updates.
926 nor with the implicit target updates.
927 For example: if a commit in the source repo is already in the target it
927 For example: if a commit in the source repo is already in the target it
928 will be reported anyways.
928 will be reported anyways.
929 """
929 """
930 merge_rev = pull_request.merge_rev
930 merge_rev = pull_request.merge_rev
931 if merge_rev is None:
931 if merge_rev is None:
932 raise ValueError('This pull request was not merged yet')
932 raise ValueError('This pull request was not merged yet')
933
933
934 commit_ids = list(pull_request.revisions)
934 commit_ids = list(pull_request.revisions)
935 if merge_rev not in commit_ids:
935 if merge_rev not in commit_ids:
936 commit_ids.append(merge_rev)
936 commit_ids.append(merge_rev)
937
937
938 return commit_ids
938 return commit_ids
939
939
940 def merge_repo(self, pull_request, user, extras):
940 def merge_repo(self, pull_request, user, extras):
941 repo_type = pull_request.source_repo.repo_type
941 repo_type = pull_request.source_repo.repo_type
942 log.debug("Merging pull request %s", pull_request.pull_request_id)
942 log.debug("Merging pull request %s", pull_request.pull_request_id)
943 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
943 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
944 merge_state = self._merge_pull_request(pull_request, user, extras)
944 merge_state = self._merge_pull_request(pull_request, user, extras)
945 if merge_state.executed:
945 if merge_state.executed:
946 log.debug("Merge was successful, updating the pull request comments.")
946 log.debug("Merge was successful, updating the pull request comments.")
947 self._comment_and_close_pr(pull_request, user, merge_state)
947 self._comment_and_close_pr(pull_request, user, merge_state)
948
948
949 self._log_audit_action(
949 self._log_audit_action(
950 'repo.pull_request.merge',
950 'repo.pull_request.merge',
951 {'merge_state': merge_state.__dict__},
951 {'merge_state': merge_state.__dict__},
952 user, pull_request)
952 user, pull_request)
953
953
954 else:
954 else:
955 log.warn("Merge failed, not updating the pull request.")
955 log.warn("Merge failed, not updating the pull request.")
956 return merge_state
956 return merge_state
957
957
958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
959 target_vcs = pull_request.target_repo.scm_instance()
959 target_vcs = pull_request.target_repo.scm_instance()
960 source_vcs = pull_request.source_repo.scm_instance()
960 source_vcs = pull_request.source_repo.scm_instance()
961
961
962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
963 pr_id=pull_request.pull_request_id,
963 pr_id=pull_request.pull_request_id,
964 pr_title=pull_request.title,
964 pr_title=pull_request.title,
965 pr_desc=pull_request.description,
965 pr_desc=pull_request.description,
966 source_repo=source_vcs.name,
966 source_repo=source_vcs.name,
967 source_ref_name=pull_request.source_ref_parts.name,
967 source_ref_name=pull_request.source_ref_parts.name,
968 target_repo=target_vcs.name,
968 target_repo=target_vcs.name,
969 target_ref_name=pull_request.target_ref_parts.name,
969 target_ref_name=pull_request.target_ref_parts.name,
970 )
970 )
971
971
972 workspace_id = self._workspace_id(pull_request)
972 workspace_id = self._workspace_id(pull_request)
973 repo_id = pull_request.target_repo.repo_id
973 repo_id = pull_request.target_repo.repo_id
974 use_rebase = self._use_rebase_for_merging(pull_request)
974 use_rebase = self._use_rebase_for_merging(pull_request)
975 close_branch = self._close_branch_before_merging(pull_request)
975 close_branch = self._close_branch_before_merging(pull_request)
976 user_name = self._user_name_for_merging(pull_request, user)
976 user_name = self._user_name_for_merging(pull_request, user)
977
977
978 target_ref = self._refresh_reference(
978 target_ref = self._refresh_reference(
979 pull_request.target_ref_parts, target_vcs)
979 pull_request.target_ref_parts, target_vcs)
980
980
981 callback_daemon, extras = prepare_callback_daemon(
981 callback_daemon, extras = prepare_callback_daemon(
982 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
982 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
983 host=vcs_settings.HOOKS_HOST,
983 host=vcs_settings.HOOKS_HOST,
984 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
984 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
985
985
986 with callback_daemon:
986 with callback_daemon:
987 # TODO: johbo: Implement a clean way to run a config_override
987 # TODO: johbo: Implement a clean way to run a config_override
988 # for a single call.
988 # for a single call.
989 target_vcs.config.set(
989 target_vcs.config.set(
990 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
990 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
991
991
992 merge_state = target_vcs.merge(
992 merge_state = target_vcs.merge(
993 repo_id, workspace_id, target_ref, source_vcs,
993 repo_id, workspace_id, target_ref, source_vcs,
994 pull_request.source_ref_parts,
994 pull_request.source_ref_parts,
995 user_name=user_name, user_email=user.email,
995 user_name=user_name, user_email=user.email,
996 message=message, use_rebase=use_rebase,
996 message=message, use_rebase=use_rebase,
997 close_branch=close_branch)
997 close_branch=close_branch)
998 return merge_state
998 return merge_state
999
999
1000 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1000 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1001 pull_request.merge_rev = merge_state.merge_ref.commit_id
1001 pull_request.merge_rev = merge_state.merge_ref.commit_id
1002 pull_request.updated_on = datetime.datetime.now()
1002 pull_request.updated_on = datetime.datetime.now()
1003 close_msg = close_msg or 'Pull request merged and closed'
1003 close_msg = close_msg or 'Pull request merged and closed'
1004
1004
1005 CommentsModel().create(
1005 CommentsModel().create(
1006 text=safe_unicode(close_msg),
1006 text=safe_unicode(close_msg),
1007 repo=pull_request.target_repo.repo_id,
1007 repo=pull_request.target_repo.repo_id,
1008 user=user.user_id,
1008 user=user.user_id,
1009 pull_request=pull_request.pull_request_id,
1009 pull_request=pull_request.pull_request_id,
1010 f_path=None,
1010 f_path=None,
1011 line_no=None,
1011 line_no=None,
1012 closing_pr=True
1012 closing_pr=True
1013 )
1013 )
1014
1014
1015 Session().add(pull_request)
1015 Session().add(pull_request)
1016 Session().flush()
1016 Session().flush()
1017 # TODO: paris: replace invalidation with less radical solution
1017 # TODO: paris: replace invalidation with less radical solution
1018 ScmModel().mark_for_invalidation(
1018 ScmModel().mark_for_invalidation(
1019 pull_request.target_repo.repo_name)
1019 pull_request.target_repo.repo_name)
1020 self.trigger_pull_request_hook(pull_request, user, 'merge')
1020 self.trigger_pull_request_hook(pull_request, user, 'merge')
1021
1021
1022 def has_valid_update_type(self, pull_request):
1022 def has_valid_update_type(self, pull_request):
1023 source_ref_type = pull_request.source_ref_parts.type
1023 source_ref_type = pull_request.source_ref_parts.type
1024 return source_ref_type in self.REF_TYPES
1024 return source_ref_type in self.REF_TYPES
1025
1025
1026 def get_flow_commits(self, pull_request):
1026 def get_flow_commits(self, pull_request):
1027
1027
1028 # source repo
1028 # source repo
1029 source_ref_name = pull_request.source_ref_parts.name
1029 source_ref_name = pull_request.source_ref_parts.name
1030 source_ref_type = pull_request.source_ref_parts.type
1030 source_ref_type = pull_request.source_ref_parts.type
1031 source_ref_id = pull_request.source_ref_parts.commit_id
1031 source_ref_id = pull_request.source_ref_parts.commit_id
1032 source_repo = pull_request.source_repo.scm_instance()
1032 source_repo = pull_request.source_repo.scm_instance()
1033
1033
1034 try:
1034 try:
1035 if source_ref_type in self.REF_TYPES:
1035 if source_ref_type in self.REF_TYPES:
1036 source_commit = source_repo.get_commit(
1036 source_commit = source_repo.get_commit(
1037 source_ref_name, reference_obj=pull_request.source_ref_parts)
1037 source_ref_name, reference_obj=pull_request.source_ref_parts)
1038 else:
1038 else:
1039 source_commit = source_repo.get_commit(source_ref_id)
1039 source_commit = source_repo.get_commit(source_ref_id)
1040 except CommitDoesNotExistError:
1040 except CommitDoesNotExistError:
1041 raise SourceRefMissing()
1041 raise SourceRefMissing()
1042
1042
1043 # target repo
1043 # target repo
1044 target_ref_name = pull_request.target_ref_parts.name
1044 target_ref_name = pull_request.target_ref_parts.name
1045 target_ref_type = pull_request.target_ref_parts.type
1045 target_ref_type = pull_request.target_ref_parts.type
1046 target_ref_id = pull_request.target_ref_parts.commit_id
1046 target_ref_id = pull_request.target_ref_parts.commit_id
1047 target_repo = pull_request.target_repo.scm_instance()
1047 target_repo = pull_request.target_repo.scm_instance()
1048
1048
1049 try:
1049 try:
1050 if target_ref_type in self.REF_TYPES:
1050 if target_ref_type in self.REF_TYPES:
1051 target_commit = target_repo.get_commit(
1051 target_commit = target_repo.get_commit(
1052 target_ref_name, reference_obj=pull_request.target_ref_parts)
1052 target_ref_name, reference_obj=pull_request.target_ref_parts)
1053 else:
1053 else:
1054 target_commit = target_repo.get_commit(target_ref_id)
1054 target_commit = target_repo.get_commit(target_ref_id)
1055 except CommitDoesNotExistError:
1055 except CommitDoesNotExistError:
1056 raise TargetRefMissing()
1056 raise TargetRefMissing()
1057
1057
1058 return source_commit, target_commit
1058 return source_commit, target_commit
1059
1059
1060 def update_commits(self, pull_request, updating_user):
1060 def update_commits(self, pull_request, updating_user):
1061 """
1061 """
1062 Get the updated list of commits for the pull request
1062 Get the updated list of commits for the pull request
1063 and return the new pull request version and the list
1063 and return the new pull request version and the list
1064 of commits processed by this update action
1064 of commits processed by this update action
1065
1065
1066 updating_user is the user_object who triggered the update
1066 updating_user is the user_object who triggered the update
1067 """
1067 """
1068 pull_request = self.__get_pull_request(pull_request)
1068 pull_request = self.__get_pull_request(pull_request)
1069 source_ref_type = pull_request.source_ref_parts.type
1069 source_ref_type = pull_request.source_ref_parts.type
1070 source_ref_name = pull_request.source_ref_parts.name
1070 source_ref_name = pull_request.source_ref_parts.name
1071 source_ref_id = pull_request.source_ref_parts.commit_id
1071 source_ref_id = pull_request.source_ref_parts.commit_id
1072
1072
1073 target_ref_type = pull_request.target_ref_parts.type
1073 target_ref_type = pull_request.target_ref_parts.type
1074 target_ref_name = pull_request.target_ref_parts.name
1074 target_ref_name = pull_request.target_ref_parts.name
1075 target_ref_id = pull_request.target_ref_parts.commit_id
1075 target_ref_id = pull_request.target_ref_parts.commit_id
1076
1076
1077 if not self.has_valid_update_type(pull_request):
1077 if not self.has_valid_update_type(pull_request):
1078 log.debug("Skipping update of pull request %s due to ref type: %s",
1078 log.debug("Skipping update of pull request %s due to ref type: %s",
1079 pull_request, source_ref_type)
1079 pull_request, source_ref_type)
1080 return UpdateResponse(
1080 return UpdateResponse(
1081 executed=False,
1081 executed=False,
1082 reason=UpdateFailureReason.WRONG_REF_TYPE,
1082 reason=UpdateFailureReason.WRONG_REF_TYPE,
1083 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1083 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1084 source_changed=False, target_changed=False)
1084 source_changed=False, target_changed=False)
1085
1085
1086 try:
1086 try:
1087 source_commit, target_commit = self.get_flow_commits(pull_request)
1087 source_commit, target_commit = self.get_flow_commits(pull_request)
1088 except SourceRefMissing:
1088 except SourceRefMissing:
1089 return UpdateResponse(
1089 return UpdateResponse(
1090 executed=False,
1090 executed=False,
1091 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1091 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1092 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1092 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1093 source_changed=False, target_changed=False)
1093 source_changed=False, target_changed=False)
1094 except TargetRefMissing:
1094 except TargetRefMissing:
1095 return UpdateResponse(
1095 return UpdateResponse(
1096 executed=False,
1096 executed=False,
1097 reason=UpdateFailureReason.MISSING_TARGET_REF,
1097 reason=UpdateFailureReason.MISSING_TARGET_REF,
1098 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1098 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1099 source_changed=False, target_changed=False)
1099 source_changed=False, target_changed=False)
1100
1100
1101 source_changed = source_ref_id != source_commit.raw_id
1101 source_changed = source_ref_id != source_commit.raw_id
1102 target_changed = target_ref_id != target_commit.raw_id
1102 target_changed = target_ref_id != target_commit.raw_id
1103
1103
1104 if not (source_changed or target_changed):
1104 if not (source_changed or target_changed):
1105 log.debug("Nothing changed in pull request %s", pull_request)
1105 log.debug("Nothing changed in pull request %s", pull_request)
1106 return UpdateResponse(
1106 return UpdateResponse(
1107 executed=False,
1107 executed=False,
1108 reason=UpdateFailureReason.NO_CHANGE,
1108 reason=UpdateFailureReason.NO_CHANGE,
1109 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1109 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1110 source_changed=target_changed, target_changed=source_changed)
1110 source_changed=target_changed, target_changed=source_changed)
1111
1111
1112 change_in_found = 'target repo' if target_changed else 'source repo'
1112 change_in_found = 'target repo' if target_changed else 'source repo'
1113 log.debug('Updating pull request because of change in %s detected',
1113 log.debug('Updating pull request because of change in %s detected',
1114 change_in_found)
1114 change_in_found)
1115
1115
1116 # Finally there is a need for an update, in case of source change
1116 # Finally there is a need for an update, in case of source change
1117 # we create a new version, else just an update
1117 # we create a new version, else just an update
1118 if source_changed:
1118 if source_changed:
1119 pull_request_version = self._create_version_from_snapshot(pull_request)
1119 pull_request_version = self._create_version_from_snapshot(pull_request)
1120 self._link_comments_to_version(pull_request_version)
1120 self._link_comments_to_version(pull_request_version)
1121 else:
1121 else:
1122 try:
1122 try:
1123 ver = pull_request.versions[-1]
1123 ver = pull_request.versions[-1]
1124 except IndexError:
1124 except IndexError:
1125 ver = None
1125 ver = None
1126
1126
1127 pull_request.pull_request_version_id = \
1127 pull_request.pull_request_version_id = \
1128 ver.pull_request_version_id if ver else None
1128 ver.pull_request_version_id if ver else None
1129 pull_request_version = pull_request
1129 pull_request_version = pull_request
1130
1130
1131 source_repo = pull_request.source_repo.scm_instance()
1131 source_repo = pull_request.source_repo.scm_instance()
1132 target_repo = pull_request.target_repo.scm_instance()
1132 target_repo = pull_request.target_repo.scm_instance()
1133
1133
1134 # re-compute commit ids
1134 # re-compute commit ids
1135 old_commit_ids = pull_request.revisions
1135 old_commit_ids = pull_request.revisions
1136 pre_load = ["author", "date", "message", "branch"]
1136 pre_load = ["author", "date", "message", "branch"]
1137 commit_ranges = target_repo.compare(
1137 commit_ranges = target_repo.compare(
1138 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1138 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1139 pre_load=pre_load)
1139 pre_load=pre_load)
1140
1140
1141 target_ref = target_commit.raw_id
1141 target_ref = target_commit.raw_id
1142 source_ref = source_commit.raw_id
1142 source_ref = source_commit.raw_id
1143 ancestor_commit_id = target_repo.get_common_ancestor(
1143 ancestor_commit_id = target_repo.get_common_ancestor(
1144 target_ref, source_ref, source_repo)
1144 target_ref, source_ref, source_repo)
1145
1145
1146 if not ancestor_commit_id:
1146 if not ancestor_commit_id:
1147 raise ValueError(
1147 raise ValueError(
1148 'cannot calculate diff info without a common ancestor. '
1148 'cannot calculate diff info without a common ancestor. '
1149 'Make sure both repositories are related, and have a common forking commit.')
1149 'Make sure both repositories are related, and have a common forking commit.')
1150
1150
1151 pull_request.common_ancestor_id = ancestor_commit_id
1151 pull_request.common_ancestor_id = ancestor_commit_id
1152
1152
1153 pull_request.source_ref = '%s:%s:%s' % (
1153 pull_request.source_ref = '%s:%s:%s' % (
1154 source_ref_type, source_ref_name, source_commit.raw_id)
1154 source_ref_type, source_ref_name, source_commit.raw_id)
1155 pull_request.target_ref = '%s:%s:%s' % (
1155 pull_request.target_ref = '%s:%s:%s' % (
1156 target_ref_type, target_ref_name, ancestor_commit_id)
1156 target_ref_type, target_ref_name, ancestor_commit_id)
1157
1157
1158 pull_request.revisions = [
1158 pull_request.revisions = [
1159 commit.raw_id for commit in reversed(commit_ranges)]
1159 commit.raw_id for commit in reversed(commit_ranges)]
1160 pull_request.updated_on = datetime.datetime.now()
1160 pull_request.updated_on = datetime.datetime.now()
1161 Session().add(pull_request)
1161 Session().add(pull_request)
1162 new_commit_ids = pull_request.revisions
1162 new_commit_ids = pull_request.revisions
1163
1163
1164 old_diff_data, new_diff_data = self._generate_update_diffs(
1164 old_diff_data, new_diff_data = self._generate_update_diffs(
1165 pull_request, pull_request_version)
1165 pull_request, pull_request_version)
1166
1166
1167 # calculate commit and file changes
1167 # calculate commit and file changes
1168 commit_changes = self._calculate_commit_id_changes(
1168 commit_changes = self._calculate_commit_id_changes(
1169 old_commit_ids, new_commit_ids)
1169 old_commit_ids, new_commit_ids)
1170 file_changes = self._calculate_file_changes(
1170 file_changes = self._calculate_file_changes(
1171 old_diff_data, new_diff_data)
1171 old_diff_data, new_diff_data)
1172
1172
1173 # set comments as outdated if DIFFS changed
1173 # set comments as outdated if DIFFS changed
1174 CommentsModel().outdate_comments(
1174 CommentsModel().outdate_comments(
1175 pull_request, old_diff_data=old_diff_data,
1175 pull_request, old_diff_data=old_diff_data,
1176 new_diff_data=new_diff_data)
1176 new_diff_data=new_diff_data)
1177
1177
1178 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1178 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1179 file_node_changes = (
1179 file_node_changes = (
1180 file_changes.added or file_changes.modified or file_changes.removed)
1180 file_changes.added or file_changes.modified or file_changes.removed)
1181 pr_has_changes = valid_commit_changes or file_node_changes
1181 pr_has_changes = valid_commit_changes or file_node_changes
1182
1182
1183 # Add an automatic comment to the pull request, in case
1183 # Add an automatic comment to the pull request, in case
1184 # anything has changed
1184 # anything has changed
1185 if pr_has_changes:
1185 if pr_has_changes:
1186 update_comment = CommentsModel().create(
1186 update_comment = CommentsModel().create(
1187 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1187 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1188 repo=pull_request.target_repo,
1188 repo=pull_request.target_repo,
1189 user=pull_request.author,
1189 user=pull_request.author,
1190 pull_request=pull_request,
1190 pull_request=pull_request,
1191 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1191 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1192
1192
1193 # Update status to "Under Review" for added commits
1193 # Update status to "Under Review" for added commits
1194 for commit_id in commit_changes.added:
1194 for commit_id in commit_changes.added:
1195 ChangesetStatusModel().set_status(
1195 ChangesetStatusModel().set_status(
1196 repo=pull_request.source_repo,
1196 repo=pull_request.source_repo,
1197 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1197 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1198 comment=update_comment,
1198 comment=update_comment,
1199 user=pull_request.author,
1199 user=pull_request.author,
1200 pull_request=pull_request,
1200 pull_request=pull_request,
1201 revision=commit_id)
1201 revision=commit_id)
1202
1202
1203 # initial commit
1203 # initial commit
1204 Session().commit()
1204 Session().commit()
1205
1205
1206 if pr_has_changes:
1206 if pr_has_changes:
1207 # send update email to users
1207 # send update email to users
1208 try:
1208 try:
1209 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1209 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1210 ancestor_commit_id=ancestor_commit_id,
1210 ancestor_commit_id=ancestor_commit_id,
1211 commit_changes=commit_changes,
1211 commit_changes=commit_changes,
1212 file_changes=file_changes)
1212 file_changes=file_changes)
1213 Session().commit()
1213 Session().commit()
1214 except Exception:
1214 except Exception:
1215 log.exception('Failed to send email notification to users')
1215 log.exception('Failed to send email notification to users')
1216 Session().rollback()
1216 Session().rollback()
1217
1217
1218 log.debug(
1218 log.debug(
1219 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1219 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1220 'removed_ids: %s', pull_request.pull_request_id,
1220 'removed_ids: %s', pull_request.pull_request_id,
1221 commit_changes.added, commit_changes.common, commit_changes.removed)
1221 commit_changes.added, commit_changes.common, commit_changes.removed)
1222 log.debug(
1222 log.debug(
1223 'Updated pull request with the following file changes: %s',
1223 'Updated pull request with the following file changes: %s',
1224 file_changes)
1224 file_changes)
1225
1225
1226 log.info(
1226 log.info(
1227 "Updated pull request %s from commit %s to commit %s, "
1227 "Updated pull request %s from commit %s to commit %s, "
1228 "stored new version %s of this pull request.",
1228 "stored new version %s of this pull request.",
1229 pull_request.pull_request_id, source_ref_id,
1229 pull_request.pull_request_id, source_ref_id,
1230 pull_request.source_ref_parts.commit_id,
1230 pull_request.source_ref_parts.commit_id,
1231 pull_request_version.pull_request_version_id)
1231 pull_request_version.pull_request_version_id)
1232
1232
1233 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1233 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1234
1234
1235 return UpdateResponse(
1235 return UpdateResponse(
1236 executed=True, reason=UpdateFailureReason.NONE,
1236 executed=True, reason=UpdateFailureReason.NONE,
1237 old=pull_request, new=pull_request_version,
1237 old=pull_request, new=pull_request_version,
1238 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1238 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1239 source_changed=source_changed, target_changed=target_changed)
1239 source_changed=source_changed, target_changed=target_changed)
1240
1240
1241 def _create_version_from_snapshot(self, pull_request):
1241 def _create_version_from_snapshot(self, pull_request):
1242 version = PullRequestVersion()
1242 version = PullRequestVersion()
1243 version.title = pull_request.title
1243 version.title = pull_request.title
1244 version.description = pull_request.description
1244 version.description = pull_request.description
1245 version.status = pull_request.status
1245 version.status = pull_request.status
1246 version.pull_request_state = pull_request.pull_request_state
1246 version.pull_request_state = pull_request.pull_request_state
1247 version.created_on = datetime.datetime.now()
1247 version.created_on = datetime.datetime.now()
1248 version.updated_on = pull_request.updated_on
1248 version.updated_on = pull_request.updated_on
1249 version.user_id = pull_request.user_id
1249 version.user_id = pull_request.user_id
1250 version.source_repo = pull_request.source_repo
1250 version.source_repo = pull_request.source_repo
1251 version.source_ref = pull_request.source_ref
1251 version.source_ref = pull_request.source_ref
1252 version.target_repo = pull_request.target_repo
1252 version.target_repo = pull_request.target_repo
1253 version.target_ref = pull_request.target_ref
1253 version.target_ref = pull_request.target_ref
1254
1254
1255 version._last_merge_source_rev = pull_request._last_merge_source_rev
1255 version._last_merge_source_rev = pull_request._last_merge_source_rev
1256 version._last_merge_target_rev = pull_request._last_merge_target_rev
1256 version._last_merge_target_rev = pull_request._last_merge_target_rev
1257 version.last_merge_status = pull_request.last_merge_status
1257 version.last_merge_status = pull_request.last_merge_status
1258 version.last_merge_metadata = pull_request.last_merge_metadata
1258 version.last_merge_metadata = pull_request.last_merge_metadata
1259 version.shadow_merge_ref = pull_request.shadow_merge_ref
1259 version.shadow_merge_ref = pull_request.shadow_merge_ref
1260 version.merge_rev = pull_request.merge_rev
1260 version.merge_rev = pull_request.merge_rev
1261 version.reviewer_data = pull_request.reviewer_data
1261 version.reviewer_data = pull_request.reviewer_data
1262
1262
1263 version.revisions = pull_request.revisions
1263 version.revisions = pull_request.revisions
1264 version.common_ancestor_id = pull_request.common_ancestor_id
1264 version.common_ancestor_id = pull_request.common_ancestor_id
1265 version.pull_request = pull_request
1265 version.pull_request = pull_request
1266 Session().add(version)
1266 Session().add(version)
1267 Session().flush()
1267 Session().flush()
1268
1268
1269 return version
1269 return version
1270
1270
1271 def _generate_update_diffs(self, pull_request, pull_request_version):
1271 def _generate_update_diffs(self, pull_request, pull_request_version):
1272
1272
1273 diff_context = (
1273 diff_context = (
1274 self.DIFF_CONTEXT +
1274 self.DIFF_CONTEXT +
1275 CommentsModel.needed_extra_diff_context())
1275 CommentsModel.needed_extra_diff_context())
1276 hide_whitespace_changes = False
1276 hide_whitespace_changes = False
1277 source_repo = pull_request_version.source_repo
1277 source_repo = pull_request_version.source_repo
1278 source_ref_id = pull_request_version.source_ref_parts.commit_id
1278 source_ref_id = pull_request_version.source_ref_parts.commit_id
1279 target_ref_id = pull_request_version.target_ref_parts.commit_id
1279 target_ref_id = pull_request_version.target_ref_parts.commit_id
1280 old_diff = self._get_diff_from_pr_or_version(
1280 old_diff = self._get_diff_from_pr_or_version(
1281 source_repo, source_ref_id, target_ref_id,
1281 source_repo, source_ref_id, target_ref_id,
1282 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1282 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283
1283
1284 source_repo = pull_request.source_repo
1284 source_repo = pull_request.source_repo
1285 source_ref_id = pull_request.source_ref_parts.commit_id
1285 source_ref_id = pull_request.source_ref_parts.commit_id
1286 target_ref_id = pull_request.target_ref_parts.commit_id
1286 target_ref_id = pull_request.target_ref_parts.commit_id
1287
1287
1288 new_diff = self._get_diff_from_pr_or_version(
1288 new_diff = self._get_diff_from_pr_or_version(
1289 source_repo, source_ref_id, target_ref_id,
1289 source_repo, source_ref_id, target_ref_id,
1290 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1290 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291
1291
1292 old_diff_data = diffs.DiffProcessor(old_diff)
1292 old_diff_data = diffs.DiffProcessor(old_diff)
1293 old_diff_data.prepare()
1293 old_diff_data.prepare()
1294 new_diff_data = diffs.DiffProcessor(new_diff)
1294 new_diff_data = diffs.DiffProcessor(new_diff)
1295 new_diff_data.prepare()
1295 new_diff_data.prepare()
1296
1296
1297 return old_diff_data, new_diff_data
1297 return old_diff_data, new_diff_data
1298
1298
1299 def _link_comments_to_version(self, pull_request_version):
1299 def _link_comments_to_version(self, pull_request_version):
1300 """
1300 """
1301 Link all unlinked comments of this pull request to the given version.
1301 Link all unlinked comments of this pull request to the given version.
1302
1302
1303 :param pull_request_version: The `PullRequestVersion` to which
1303 :param pull_request_version: The `PullRequestVersion` to which
1304 the comments shall be linked.
1304 the comments shall be linked.
1305
1305
1306 """
1306 """
1307 pull_request = pull_request_version.pull_request
1307 pull_request = pull_request_version.pull_request
1308 comments = ChangesetComment.query()\
1308 comments = ChangesetComment.query()\
1309 .filter(
1309 .filter(
1310 # TODO: johbo: Should we query for the repo at all here?
1310 # TODO: johbo: Should we query for the repo at all here?
1311 # Pending decision on how comments of PRs are to be related
1311 # Pending decision on how comments of PRs are to be related
1312 # to either the source repo, the target repo or no repo at all.
1312 # to either the source repo, the target repo or no repo at all.
1313 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1313 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1314 ChangesetComment.pull_request == pull_request,
1314 ChangesetComment.pull_request == pull_request,
1315 ChangesetComment.pull_request_version == None)\
1315 ChangesetComment.pull_request_version == None)\
1316 .order_by(ChangesetComment.comment_id.asc())
1316 .order_by(ChangesetComment.comment_id.asc())
1317
1317
1318 # TODO: johbo: Find out why this breaks if it is done in a bulk
1318 # TODO: johbo: Find out why this breaks if it is done in a bulk
1319 # operation.
1319 # operation.
1320 for comment in comments:
1320 for comment in comments:
1321 comment.pull_request_version_id = (
1321 comment.pull_request_version_id = (
1322 pull_request_version.pull_request_version_id)
1322 pull_request_version.pull_request_version_id)
1323 Session().add(comment)
1323 Session().add(comment)
1324
1324
1325 def _calculate_commit_id_changes(self, old_ids, new_ids):
1325 def _calculate_commit_id_changes(self, old_ids, new_ids):
1326 added = [x for x in new_ids if x not in old_ids]
1326 added = [x for x in new_ids if x not in old_ids]
1327 common = [x for x in new_ids if x in old_ids]
1327 common = [x for x in new_ids if x in old_ids]
1328 removed = [x for x in old_ids if x not in new_ids]
1328 removed = [x for x in old_ids if x not in new_ids]
1329 total = new_ids
1329 total = new_ids
1330 return ChangeTuple(added, common, removed, total)
1330 return ChangeTuple(added, common, removed, total)
1331
1331
1332 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1332 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1333
1333
1334 old_files = OrderedDict()
1334 old_files = OrderedDict()
1335 for diff_data in old_diff_data.parsed_diff:
1335 for diff_data in old_diff_data.parsed_diff:
1336 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1336 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1337
1337
1338 added_files = []
1338 added_files = []
1339 modified_files = []
1339 modified_files = []
1340 removed_files = []
1340 removed_files = []
1341 for diff_data in new_diff_data.parsed_diff:
1341 for diff_data in new_diff_data.parsed_diff:
1342 new_filename = diff_data['filename']
1342 new_filename = diff_data['filename']
1343 new_hash = md5_safe(diff_data['raw_diff'])
1343 new_hash = md5_safe(diff_data['raw_diff'])
1344
1344
1345 old_hash = old_files.get(new_filename)
1345 old_hash = old_files.get(new_filename)
1346 if not old_hash:
1346 if not old_hash:
1347 # file is not present in old diff, we have to figure out from parsed diff
1347 # file is not present in old diff, we have to figure out from parsed diff
1348 # operation ADD/REMOVE
1348 # operation ADD/REMOVE
1349 operations_dict = diff_data['stats']['ops']
1349 operations_dict = diff_data['stats']['ops']
1350 if diffs.DEL_FILENODE in operations_dict:
1350 if diffs.DEL_FILENODE in operations_dict:
1351 removed_files.append(new_filename)
1351 removed_files.append(new_filename)
1352 else:
1352 else:
1353 added_files.append(new_filename)
1353 added_files.append(new_filename)
1354 else:
1354 else:
1355 if new_hash != old_hash:
1355 if new_hash != old_hash:
1356 modified_files.append(new_filename)
1356 modified_files.append(new_filename)
1357 # now remove a file from old, since we have seen it already
1357 # now remove a file from old, since we have seen it already
1358 del old_files[new_filename]
1358 del old_files[new_filename]
1359
1359
1360 # removed files is when there are present in old, but not in NEW,
1360 # removed files is when there are present in old, but not in NEW,
1361 # since we remove old files that are present in new diff, left-overs
1361 # since we remove old files that are present in new diff, left-overs
1362 # if any should be the removed files
1362 # if any should be the removed files
1363 removed_files.extend(old_files.keys())
1363 removed_files.extend(old_files.keys())
1364
1364
1365 return FileChangeTuple(added_files, modified_files, removed_files)
1365 return FileChangeTuple(added_files, modified_files, removed_files)
1366
1366
1367 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1367 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1368 """
1368 """
1369 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1369 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1370 so it's always looking the same disregarding on which default
1370 so it's always looking the same disregarding on which default
1371 renderer system is using.
1371 renderer system is using.
1372
1372
1373 :param ancestor_commit_id: ancestor raw_id
1373 :param ancestor_commit_id: ancestor raw_id
1374 :param changes: changes named tuple
1374 :param changes: changes named tuple
1375 :param file_changes: file changes named tuple
1375 :param file_changes: file changes named tuple
1376
1376
1377 """
1377 """
1378 new_status = ChangesetStatus.get_status_lbl(
1378 new_status = ChangesetStatus.get_status_lbl(
1379 ChangesetStatus.STATUS_UNDER_REVIEW)
1379 ChangesetStatus.STATUS_UNDER_REVIEW)
1380
1380
1381 changed_files = (
1381 changed_files = (
1382 file_changes.added + file_changes.modified + file_changes.removed)
1382 file_changes.added + file_changes.modified + file_changes.removed)
1383
1383
1384 params = {
1384 params = {
1385 'under_review_label': new_status,
1385 'under_review_label': new_status,
1386 'added_commits': changes.added,
1386 'added_commits': changes.added,
1387 'removed_commits': changes.removed,
1387 'removed_commits': changes.removed,
1388 'changed_files': changed_files,
1388 'changed_files': changed_files,
1389 'added_files': file_changes.added,
1389 'added_files': file_changes.added,
1390 'modified_files': file_changes.modified,
1390 'modified_files': file_changes.modified,
1391 'removed_files': file_changes.removed,
1391 'removed_files': file_changes.removed,
1392 'ancestor_commit_id': ancestor_commit_id
1392 'ancestor_commit_id': ancestor_commit_id
1393 }
1393 }
1394 renderer = RstTemplateRenderer()
1394 renderer = RstTemplateRenderer()
1395 return renderer.render('pull_request_update.mako', **params)
1395 return renderer.render('pull_request_update.mako', **params)
1396
1396
1397 def edit(self, pull_request, title, description, description_renderer, user):
1397 def edit(self, pull_request, title, description, description_renderer, user):
1398 pull_request = self.__get_pull_request(pull_request)
1398 pull_request = self.__get_pull_request(pull_request)
1399 old_data = pull_request.get_api_data(with_merge_state=False)
1399 old_data = pull_request.get_api_data(with_merge_state=False)
1400 if pull_request.is_closed():
1400 if pull_request.is_closed():
1401 raise ValueError('This pull request is closed')
1401 raise ValueError('This pull request is closed')
1402 if title:
1402 if title:
1403 pull_request.title = title
1403 pull_request.title = title
1404 pull_request.description = description
1404 pull_request.description = description
1405 pull_request.updated_on = datetime.datetime.now()
1405 pull_request.updated_on = datetime.datetime.now()
1406 pull_request.description_renderer = description_renderer
1406 pull_request.description_renderer = description_renderer
1407 Session().add(pull_request)
1407 Session().add(pull_request)
1408 self._log_audit_action(
1408 self._log_audit_action(
1409 'repo.pull_request.edit', {'old_data': old_data},
1409 'repo.pull_request.edit', {'old_data': old_data},
1410 user, pull_request)
1410 user, pull_request)
1411
1411
1412 def update_reviewers(self, pull_request, reviewer_data, user):
1412 def update_reviewers(self, pull_request, reviewer_data, user):
1413 """
1413 """
1414 Update the reviewers in the pull request
1414 Update the reviewers in the pull request
1415
1415
1416 :param pull_request: the pr to update
1416 :param pull_request: the pr to update
1417 :param reviewer_data: list of tuples
1417 :param reviewer_data: list of tuples
1418 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1418 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1419 :param user: current use who triggers this action
1419 :param user: current use who triggers this action
1420 """
1420 """
1421
1421
1422 pull_request = self.__get_pull_request(pull_request)
1422 pull_request = self.__get_pull_request(pull_request)
1423 if pull_request.is_closed():
1423 if pull_request.is_closed():
1424 raise ValueError('This pull request is closed')
1424 raise ValueError('This pull request is closed')
1425
1425
1426 reviewers = {}
1426 reviewers = {}
1427 for user_id, reasons, mandatory, role, rules in reviewer_data:
1427 for user_id, reasons, mandatory, role, rules in reviewer_data:
1428 if isinstance(user_id, (int, str)):
1428 if isinstance(user_id, (int, str)):
1429 user_id = self._get_user(user_id).user_id
1429 user_id = self._get_user(user_id).user_id
1430 reviewers[user_id] = {
1430 reviewers[user_id] = {
1431 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1431 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1432
1432
1433 reviewers_ids = set(reviewers.keys())
1433 reviewers_ids = set(reviewers.keys())
1434 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1434 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1435 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1435 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1436
1436
1437 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1437 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1438
1438
1439 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1439 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1440 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1440 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1441
1441
1442 log.debug("Adding %s reviewers", ids_to_add)
1442 log.debug("Adding %s reviewers", ids_to_add)
1443 log.debug("Removing %s reviewers", ids_to_remove)
1443 log.debug("Removing %s reviewers", ids_to_remove)
1444 changed = False
1444 changed = False
1445 added_audit_reviewers = []
1445 added_audit_reviewers = []
1446 removed_audit_reviewers = []
1446 removed_audit_reviewers = []
1447
1447
1448 for uid in ids_to_add:
1448 for uid in ids_to_add:
1449 changed = True
1449 changed = True
1450 _usr = self._get_user(uid)
1450 _usr = self._get_user(uid)
1451 reviewer = PullRequestReviewers()
1451 reviewer = PullRequestReviewers()
1452 reviewer.user = _usr
1452 reviewer.user = _usr
1453 reviewer.pull_request = pull_request
1453 reviewer.pull_request = pull_request
1454 reviewer.reasons = reviewers[uid]['reasons']
1454 reviewer.reasons = reviewers[uid]['reasons']
1455 # NOTE(marcink): mandatory shouldn't be changed now
1455 # NOTE(marcink): mandatory shouldn't be changed now
1456 # reviewer.mandatory = reviewers[uid]['reasons']
1456 # reviewer.mandatory = reviewers[uid]['reasons']
1457 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1457 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1458 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1458 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1459 Session().add(reviewer)
1459 Session().add(reviewer)
1460 added_audit_reviewers.append(reviewer.get_dict())
1460 added_audit_reviewers.append(reviewer.get_dict())
1461
1461
1462 for uid in ids_to_remove:
1462 for uid in ids_to_remove:
1463 changed = True
1463 changed = True
1464 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1464 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1465 # This is an edge case that handles previous state of having the same reviewer twice.
1465 # This is an edge case that handles previous state of having the same reviewer twice.
1466 # this CAN happen due to the lack of DB checks
1466 # this CAN happen due to the lack of DB checks
1467 reviewers = PullRequestReviewers.query()\
1467 reviewers = PullRequestReviewers.query()\
1468 .filter(PullRequestReviewers.user_id == uid,
1468 .filter(PullRequestReviewers.user_id == uid,
1469 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1469 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1470 PullRequestReviewers.pull_request == pull_request)\
1470 PullRequestReviewers.pull_request == pull_request)\
1471 .all()
1471 .all()
1472
1472
1473 for obj in reviewers:
1473 for obj in reviewers:
1474 added_audit_reviewers.append(obj.get_dict())
1474 added_audit_reviewers.append(obj.get_dict())
1475 Session().delete(obj)
1475 Session().delete(obj)
1476
1476
1477 if changed:
1477 if changed:
1478 Session().expire_all()
1478 Session().expire_all()
1479 pull_request.updated_on = datetime.datetime.now()
1479 pull_request.updated_on = datetime.datetime.now()
1480 Session().add(pull_request)
1480 Session().add(pull_request)
1481
1481
1482 # finally store audit logs
1482 # finally store audit logs
1483 for user_data in added_audit_reviewers:
1483 for user_data in added_audit_reviewers:
1484 self._log_audit_action(
1484 self._log_audit_action(
1485 'repo.pull_request.reviewer.add', {'data': user_data},
1485 'repo.pull_request.reviewer.add', {'data': user_data},
1486 user, pull_request)
1486 user, pull_request)
1487 for user_data in removed_audit_reviewers:
1487 for user_data in removed_audit_reviewers:
1488 self._log_audit_action(
1488 self._log_audit_action(
1489 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1489 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1490 user, pull_request)
1490 user, pull_request)
1491
1491
1492 self.notify_reviewers(pull_request, ids_to_add, user)
1492 self.notify_reviewers(pull_request, ids_to_add, user)
1493 return ids_to_add, ids_to_remove
1493 return ids_to_add, ids_to_remove
1494
1494
1495 def update_observers(self, pull_request, observer_data, user):
1495 def update_observers(self, pull_request, observer_data, user):
1496 """
1496 """
1497 Update the observers in the pull request
1497 Update the observers in the pull request
1498
1498
1499 :param pull_request: the pr to update
1499 :param pull_request: the pr to update
1500 :param observer_data: list of tuples
1500 :param observer_data: list of tuples
1501 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1501 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1502 :param user: current use who triggers this action
1502 :param user: current use who triggers this action
1503 """
1503 """
1504 pull_request = self.__get_pull_request(pull_request)
1504 pull_request = self.__get_pull_request(pull_request)
1505 if pull_request.is_closed():
1505 if pull_request.is_closed():
1506 raise ValueError('This pull request is closed')
1506 raise ValueError('This pull request is closed')
1507
1507
1508 observers = {}
1508 observers = {}
1509 for user_id, reasons, mandatory, role, rules in observer_data:
1509 for user_id, reasons, mandatory, role, rules in observer_data:
1510 if isinstance(user_id, (int, str)):
1510 if isinstance(user_id, (int, str)):
1511 user_id = self._get_user(user_id).user_id
1511 user_id = self._get_user(user_id).user_id
1512 observers[user_id] = {
1512 observers[user_id] = {
1513 'reasons': reasons, 'observers': mandatory, 'role': role}
1513 'reasons': reasons, 'observers': mandatory, 'role': role}
1514
1514
1515 observers_ids = set(observers.keys())
1515 observers_ids = set(observers.keys())
1516 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1516 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1517 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1517 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1518
1518
1519 current_observers_ids = set([x.user.user_id for x in current_observers])
1519 current_observers_ids = set([x.user.user_id for x in current_observers])
1520
1520
1521 ids_to_add = observers_ids.difference(current_observers_ids)
1521 ids_to_add = observers_ids.difference(current_observers_ids)
1522 ids_to_remove = current_observers_ids.difference(observers_ids)
1522 ids_to_remove = current_observers_ids.difference(observers_ids)
1523
1523
1524 log.debug("Adding %s observer", ids_to_add)
1524 log.debug("Adding %s observer", ids_to_add)
1525 log.debug("Removing %s observer", ids_to_remove)
1525 log.debug("Removing %s observer", ids_to_remove)
1526 changed = False
1526 changed = False
1527 added_audit_observers = []
1527 added_audit_observers = []
1528 removed_audit_observers = []
1528 removed_audit_observers = []
1529
1529
1530 for uid in ids_to_add:
1530 for uid in ids_to_add:
1531 changed = True
1531 changed = True
1532 _usr = self._get_user(uid)
1532 _usr = self._get_user(uid)
1533 observer = PullRequestReviewers()
1533 observer = PullRequestReviewers()
1534 observer.user = _usr
1534 observer.user = _usr
1535 observer.pull_request = pull_request
1535 observer.pull_request = pull_request
1536 observer.reasons = observers[uid]['reasons']
1536 observer.reasons = observers[uid]['reasons']
1537 # NOTE(marcink): mandatory shouldn't be changed now
1537 # NOTE(marcink): mandatory shouldn't be changed now
1538 # observer.mandatory = observer[uid]['reasons']
1538 # observer.mandatory = observer[uid]['reasons']
1539
1539
1540 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1540 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1541 observer.role = PullRequestReviewers.ROLE_OBSERVER
1541 observer.role = PullRequestReviewers.ROLE_OBSERVER
1542 Session().add(observer)
1542 Session().add(observer)
1543 added_audit_observers.append(observer.get_dict())
1543 added_audit_observers.append(observer.get_dict())
1544
1544
1545 for uid in ids_to_remove:
1545 for uid in ids_to_remove:
1546 changed = True
1546 changed = True
1547 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1547 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1548 # This is an edge case that handles previous state of having the same reviewer twice.
1548 # This is an edge case that handles previous state of having the same reviewer twice.
1549 # this CAN happen due to the lack of DB checks
1549 # this CAN happen due to the lack of DB checks
1550 observers = PullRequestReviewers.query()\
1550 observers = PullRequestReviewers.query()\
1551 .filter(PullRequestReviewers.user_id == uid,
1551 .filter(PullRequestReviewers.user_id == uid,
1552 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1552 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1553 PullRequestReviewers.pull_request == pull_request)\
1553 PullRequestReviewers.pull_request == pull_request)\
1554 .all()
1554 .all()
1555
1555
1556 for obj in observers:
1556 for obj in observers:
1557 added_audit_observers.append(obj.get_dict())
1557 added_audit_observers.append(obj.get_dict())
1558 Session().delete(obj)
1558 Session().delete(obj)
1559
1559
1560 if changed:
1560 if changed:
1561 Session().expire_all()
1561 Session().expire_all()
1562 pull_request.updated_on = datetime.datetime.now()
1562 pull_request.updated_on = datetime.datetime.now()
1563 Session().add(pull_request)
1563 Session().add(pull_request)
1564
1564
1565 # finally store audit logs
1565 # finally store audit logs
1566 for user_data in added_audit_observers:
1566 for user_data in added_audit_observers:
1567 self._log_audit_action(
1567 self._log_audit_action(
1568 'repo.pull_request.observer.add', {'data': user_data},
1568 'repo.pull_request.observer.add', {'data': user_data},
1569 user, pull_request)
1569 user, pull_request)
1570 for user_data in removed_audit_observers:
1570 for user_data in removed_audit_observers:
1571 self._log_audit_action(
1571 self._log_audit_action(
1572 'repo.pull_request.observer.delete', {'old_data': user_data},
1572 'repo.pull_request.observer.delete', {'old_data': user_data},
1573 user, pull_request)
1573 user, pull_request)
1574
1574
1575 self.notify_observers(pull_request, ids_to_add, user)
1575 self.notify_observers(pull_request, ids_to_add, user)
1576 return ids_to_add, ids_to_remove
1576 return ids_to_add, ids_to_remove
1577
1577
1578 def get_url(self, pull_request, request=None, permalink=False):
1578 def get_url(self, pull_request, request=None, permalink=False):
1579 if not request:
1579 if not request:
1580 request = get_current_request()
1580 request = get_current_request()
1581
1581
1582 if permalink:
1582 if permalink:
1583 return request.route_url(
1583 return request.route_url(
1584 'pull_requests_global',
1584 'pull_requests_global',
1585 pull_request_id=pull_request.pull_request_id,)
1585 pull_request_id=pull_request.pull_request_id,)
1586 else:
1586 else:
1587 return request.route_url('pullrequest_show',
1587 return request.route_url('pullrequest_show',
1588 repo_name=safe_str(pull_request.target_repo.repo_name),
1588 repo_name=safe_str(pull_request.target_repo.repo_name),
1589 pull_request_id=pull_request.pull_request_id,)
1589 pull_request_id=pull_request.pull_request_id,)
1590
1590
1591 def get_shadow_clone_url(self, pull_request, request=None):
1591 def get_shadow_clone_url(self, pull_request, request=None):
1592 """
1592 """
1593 Returns qualified url pointing to the shadow repository. If this pull
1593 Returns qualified url pointing to the shadow repository. If this pull
1594 request is closed there is no shadow repository and ``None`` will be
1594 request is closed there is no shadow repository and ``None`` will be
1595 returned.
1595 returned.
1596 """
1596 """
1597 if pull_request.is_closed():
1597 if pull_request.is_closed():
1598 return None
1598 return None
1599 else:
1599 else:
1600 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1600 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1601 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1601 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1602
1602
1603 def _notify_reviewers(self, pull_request, user_ids, role, user):
1603 def _notify_reviewers(self, pull_request, user_ids, role, user):
1604 # notification to reviewers/observers
1604 # notification to reviewers/observers
1605 if not user_ids:
1605 if not user_ids:
1606 return
1606 return
1607
1607
1608 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1608 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1609
1609
1610 pull_request_obj = pull_request
1610 pull_request_obj = pull_request
1611 # get the current participants of this pull request
1611 # get the current participants of this pull request
1612 recipients = user_ids
1612 recipients = user_ids
1613 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1613 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1614
1614
1615 pr_source_repo = pull_request_obj.source_repo
1615 pr_source_repo = pull_request_obj.source_repo
1616 pr_target_repo = pull_request_obj.target_repo
1616 pr_target_repo = pull_request_obj.target_repo
1617
1617
1618 pr_url = h.route_url('pullrequest_show',
1618 pr_url = h.route_url('pullrequest_show',
1619 repo_name=pr_target_repo.repo_name,
1619 repo_name=pr_target_repo.repo_name,
1620 pull_request_id=pull_request_obj.pull_request_id,)
1620 pull_request_id=pull_request_obj.pull_request_id,)
1621
1621
1622 # set some variables for email notification
1622 # set some variables for email notification
1623 pr_target_repo_url = h.route_url(
1623 pr_target_repo_url = h.route_url(
1624 'repo_summary', repo_name=pr_target_repo.repo_name)
1624 'repo_summary', repo_name=pr_target_repo.repo_name)
1625
1625
1626 pr_source_repo_url = h.route_url(
1626 pr_source_repo_url = h.route_url(
1627 'repo_summary', repo_name=pr_source_repo.repo_name)
1627 'repo_summary', repo_name=pr_source_repo.repo_name)
1628
1628
1629 # pull request specifics
1629 # pull request specifics
1630 pull_request_commits = [
1630 pull_request_commits = [
1631 (x.raw_id, x.message)
1631 (x.raw_id, x.message)
1632 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1632 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1633
1633
1634 current_rhodecode_user = user
1634 current_rhodecode_user = user
1635 kwargs = {
1635 kwargs = {
1636 'user': current_rhodecode_user,
1636 'user': current_rhodecode_user,
1637 'pull_request_author': pull_request.author,
1637 'pull_request_author': pull_request.author,
1638 'pull_request': pull_request_obj,
1638 'pull_request': pull_request_obj,
1639 'pull_request_commits': pull_request_commits,
1639 'pull_request_commits': pull_request_commits,
1640
1640
1641 'pull_request_target_repo': pr_target_repo,
1641 'pull_request_target_repo': pr_target_repo,
1642 'pull_request_target_repo_url': pr_target_repo_url,
1642 'pull_request_target_repo_url': pr_target_repo_url,
1643
1643
1644 'pull_request_source_repo': pr_source_repo,
1644 'pull_request_source_repo': pr_source_repo,
1645 'pull_request_source_repo_url': pr_source_repo_url,
1645 'pull_request_source_repo_url': pr_source_repo_url,
1646
1646
1647 'pull_request_url': pr_url,
1647 'pull_request_url': pr_url,
1648 'thread_ids': [pr_url],
1648 'thread_ids': [pr_url],
1649 'user_role': role
1649 'user_role': role
1650 }
1650 }
1651
1651
1652 # create notification objects, and emails
1652 # create notification objects, and emails
1653 NotificationModel().create(
1653 NotificationModel().create(
1654 created_by=current_rhodecode_user,
1654 created_by=current_rhodecode_user,
1655 notification_subject='', # Filled in based on the notification_type
1655 notification_subject='', # Filled in based on the notification_type
1656 notification_body='', # Filled in based on the notification_type
1656 notification_body='', # Filled in based on the notification_type
1657 notification_type=notification_type,
1657 notification_type=notification_type,
1658 recipients=recipients,
1658 recipients=recipients,
1659 email_kwargs=kwargs,
1659 email_kwargs=kwargs,
1660 )
1660 )
1661
1661
1662 def notify_reviewers(self, pull_request, reviewers_ids, user):
1662 def notify_reviewers(self, pull_request, reviewers_ids, user):
1663 return self._notify_reviewers(pull_request, reviewers_ids,
1663 return self._notify_reviewers(pull_request, reviewers_ids,
1664 PullRequestReviewers.ROLE_REVIEWER, user)
1664 PullRequestReviewers.ROLE_REVIEWER, user)
1665
1665
1666 def notify_observers(self, pull_request, observers_ids, user):
1666 def notify_observers(self, pull_request, observers_ids, user):
1667 return self._notify_reviewers(pull_request, observers_ids,
1667 return self._notify_reviewers(pull_request, observers_ids,
1668 PullRequestReviewers.ROLE_OBSERVER, user)
1668 PullRequestReviewers.ROLE_OBSERVER, user)
1669
1669
1670 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1670 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1671 commit_changes, file_changes):
1671 commit_changes, file_changes):
1672
1672
1673 updating_user_id = updating_user.user_id
1673 updating_user_id = updating_user.user_id
1674 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1674 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1675 # NOTE(marcink): send notification to all other users except to
1675 # NOTE(marcink): send notification to all other users except to
1676 # person who updated the PR
1676 # person who updated the PR
1677 recipients = reviewers.difference(set([updating_user_id]))
1677 recipients = reviewers.difference(set([updating_user_id]))
1678
1678
1679 log.debug('Notify following recipients about pull-request update %s', recipients)
1679 log.debug('Notify following recipients about pull-request update %s', recipients)
1680
1680
1681 pull_request_obj = pull_request
1681 pull_request_obj = pull_request
1682
1682
1683 # send email about the update
1683 # send email about the update
1684 changed_files = (
1684 changed_files = (
1685 file_changes.added + file_changes.modified + file_changes.removed)
1685 file_changes.added + file_changes.modified + file_changes.removed)
1686
1686
1687 pr_source_repo = pull_request_obj.source_repo
1687 pr_source_repo = pull_request_obj.source_repo
1688 pr_target_repo = pull_request_obj.target_repo
1688 pr_target_repo = pull_request_obj.target_repo
1689
1689
1690 pr_url = h.route_url('pullrequest_show',
1690 pr_url = h.route_url('pullrequest_show',
1691 repo_name=pr_target_repo.repo_name,
1691 repo_name=pr_target_repo.repo_name,
1692 pull_request_id=pull_request_obj.pull_request_id,)
1692 pull_request_id=pull_request_obj.pull_request_id,)
1693
1693
1694 # set some variables for email notification
1694 # set some variables for email notification
1695 pr_target_repo_url = h.route_url(
1695 pr_target_repo_url = h.route_url(
1696 'repo_summary', repo_name=pr_target_repo.repo_name)
1696 'repo_summary', repo_name=pr_target_repo.repo_name)
1697
1697
1698 pr_source_repo_url = h.route_url(
1698 pr_source_repo_url = h.route_url(
1699 'repo_summary', repo_name=pr_source_repo.repo_name)
1699 'repo_summary', repo_name=pr_source_repo.repo_name)
1700
1700
1701 email_kwargs = {
1701 email_kwargs = {
1702 'date': datetime.datetime.now(),
1702 'date': datetime.datetime.now(),
1703 'updating_user': updating_user,
1703 'updating_user': updating_user,
1704
1704
1705 'pull_request': pull_request_obj,
1705 'pull_request': pull_request_obj,
1706
1706
1707 'pull_request_target_repo': pr_target_repo,
1707 'pull_request_target_repo': pr_target_repo,
1708 'pull_request_target_repo_url': pr_target_repo_url,
1708 'pull_request_target_repo_url': pr_target_repo_url,
1709
1709
1710 'pull_request_source_repo': pr_source_repo,
1710 'pull_request_source_repo': pr_source_repo,
1711 'pull_request_source_repo_url': pr_source_repo_url,
1711 'pull_request_source_repo_url': pr_source_repo_url,
1712
1712
1713 'pull_request_url': pr_url,
1713 'pull_request_url': pr_url,
1714
1714
1715 'ancestor_commit_id': ancestor_commit_id,
1715 'ancestor_commit_id': ancestor_commit_id,
1716 'added_commits': commit_changes.added,
1716 'added_commits': commit_changes.added,
1717 'removed_commits': commit_changes.removed,
1717 'removed_commits': commit_changes.removed,
1718 'changed_files': changed_files,
1718 'changed_files': changed_files,
1719 'added_files': file_changes.added,
1719 'added_files': file_changes.added,
1720 'modified_files': file_changes.modified,
1720 'modified_files': file_changes.modified,
1721 'removed_files': file_changes.removed,
1721 'removed_files': file_changes.removed,
1722 'thread_ids': [pr_url],
1722 'thread_ids': [pr_url],
1723 }
1723 }
1724
1724
1725 # create notification objects, and emails
1725 # create notification objects, and emails
1726 NotificationModel().create(
1726 NotificationModel().create(
1727 created_by=updating_user,
1727 created_by=updating_user,
1728 notification_subject='', # Filled in based on the notification_type
1728 notification_subject='', # Filled in based on the notification_type
1729 notification_body='', # Filled in based on the notification_type
1729 notification_body='', # Filled in based on the notification_type
1730 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1730 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1731 recipients=recipients,
1731 recipients=recipients,
1732 email_kwargs=email_kwargs,
1732 email_kwargs=email_kwargs,
1733 )
1733 )
1734
1734
1735 def delete(self, pull_request, user=None):
1735 def delete(self, pull_request, user=None):
1736 if not user:
1736 if not user:
1737 user = getattr(get_current_rhodecode_user(), 'username', None)
1737 user = getattr(get_current_rhodecode_user(), 'username', None)
1738
1738
1739 pull_request = self.__get_pull_request(pull_request)
1739 pull_request = self.__get_pull_request(pull_request)
1740 old_data = pull_request.get_api_data(with_merge_state=False)
1740 old_data = pull_request.get_api_data(with_merge_state=False)
1741 self._cleanup_merge_workspace(pull_request)
1741 self._cleanup_merge_workspace(pull_request)
1742 self._log_audit_action(
1742 self._log_audit_action(
1743 'repo.pull_request.delete', {'old_data': old_data},
1743 'repo.pull_request.delete', {'old_data': old_data},
1744 user, pull_request)
1744 user, pull_request)
1745 Session().delete(pull_request)
1745 Session().delete(pull_request)
1746
1746
1747 def close_pull_request(self, pull_request, user):
1747 def close_pull_request(self, pull_request, user):
1748 pull_request = self.__get_pull_request(pull_request)
1748 pull_request = self.__get_pull_request(pull_request)
1749 self._cleanup_merge_workspace(pull_request)
1749 self._cleanup_merge_workspace(pull_request)
1750 pull_request.status = PullRequest.STATUS_CLOSED
1750 pull_request.status = PullRequest.STATUS_CLOSED
1751 pull_request.updated_on = datetime.datetime.now()
1751 pull_request.updated_on = datetime.datetime.now()
1752 Session().add(pull_request)
1752 Session().add(pull_request)
1753 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1753 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1754
1754
1755 pr_data = pull_request.get_api_data(with_merge_state=False)
1755 pr_data = pull_request.get_api_data(with_merge_state=False)
1756 self._log_audit_action(
1756 self._log_audit_action(
1757 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1757 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1758
1758
1759 def close_pull_request_with_comment(
1759 def close_pull_request_with_comment(
1760 self, pull_request, user, repo, message=None, auth_user=None):
1760 self, pull_request, user, repo, message=None, auth_user=None):
1761
1761
1762 pull_request_review_status = pull_request.calculated_review_status()
1762 pull_request_review_status = pull_request.calculated_review_status()
1763
1763
1764 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1764 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1765 # approved only if we have voting consent
1765 # approved only if we have voting consent
1766 status = ChangesetStatus.STATUS_APPROVED
1766 status = ChangesetStatus.STATUS_APPROVED
1767 else:
1767 else:
1768 status = ChangesetStatus.STATUS_REJECTED
1768 status = ChangesetStatus.STATUS_REJECTED
1769 status_lbl = ChangesetStatus.get_status_lbl(status)
1769 status_lbl = ChangesetStatus.get_status_lbl(status)
1770
1770
1771 default_message = (
1771 default_message = (
1772 'Closing with status change {transition_icon} {status}.'
1772 'Closing with status change {transition_icon} {status}.'
1773 ).format(transition_icon='>', status=status_lbl)
1773 ).format(transition_icon='>', status=status_lbl)
1774 text = message or default_message
1774 text = message or default_message
1775
1775
1776 # create a comment, and link it to new status
1776 # create a comment, and link it to new status
1777 comment = CommentsModel().create(
1777 comment = CommentsModel().create(
1778 text=text,
1778 text=text,
1779 repo=repo.repo_id,
1779 repo=repo.repo_id,
1780 user=user.user_id,
1780 user=user.user_id,
1781 pull_request=pull_request.pull_request_id,
1781 pull_request=pull_request.pull_request_id,
1782 status_change=status_lbl,
1782 status_change=status_lbl,
1783 status_change_type=status,
1783 status_change_type=status,
1784 closing_pr=True,
1784 closing_pr=True,
1785 auth_user=auth_user,
1785 auth_user=auth_user,
1786 )
1786 )
1787
1787
1788 # calculate old status before we change it
1788 # calculate old status before we change it
1789 old_calculated_status = pull_request.calculated_review_status()
1789 old_calculated_status = pull_request.calculated_review_status()
1790 ChangesetStatusModel().set_status(
1790 ChangesetStatusModel().set_status(
1791 repo.repo_id,
1791 repo.repo_id,
1792 status,
1792 status,
1793 user.user_id,
1793 user.user_id,
1794 comment=comment,
1794 comment=comment,
1795 pull_request=pull_request.pull_request_id
1795 pull_request=pull_request.pull_request_id
1796 )
1796 )
1797
1797
1798 Session().flush()
1798 Session().flush()
1799
1799
1800 self.trigger_pull_request_hook(pull_request, user, 'comment',
1800 self.trigger_pull_request_hook(pull_request, user, 'comment',
1801 data={'comment': comment})
1801 data={'comment': comment})
1802
1802
1803 # we now calculate the status of pull request again, and based on that
1803 # we now calculate the status of pull request again, and based on that
1804 # calculation trigger status change. This might happen in cases
1804 # calculation trigger status change. This might happen in cases
1805 # that non-reviewer admin closes a pr, which means his vote doesn't
1805 # that non-reviewer admin closes a pr, which means his vote doesn't
1806 # change the status, while if he's a reviewer this might change it.
1806 # change the status, while if he's a reviewer this might change it.
1807 calculated_status = pull_request.calculated_review_status()
1807 calculated_status = pull_request.calculated_review_status()
1808 if old_calculated_status != calculated_status:
1808 if old_calculated_status != calculated_status:
1809 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1809 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1810 data={'status': calculated_status})
1810 data={'status': calculated_status})
1811
1811
1812 # finally close the PR
1812 # finally close the PR
1813 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1813 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1814
1814
1815 return comment, status
1815 return comment, status
1816
1816
1817 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1817 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1818 _ = translator or get_current_request().translate
1818 _ = translator or get_current_request().translate
1819
1819
1820 if not self._is_merge_enabled(pull_request):
1820 if not self._is_merge_enabled(pull_request):
1821 return None, False, _('Server-side pull request merging is disabled.')
1821 return None, False, _('Server-side pull request merging is disabled.')
1822
1822
1823 if pull_request.is_closed():
1823 if pull_request.is_closed():
1824 return None, False, _('This pull request is closed.')
1824 return None, False, _('This pull request is closed.')
1825
1825
1826 merge_possible, msg = self._check_repo_requirements(
1826 merge_possible, msg = self._check_repo_requirements(
1827 target=pull_request.target_repo, source=pull_request.source_repo,
1827 target=pull_request.target_repo, source=pull_request.source_repo,
1828 translator=_)
1828 translator=_)
1829 if not merge_possible:
1829 if not merge_possible:
1830 return None, merge_possible, msg
1830 return None, merge_possible, msg
1831
1831
1832 try:
1832 try:
1833 merge_response = self._try_merge(
1833 merge_response = self._try_merge(
1834 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1834 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1835 log.debug("Merge response: %s", merge_response)
1835 log.debug("Merge response: %s", merge_response)
1836 return merge_response, merge_response.possible, merge_response.merge_status_message
1836 return merge_response, merge_response.possible, merge_response.merge_status_message
1837 except NotImplementedError:
1837 except NotImplementedError:
1838 return None, False, _('Pull request merging is not supported.')
1838 return None, False, _('Pull request merging is not supported.')
1839
1839
1840 def _check_repo_requirements(self, target, source, translator):
1840 def _check_repo_requirements(self, target, source, translator):
1841 """
1841 """
1842 Check if `target` and `source` have compatible requirements.
1842 Check if `target` and `source` have compatible requirements.
1843
1843
1844 Currently this is just checking for largefiles.
1844 Currently this is just checking for largefiles.
1845 """
1845 """
1846 _ = translator
1846 _ = translator
1847 target_has_largefiles = self._has_largefiles(target)
1847 target_has_largefiles = self._has_largefiles(target)
1848 source_has_largefiles = self._has_largefiles(source)
1848 source_has_largefiles = self._has_largefiles(source)
1849 merge_possible = True
1849 merge_possible = True
1850 message = u''
1850 message = u''
1851
1851
1852 if target_has_largefiles != source_has_largefiles:
1852 if target_has_largefiles != source_has_largefiles:
1853 merge_possible = False
1853 merge_possible = False
1854 if source_has_largefiles:
1854 if source_has_largefiles:
1855 message = _(
1855 message = _(
1856 'Target repository large files support is disabled.')
1856 'Target repository large files support is disabled.')
1857 else:
1857 else:
1858 message = _(
1858 message = _(
1859 'Source repository large files support is disabled.')
1859 'Source repository large files support is disabled.')
1860
1860
1861 return merge_possible, message
1861 return merge_possible, message
1862
1862
1863 def _has_largefiles(self, repo):
1863 def _has_largefiles(self, repo):
1864 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1864 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1865 'extensions', 'largefiles')
1865 'extensions', 'largefiles')
1866 return largefiles_ui and largefiles_ui[0].active
1866 return largefiles_ui and largefiles_ui[0].active
1867
1867
1868 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1868 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1869 """
1869 """
1870 Try to merge the pull request and return the merge status.
1870 Try to merge the pull request and return the merge status.
1871 """
1871 """
1872 log.debug(
1872 log.debug(
1873 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1873 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1874 pull_request.pull_request_id, force_shadow_repo_refresh)
1874 pull_request.pull_request_id, force_shadow_repo_refresh)
1875 target_vcs = pull_request.target_repo.scm_instance()
1875 target_vcs = pull_request.target_repo.scm_instance()
1876 # Refresh the target reference.
1876 # Refresh the target reference.
1877 try:
1877 try:
1878 target_ref = self._refresh_reference(
1878 target_ref = self._refresh_reference(
1879 pull_request.target_ref_parts, target_vcs)
1879 pull_request.target_ref_parts, target_vcs)
1880 except CommitDoesNotExistError:
1880 except CommitDoesNotExistError:
1881 merge_state = MergeResponse(
1881 merge_state = MergeResponse(
1882 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1882 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1883 metadata={'target_ref': pull_request.target_ref_parts})
1883 metadata={'target_ref': pull_request.target_ref_parts})
1884 return merge_state
1884 return merge_state
1885
1885
1886 target_locked = pull_request.target_repo.locked
1886 target_locked = pull_request.target_repo.locked
1887 if target_locked and target_locked[0]:
1887 if target_locked and target_locked[0]:
1888 locked_by = 'user:{}'.format(target_locked[0])
1888 locked_by = 'user:{}'.format(target_locked[0])
1889 log.debug("The target repository is locked by %s.", locked_by)
1889 log.debug("The target repository is locked by %s.", locked_by)
1890 merge_state = MergeResponse(
1890 merge_state = MergeResponse(
1891 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1891 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1892 metadata={'locked_by': locked_by})
1892 metadata={'locked_by': locked_by})
1893 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1893 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1894 pull_request, target_ref):
1894 pull_request, target_ref):
1895 log.debug("Refreshing the merge status of the repository.")
1895 log.debug("Refreshing the merge status of the repository.")
1896 merge_state = self._refresh_merge_state(
1896 merge_state = self._refresh_merge_state(
1897 pull_request, target_vcs, target_ref)
1897 pull_request, target_vcs, target_ref)
1898 else:
1898 else:
1899 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1899 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1900 metadata = {
1900 metadata = {
1901 'unresolved_files': '',
1901 'unresolved_files': '',
1902 'target_ref': pull_request.target_ref_parts,
1902 'target_ref': pull_request.target_ref_parts,
1903 'source_ref': pull_request.source_ref_parts,
1903 'source_ref': pull_request.source_ref_parts,
1904 }
1904 }
1905 if pull_request.last_merge_metadata:
1905 if pull_request.last_merge_metadata:
1906 metadata.update(pull_request.last_merge_metadata_parsed)
1906 metadata.update(pull_request.last_merge_metadata_parsed)
1907
1907
1908 if not possible and target_ref.type == 'branch':
1908 if not possible and target_ref.type == 'branch':
1909 # NOTE(marcink): case for mercurial multiple heads on branch
1909 # NOTE(marcink): case for mercurial multiple heads on branch
1910 heads = target_vcs._heads(target_ref.name)
1910 heads = target_vcs._heads(target_ref.name)
1911 if len(heads) != 1:
1911 if len(heads) != 1:
1912 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1912 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1913 metadata.update({
1913 metadata.update({
1914 'heads': heads
1914 'heads': heads
1915 })
1915 })
1916
1916
1917 merge_state = MergeResponse(
1917 merge_state = MergeResponse(
1918 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1918 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1919
1919
1920 return merge_state
1920 return merge_state
1921
1921
1922 def _refresh_reference(self, reference, vcs_repository):
1922 def _refresh_reference(self, reference, vcs_repository):
1923 if reference.type in self.UPDATABLE_REF_TYPES:
1923 if reference.type in self.UPDATABLE_REF_TYPES:
1924 name_or_id = reference.name
1924 name_or_id = reference.name
1925 else:
1925 else:
1926 name_or_id = reference.commit_id
1926 name_or_id = reference.commit_id
1927
1927
1928 refreshed_commit = vcs_repository.get_commit(name_or_id)
1928 refreshed_commit = vcs_repository.get_commit(name_or_id)
1929 refreshed_reference = Reference(
1929 refreshed_reference = Reference(
1930 reference.type, reference.name, refreshed_commit.raw_id)
1930 reference.type, reference.name, refreshed_commit.raw_id)
1931 return refreshed_reference
1931 return refreshed_reference
1932
1932
1933 def _needs_merge_state_refresh(self, pull_request, target_reference):
1933 def _needs_merge_state_refresh(self, pull_request, target_reference):
1934 return not(
1934 return not(
1935 pull_request.revisions and
1935 pull_request.revisions and
1936 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1936 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1937 target_reference.commit_id == pull_request._last_merge_target_rev)
1937 target_reference.commit_id == pull_request._last_merge_target_rev)
1938
1938
1939 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1939 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1940 workspace_id = self._workspace_id(pull_request)
1940 workspace_id = self._workspace_id(pull_request)
1941 source_vcs = pull_request.source_repo.scm_instance()
1941 source_vcs = pull_request.source_repo.scm_instance()
1942 repo_id = pull_request.target_repo.repo_id
1942 repo_id = pull_request.target_repo.repo_id
1943 use_rebase = self._use_rebase_for_merging(pull_request)
1943 use_rebase = self._use_rebase_for_merging(pull_request)
1944 close_branch = self._close_branch_before_merging(pull_request)
1944 close_branch = self._close_branch_before_merging(pull_request)
1945 merge_state = target_vcs.merge(
1945 merge_state = target_vcs.merge(
1946 repo_id, workspace_id,
1946 repo_id, workspace_id,
1947 target_reference, source_vcs, pull_request.source_ref_parts,
1947 target_reference, source_vcs, pull_request.source_ref_parts,
1948 dry_run=True, use_rebase=use_rebase,
1948 dry_run=True, use_rebase=use_rebase,
1949 close_branch=close_branch)
1949 close_branch=close_branch)
1950
1950
1951 # Do not store the response if there was an unknown error.
1951 # Do not store the response if there was an unknown error.
1952 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1952 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1953 pull_request._last_merge_source_rev = \
1953 pull_request._last_merge_source_rev = \
1954 pull_request.source_ref_parts.commit_id
1954 pull_request.source_ref_parts.commit_id
1955 pull_request._last_merge_target_rev = target_reference.commit_id
1955 pull_request._last_merge_target_rev = target_reference.commit_id
1956 pull_request.last_merge_status = merge_state.failure_reason
1956 pull_request.last_merge_status = merge_state.failure_reason
1957 pull_request.last_merge_metadata = merge_state.metadata
1957 pull_request.last_merge_metadata = merge_state.metadata
1958
1958
1959 pull_request.shadow_merge_ref = merge_state.merge_ref
1959 pull_request.shadow_merge_ref = merge_state.merge_ref
1960 Session().add(pull_request)
1960 Session().add(pull_request)
1961 Session().commit()
1961 Session().commit()
1962
1962
1963 return merge_state
1963 return merge_state
1964
1964
1965 def _workspace_id(self, pull_request):
1965 def _workspace_id(self, pull_request):
1966 workspace_id = 'pr-%s' % pull_request.pull_request_id
1966 workspace_id = 'pr-%s' % pull_request.pull_request_id
1967 return workspace_id
1967 return workspace_id
1968
1968
1969 def generate_repo_data(self, repo, commit_id=None, branch=None,
1969 def generate_repo_data(self, repo, commit_id=None, branch=None,
1970 bookmark=None, translator=None):
1970 bookmark=None, translator=None):
1971 from rhodecode.model.repo import RepoModel
1971 from rhodecode.model.repo import RepoModel
1972
1972
1973 all_refs, selected_ref = \
1973 all_refs, selected_ref = \
1974 self._get_repo_pullrequest_sources(
1974 self._get_repo_pullrequest_sources(
1975 repo.scm_instance(), commit_id=commit_id,
1975 repo.scm_instance(), commit_id=commit_id,
1976 branch=branch, bookmark=bookmark, translator=translator)
1976 branch=branch, bookmark=bookmark, translator=translator)
1977
1977
1978 refs_select2 = []
1978 refs_select2 = []
1979 for element in all_refs:
1979 for element in all_refs:
1980 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1980 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1981 refs_select2.append({'text': element[1], 'children': children})
1981 refs_select2.append({'text': element[1], 'children': children})
1982
1982
1983 return {
1983 return {
1984 'user': {
1984 'user': {
1985 'user_id': repo.user.user_id,
1985 'user_id': repo.user.user_id,
1986 'username': repo.user.username,
1986 'username': repo.user.username,
1987 'firstname': repo.user.first_name,
1987 'firstname': repo.user.first_name,
1988 'lastname': repo.user.last_name,
1988 'lastname': repo.user.last_name,
1989 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1989 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1990 },
1990 },
1991 'name': repo.repo_name,
1991 'name': repo.repo_name,
1992 'link': RepoModel().get_url(repo),
1992 'link': RepoModel().get_url(repo),
1993 'description': h.chop_at_smart(repo.description_safe, '\n'),
1993 'description': h.chop_at_smart(repo.description_safe, '\n'),
1994 'refs': {
1994 'refs': {
1995 'all_refs': all_refs,
1995 'all_refs': all_refs,
1996 'selected_ref': selected_ref,
1996 'selected_ref': selected_ref,
1997 'select2_refs': refs_select2
1997 'select2_refs': refs_select2
1998 }
1998 }
1999 }
1999 }
2000
2000
2001 def generate_pullrequest_title(self, source, source_ref, target):
2001 def generate_pullrequest_title(self, source, source_ref, target):
2002 return u'{source}#{at_ref} to {target}'.format(
2002 return u'{source}#{at_ref} to {target}'.format(
2003 source=source,
2003 source=source,
2004 at_ref=source_ref,
2004 at_ref=source_ref,
2005 target=target,
2005 target=target,
2006 )
2006 )
2007
2007
2008 def _cleanup_merge_workspace(self, pull_request):
2008 def _cleanup_merge_workspace(self, pull_request):
2009 # Merging related cleanup
2009 # Merging related cleanup
2010 repo_id = pull_request.target_repo.repo_id
2010 repo_id = pull_request.target_repo.repo_id
2011 target_scm = pull_request.target_repo.scm_instance()
2011 target_scm = pull_request.target_repo.scm_instance()
2012 workspace_id = self._workspace_id(pull_request)
2012 workspace_id = self._workspace_id(pull_request)
2013
2013
2014 try:
2014 try:
2015 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2015 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2016 except NotImplementedError:
2016 except NotImplementedError:
2017 pass
2017 pass
2018
2018
2019 def _get_repo_pullrequest_sources(
2019 def _get_repo_pullrequest_sources(
2020 self, repo, commit_id=None, branch=None, bookmark=None,
2020 self, repo, commit_id=None, branch=None, bookmark=None,
2021 translator=None):
2021 translator=None):
2022 """
2022 """
2023 Return a structure with repo's interesting commits, suitable for
2023 Return a structure with repo's interesting commits, suitable for
2024 the selectors in pullrequest controller
2024 the selectors in pullrequest controller
2025
2025
2026 :param commit_id: a commit that must be in the list somehow
2026 :param commit_id: a commit that must be in the list somehow
2027 and selected by default
2027 and selected by default
2028 :param branch: a branch that must be in the list and selected
2028 :param branch: a branch that must be in the list and selected
2029 by default - even if closed
2029 by default - even if closed
2030 :param bookmark: a bookmark that must be in the list and selected
2030 :param bookmark: a bookmark that must be in the list and selected
2031 """
2031 """
2032 _ = translator or get_current_request().translate
2032 _ = translator or get_current_request().translate
2033
2033
2034 commit_id = safe_str(commit_id) if commit_id else None
2034 commit_id = safe_str(commit_id) if commit_id else None
2035 branch = safe_unicode(branch) if branch else None
2035 branch = safe_unicode(branch) if branch else None
2036 bookmark = safe_unicode(bookmark) if bookmark else None
2036 bookmark = safe_unicode(bookmark) if bookmark else None
2037
2037
2038 selected = None
2038 selected = None
2039
2039
2040 # order matters: first source that has commit_id in it will be selected
2040 # order matters: first source that has commit_id in it will be selected
2041 sources = []
2041 sources = []
2042 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2042 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2043 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2043 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2044
2044
2045 if commit_id:
2045 if commit_id:
2046 ref_commit = (h.short_id(commit_id), commit_id)
2046 ref_commit = (h.short_id(commit_id), commit_id)
2047 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2047 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2048
2048
2049 sources.append(
2049 sources.append(
2050 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2050 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2051 )
2051 )
2052
2052
2053 groups = []
2053 groups = []
2054
2054
2055 for group_key, ref_list, group_name, match in sources:
2055 for group_key, ref_list, group_name, match in sources:
2056 group_refs = []
2056 group_refs = []
2057 for ref_name, ref_id in ref_list:
2057 for ref_name, ref_id in ref_list:
2058 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2058 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2059 group_refs.append((ref_key, ref_name))
2059 group_refs.append((ref_key, ref_name))
2060
2060
2061 if not selected:
2061 if not selected:
2062 if set([commit_id, match]) & set([ref_id, ref_name]):
2062 if set([commit_id, match]) & set([ref_id, ref_name]):
2063 selected = ref_key
2063 selected = ref_key
2064
2064
2065 if group_refs:
2065 if group_refs:
2066 groups.append((group_refs, group_name))
2066 groups.append((group_refs, group_name))
2067
2067
2068 if not selected:
2068 if not selected:
2069 ref = commit_id or branch or bookmark
2069 ref = commit_id or branch or bookmark
2070 if ref:
2070 if ref:
2071 raise CommitDoesNotExistError(
2071 raise CommitDoesNotExistError(
2072 u'No commit refs could be found matching: {}'.format(ref))
2072 u'No commit refs could be found matching: {}'.format(ref))
2073 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2073 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2074 selected = u'branch:{}:{}'.format(
2074 selected = u'branch:{}:{}'.format(
2075 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2075 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2076 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2076 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2077 )
2077 )
2078 elif repo.commit_ids:
2078 elif repo.commit_ids:
2079 # make the user select in this case
2079 # make the user select in this case
2080 selected = None
2080 selected = None
2081 else:
2081 else:
2082 raise EmptyRepositoryError()
2082 raise EmptyRepositoryError()
2083 return groups, selected
2083 return groups, selected
2084
2084
2085 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2085 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2086 hide_whitespace_changes, diff_context):
2086 hide_whitespace_changes, diff_context):
2087
2087
2088 return self._get_diff_from_pr_or_version(
2088 return self._get_diff_from_pr_or_version(
2089 source_repo, source_ref_id, target_ref_id,
2089 source_repo, source_ref_id, target_ref_id,
2090 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2090 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2091
2091
2092 def _get_diff_from_pr_or_version(
2092 def _get_diff_from_pr_or_version(
2093 self, source_repo, source_ref_id, target_ref_id,
2093 self, source_repo, source_ref_id, target_ref_id,
2094 hide_whitespace_changes, diff_context):
2094 hide_whitespace_changes, diff_context):
2095
2095
2096 target_commit = source_repo.get_commit(
2096 target_commit = source_repo.get_commit(
2097 commit_id=safe_str(target_ref_id))
2097 commit_id=safe_str(target_ref_id))
2098 source_commit = source_repo.get_commit(
2098 source_commit = source_repo.get_commit(
2099 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2099 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2100 if isinstance(source_repo, Repository):
2100 if isinstance(source_repo, Repository):
2101 vcs_repo = source_repo.scm_instance()
2101 vcs_repo = source_repo.scm_instance()
2102 else:
2102 else:
2103 vcs_repo = source_repo
2103 vcs_repo = source_repo
2104
2104
2105 # TODO: johbo: In the context of an update, we cannot reach
2105 # TODO: johbo: In the context of an update, we cannot reach
2106 # the old commit anymore with our normal mechanisms. It needs
2106 # the old commit anymore with our normal mechanisms. It needs
2107 # some sort of special support in the vcs layer to avoid this
2107 # some sort of special support in the vcs layer to avoid this
2108 # workaround.
2108 # workaround.
2109 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2109 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2110 vcs_repo.alias == 'git'):
2110 vcs_repo.alias == 'git'):
2111 source_commit.raw_id = safe_str(source_ref_id)
2111 source_commit.raw_id = safe_str(source_ref_id)
2112
2112
2113 log.debug('calculating diff between '
2113 log.debug('calculating diff between '
2114 'source_ref:%s and target_ref:%s for repo `%s`',
2114 'source_ref:%s and target_ref:%s for repo `%s`',
2115 target_ref_id, source_ref_id,
2115 target_ref_id, source_ref_id,
2116 safe_unicode(vcs_repo.path))
2116 safe_unicode(vcs_repo.path))
2117
2117
2118 vcs_diff = vcs_repo.get_diff(
2118 vcs_diff = vcs_repo.get_diff(
2119 commit1=target_commit, commit2=source_commit,
2119 commit1=target_commit, commit2=source_commit,
2120 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2120 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2121 return vcs_diff
2121 return vcs_diff
2122
2122
2123 def _is_merge_enabled(self, pull_request):
2123 def _is_merge_enabled(self, pull_request):
2124 return self._get_general_setting(
2124 return self._get_general_setting(
2125 pull_request, 'rhodecode_pr_merge_enabled')
2125 pull_request, 'rhodecode_pr_merge_enabled')
2126
2126
2127 def _use_rebase_for_merging(self, pull_request):
2127 def _use_rebase_for_merging(self, pull_request):
2128 repo_type = pull_request.target_repo.repo_type
2128 repo_type = pull_request.target_repo.repo_type
2129 if repo_type == 'hg':
2129 if repo_type == 'hg':
2130 return self._get_general_setting(
2130 return self._get_general_setting(
2131 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2131 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2132 elif repo_type == 'git':
2132 elif repo_type == 'git':
2133 return self._get_general_setting(
2133 return self._get_general_setting(
2134 pull_request, 'rhodecode_git_use_rebase_for_merging')
2134 pull_request, 'rhodecode_git_use_rebase_for_merging')
2135
2135
2136 return False
2136 return False
2137
2137
2138 def _user_name_for_merging(self, pull_request, user):
2138 def _user_name_for_merging(self, pull_request, user):
2139 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2139 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2140 if env_user_name_attr and hasattr(user, env_user_name_attr):
2140 if env_user_name_attr and hasattr(user, env_user_name_attr):
2141 user_name_attr = env_user_name_attr
2141 user_name_attr = env_user_name_attr
2142 else:
2142 else:
2143 user_name_attr = 'short_contact'
2143 user_name_attr = 'short_contact'
2144
2144
2145 user_name = getattr(user, user_name_attr)
2145 user_name = getattr(user, user_name_attr)
2146 return user_name
2146 return user_name
2147
2147
2148 def _close_branch_before_merging(self, pull_request):
2148 def _close_branch_before_merging(self, pull_request):
2149 repo_type = pull_request.target_repo.repo_type
2149 repo_type = pull_request.target_repo.repo_type
2150 if repo_type == 'hg':
2150 if repo_type == 'hg':
2151 return self._get_general_setting(
2151 return self._get_general_setting(
2152 pull_request, 'rhodecode_hg_close_branch_before_merging')
2152 pull_request, 'rhodecode_hg_close_branch_before_merging')
2153 elif repo_type == 'git':
2153 elif repo_type == 'git':
2154 return self._get_general_setting(
2154 return self._get_general_setting(
2155 pull_request, 'rhodecode_git_close_branch_before_merging')
2155 pull_request, 'rhodecode_git_close_branch_before_merging')
2156
2156
2157 return False
2157 return False
2158
2158
2159 def _get_general_setting(self, pull_request, settings_key, default=False):
2159 def _get_general_setting(self, pull_request, settings_key, default=False):
2160 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2160 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2161 settings = settings_model.get_general_settings()
2161 settings = settings_model.get_general_settings()
2162 return settings.get(settings_key, default)
2162 return settings.get(settings_key, default)
2163
2163
2164 def _log_audit_action(self, action, action_data, user, pull_request):
2164 def _log_audit_action(self, action, action_data, user, pull_request):
2165 audit_logger.store(
2165 audit_logger.store(
2166 action=action,
2166 action=action,
2167 action_data=action_data,
2167 action_data=action_data,
2168 user=user,
2168 user=user,
2169 repo=pull_request.target_repo)
2169 repo=pull_request.target_repo)
2170
2170
2171 def get_reviewer_functions(self):
2171 def get_reviewer_functions(self):
2172 """
2172 """
2173 Fetches functions for validation and fetching default reviewers.
2173 Fetches functions for validation and fetching default reviewers.
2174 If available we use the EE package, else we fallback to CE
2174 If available we use the EE package, else we fallback to CE
2175 package functions
2175 package functions
2176 """
2176 """
2177 try:
2177 try:
2178 from rc_reviewers.utils import get_default_reviewers_data
2178 from rc_reviewers.utils import get_default_reviewers_data
2179 from rc_reviewers.utils import validate_default_reviewers
2179 from rc_reviewers.utils import validate_default_reviewers
2180 from rc_reviewers.utils import validate_observers
2180 from rc_reviewers.utils import validate_observers
2181 except ImportError:
2181 except ImportError:
2182 from rhodecode.apps.repository.utils import get_default_reviewers_data
2182 from rhodecode.apps.repository.utils import get_default_reviewers_data
2183 from rhodecode.apps.repository.utils import validate_default_reviewers
2183 from rhodecode.apps.repository.utils import validate_default_reviewers
2184 from rhodecode.apps.repository.utils import validate_observers
2184 from rhodecode.apps.repository.utils import validate_observers
2185
2185
2186 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2186 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2187
2187
2188
2188
2189 class MergeCheck(object):
2189 class MergeCheck(object):
2190 """
2190 """
2191 Perform Merge Checks and returns a check object which stores information
2191 Perform Merge Checks and returns a check object which stores information
2192 about merge errors, and merge conditions
2192 about merge errors, and merge conditions
2193 """
2193 """
2194 TODO_CHECK = 'todo'
2194 TODO_CHECK = 'todo'
2195 PERM_CHECK = 'perm'
2195 PERM_CHECK = 'perm'
2196 REVIEW_CHECK = 'review'
2196 REVIEW_CHECK = 'review'
2197 MERGE_CHECK = 'merge'
2197 MERGE_CHECK = 'merge'
2198 WIP_CHECK = 'wip'
2198 WIP_CHECK = 'wip'
2199
2199
2200 def __init__(self):
2200 def __init__(self):
2201 self.review_status = None
2201 self.review_status = None
2202 self.merge_possible = None
2202 self.merge_possible = None
2203 self.merge_msg = ''
2203 self.merge_msg = ''
2204 self.merge_response = None
2204 self.merge_response = None
2205 self.failed = None
2205 self.failed = None
2206 self.errors = []
2206 self.errors = []
2207 self.error_details = OrderedDict()
2207 self.error_details = OrderedDict()
2208 self.source_commit = AttributeDict()
2208 self.source_commit = AttributeDict()
2209 self.target_commit = AttributeDict()
2209 self.target_commit = AttributeDict()
2210 self.reviewers_count = 0
2210 self.reviewers_count = 0
2211 self.observers_count = 0
2211 self.observers_count = 0
2212
2212
2213 def __repr__(self):
2213 def __repr__(self):
2214 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2214 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2215 self.merge_possible, self.failed, self.errors)
2215 self.merge_possible, self.failed, self.errors)
2216
2216
2217 def push_error(self, error_type, message, error_key, details):
2217 def push_error(self, error_type, message, error_key, details):
2218 self.failed = True
2218 self.failed = True
2219 self.errors.append([error_type, message])
2219 self.errors.append([error_type, message])
2220 self.error_details[error_key] = dict(
2220 self.error_details[error_key] = dict(
2221 details=details,
2221 details=details,
2222 error_type=error_type,
2222 error_type=error_type,
2223 message=message
2223 message=message
2224 )
2224 )
2225
2225
2226 @classmethod
2226 @classmethod
2227 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2227 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2228 force_shadow_repo_refresh=False):
2228 force_shadow_repo_refresh=False):
2229 _ = translator
2229 _ = translator
2230 merge_check = cls()
2230 merge_check = cls()
2231
2231
2232 # title has WIP:
2232 # title has WIP:
2233 if pull_request.work_in_progress:
2233 if pull_request.work_in_progress:
2234 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2234 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2235
2235
2236 msg = _('WIP marker in title prevents from accidental merge.')
2236 msg = _('WIP marker in title prevents from accidental merge.')
2237 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2237 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2238 if fail_early:
2238 if fail_early:
2239 return merge_check
2239 return merge_check
2240
2240
2241 # permissions to merge
2241 # permissions to merge
2242 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2242 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2243 if not user_allowed_to_merge:
2243 if not user_allowed_to_merge:
2244 log.debug("MergeCheck: cannot merge, approval is pending.")
2244 log.debug("MergeCheck: cannot merge, approval is pending.")
2245
2245
2246 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2246 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2247 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2247 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2248 if fail_early:
2248 if fail_early:
2249 return merge_check
2249 return merge_check
2250
2250
2251 # permission to merge into the target branch
2251 # permission to merge into the target branch
2252 target_commit_id = pull_request.target_ref_parts.commit_id
2252 target_commit_id = pull_request.target_ref_parts.commit_id
2253 if pull_request.target_ref_parts.type == 'branch':
2253 if pull_request.target_ref_parts.type == 'branch':
2254 branch_name = pull_request.target_ref_parts.name
2254 branch_name = pull_request.target_ref_parts.name
2255 else:
2255 else:
2256 # for mercurial we can always figure out the branch from the commit
2256 # for mercurial we can always figure out the branch from the commit
2257 # in case of bookmark
2257 # in case of bookmark
2258 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2258 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2259 branch_name = target_commit.branch
2259 branch_name = target_commit.branch
2260
2260
2261 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2261 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2262 pull_request.target_repo.repo_name, branch_name)
2262 pull_request.target_repo.repo_name, branch_name)
2263 if branch_perm and branch_perm == 'branch.none':
2263 if branch_perm and branch_perm == 'branch.none':
2264 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2264 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2265 branch_name, rule)
2265 branch_name, rule)
2266 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2266 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2267 if fail_early:
2267 if fail_early:
2268 return merge_check
2268 return merge_check
2269
2269
2270 # review status, must be always present
2270 # review status, must be always present
2271 review_status = pull_request.calculated_review_status()
2271 review_status = pull_request.calculated_review_status()
2272 merge_check.review_status = review_status
2272 merge_check.review_status = review_status
2273 merge_check.reviewers_count = pull_request.reviewers_count
2273 merge_check.reviewers_count = pull_request.reviewers_count
2274 merge_check.observers_count = pull_request.observers_count
2274 merge_check.observers_count = pull_request.observers_count
2275
2275
2276 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2276 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2277 if not status_approved and merge_check.reviewers_count:
2277 if not status_approved and merge_check.reviewers_count:
2278 log.debug("MergeCheck: cannot merge, approval is pending.")
2278 log.debug("MergeCheck: cannot merge, approval is pending.")
2279 msg = _('Pull request reviewer approval is pending.')
2279 msg = _('Pull request reviewer approval is pending.')
2280
2280
2281 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2281 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2282
2282
2283 if fail_early:
2283 if fail_early:
2284 return merge_check
2284 return merge_check
2285
2285
2286 # left over TODOs
2286 # left over TODOs
2287 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2287 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2288 if todos:
2288 if todos:
2289 log.debug("MergeCheck: cannot merge, {} "
2289 log.debug("MergeCheck: cannot merge, {} "
2290 "unresolved TODOs left.".format(len(todos)))
2290 "unresolved TODOs left.".format(len(todos)))
2291
2291
2292 if len(todos) == 1:
2292 if len(todos) == 1:
2293 msg = _('Cannot merge, {} TODO still not resolved.').format(
2293 msg = _('Cannot merge, {} TODO still not resolved.').format(
2294 len(todos))
2294 len(todos))
2295 else:
2295 else:
2296 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2296 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2297 len(todos))
2297 len(todos))
2298
2298
2299 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2299 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2300
2300
2301 if fail_early:
2301 if fail_early:
2302 return merge_check
2302 return merge_check
2303
2303
2304 # merge possible, here is the filesystem simulation + shadow repo
2304 # merge possible, here is the filesystem simulation + shadow repo
2305 merge_response, merge_status, msg = PullRequestModel().merge_status(
2305 merge_response, merge_status, msg = PullRequestModel().merge_status(
2306 pull_request, translator=translator,
2306 pull_request, translator=translator,
2307 force_shadow_repo_refresh=force_shadow_repo_refresh)
2307 force_shadow_repo_refresh=force_shadow_repo_refresh)
2308
2308
2309 merge_check.merge_possible = merge_status
2309 merge_check.merge_possible = merge_status
2310 merge_check.merge_msg = msg
2310 merge_check.merge_msg = msg
2311 merge_check.merge_response = merge_response
2311 merge_check.merge_response = merge_response
2312
2312
2313 source_ref_id = pull_request.source_ref_parts.commit_id
2313 source_ref_id = pull_request.source_ref_parts.commit_id
2314 target_ref_id = pull_request.target_ref_parts.commit_id
2314 target_ref_id = pull_request.target_ref_parts.commit_id
2315
2315
2316 try:
2316 try:
2317 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2317 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2318 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2318 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2319 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2319 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2320 merge_check.source_commit.current_raw_id = source_commit.raw_id
2320 merge_check.source_commit.current_raw_id = source_commit.raw_id
2321 merge_check.source_commit.previous_raw_id = source_ref_id
2321 merge_check.source_commit.previous_raw_id = source_ref_id
2322
2322
2323 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2323 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2324 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2324 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2325 merge_check.target_commit.current_raw_id = target_commit.raw_id
2325 merge_check.target_commit.current_raw_id = target_commit.raw_id
2326 merge_check.target_commit.previous_raw_id = target_ref_id
2326 merge_check.target_commit.previous_raw_id = target_ref_id
2327 except (SourceRefMissing, TargetRefMissing):
2327 except (SourceRefMissing, TargetRefMissing):
2328 pass
2328 pass
2329
2329
2330 if not merge_status:
2330 if not merge_status:
2331 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2331 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2332 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2332 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2333
2333
2334 if fail_early:
2334 if fail_early:
2335 return merge_check
2335 return merge_check
2336
2336
2337 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2337 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2338 return merge_check
2338 return merge_check
2339
2339
2340 @classmethod
2340 @classmethod
2341 def get_merge_conditions(cls, pull_request, translator):
2341 def get_merge_conditions(cls, pull_request, translator):
2342 _ = translator
2342 _ = translator
2343 merge_details = {}
2343 merge_details = {}
2344
2344
2345 model = PullRequestModel()
2345 model = PullRequestModel()
2346 use_rebase = model._use_rebase_for_merging(pull_request)
2346 use_rebase = model._use_rebase_for_merging(pull_request)
2347
2347
2348 if use_rebase:
2348 if use_rebase:
2349 merge_details['merge_strategy'] = dict(
2349 merge_details['merge_strategy'] = dict(
2350 details={},
2350 details={},
2351 message=_('Merge strategy: rebase')
2351 message=_('Merge strategy: rebase')
2352 )
2352 )
2353 else:
2353 else:
2354 merge_details['merge_strategy'] = dict(
2354 merge_details['merge_strategy'] = dict(
2355 details={},
2355 details={},
2356 message=_('Merge strategy: explicit merge commit')
2356 message=_('Merge strategy: explicit merge commit')
2357 )
2357 )
2358
2358
2359 close_branch = model._close_branch_before_merging(pull_request)
2359 close_branch = model._close_branch_before_merging(pull_request)
2360 if close_branch:
2360 if close_branch:
2361 repo_type = pull_request.target_repo.repo_type
2361 repo_type = pull_request.target_repo.repo_type
2362 close_msg = ''
2362 close_msg = ''
2363 if repo_type == 'hg':
2363 if repo_type == 'hg':
2364 close_msg = _('Source branch will be closed before the merge.')
2364 close_msg = _('Source branch will be closed before the merge.')
2365 elif repo_type == 'git':
2365 elif repo_type == 'git':
2366 close_msg = _('Source branch will be deleted after the merge.')
2366 close_msg = _('Source branch will be deleted after the merge.')
2367
2367
2368 merge_details['close_branch'] = dict(
2368 merge_details['close_branch'] = dict(
2369 details={},
2369 details={},
2370 message=close_msg
2370 message=close_msg
2371 )
2371 )
2372
2372
2373 return merge_details
2373 return merge_details
2374
2374
2375
2375
2376 ChangeTuple = collections.namedtuple(
2376 ChangeTuple = collections.namedtuple(
2377 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2377 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2378
2378
2379 FileChangeTuple = collections.namedtuple(
2379 FileChangeTuple = collections.namedtuple(
2380 'FileChangeTuple', ['added', 'modified', 'removed'])
2380 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now