##// END OF EJS Templates
pull-requests: fixed case for GIT repositories when a merge check failed due to merge conflicts the pull request wrongly reported missing commits....
marcink -
r4299:04e45b92 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,52 b''
1 # -*- coding: utf-8 -*-
2
3 import logging
4 from sqlalchemy import *
5
6 from alembic.migration import MigrationContext
7 from alembic.operations import Operations
8 from sqlalchemy import BigInteger
9
10 from rhodecode.lib.dbmigrate.versions import _reset_base
11 from rhodecode.model import init_model_encryption
12
13
14 log = logging.getLogger(__name__)
15
16
17 def upgrade(migrate_engine):
18 """
19 Upgrade operations go here.
20 Don't create your own engine; bind migrate_engine to your metadata
21 """
22 _reset_base(migrate_engine)
23 from rhodecode.lib.dbmigrate.schema import db_4_18_0_1 as db
24
25 init_model_encryption(db)
26
27 context = MigrationContext.configure(migrate_engine.connect())
28 op = Operations(context)
29
30 pull_requests = db.PullRequest.__table__
31
32 with op.batch_alter_table(pull_requests.name) as batch_op:
33 new_column = Column(
34 'last_merge_metadata',
35 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
36 batch_op.add_column(new_column)
37
38 pull_request_version = db.PullRequestVersion.__table__
39 with op.batch_alter_table(pull_request_version.name) as batch_op:
40 new_column = Column(
41 'last_merge_metadata',
42 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
43 batch_op.add_column(new_column)
44
45
46 def downgrade(migrate_engine):
47 meta = MetaData()
48 meta.bind = migrate_engine
49
50
51 def fixups(models, _SESSION):
52 pass
@@ -1,57 +1,57 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import sys
22 import sys
23 import platform
23 import platform
24
24
25 VERSION = tuple(open(os.path.join(
25 VERSION = tuple(open(os.path.join(
26 os.path.dirname(__file__), 'VERSION')).read().split('.'))
26 os.path.dirname(__file__), 'VERSION')).read().split('.'))
27
27
28 BACKENDS = {
28 BACKENDS = {
29 'hg': 'Mercurial repository',
29 'hg': 'Mercurial repository',
30 'git': 'Git repository',
30 'git': 'Git repository',
31 'svn': 'Subversion repository',
31 'svn': 'Subversion repository',
32 }
32 }
33
33
34 CELERY_ENABLED = False
34 CELERY_ENABLED = False
35 CELERY_EAGER = False
35 CELERY_EAGER = False
36
36
37 # link to config for pyramid
37 # link to config for pyramid
38 CONFIG = {}
38 CONFIG = {}
39
39
40 # Populated with the settings dictionary from application init in
40 # Populated with the settings dictionary from application init in
41 # rhodecode.conf.environment.load_pyramid_environment
41 # rhodecode.conf.environment.load_pyramid_environment
42 PYRAMID_SETTINGS = {}
42 PYRAMID_SETTINGS = {}
43
43
44 # Linked module for extensions
44 # Linked module for extensions
45 EXTENSIONS = {}
45 EXTENSIONS = {}
46
46
47 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
47 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
48 __dbversion__ = 103 # defines current db version for migrations
48 __dbversion__ = 104 # defines current db version for migrations
49 __platform__ = platform.system()
49 __platform__ = platform.system()
50 __license__ = 'AGPLv3, and Commercial License'
50 __license__ = 'AGPLv3, and Commercial License'
51 __author__ = 'RhodeCode GmbH'
51 __author__ = 'RhodeCode GmbH'
52 __url__ = 'https://code.rhodecode.com'
52 __url__ = 'https://code.rhodecode.com'
53
53
54 is_windows = __platform__ in ['Windows']
54 is_windows = __platform__ in ['Windows']
55 is_unix = not is_windows
55 is_unix = not is_windows
56 is_test = False
56 is_test = False
57 disable_error_handler = False
57 disable_error_handler = False
@@ -1,1215 +1,1217 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35
35
36
36
37 def route_path(name, params=None, **kwargs):
37 def route_path(name, params=None, **kwargs):
38 import urllib
38 import urllib
39
39
40 base_url = {
40 base_url = {
41 'repo_changelog': '/{repo_name}/changelog',
41 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_commits': '/{repo_name}/commits',
43 'repo_commits': '/{repo_name}/commits',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 }[name].format(**kwargs)
57 }[name].format(**kwargs)
58
58
59 if params:
59 if params:
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 return base_url
61 return base_url
62
62
63
63
64 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.usefixtures('app', 'autologin_user')
65 @pytest.mark.backends("git", "hg")
65 @pytest.mark.backends("git", "hg")
66 class TestPullrequestsView(object):
66 class TestPullrequestsView(object):
67
67
68 def test_index(self, backend):
68 def test_index(self, backend):
69 self.app.get(route_path(
69 self.app.get(route_path(
70 'pullrequest_new',
70 'pullrequest_new',
71 repo_name=backend.repo_name))
71 repo_name=backend.repo_name))
72
72
73 def test_option_menu_create_pull_request_exists(self, backend):
73 def test_option_menu_create_pull_request_exists(self, backend):
74 repo_name = backend.repo_name
74 repo_name = backend.repo_name
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76
76
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 'pullrequest_new', repo_name=repo_name)
78 'pullrequest_new', repo_name=repo_name)
79 response.mustcontain(create_pr_link)
79 response.mustcontain(create_pr_link)
80
80
81 def test_create_pr_form_with_raw_commit_id(self, backend):
81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 repo = backend.repo
82 repo = backend.repo
83
83
84 self.app.get(
84 self.app.get(
85 route_path('pullrequest_new', repo_name=repo.repo_name,
85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 pull_request = pr_util.create_pull_request(
92 pull_request = pr_util.create_pull_request(
93 mergeable=pr_merge_enabled, enable_notifications=False)
93 mergeable=pr_merge_enabled, enable_notifications=False)
94
94
95 response = self.app.get(route_path(
95 response = self.app.get(route_path(
96 'pullrequest_show',
96 'pullrequest_show',
97 repo_name=pull_request.target_repo.scm_instance().name,
97 repo_name=pull_request.target_repo.scm_instance().name,
98 pull_request_id=pull_request.pull_request_id,
98 pull_request_id=pull_request.pull_request_id,
99 params={'range-diff': range_diff}))
99 params={'range-diff': range_diff}))
100
100
101 for commit_id in pull_request.revisions:
101 for commit_id in pull_request.revisions:
102 response.mustcontain(commit_id)
102 response.mustcontain(commit_id)
103
103
104 response.mustcontain(pull_request.target_ref_parts.type)
104 response.mustcontain(pull_request.target_ref_parts.type)
105 response.mustcontain(pull_request.target_ref_parts.name)
105 response.mustcontain(pull_request.target_ref_parts.name)
106
106
107 response.mustcontain('class="pull-request-merge"')
107 response.mustcontain('class="pull-request-merge"')
108
108
109 if pr_merge_enabled:
109 if pr_merge_enabled:
110 response.mustcontain('Pull request reviewer approval is pending')
110 response.mustcontain('Pull request reviewer approval is pending')
111 else:
111 else:
112 response.mustcontain('Server-side pull request merging is disabled.')
112 response.mustcontain('Server-side pull request merging is disabled.')
113
113
114 if range_diff == "1":
114 if range_diff == "1":
115 response.mustcontain('Turn off: Show the diff as commit range')
115 response.mustcontain('Turn off: Show the diff as commit range')
116
116
117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 # Logout
118 # Logout
119 response = self.app.post(
119 response = self.app.post(
120 h.route_path('logout'),
120 h.route_path('logout'),
121 params={'csrf_token': csrf_token})
121 params={'csrf_token': csrf_token})
122 # Login as regular user
122 # Login as regular user
123 response = self.app.post(h.route_path('login'),
123 response = self.app.post(h.route_path('login'),
124 {'username': TEST_USER_REGULAR_LOGIN,
124 {'username': TEST_USER_REGULAR_LOGIN,
125 'password': 'test12'})
125 'password': 'test12'})
126
126
127 pull_request = pr_util.create_pull_request(
127 pull_request = pr_util.create_pull_request(
128 author=TEST_USER_REGULAR_LOGIN)
128 author=TEST_USER_REGULAR_LOGIN)
129
129
130 response = self.app.get(route_path(
130 response = self.app.get(route_path(
131 'pullrequest_show',
131 'pullrequest_show',
132 repo_name=pull_request.target_repo.scm_instance().name,
132 repo_name=pull_request.target_repo.scm_instance().name,
133 pull_request_id=pull_request.pull_request_id))
133 pull_request_id=pull_request.pull_request_id))
134
134
135 response.mustcontain('Server-side pull request merging is disabled.')
135 response.mustcontain('Server-side pull request merging is disabled.')
136
136
137 assert_response = response.assert_response()
137 assert_response = response.assert_response()
138 # for regular user without a merge permissions, we don't see it
138 # for regular user without a merge permissions, we don't see it
139 assert_response.no_element_exists('#close-pull-request-action')
139 assert_response.no_element_exists('#close-pull-request-action')
140
140
141 user_util.grant_user_permission_to_repo(
141 user_util.grant_user_permission_to_repo(
142 pull_request.target_repo,
142 pull_request.target_repo,
143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 'repository.write')
144 'repository.write')
145 response = self.app.get(route_path(
145 response = self.app.get(route_path(
146 'pullrequest_show',
146 'pullrequest_show',
147 repo_name=pull_request.target_repo.scm_instance().name,
147 repo_name=pull_request.target_repo.scm_instance().name,
148 pull_request_id=pull_request.pull_request_id))
148 pull_request_id=pull_request.pull_request_id))
149
149
150 response.mustcontain('Server-side pull request merging is disabled.')
150 response.mustcontain('Server-side pull request merging is disabled.')
151
151
152 assert_response = response.assert_response()
152 assert_response = response.assert_response()
153 # now regular user has a merge permissions, we have CLOSE button
153 # now regular user has a merge permissions, we have CLOSE button
154 assert_response.one_element_exists('#close-pull-request-action')
154 assert_response.one_element_exists('#close-pull-request-action')
155
155
156 def test_show_invalid_commit_id(self, pr_util):
156 def test_show_invalid_commit_id(self, pr_util):
157 # Simulating invalid revisions which will cause a lookup error
157 # Simulating invalid revisions which will cause a lookup error
158 pull_request = pr_util.create_pull_request()
158 pull_request = pr_util.create_pull_request()
159 pull_request.revisions = ['invalid']
159 pull_request.revisions = ['invalid']
160 Session().add(pull_request)
160 Session().add(pull_request)
161 Session().commit()
161 Session().commit()
162
162
163 response = self.app.get(route_path(
163 response = self.app.get(route_path(
164 'pullrequest_show',
164 'pullrequest_show',
165 repo_name=pull_request.target_repo.scm_instance().name,
165 repo_name=pull_request.target_repo.scm_instance().name,
166 pull_request_id=pull_request.pull_request_id))
166 pull_request_id=pull_request.pull_request_id))
167
167
168 for commit_id in pull_request.revisions:
168 for commit_id in pull_request.revisions:
169 response.mustcontain(commit_id)
169 response.mustcontain(commit_id)
170
170
171 def test_show_invalid_source_reference(self, pr_util):
171 def test_show_invalid_source_reference(self, pr_util):
172 pull_request = pr_util.create_pull_request()
172 pull_request = pr_util.create_pull_request()
173 pull_request.source_ref = 'branch:b:invalid'
173 pull_request.source_ref = 'branch:b:invalid'
174 Session().add(pull_request)
174 Session().add(pull_request)
175 Session().commit()
175 Session().commit()
176
176
177 self.app.get(route_path(
177 self.app.get(route_path(
178 'pullrequest_show',
178 'pullrequest_show',
179 repo_name=pull_request.target_repo.scm_instance().name,
179 repo_name=pull_request.target_repo.scm_instance().name,
180 pull_request_id=pull_request.pull_request_id))
180 pull_request_id=pull_request.pull_request_id))
181
181
182 def test_edit_title_description(self, pr_util, csrf_token):
182 def test_edit_title_description(self, pr_util, csrf_token):
183 pull_request = pr_util.create_pull_request()
183 pull_request = pr_util.create_pull_request()
184 pull_request_id = pull_request.pull_request_id
184 pull_request_id = pull_request.pull_request_id
185
185
186 response = self.app.post(
186 response = self.app.post(
187 route_path('pullrequest_update',
187 route_path('pullrequest_update',
188 repo_name=pull_request.target_repo.repo_name,
188 repo_name=pull_request.target_repo.repo_name,
189 pull_request_id=pull_request_id),
189 pull_request_id=pull_request_id),
190 params={
190 params={
191 'edit_pull_request': 'true',
191 'edit_pull_request': 'true',
192 'title': 'New title',
192 'title': 'New title',
193 'description': 'New description',
193 'description': 'New description',
194 'csrf_token': csrf_token})
194 'csrf_token': csrf_token})
195
195
196 assert_session_flash(
196 assert_session_flash(
197 response, u'Pull request title & description updated.',
197 response, u'Pull request title & description updated.',
198 category='success')
198 category='success')
199
199
200 pull_request = PullRequest.get(pull_request_id)
200 pull_request = PullRequest.get(pull_request_id)
201 assert pull_request.title == 'New title'
201 assert pull_request.title == 'New title'
202 assert pull_request.description == 'New description'
202 assert pull_request.description == 'New description'
203
203
204 def test_edit_title_description_closed(self, pr_util, csrf_token):
204 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 pull_request = pr_util.create_pull_request()
205 pull_request = pr_util.create_pull_request()
206 pull_request_id = pull_request.pull_request_id
206 pull_request_id = pull_request.pull_request_id
207 repo_name = pull_request.target_repo.repo_name
207 repo_name = pull_request.target_repo.repo_name
208 pr_util.close()
208 pr_util.close()
209
209
210 response = self.app.post(
210 response = self.app.post(
211 route_path('pullrequest_update',
211 route_path('pullrequest_update',
212 repo_name=repo_name, pull_request_id=pull_request_id),
212 repo_name=repo_name, pull_request_id=pull_request_id),
213 params={
213 params={
214 'edit_pull_request': 'true',
214 'edit_pull_request': 'true',
215 'title': 'New title',
215 'title': 'New title',
216 'description': 'New description',
216 'description': 'New description',
217 'csrf_token': csrf_token}, status=200)
217 'csrf_token': csrf_token}, status=200)
218 assert_session_flash(
218 assert_session_flash(
219 response, u'Cannot update closed pull requests.',
219 response, u'Cannot update closed pull requests.',
220 category='error')
220 category='error')
221
221
222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224
224
225 pull_request = pr_util.create_pull_request()
225 pull_request = pr_util.create_pull_request()
226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 Session().add(pull_request)
227 Session().add(pull_request)
228 Session().commit()
228 Session().commit()
229
229
230 pull_request_id = pull_request.pull_request_id
230 pull_request_id = pull_request.pull_request_id
231
231
232 response = self.app.post(
232 response = self.app.post(
233 route_path('pullrequest_update',
233 route_path('pullrequest_update',
234 repo_name=pull_request.target_repo.repo_name,
234 repo_name=pull_request.target_repo.repo_name,
235 pull_request_id=pull_request_id),
235 pull_request_id=pull_request_id),
236 params={'update_commits': 'true', 'csrf_token': csrf_token})
236 params={'update_commits': 'true', 'csrf_token': csrf_token})
237
237
238 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
238 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
239 UpdateFailureReason.MISSING_SOURCE_REF])
239 UpdateFailureReason.MISSING_SOURCE_REF])
240 assert_session_flash(response, expected_msg, category='error')
240 assert_session_flash(response, expected_msg, category='error')
241
241
242 def test_missing_target_reference(self, pr_util, csrf_token):
242 def test_missing_target_reference(self, pr_util, csrf_token):
243 from rhodecode.lib.vcs.backends.base import MergeFailureReason
243 from rhodecode.lib.vcs.backends.base import MergeFailureReason
244 pull_request = pr_util.create_pull_request(
244 pull_request = pr_util.create_pull_request(
245 approved=True, mergeable=True)
245 approved=True, mergeable=True)
246 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
246 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
247 pull_request.target_ref = unicode_reference
247 pull_request.target_ref = unicode_reference
248 Session().add(pull_request)
248 Session().add(pull_request)
249 Session().commit()
249 Session().commit()
250
250
251 pull_request_id = pull_request.pull_request_id
251 pull_request_id = pull_request.pull_request_id
252 pull_request_url = route_path(
252 pull_request_url = route_path(
253 'pullrequest_show',
253 'pullrequest_show',
254 repo_name=pull_request.target_repo.repo_name,
254 repo_name=pull_request.target_repo.repo_name,
255 pull_request_id=pull_request_id)
255 pull_request_id=pull_request_id)
256
256
257 response = self.app.get(pull_request_url)
257 response = self.app.get(pull_request_url)
258 target_ref_id = 'invalid-branch'
258 target_ref_id = 'invalid-branch'
259 merge_resp = MergeResponse(
259 merge_resp = MergeResponse(
260 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
260 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
261 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
261 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
262 response.assert_response().element_contains(
262 response.assert_response().element_contains(
263 'div[data-role="merge-message"]', merge_resp.merge_status_message)
263 'div[data-role="merge-message"]', merge_resp.merge_status_message)
264
264
265 def test_comment_and_close_pull_request_custom_message_approved(
265 def test_comment_and_close_pull_request_custom_message_approved(
266 self, pr_util, csrf_token, xhr_header):
266 self, pr_util, csrf_token, xhr_header):
267
267
268 pull_request = pr_util.create_pull_request(approved=True)
268 pull_request = pr_util.create_pull_request(approved=True)
269 pull_request_id = pull_request.pull_request_id
269 pull_request_id = pull_request.pull_request_id
270 author = pull_request.user_id
270 author = pull_request.user_id
271 repo = pull_request.target_repo.repo_id
271 repo = pull_request.target_repo.repo_id
272
272
273 self.app.post(
273 self.app.post(
274 route_path('pullrequest_comment_create',
274 route_path('pullrequest_comment_create',
275 repo_name=pull_request.target_repo.scm_instance().name,
275 repo_name=pull_request.target_repo.scm_instance().name,
276 pull_request_id=pull_request_id),
276 pull_request_id=pull_request_id),
277 params={
277 params={
278 'close_pull_request': '1',
278 'close_pull_request': '1',
279 'text': 'Closing a PR',
279 'text': 'Closing a PR',
280 'csrf_token': csrf_token},
280 'csrf_token': csrf_token},
281 extra_environ=xhr_header,)
281 extra_environ=xhr_header,)
282
282
283 journal = UserLog.query()\
283 journal = UserLog.query()\
284 .filter(UserLog.user_id == author)\
284 .filter(UserLog.user_id == author)\
285 .filter(UserLog.repository_id == repo) \
285 .filter(UserLog.repository_id == repo) \
286 .order_by(UserLog.user_log_id.asc()) \
286 .order_by(UserLog.user_log_id.asc()) \
287 .all()
287 .all()
288 assert journal[-1].action == 'repo.pull_request.close'
288 assert journal[-1].action == 'repo.pull_request.close'
289
289
290 pull_request = PullRequest.get(pull_request_id)
290 pull_request = PullRequest.get(pull_request_id)
291 assert pull_request.is_closed()
291 assert pull_request.is_closed()
292
292
293 status = ChangesetStatusModel().get_status(
293 status = ChangesetStatusModel().get_status(
294 pull_request.source_repo, pull_request=pull_request)
294 pull_request.source_repo, pull_request=pull_request)
295 assert status == ChangesetStatus.STATUS_APPROVED
295 assert status == ChangesetStatus.STATUS_APPROVED
296 comments = ChangesetComment().query() \
296 comments = ChangesetComment().query() \
297 .filter(ChangesetComment.pull_request == pull_request) \
297 .filter(ChangesetComment.pull_request == pull_request) \
298 .order_by(ChangesetComment.comment_id.asc())\
298 .order_by(ChangesetComment.comment_id.asc())\
299 .all()
299 .all()
300 assert comments[-1].text == 'Closing a PR'
300 assert comments[-1].text == 'Closing a PR'
301
301
302 def test_comment_force_close_pull_request_rejected(
302 def test_comment_force_close_pull_request_rejected(
303 self, pr_util, csrf_token, xhr_header):
303 self, pr_util, csrf_token, xhr_header):
304 pull_request = pr_util.create_pull_request()
304 pull_request = pr_util.create_pull_request()
305 pull_request_id = pull_request.pull_request_id
305 pull_request_id = pull_request.pull_request_id
306 PullRequestModel().update_reviewers(
306 PullRequestModel().update_reviewers(
307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 pull_request.author)
308 pull_request.author)
309 author = pull_request.user_id
309 author = pull_request.user_id
310 repo = pull_request.target_repo.repo_id
310 repo = pull_request.target_repo.repo_id
311
311
312 self.app.post(
312 self.app.post(
313 route_path('pullrequest_comment_create',
313 route_path('pullrequest_comment_create',
314 repo_name=pull_request.target_repo.scm_instance().name,
314 repo_name=pull_request.target_repo.scm_instance().name,
315 pull_request_id=pull_request_id),
315 pull_request_id=pull_request_id),
316 params={
316 params={
317 'close_pull_request': '1',
317 'close_pull_request': '1',
318 'csrf_token': csrf_token},
318 'csrf_token': csrf_token},
319 extra_environ=xhr_header)
319 extra_environ=xhr_header)
320
320
321 pull_request = PullRequest.get(pull_request_id)
321 pull_request = PullRequest.get(pull_request_id)
322
322
323 journal = UserLog.query()\
323 journal = UserLog.query()\
324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 .order_by(UserLog.user_log_id.asc()) \
325 .order_by(UserLog.user_log_id.asc()) \
326 .all()
326 .all()
327 assert journal[-1].action == 'repo.pull_request.close'
327 assert journal[-1].action == 'repo.pull_request.close'
328
328
329 # check only the latest status, not the review status
329 # check only the latest status, not the review status
330 status = ChangesetStatusModel().get_status(
330 status = ChangesetStatusModel().get_status(
331 pull_request.source_repo, pull_request=pull_request)
331 pull_request.source_repo, pull_request=pull_request)
332 assert status == ChangesetStatus.STATUS_REJECTED
332 assert status == ChangesetStatus.STATUS_REJECTED
333
333
334 def test_comment_and_close_pull_request(
334 def test_comment_and_close_pull_request(
335 self, pr_util, csrf_token, xhr_header):
335 self, pr_util, csrf_token, xhr_header):
336 pull_request = pr_util.create_pull_request()
336 pull_request = pr_util.create_pull_request()
337 pull_request_id = pull_request.pull_request_id
337 pull_request_id = pull_request.pull_request_id
338
338
339 response = self.app.post(
339 response = self.app.post(
340 route_path('pullrequest_comment_create',
340 route_path('pullrequest_comment_create',
341 repo_name=pull_request.target_repo.scm_instance().name,
341 repo_name=pull_request.target_repo.scm_instance().name,
342 pull_request_id=pull_request.pull_request_id),
342 pull_request_id=pull_request.pull_request_id),
343 params={
343 params={
344 'close_pull_request': 'true',
344 'close_pull_request': 'true',
345 'csrf_token': csrf_token},
345 'csrf_token': csrf_token},
346 extra_environ=xhr_header)
346 extra_environ=xhr_header)
347
347
348 assert response.json
348 assert response.json
349
349
350 pull_request = PullRequest.get(pull_request_id)
350 pull_request = PullRequest.get(pull_request_id)
351 assert pull_request.is_closed()
351 assert pull_request.is_closed()
352
352
353 # check only the latest status, not the review status
353 # check only the latest status, not the review status
354 status = ChangesetStatusModel().get_status(
354 status = ChangesetStatusModel().get_status(
355 pull_request.source_repo, pull_request=pull_request)
355 pull_request.source_repo, pull_request=pull_request)
356 assert status == ChangesetStatus.STATUS_REJECTED
356 assert status == ChangesetStatus.STATUS_REJECTED
357
357
358 def test_create_pull_request(self, backend, csrf_token):
358 def test_create_pull_request(self, backend, csrf_token):
359 commits = [
359 commits = [
360 {'message': 'ancestor'},
360 {'message': 'ancestor'},
361 {'message': 'change'},
361 {'message': 'change'},
362 {'message': 'change2'},
362 {'message': 'change2'},
363 ]
363 ]
364 commit_ids = backend.create_master_repo(commits)
364 commit_ids = backend.create_master_repo(commits)
365 target = backend.create_repo(heads=['ancestor'])
365 target = backend.create_repo(heads=['ancestor'])
366 source = backend.create_repo(heads=['change2'])
366 source = backend.create_repo(heads=['change2'])
367
367
368 response = self.app.post(
368 response = self.app.post(
369 route_path('pullrequest_create', repo_name=source.repo_name),
369 route_path('pullrequest_create', repo_name=source.repo_name),
370 [
370 [
371 ('source_repo', source.repo_name),
371 ('source_repo', source.repo_name),
372 ('source_ref', 'branch:default:' + commit_ids['change2']),
372 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 ('target_repo', target.repo_name),
373 ('target_repo', target.repo_name),
374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 ('common_ancestor', commit_ids['ancestor']),
375 ('common_ancestor', commit_ids['ancestor']),
376 ('pullrequest_title', 'Title'),
376 ('pullrequest_title', 'Title'),
377 ('pullrequest_desc', 'Description'),
377 ('pullrequest_desc', 'Description'),
378 ('description_renderer', 'markdown'),
378 ('description_renderer', 'markdown'),
379 ('__start__', 'review_members:sequence'),
379 ('__start__', 'review_members:sequence'),
380 ('__start__', 'reviewer:mapping'),
380 ('__start__', 'reviewer:mapping'),
381 ('user_id', '1'),
381 ('user_id', '1'),
382 ('__start__', 'reasons:sequence'),
382 ('__start__', 'reasons:sequence'),
383 ('reason', 'Some reason'),
383 ('reason', 'Some reason'),
384 ('__end__', 'reasons:sequence'),
384 ('__end__', 'reasons:sequence'),
385 ('__start__', 'rules:sequence'),
385 ('__start__', 'rules:sequence'),
386 ('__end__', 'rules:sequence'),
386 ('__end__', 'rules:sequence'),
387 ('mandatory', 'False'),
387 ('mandatory', 'False'),
388 ('__end__', 'reviewer:mapping'),
388 ('__end__', 'reviewer:mapping'),
389 ('__end__', 'review_members:sequence'),
389 ('__end__', 'review_members:sequence'),
390 ('__start__', 'revisions:sequence'),
390 ('__start__', 'revisions:sequence'),
391 ('revisions', commit_ids['change']),
391 ('revisions', commit_ids['change']),
392 ('revisions', commit_ids['change2']),
392 ('revisions', commit_ids['change2']),
393 ('__end__', 'revisions:sequence'),
393 ('__end__', 'revisions:sequence'),
394 ('user', ''),
394 ('user', ''),
395 ('csrf_token', csrf_token),
395 ('csrf_token', csrf_token),
396 ],
396 ],
397 status=302)
397 status=302)
398
398
399 location = response.headers['Location']
399 location = response.headers['Location']
400 pull_request_id = location.rsplit('/', 1)[1]
400 pull_request_id = location.rsplit('/', 1)[1]
401 assert pull_request_id != 'new'
401 assert pull_request_id != 'new'
402 pull_request = PullRequest.get(int(pull_request_id))
402 pull_request = PullRequest.get(int(pull_request_id))
403
403
404 # check that we have now both revisions
404 # check that we have now both revisions
405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 assert pull_request.target_ref == expected_target_ref
408 assert pull_request.target_ref == expected_target_ref
409
409
410 def test_reviewer_notifications(self, backend, csrf_token):
410 def test_reviewer_notifications(self, backend, csrf_token):
411 # We have to use the app.post for this test so it will create the
411 # We have to use the app.post for this test so it will create the
412 # notifications properly with the new PR
412 # notifications properly with the new PR
413 commits = [
413 commits = [
414 {'message': 'ancestor',
414 {'message': 'ancestor',
415 'added': [FileNode('file_A', content='content_of_ancestor')]},
415 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 {'message': 'change',
416 {'message': 'change',
417 'added': [FileNode('file_a', content='content_of_change')]},
417 'added': [FileNode('file_a', content='content_of_change')]},
418 {'message': 'change-child'},
418 {'message': 'change-child'},
419 {'message': 'ancestor-child', 'parents': ['ancestor'],
419 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 'added': [
420 'added': [
421 FileNode('file_B', content='content_of_ancestor_child')]},
421 FileNode('file_B', content='content_of_ancestor_child')]},
422 {'message': 'ancestor-child-2'},
422 {'message': 'ancestor-child-2'},
423 ]
423 ]
424 commit_ids = backend.create_master_repo(commits)
424 commit_ids = backend.create_master_repo(commits)
425 target = backend.create_repo(heads=['ancestor-child'])
425 target = backend.create_repo(heads=['ancestor-child'])
426 source = backend.create_repo(heads=['change'])
426 source = backend.create_repo(heads=['change'])
427
427
428 response = self.app.post(
428 response = self.app.post(
429 route_path('pullrequest_create', repo_name=source.repo_name),
429 route_path('pullrequest_create', repo_name=source.repo_name),
430 [
430 [
431 ('source_repo', source.repo_name),
431 ('source_repo', source.repo_name),
432 ('source_ref', 'branch:default:' + commit_ids['change']),
432 ('source_ref', 'branch:default:' + commit_ids['change']),
433 ('target_repo', target.repo_name),
433 ('target_repo', target.repo_name),
434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 ('common_ancestor', commit_ids['ancestor']),
435 ('common_ancestor', commit_ids['ancestor']),
436 ('pullrequest_title', 'Title'),
436 ('pullrequest_title', 'Title'),
437 ('pullrequest_desc', 'Description'),
437 ('pullrequest_desc', 'Description'),
438 ('description_renderer', 'markdown'),
438 ('description_renderer', 'markdown'),
439 ('__start__', 'review_members:sequence'),
439 ('__start__', 'review_members:sequence'),
440 ('__start__', 'reviewer:mapping'),
440 ('__start__', 'reviewer:mapping'),
441 ('user_id', '2'),
441 ('user_id', '2'),
442 ('__start__', 'reasons:sequence'),
442 ('__start__', 'reasons:sequence'),
443 ('reason', 'Some reason'),
443 ('reason', 'Some reason'),
444 ('__end__', 'reasons:sequence'),
444 ('__end__', 'reasons:sequence'),
445 ('__start__', 'rules:sequence'),
445 ('__start__', 'rules:sequence'),
446 ('__end__', 'rules:sequence'),
446 ('__end__', 'rules:sequence'),
447 ('mandatory', 'False'),
447 ('mandatory', 'False'),
448 ('__end__', 'reviewer:mapping'),
448 ('__end__', 'reviewer:mapping'),
449 ('__end__', 'review_members:sequence'),
449 ('__end__', 'review_members:sequence'),
450 ('__start__', 'revisions:sequence'),
450 ('__start__', 'revisions:sequence'),
451 ('revisions', commit_ids['change']),
451 ('revisions', commit_ids['change']),
452 ('__end__', 'revisions:sequence'),
452 ('__end__', 'revisions:sequence'),
453 ('user', ''),
453 ('user', ''),
454 ('csrf_token', csrf_token),
454 ('csrf_token', csrf_token),
455 ],
455 ],
456 status=302)
456 status=302)
457
457
458 location = response.headers['Location']
458 location = response.headers['Location']
459
459
460 pull_request_id = location.rsplit('/', 1)[1]
460 pull_request_id = location.rsplit('/', 1)[1]
461 assert pull_request_id != 'new'
461 assert pull_request_id != 'new'
462 pull_request = PullRequest.get(int(pull_request_id))
462 pull_request = PullRequest.get(int(pull_request_id))
463
463
464 # Check that a notification was made
464 # Check that a notification was made
465 notifications = Notification.query()\
465 notifications = Notification.query()\
466 .filter(Notification.created_by == pull_request.author.user_id,
466 .filter(Notification.created_by == pull_request.author.user_id,
467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 Notification.subject.contains(
468 Notification.subject.contains(
469 "requested a pull request review. !%s" % pull_request_id))
469 "requested a pull request review. !%s" % pull_request_id))
470 assert len(notifications.all()) == 1
470 assert len(notifications.all()) == 1
471
471
472 # Change reviewers and check that a notification was made
472 # Change reviewers and check that a notification was made
473 PullRequestModel().update_reviewers(
473 PullRequestModel().update_reviewers(
474 pull_request.pull_request_id, [(1, [], False, [])],
474 pull_request.pull_request_id, [(1, [], False, [])],
475 pull_request.author)
475 pull_request.author)
476 assert len(notifications.all()) == 2
476 assert len(notifications.all()) == 2
477
477
478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 csrf_token):
479 csrf_token):
480 commits = [
480 commits = [
481 {'message': 'ancestor',
481 {'message': 'ancestor',
482 'added': [FileNode('file_A', content='content_of_ancestor')]},
482 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 {'message': 'change',
483 {'message': 'change',
484 'added': [FileNode('file_a', content='content_of_change')]},
484 'added': [FileNode('file_a', content='content_of_change')]},
485 {'message': 'change-child'},
485 {'message': 'change-child'},
486 {'message': 'ancestor-child', 'parents': ['ancestor'],
486 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 'added': [
487 'added': [
488 FileNode('file_B', content='content_of_ancestor_child')]},
488 FileNode('file_B', content='content_of_ancestor_child')]},
489 {'message': 'ancestor-child-2'},
489 {'message': 'ancestor-child-2'},
490 ]
490 ]
491 commit_ids = backend.create_master_repo(commits)
491 commit_ids = backend.create_master_repo(commits)
492 target = backend.create_repo(heads=['ancestor-child'])
492 target = backend.create_repo(heads=['ancestor-child'])
493 source = backend.create_repo(heads=['change'])
493 source = backend.create_repo(heads=['change'])
494
494
495 response = self.app.post(
495 response = self.app.post(
496 route_path('pullrequest_create', repo_name=source.repo_name),
496 route_path('pullrequest_create', repo_name=source.repo_name),
497 [
497 [
498 ('source_repo', source.repo_name),
498 ('source_repo', source.repo_name),
499 ('source_ref', 'branch:default:' + commit_ids['change']),
499 ('source_ref', 'branch:default:' + commit_ids['change']),
500 ('target_repo', target.repo_name),
500 ('target_repo', target.repo_name),
501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 ('common_ancestor', commit_ids['ancestor']),
502 ('common_ancestor', commit_ids['ancestor']),
503 ('pullrequest_title', 'Title'),
503 ('pullrequest_title', 'Title'),
504 ('pullrequest_desc', 'Description'),
504 ('pullrequest_desc', 'Description'),
505 ('description_renderer', 'markdown'),
505 ('description_renderer', 'markdown'),
506 ('__start__', 'review_members:sequence'),
506 ('__start__', 'review_members:sequence'),
507 ('__start__', 'reviewer:mapping'),
507 ('__start__', 'reviewer:mapping'),
508 ('user_id', '1'),
508 ('user_id', '1'),
509 ('__start__', 'reasons:sequence'),
509 ('__start__', 'reasons:sequence'),
510 ('reason', 'Some reason'),
510 ('reason', 'Some reason'),
511 ('__end__', 'reasons:sequence'),
511 ('__end__', 'reasons:sequence'),
512 ('__start__', 'rules:sequence'),
512 ('__start__', 'rules:sequence'),
513 ('__end__', 'rules:sequence'),
513 ('__end__', 'rules:sequence'),
514 ('mandatory', 'False'),
514 ('mandatory', 'False'),
515 ('__end__', 'reviewer:mapping'),
515 ('__end__', 'reviewer:mapping'),
516 ('__end__', 'review_members:sequence'),
516 ('__end__', 'review_members:sequence'),
517 ('__start__', 'revisions:sequence'),
517 ('__start__', 'revisions:sequence'),
518 ('revisions', commit_ids['change']),
518 ('revisions', commit_ids['change']),
519 ('__end__', 'revisions:sequence'),
519 ('__end__', 'revisions:sequence'),
520 ('user', ''),
520 ('user', ''),
521 ('csrf_token', csrf_token),
521 ('csrf_token', csrf_token),
522 ],
522 ],
523 status=302)
523 status=302)
524
524
525 location = response.headers['Location']
525 location = response.headers['Location']
526
526
527 pull_request_id = location.rsplit('/', 1)[1]
527 pull_request_id = location.rsplit('/', 1)[1]
528 assert pull_request_id != 'new'
528 assert pull_request_id != 'new'
529 pull_request = PullRequest.get(int(pull_request_id))
529 pull_request = PullRequest.get(int(pull_request_id))
530
530
531 # target_ref has to point to the ancestor's commit_id in order to
531 # target_ref has to point to the ancestor's commit_id in order to
532 # show the correct diff
532 # show the correct diff
533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 assert pull_request.target_ref == expected_target_ref
534 assert pull_request.target_ref == expected_target_ref
535
535
536 # Check generated diff contents
536 # Check generated diff contents
537 response = response.follow()
537 response = response.follow()
538 response.mustcontain(no=['content_of_ancestor'])
538 response.mustcontain(no=['content_of_ancestor'])
539 response.mustcontain(no=['content_of_ancestor-child'])
539 response.mustcontain(no=['content_of_ancestor-child'])
540 response.mustcontain('content_of_change')
540 response.mustcontain('content_of_change')
541
541
542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 # Clear any previous calls to rcextensions
543 # Clear any previous calls to rcextensions
544 rhodecode.EXTENSIONS.calls.clear()
544 rhodecode.EXTENSIONS.calls.clear()
545
545
546 pull_request = pr_util.create_pull_request(
546 pull_request = pr_util.create_pull_request(
547 approved=True, mergeable=True)
547 approved=True, mergeable=True)
548 pull_request_id = pull_request.pull_request_id
548 pull_request_id = pull_request.pull_request_id
549 repo_name = pull_request.target_repo.scm_instance().name,
549 repo_name = pull_request.target_repo.scm_instance().name,
550
550
551 url = route_path('pullrequest_merge',
551 url = route_path('pullrequest_merge',
552 repo_name=str(repo_name[0]),
552 repo_name=str(repo_name[0]),
553 pull_request_id=pull_request_id)
553 pull_request_id=pull_request_id)
554 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
554 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
555
555
556 pull_request = PullRequest.get(pull_request_id)
556 pull_request = PullRequest.get(pull_request_id)
557
557
558 assert response.status_int == 200
558 assert response.status_int == 200
559 assert pull_request.is_closed()
559 assert pull_request.is_closed()
560 assert_pull_request_status(
560 assert_pull_request_status(
561 pull_request, ChangesetStatus.STATUS_APPROVED)
561 pull_request, ChangesetStatus.STATUS_APPROVED)
562
562
563 # Check the relevant log entries were added
563 # Check the relevant log entries were added
564 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
564 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
565 actions = [log.action for log in user_logs]
565 actions = [log.action for log in user_logs]
566 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
566 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
567 expected_actions = [
567 expected_actions = [
568 u'repo.pull_request.close',
568 u'repo.pull_request.close',
569 u'repo.pull_request.merge',
569 u'repo.pull_request.merge',
570 u'repo.pull_request.comment.create'
570 u'repo.pull_request.comment.create'
571 ]
571 ]
572 assert actions == expected_actions
572 assert actions == expected_actions
573
573
574 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
574 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
575 actions = [log for log in user_logs]
575 actions = [log for log in user_logs]
576 assert actions[-1].action == 'user.push'
576 assert actions[-1].action == 'user.push'
577 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
577 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
578
578
579 # Check post_push rcextension was really executed
579 # Check post_push rcextension was really executed
580 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
580 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
581 assert len(push_calls) == 1
581 assert len(push_calls) == 1
582 unused_last_call_args, last_call_kwargs = push_calls[0]
582 unused_last_call_args, last_call_kwargs = push_calls[0]
583 assert last_call_kwargs['action'] == 'push'
583 assert last_call_kwargs['action'] == 'push'
584 assert last_call_kwargs['commit_ids'] == pr_commit_ids
584 assert last_call_kwargs['commit_ids'] == pr_commit_ids
585
585
586 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
586 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
587 pull_request = pr_util.create_pull_request(mergeable=False)
587 pull_request = pr_util.create_pull_request(mergeable=False)
588 pull_request_id = pull_request.pull_request_id
588 pull_request_id = pull_request.pull_request_id
589 pull_request = PullRequest.get(pull_request_id)
589 pull_request = PullRequest.get(pull_request_id)
590
590
591 response = self.app.post(
591 response = self.app.post(
592 route_path('pullrequest_merge',
592 route_path('pullrequest_merge',
593 repo_name=pull_request.target_repo.scm_instance().name,
593 repo_name=pull_request.target_repo.scm_instance().name,
594 pull_request_id=pull_request.pull_request_id),
594 pull_request_id=pull_request.pull_request_id),
595 params={'csrf_token': csrf_token}).follow()
595 params={'csrf_token': csrf_token}).follow()
596
596
597 assert response.status_int == 200
597 assert response.status_int == 200
598 response.mustcontain(
598 response.mustcontain(
599 'Merge is not currently possible because of below failed checks.')
599 'Merge is not currently possible because of below failed checks.')
600 response.mustcontain('Server-side pull request merging is disabled.')
600 response.mustcontain('Server-side pull request merging is disabled.')
601
601
602 @pytest.mark.skip_backends('svn')
602 @pytest.mark.skip_backends('svn')
603 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
603 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
604 pull_request = pr_util.create_pull_request(mergeable=True)
604 pull_request = pr_util.create_pull_request(mergeable=True)
605 pull_request_id = pull_request.pull_request_id
605 pull_request_id = pull_request.pull_request_id
606 repo_name = pull_request.target_repo.scm_instance().name
606 repo_name = pull_request.target_repo.scm_instance().name
607
607
608 response = self.app.post(
608 response = self.app.post(
609 route_path('pullrequest_merge',
609 route_path('pullrequest_merge',
610 repo_name=repo_name, pull_request_id=pull_request_id),
610 repo_name=repo_name, pull_request_id=pull_request_id),
611 params={'csrf_token': csrf_token}).follow()
611 params={'csrf_token': csrf_token}).follow()
612
612
613 assert response.status_int == 200
613 assert response.status_int == 200
614
614
615 response.mustcontain(
615 response.mustcontain(
616 'Merge is not currently possible because of below failed checks.')
616 'Merge is not currently possible because of below failed checks.')
617 response.mustcontain('Pull request reviewer approval is pending.')
617 response.mustcontain('Pull request reviewer approval is pending.')
618
618
619 def test_merge_pull_request_renders_failure_reason(
619 def test_merge_pull_request_renders_failure_reason(
620 self, user_regular, csrf_token, pr_util):
620 self, user_regular, csrf_token, pr_util):
621 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
621 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
622 pull_request_id = pull_request.pull_request_id
622 pull_request_id = pull_request.pull_request_id
623 repo_name = pull_request.target_repo.scm_instance().name
623 repo_name = pull_request.target_repo.scm_instance().name
624
624
625 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
625 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
626 MergeFailureReason.PUSH_FAILED,
626 MergeFailureReason.PUSH_FAILED,
627 metadata={'target': 'shadow repo',
627 metadata={'target': 'shadow repo',
628 'merge_commit': 'xxx'})
628 'merge_commit': 'xxx'})
629 model_patcher = mock.patch.multiple(
629 model_patcher = mock.patch.multiple(
630 PullRequestModel,
630 PullRequestModel,
631 merge_repo=mock.Mock(return_value=merge_resp),
631 merge_repo=mock.Mock(return_value=merge_resp),
632 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
632 merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE')))
633
633
634 with model_patcher:
634 with model_patcher:
635 response = self.app.post(
635 response = self.app.post(
636 route_path('pullrequest_merge',
636 route_path('pullrequest_merge',
637 repo_name=repo_name,
637 repo_name=repo_name,
638 pull_request_id=pull_request_id),
638 pull_request_id=pull_request_id),
639 params={'csrf_token': csrf_token}, status=302)
639 params={'csrf_token': csrf_token}, status=302)
640
640
641 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
641 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
642 metadata={'target': 'shadow repo',
642 metadata={'target': 'shadow repo',
643 'merge_commit': 'xxx'})
643 'merge_commit': 'xxx'})
644 assert_session_flash(response, merge_resp.merge_status_message)
644 assert_session_flash(response, merge_resp.merge_status_message)
645
645
646 def test_update_source_revision(self, backend, csrf_token):
646 def test_update_source_revision(self, backend, csrf_token):
647 commits = [
647 commits = [
648 {'message': 'ancestor'},
648 {'message': 'ancestor'},
649 {'message': 'change'},
649 {'message': 'change'},
650 {'message': 'change-2'},
650 {'message': 'change-2'},
651 ]
651 ]
652 commit_ids = backend.create_master_repo(commits)
652 commit_ids = backend.create_master_repo(commits)
653 target = backend.create_repo(heads=['ancestor'])
653 target = backend.create_repo(heads=['ancestor'])
654 source = backend.create_repo(heads=['change'])
654 source = backend.create_repo(heads=['change'])
655
655
656 # create pr from a in source to A in target
656 # create pr from a in source to A in target
657 pull_request = PullRequest()
657 pull_request = PullRequest()
658
658
659 pull_request.source_repo = source
659 pull_request.source_repo = source
660 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
660 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
661 branch=backend.default_branch_name, commit_id=commit_ids['change'])
661 branch=backend.default_branch_name, commit_id=commit_ids['change'])
662
662
663 pull_request.target_repo = target
663 pull_request.target_repo = target
664 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
664 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
665 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
665 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
666
666
667 pull_request.revisions = [commit_ids['change']]
667 pull_request.revisions = [commit_ids['change']]
668 pull_request.title = u"Test"
668 pull_request.title = u"Test"
669 pull_request.description = u"Description"
669 pull_request.description = u"Description"
670 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
670 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
671 pull_request.pull_request_state = PullRequest.STATE_CREATED
671 pull_request.pull_request_state = PullRequest.STATE_CREATED
672 Session().add(pull_request)
672 Session().add(pull_request)
673 Session().commit()
673 Session().commit()
674 pull_request_id = pull_request.pull_request_id
674 pull_request_id = pull_request.pull_request_id
675
675
676 # source has ancestor - change - change-2
676 # source has ancestor - change - change-2
677 backend.pull_heads(source, heads=['change-2'])
677 backend.pull_heads(source, heads=['change-2'])
678
678
679 # update PR
679 # update PR
680 self.app.post(
680 self.app.post(
681 route_path('pullrequest_update',
681 route_path('pullrequest_update',
682 repo_name=target.repo_name, pull_request_id=pull_request_id),
682 repo_name=target.repo_name, pull_request_id=pull_request_id),
683 params={'update_commits': 'true', 'csrf_token': csrf_token})
683 params={'update_commits': 'true', 'csrf_token': csrf_token})
684
684
685 response = self.app.get(
685 response = self.app.get(
686 route_path('pullrequest_show',
686 route_path('pullrequest_show',
687 repo_name=target.repo_name,
687 repo_name=target.repo_name,
688 pull_request_id=pull_request.pull_request_id))
688 pull_request_id=pull_request.pull_request_id))
689
689
690 assert response.status_int == 200
690 assert response.status_int == 200
691 response.mustcontain('Pull request updated to')
691 response.mustcontain('Pull request updated to')
692 response.mustcontain('with 1 added, 0 removed commits.')
692 response.mustcontain('with 1 added, 0 removed commits.')
693
693
694 # check that we have now both revisions
694 # check that we have now both revisions
695 pull_request = PullRequest.get(pull_request_id)
695 pull_request = PullRequest.get(pull_request_id)
696 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
696 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
697
697
698 def test_update_target_revision(self, backend, csrf_token):
698 def test_update_target_revision(self, backend, csrf_token):
699 commits = [
699 commits = [
700 {'message': 'ancestor'},
700 {'message': 'ancestor'},
701 {'message': 'change'},
701 {'message': 'change'},
702 {'message': 'ancestor-new', 'parents': ['ancestor']},
702 {'message': 'ancestor-new', 'parents': ['ancestor']},
703 {'message': 'change-rebased'},
703 {'message': 'change-rebased'},
704 ]
704 ]
705 commit_ids = backend.create_master_repo(commits)
705 commit_ids = backend.create_master_repo(commits)
706 target = backend.create_repo(heads=['ancestor'])
706 target = backend.create_repo(heads=['ancestor'])
707 source = backend.create_repo(heads=['change'])
707 source = backend.create_repo(heads=['change'])
708
708
709 # create pr from a in source to A in target
709 # create pr from a in source to A in target
710 pull_request = PullRequest()
710 pull_request = PullRequest()
711
711
712 pull_request.source_repo = source
712 pull_request.source_repo = source
713 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
713 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
714 branch=backend.default_branch_name, commit_id=commit_ids['change'])
714 branch=backend.default_branch_name, commit_id=commit_ids['change'])
715
715
716 pull_request.target_repo = target
716 pull_request.target_repo = target
717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
718 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
718 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
719
719
720 pull_request.revisions = [commit_ids['change']]
720 pull_request.revisions = [commit_ids['change']]
721 pull_request.title = u"Test"
721 pull_request.title = u"Test"
722 pull_request.description = u"Description"
722 pull_request.description = u"Description"
723 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
723 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
724 pull_request.pull_request_state = PullRequest.STATE_CREATED
724 pull_request.pull_request_state = PullRequest.STATE_CREATED
725
725
726 Session().add(pull_request)
726 Session().add(pull_request)
727 Session().commit()
727 Session().commit()
728 pull_request_id = pull_request.pull_request_id
728 pull_request_id = pull_request.pull_request_id
729
729
730 # target has ancestor - ancestor-new
730 # target has ancestor - ancestor-new
731 # source has ancestor - ancestor-new - change-rebased
731 # source has ancestor - ancestor-new - change-rebased
732 backend.pull_heads(target, heads=['ancestor-new'])
732 backend.pull_heads(target, heads=['ancestor-new'])
733 backend.pull_heads(source, heads=['change-rebased'])
733 backend.pull_heads(source, heads=['change-rebased'])
734
734
735 # update PR
735 # update PR
736 url = route_path('pullrequest_update',
736 url = route_path('pullrequest_update',
737 repo_name=target.repo_name,
737 repo_name=target.repo_name,
738 pull_request_id=pull_request_id)
738 pull_request_id=pull_request_id)
739 self.app.post(url,
739 self.app.post(url,
740 params={'update_commits': 'true', 'csrf_token': csrf_token},
740 params={'update_commits': 'true', 'csrf_token': csrf_token},
741 status=200)
741 status=200)
742
742
743 # check that we have now both revisions
743 # check that we have now both revisions
744 pull_request = PullRequest.get(pull_request_id)
744 pull_request = PullRequest.get(pull_request_id)
745 assert pull_request.revisions == [commit_ids['change-rebased']]
745 assert pull_request.revisions == [commit_ids['change-rebased']]
746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
747 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
747 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
748
748
749 response = self.app.get(
749 response = self.app.get(
750 route_path('pullrequest_show',
750 route_path('pullrequest_show',
751 repo_name=target.repo_name,
751 repo_name=target.repo_name,
752 pull_request_id=pull_request.pull_request_id))
752 pull_request_id=pull_request.pull_request_id))
753 assert response.status_int == 200
753 assert response.status_int == 200
754 response.mustcontain('Pull request updated to')
754 response.mustcontain('Pull request updated to')
755 response.mustcontain('with 1 added, 1 removed commits.')
755 response.mustcontain('with 1 added, 1 removed commits.')
756
756
757 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
757 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
758 backend = backend_git
758 backend = backend_git
759 commits = [
759 commits = [
760 {'message': 'master-commit-1'},
760 {'message': 'master-commit-1'},
761 {'message': 'master-commit-2-change-1'},
761 {'message': 'master-commit-2-change-1'},
762 {'message': 'master-commit-3-change-2'},
762 {'message': 'master-commit-3-change-2'},
763
763
764 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
764 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
765 {'message': 'feat-commit-2'},
765 {'message': 'feat-commit-2'},
766 ]
766 ]
767 commit_ids = backend.create_master_repo(commits)
767 commit_ids = backend.create_master_repo(commits)
768 target = backend.create_repo(heads=['master-commit-3-change-2'])
768 target = backend.create_repo(heads=['master-commit-3-change-2'])
769 source = backend.create_repo(heads=['feat-commit-2'])
769 source = backend.create_repo(heads=['feat-commit-2'])
770
770
771 # create pr from a in source to A in target
771 # create pr from a in source to A in target
772 pull_request = PullRequest()
772 pull_request = PullRequest()
773 pull_request.source_repo = source
773 pull_request.source_repo = source
774
774
775 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
775 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
776 branch=backend.default_branch_name,
776 branch=backend.default_branch_name,
777 commit_id=commit_ids['master-commit-3-change-2'])
777 commit_id=commit_ids['master-commit-3-change-2'])
778
778
779 pull_request.target_repo = target
779 pull_request.target_repo = target
780 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
780 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
781 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
781 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
782
782
783 pull_request.revisions = [
783 pull_request.revisions = [
784 commit_ids['feat-commit-1'],
784 commit_ids['feat-commit-1'],
785 commit_ids['feat-commit-2']
785 commit_ids['feat-commit-2']
786 ]
786 ]
787 pull_request.title = u"Test"
787 pull_request.title = u"Test"
788 pull_request.description = u"Description"
788 pull_request.description = u"Description"
789 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
789 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
790 pull_request.pull_request_state = PullRequest.STATE_CREATED
790 pull_request.pull_request_state = PullRequest.STATE_CREATED
791 Session().add(pull_request)
791 Session().add(pull_request)
792 Session().commit()
792 Session().commit()
793 pull_request_id = pull_request.pull_request_id
793 pull_request_id = pull_request.pull_request_id
794
794
795 # PR is created, now we simulate a force-push into target,
795 # PR is created, now we simulate a force-push into target,
796 # that drops a 2 last commits
796 # that drops a 2 last commits
797 vcsrepo = target.scm_instance()
797 vcsrepo = target.scm_instance()
798 vcsrepo.config.clear_section('hooks')
798 vcsrepo.config.clear_section('hooks')
799 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
799 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
800
800
801 # update PR
801 # update PR
802 url = route_path('pullrequest_update',
802 url = route_path('pullrequest_update',
803 repo_name=target.repo_name,
803 repo_name=target.repo_name,
804 pull_request_id=pull_request_id)
804 pull_request_id=pull_request_id)
805 self.app.post(url,
805 self.app.post(url,
806 params={'update_commits': 'true', 'csrf_token': csrf_token},
806 params={'update_commits': 'true', 'csrf_token': csrf_token},
807 status=200)
807 status=200)
808
808
809 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
809 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
810 assert response.status_int == 200
810 assert response.status_int == 200
811 response.mustcontain('Pull request updated to')
811 response.mustcontain('Pull request updated to')
812 response.mustcontain('with 0 added, 0 removed commits.')
812 response.mustcontain('with 0 added, 0 removed commits.')
813
813
814 def test_update_of_ancestor_reference(self, backend, csrf_token):
814 def test_update_of_ancestor_reference(self, backend, csrf_token):
815 commits = [
815 commits = [
816 {'message': 'ancestor'},
816 {'message': 'ancestor'},
817 {'message': 'change'},
817 {'message': 'change'},
818 {'message': 'change-2'},
818 {'message': 'change-2'},
819 {'message': 'ancestor-new', 'parents': ['ancestor']},
819 {'message': 'ancestor-new', 'parents': ['ancestor']},
820 {'message': 'change-rebased'},
820 {'message': 'change-rebased'},
821 ]
821 ]
822 commit_ids = backend.create_master_repo(commits)
822 commit_ids = backend.create_master_repo(commits)
823 target = backend.create_repo(heads=['ancestor'])
823 target = backend.create_repo(heads=['ancestor'])
824 source = backend.create_repo(heads=['change'])
824 source = backend.create_repo(heads=['change'])
825
825
826 # create pr from a in source to A in target
826 # create pr from a in source to A in target
827 pull_request = PullRequest()
827 pull_request = PullRequest()
828 pull_request.source_repo = source
828 pull_request.source_repo = source
829
829
830 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
830 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
831 branch=backend.default_branch_name, commit_id=commit_ids['change'])
831 branch=backend.default_branch_name, commit_id=commit_ids['change'])
832 pull_request.target_repo = target
832 pull_request.target_repo = target
833 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
833 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
834 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
834 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
835 pull_request.revisions = [commit_ids['change']]
835 pull_request.revisions = [commit_ids['change']]
836 pull_request.title = u"Test"
836 pull_request.title = u"Test"
837 pull_request.description = u"Description"
837 pull_request.description = u"Description"
838 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
838 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
839 pull_request.pull_request_state = PullRequest.STATE_CREATED
839 pull_request.pull_request_state = PullRequest.STATE_CREATED
840 Session().add(pull_request)
840 Session().add(pull_request)
841 Session().commit()
841 Session().commit()
842 pull_request_id = pull_request.pull_request_id
842 pull_request_id = pull_request.pull_request_id
843
843
844 # target has ancestor - ancestor-new
844 # target has ancestor - ancestor-new
845 # source has ancestor - ancestor-new - change-rebased
845 # source has ancestor - ancestor-new - change-rebased
846 backend.pull_heads(target, heads=['ancestor-new'])
846 backend.pull_heads(target, heads=['ancestor-new'])
847 backend.pull_heads(source, heads=['change-rebased'])
847 backend.pull_heads(source, heads=['change-rebased'])
848
848
849 # update PR
849 # update PR
850 self.app.post(
850 self.app.post(
851 route_path('pullrequest_update',
851 route_path('pullrequest_update',
852 repo_name=target.repo_name, pull_request_id=pull_request_id),
852 repo_name=target.repo_name, pull_request_id=pull_request_id),
853 params={'update_commits': 'true', 'csrf_token': csrf_token},
853 params={'update_commits': 'true', 'csrf_token': csrf_token},
854 status=200)
854 status=200)
855
855
856 # Expect the target reference to be updated correctly
856 # Expect the target reference to be updated correctly
857 pull_request = PullRequest.get(pull_request_id)
857 pull_request = PullRequest.get(pull_request_id)
858 assert pull_request.revisions == [commit_ids['change-rebased']]
858 assert pull_request.revisions == [commit_ids['change-rebased']]
859 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
859 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
860 branch=backend.default_branch_name,
860 branch=backend.default_branch_name,
861 commit_id=commit_ids['ancestor-new'])
861 commit_id=commit_ids['ancestor-new'])
862 assert pull_request.target_ref == expected_target_ref
862 assert pull_request.target_ref == expected_target_ref
863
863
864 def test_remove_pull_request_branch(self, backend_git, csrf_token):
864 def test_remove_pull_request_branch(self, backend_git, csrf_token):
865 branch_name = 'development'
865 branch_name = 'development'
866 commits = [
866 commits = [
867 {'message': 'initial-commit'},
867 {'message': 'initial-commit'},
868 {'message': 'old-feature'},
868 {'message': 'old-feature'},
869 {'message': 'new-feature', 'branch': branch_name},
869 {'message': 'new-feature', 'branch': branch_name},
870 ]
870 ]
871 repo = backend_git.create_repo(commits)
871 repo = backend_git.create_repo(commits)
872 repo_name = repo.repo_name
872 repo_name = repo.repo_name
873 commit_ids = backend_git.commit_ids
873 commit_ids = backend_git.commit_ids
874
874
875 pull_request = PullRequest()
875 pull_request = PullRequest()
876 pull_request.source_repo = repo
876 pull_request.source_repo = repo
877 pull_request.target_repo = repo
877 pull_request.target_repo = repo
878 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
878 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
879 branch=branch_name, commit_id=commit_ids['new-feature'])
879 branch=branch_name, commit_id=commit_ids['new-feature'])
880 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
880 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
881 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
881 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
882 pull_request.revisions = [commit_ids['new-feature']]
882 pull_request.revisions = [commit_ids['new-feature']]
883 pull_request.title = u"Test"
883 pull_request.title = u"Test"
884 pull_request.description = u"Description"
884 pull_request.description = u"Description"
885 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
885 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
886 pull_request.pull_request_state = PullRequest.STATE_CREATED
886 pull_request.pull_request_state = PullRequest.STATE_CREATED
887 Session().add(pull_request)
887 Session().add(pull_request)
888 Session().commit()
888 Session().commit()
889
889
890 pull_request_id = pull_request.pull_request_id
890 pull_request_id = pull_request.pull_request_id
891
891
892 vcs = repo.scm_instance()
892 vcs = repo.scm_instance()
893 vcs.remove_ref('refs/heads/{}'.format(branch_name))
893 vcs.remove_ref('refs/heads/{}'.format(branch_name))
894 # NOTE(marcink): run GC to ensure the commits are gone
895 vcs.run_gc()
894
896
895 response = self.app.get(route_path(
897 response = self.app.get(route_path(
896 'pullrequest_show',
898 'pullrequest_show',
897 repo_name=repo_name,
899 repo_name=repo_name,
898 pull_request_id=pull_request_id))
900 pull_request_id=pull_request_id))
899
901
900 assert response.status_int == 200
902 assert response.status_int == 200
901
903
902 response.assert_response().element_contains(
904 response.assert_response().element_contains(
903 '#changeset_compare_view_content .alert strong',
905 '#changeset_compare_view_content .alert strong',
904 'Missing commits')
906 'Missing commits')
905 response.assert_response().element_contains(
907 response.assert_response().element_contains(
906 '#changeset_compare_view_content .alert',
908 '#changeset_compare_view_content .alert',
907 'This pull request cannot be displayed, because one or more'
909 'This pull request cannot be displayed, because one or more'
908 ' commits no longer exist in the source repository.')
910 ' commits no longer exist in the source repository.')
909
911
910 def test_strip_commits_from_pull_request(
912 def test_strip_commits_from_pull_request(
911 self, backend, pr_util, csrf_token):
913 self, backend, pr_util, csrf_token):
912 commits = [
914 commits = [
913 {'message': 'initial-commit'},
915 {'message': 'initial-commit'},
914 {'message': 'old-feature'},
916 {'message': 'old-feature'},
915 {'message': 'new-feature', 'parents': ['initial-commit']},
917 {'message': 'new-feature', 'parents': ['initial-commit']},
916 ]
918 ]
917 pull_request = pr_util.create_pull_request(
919 pull_request = pr_util.create_pull_request(
918 commits, target_head='initial-commit', source_head='new-feature',
920 commits, target_head='initial-commit', source_head='new-feature',
919 revisions=['new-feature'])
921 revisions=['new-feature'])
920
922
921 vcs = pr_util.source_repository.scm_instance()
923 vcs = pr_util.source_repository.scm_instance()
922 if backend.alias == 'git':
924 if backend.alias == 'git':
923 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
925 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
924 else:
926 else:
925 vcs.strip(pr_util.commit_ids['new-feature'])
927 vcs.strip(pr_util.commit_ids['new-feature'])
926
928
927 response = self.app.get(route_path(
929 response = self.app.get(route_path(
928 'pullrequest_show',
930 'pullrequest_show',
929 repo_name=pr_util.target_repository.repo_name,
931 repo_name=pr_util.target_repository.repo_name,
930 pull_request_id=pull_request.pull_request_id))
932 pull_request_id=pull_request.pull_request_id))
931
933
932 assert response.status_int == 200
934 assert response.status_int == 200
933
935
934 response.assert_response().element_contains(
936 response.assert_response().element_contains(
935 '#changeset_compare_view_content .alert strong',
937 '#changeset_compare_view_content .alert strong',
936 'Missing commits')
938 'Missing commits')
937 response.assert_response().element_contains(
939 response.assert_response().element_contains(
938 '#changeset_compare_view_content .alert',
940 '#changeset_compare_view_content .alert',
939 'This pull request cannot be displayed, because one or more'
941 'This pull request cannot be displayed, because one or more'
940 ' commits no longer exist in the source repository.')
942 ' commits no longer exist in the source repository.')
941 response.assert_response().element_contains(
943 response.assert_response().element_contains(
942 '#update_commits',
944 '#update_commits',
943 'Update commits')
945 'Update commits')
944
946
945 def test_strip_commits_and_update(
947 def test_strip_commits_and_update(
946 self, backend, pr_util, csrf_token):
948 self, backend, pr_util, csrf_token):
947 commits = [
949 commits = [
948 {'message': 'initial-commit'},
950 {'message': 'initial-commit'},
949 {'message': 'old-feature'},
951 {'message': 'old-feature'},
950 {'message': 'new-feature', 'parents': ['old-feature']},
952 {'message': 'new-feature', 'parents': ['old-feature']},
951 ]
953 ]
952 pull_request = pr_util.create_pull_request(
954 pull_request = pr_util.create_pull_request(
953 commits, target_head='old-feature', source_head='new-feature',
955 commits, target_head='old-feature', source_head='new-feature',
954 revisions=['new-feature'], mergeable=True)
956 revisions=['new-feature'], mergeable=True)
955
957
956 vcs = pr_util.source_repository.scm_instance()
958 vcs = pr_util.source_repository.scm_instance()
957 if backend.alias == 'git':
959 if backend.alias == 'git':
958 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
960 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
959 else:
961 else:
960 vcs.strip(pr_util.commit_ids['new-feature'])
962 vcs.strip(pr_util.commit_ids['new-feature'])
961
963
962 url = route_path('pullrequest_update',
964 url = route_path('pullrequest_update',
963 repo_name=pull_request.target_repo.repo_name,
965 repo_name=pull_request.target_repo.repo_name,
964 pull_request_id=pull_request.pull_request_id)
966 pull_request_id=pull_request.pull_request_id)
965 response = self.app.post(url,
967 response = self.app.post(url,
966 params={'update_commits': 'true',
968 params={'update_commits': 'true',
967 'csrf_token': csrf_token})
969 'csrf_token': csrf_token})
968
970
969 assert response.status_int == 200
971 assert response.status_int == 200
970 assert response.body == '{"response": true, "redirect_url": null}'
972 assert response.body == '{"response": true, "redirect_url": null}'
971
973
972 # Make sure that after update, it won't raise 500 errors
974 # Make sure that after update, it won't raise 500 errors
973 response = self.app.get(route_path(
975 response = self.app.get(route_path(
974 'pullrequest_show',
976 'pullrequest_show',
975 repo_name=pr_util.target_repository.repo_name,
977 repo_name=pr_util.target_repository.repo_name,
976 pull_request_id=pull_request.pull_request_id))
978 pull_request_id=pull_request.pull_request_id))
977
979
978 assert response.status_int == 200
980 assert response.status_int == 200
979 response.assert_response().element_contains(
981 response.assert_response().element_contains(
980 '#changeset_compare_view_content .alert strong',
982 '#changeset_compare_view_content .alert strong',
981 'Missing commits')
983 'Missing commits')
982
984
983 def test_branch_is_a_link(self, pr_util):
985 def test_branch_is_a_link(self, pr_util):
984 pull_request = pr_util.create_pull_request()
986 pull_request = pr_util.create_pull_request()
985 pull_request.source_ref = 'branch:origin:1234567890abcdef'
987 pull_request.source_ref = 'branch:origin:1234567890abcdef'
986 pull_request.target_ref = 'branch:target:abcdef1234567890'
988 pull_request.target_ref = 'branch:target:abcdef1234567890'
987 Session().add(pull_request)
989 Session().add(pull_request)
988 Session().commit()
990 Session().commit()
989
991
990 response = self.app.get(route_path(
992 response = self.app.get(route_path(
991 'pullrequest_show',
993 'pullrequest_show',
992 repo_name=pull_request.target_repo.scm_instance().name,
994 repo_name=pull_request.target_repo.scm_instance().name,
993 pull_request_id=pull_request.pull_request_id))
995 pull_request_id=pull_request.pull_request_id))
994 assert response.status_int == 200
996 assert response.status_int == 200
995
997
996 source = response.assert_response().get_element('.pr-source-info')
998 source = response.assert_response().get_element('.pr-source-info')
997 source_parent = source.getparent()
999 source_parent = source.getparent()
998 assert len(source_parent) == 1
1000 assert len(source_parent) == 1
999
1001
1000 target = response.assert_response().get_element('.pr-target-info')
1002 target = response.assert_response().get_element('.pr-target-info')
1001 target_parent = target.getparent()
1003 target_parent = target.getparent()
1002 assert len(target_parent) == 1
1004 assert len(target_parent) == 1
1003
1005
1004 expected_origin_link = route_path(
1006 expected_origin_link = route_path(
1005 'repo_commits',
1007 'repo_commits',
1006 repo_name=pull_request.source_repo.scm_instance().name,
1008 repo_name=pull_request.source_repo.scm_instance().name,
1007 params=dict(branch='origin'))
1009 params=dict(branch='origin'))
1008 expected_target_link = route_path(
1010 expected_target_link = route_path(
1009 'repo_commits',
1011 'repo_commits',
1010 repo_name=pull_request.target_repo.scm_instance().name,
1012 repo_name=pull_request.target_repo.scm_instance().name,
1011 params=dict(branch='target'))
1013 params=dict(branch='target'))
1012 assert source_parent.attrib['href'] == expected_origin_link
1014 assert source_parent.attrib['href'] == expected_origin_link
1013 assert target_parent.attrib['href'] == expected_target_link
1015 assert target_parent.attrib['href'] == expected_target_link
1014
1016
1015 def test_bookmark_is_not_a_link(self, pr_util):
1017 def test_bookmark_is_not_a_link(self, pr_util):
1016 pull_request = pr_util.create_pull_request()
1018 pull_request = pr_util.create_pull_request()
1017 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1019 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1018 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1020 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1019 Session().add(pull_request)
1021 Session().add(pull_request)
1020 Session().commit()
1022 Session().commit()
1021
1023
1022 response = self.app.get(route_path(
1024 response = self.app.get(route_path(
1023 'pullrequest_show',
1025 'pullrequest_show',
1024 repo_name=pull_request.target_repo.scm_instance().name,
1026 repo_name=pull_request.target_repo.scm_instance().name,
1025 pull_request_id=pull_request.pull_request_id))
1027 pull_request_id=pull_request.pull_request_id))
1026 assert response.status_int == 200
1028 assert response.status_int == 200
1027
1029
1028 source = response.assert_response().get_element('.pr-source-info')
1030 source = response.assert_response().get_element('.pr-source-info')
1029 assert source.text.strip() == 'bookmark:origin'
1031 assert source.text.strip() == 'bookmark:origin'
1030 assert source.getparent().attrib.get('href') is None
1032 assert source.getparent().attrib.get('href') is None
1031
1033
1032 target = response.assert_response().get_element('.pr-target-info')
1034 target = response.assert_response().get_element('.pr-target-info')
1033 assert target.text.strip() == 'bookmark:target'
1035 assert target.text.strip() == 'bookmark:target'
1034 assert target.getparent().attrib.get('href') is None
1036 assert target.getparent().attrib.get('href') is None
1035
1037
1036 def test_tag_is_not_a_link(self, pr_util):
1038 def test_tag_is_not_a_link(self, pr_util):
1037 pull_request = pr_util.create_pull_request()
1039 pull_request = pr_util.create_pull_request()
1038 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1040 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1039 pull_request.target_ref = 'tag:target:abcdef1234567890'
1041 pull_request.target_ref = 'tag:target:abcdef1234567890'
1040 Session().add(pull_request)
1042 Session().add(pull_request)
1041 Session().commit()
1043 Session().commit()
1042
1044
1043 response = self.app.get(route_path(
1045 response = self.app.get(route_path(
1044 'pullrequest_show',
1046 'pullrequest_show',
1045 repo_name=pull_request.target_repo.scm_instance().name,
1047 repo_name=pull_request.target_repo.scm_instance().name,
1046 pull_request_id=pull_request.pull_request_id))
1048 pull_request_id=pull_request.pull_request_id))
1047 assert response.status_int == 200
1049 assert response.status_int == 200
1048
1050
1049 source = response.assert_response().get_element('.pr-source-info')
1051 source = response.assert_response().get_element('.pr-source-info')
1050 assert source.text.strip() == 'tag:origin'
1052 assert source.text.strip() == 'tag:origin'
1051 assert source.getparent().attrib.get('href') is None
1053 assert source.getparent().attrib.get('href') is None
1052
1054
1053 target = response.assert_response().get_element('.pr-target-info')
1055 target = response.assert_response().get_element('.pr-target-info')
1054 assert target.text.strip() == 'tag:target'
1056 assert target.text.strip() == 'tag:target'
1055 assert target.getparent().attrib.get('href') is None
1057 assert target.getparent().attrib.get('href') is None
1056
1058
1057 @pytest.mark.parametrize('mergeable', [True, False])
1059 @pytest.mark.parametrize('mergeable', [True, False])
1058 def test_shadow_repository_link(
1060 def test_shadow_repository_link(
1059 self, mergeable, pr_util, http_host_only_stub):
1061 self, mergeable, pr_util, http_host_only_stub):
1060 """
1062 """
1061 Check that the pull request summary page displays a link to the shadow
1063 Check that the pull request summary page displays a link to the shadow
1062 repository if the pull request is mergeable. If it is not mergeable
1064 repository if the pull request is mergeable. If it is not mergeable
1063 the link should not be displayed.
1065 the link should not be displayed.
1064 """
1066 """
1065 pull_request = pr_util.create_pull_request(
1067 pull_request = pr_util.create_pull_request(
1066 mergeable=mergeable, enable_notifications=False)
1068 mergeable=mergeable, enable_notifications=False)
1067 target_repo = pull_request.target_repo.scm_instance()
1069 target_repo = pull_request.target_repo.scm_instance()
1068 pr_id = pull_request.pull_request_id
1070 pr_id = pull_request.pull_request_id
1069 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1071 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1070 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1072 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1071
1073
1072 response = self.app.get(route_path(
1074 response = self.app.get(route_path(
1073 'pullrequest_show',
1075 'pullrequest_show',
1074 repo_name=target_repo.name,
1076 repo_name=target_repo.name,
1075 pull_request_id=pr_id))
1077 pull_request_id=pr_id))
1076
1078
1077 if mergeable:
1079 if mergeable:
1078 response.assert_response().element_value_contains(
1080 response.assert_response().element_value_contains(
1079 'input.pr-mergeinfo', shadow_url)
1081 'input.pr-mergeinfo', shadow_url)
1080 response.assert_response().element_value_contains(
1082 response.assert_response().element_value_contains(
1081 'input.pr-mergeinfo ', 'pr-merge')
1083 'input.pr-mergeinfo ', 'pr-merge')
1082 else:
1084 else:
1083 response.assert_response().no_element_exists('.pr-mergeinfo')
1085 response.assert_response().no_element_exists('.pr-mergeinfo')
1084
1086
1085
1087
1086 @pytest.mark.usefixtures('app')
1088 @pytest.mark.usefixtures('app')
1087 @pytest.mark.backends("git", "hg")
1089 @pytest.mark.backends("git", "hg")
1088 class TestPullrequestsControllerDelete(object):
1090 class TestPullrequestsControllerDelete(object):
1089 def test_pull_request_delete_button_permissions_admin(
1091 def test_pull_request_delete_button_permissions_admin(
1090 self, autologin_user, user_admin, pr_util):
1092 self, autologin_user, user_admin, pr_util):
1091 pull_request = pr_util.create_pull_request(
1093 pull_request = pr_util.create_pull_request(
1092 author=user_admin.username, enable_notifications=False)
1094 author=user_admin.username, enable_notifications=False)
1093
1095
1094 response = self.app.get(route_path(
1096 response = self.app.get(route_path(
1095 'pullrequest_show',
1097 'pullrequest_show',
1096 repo_name=pull_request.target_repo.scm_instance().name,
1098 repo_name=pull_request.target_repo.scm_instance().name,
1097 pull_request_id=pull_request.pull_request_id))
1099 pull_request_id=pull_request.pull_request_id))
1098
1100
1099 response.mustcontain('id="delete_pullrequest"')
1101 response.mustcontain('id="delete_pullrequest"')
1100 response.mustcontain('Confirm to delete this pull request')
1102 response.mustcontain('Confirm to delete this pull request')
1101
1103
1102 def test_pull_request_delete_button_permissions_owner(
1104 def test_pull_request_delete_button_permissions_owner(
1103 self, autologin_regular_user, user_regular, pr_util):
1105 self, autologin_regular_user, user_regular, pr_util):
1104 pull_request = pr_util.create_pull_request(
1106 pull_request = pr_util.create_pull_request(
1105 author=user_regular.username, enable_notifications=False)
1107 author=user_regular.username, enable_notifications=False)
1106
1108
1107 response = self.app.get(route_path(
1109 response = self.app.get(route_path(
1108 'pullrequest_show',
1110 'pullrequest_show',
1109 repo_name=pull_request.target_repo.scm_instance().name,
1111 repo_name=pull_request.target_repo.scm_instance().name,
1110 pull_request_id=pull_request.pull_request_id))
1112 pull_request_id=pull_request.pull_request_id))
1111
1113
1112 response.mustcontain('id="delete_pullrequest"')
1114 response.mustcontain('id="delete_pullrequest"')
1113 response.mustcontain('Confirm to delete this pull request')
1115 response.mustcontain('Confirm to delete this pull request')
1114
1116
1115 def test_pull_request_delete_button_permissions_forbidden(
1117 def test_pull_request_delete_button_permissions_forbidden(
1116 self, autologin_regular_user, user_regular, user_admin, pr_util):
1118 self, autologin_regular_user, user_regular, user_admin, pr_util):
1117 pull_request = pr_util.create_pull_request(
1119 pull_request = pr_util.create_pull_request(
1118 author=user_admin.username, enable_notifications=False)
1120 author=user_admin.username, enable_notifications=False)
1119
1121
1120 response = self.app.get(route_path(
1122 response = self.app.get(route_path(
1121 'pullrequest_show',
1123 'pullrequest_show',
1122 repo_name=pull_request.target_repo.scm_instance().name,
1124 repo_name=pull_request.target_repo.scm_instance().name,
1123 pull_request_id=pull_request.pull_request_id))
1125 pull_request_id=pull_request.pull_request_id))
1124 response.mustcontain(no=['id="delete_pullrequest"'])
1126 response.mustcontain(no=['id="delete_pullrequest"'])
1125 response.mustcontain(no=['Confirm to delete this pull request'])
1127 response.mustcontain(no=['Confirm to delete this pull request'])
1126
1128
1127 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1129 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1128 self, autologin_regular_user, user_regular, user_admin, pr_util,
1130 self, autologin_regular_user, user_regular, user_admin, pr_util,
1129 user_util):
1131 user_util):
1130
1132
1131 pull_request = pr_util.create_pull_request(
1133 pull_request = pr_util.create_pull_request(
1132 author=user_admin.username, enable_notifications=False)
1134 author=user_admin.username, enable_notifications=False)
1133
1135
1134 user_util.grant_user_permission_to_repo(
1136 user_util.grant_user_permission_to_repo(
1135 pull_request.target_repo, user_regular,
1137 pull_request.target_repo, user_regular,
1136 'repository.write')
1138 'repository.write')
1137
1139
1138 response = self.app.get(route_path(
1140 response = self.app.get(route_path(
1139 'pullrequest_show',
1141 'pullrequest_show',
1140 repo_name=pull_request.target_repo.scm_instance().name,
1142 repo_name=pull_request.target_repo.scm_instance().name,
1141 pull_request_id=pull_request.pull_request_id))
1143 pull_request_id=pull_request.pull_request_id))
1142
1144
1143 response.mustcontain('id="open_edit_pullrequest"')
1145 response.mustcontain('id="open_edit_pullrequest"')
1144 response.mustcontain('id="delete_pullrequest"')
1146 response.mustcontain('id="delete_pullrequest"')
1145 response.mustcontain(no=['Confirm to delete this pull request'])
1147 response.mustcontain(no=['Confirm to delete this pull request'])
1146
1148
1147 def test_delete_comment_returns_404_if_comment_does_not_exist(
1149 def test_delete_comment_returns_404_if_comment_does_not_exist(
1148 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1150 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1149
1151
1150 pull_request = pr_util.create_pull_request(
1152 pull_request = pr_util.create_pull_request(
1151 author=user_admin.username, enable_notifications=False)
1153 author=user_admin.username, enable_notifications=False)
1152
1154
1153 self.app.post(
1155 self.app.post(
1154 route_path(
1156 route_path(
1155 'pullrequest_comment_delete',
1157 'pullrequest_comment_delete',
1156 repo_name=pull_request.target_repo.scm_instance().name,
1158 repo_name=pull_request.target_repo.scm_instance().name,
1157 pull_request_id=pull_request.pull_request_id,
1159 pull_request_id=pull_request.pull_request_id,
1158 comment_id=1024404),
1160 comment_id=1024404),
1159 extra_environ=xhr_header,
1161 extra_environ=xhr_header,
1160 params={'csrf_token': csrf_token},
1162 params={'csrf_token': csrf_token},
1161 status=404
1163 status=404
1162 )
1164 )
1163
1165
1164 def test_delete_comment(
1166 def test_delete_comment(
1165 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1167 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1166
1168
1167 pull_request = pr_util.create_pull_request(
1169 pull_request = pr_util.create_pull_request(
1168 author=user_admin.username, enable_notifications=False)
1170 author=user_admin.username, enable_notifications=False)
1169 comment = pr_util.create_comment()
1171 comment = pr_util.create_comment()
1170 comment_id = comment.comment_id
1172 comment_id = comment.comment_id
1171
1173
1172 response = self.app.post(
1174 response = self.app.post(
1173 route_path(
1175 route_path(
1174 'pullrequest_comment_delete',
1176 'pullrequest_comment_delete',
1175 repo_name=pull_request.target_repo.scm_instance().name,
1177 repo_name=pull_request.target_repo.scm_instance().name,
1176 pull_request_id=pull_request.pull_request_id,
1178 pull_request_id=pull_request.pull_request_id,
1177 comment_id=comment_id),
1179 comment_id=comment_id),
1178 extra_environ=xhr_header,
1180 extra_environ=xhr_header,
1179 params={'csrf_token': csrf_token},
1181 params={'csrf_token': csrf_token},
1180 status=200
1182 status=200
1181 )
1183 )
1182 assert response.body == 'true'
1184 assert response.body == 'true'
1183
1185
1184 @pytest.mark.parametrize('url_type', [
1186 @pytest.mark.parametrize('url_type', [
1185 'pullrequest_new',
1187 'pullrequest_new',
1186 'pullrequest_create',
1188 'pullrequest_create',
1187 'pullrequest_update',
1189 'pullrequest_update',
1188 'pullrequest_merge',
1190 'pullrequest_merge',
1189 ])
1191 ])
1190 def test_pull_request_is_forbidden_on_archived_repo(
1192 def test_pull_request_is_forbidden_on_archived_repo(
1191 self, autologin_user, backend, xhr_header, user_util, url_type):
1193 self, autologin_user, backend, xhr_header, user_util, url_type):
1192
1194
1193 # create a temporary repo
1195 # create a temporary repo
1194 source = user_util.create_repo(repo_type=backend.alias)
1196 source = user_util.create_repo(repo_type=backend.alias)
1195 repo_name = source.repo_name
1197 repo_name = source.repo_name
1196 repo = Repository.get_by_repo_name(repo_name)
1198 repo = Repository.get_by_repo_name(repo_name)
1197 repo.archived = True
1199 repo.archived = True
1198 Session().commit()
1200 Session().commit()
1199
1201
1200 response = self.app.get(
1202 response = self.app.get(
1201 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1203 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1202
1204
1203 msg = 'Action not supported for archived repository.'
1205 msg = 'Action not supported for archived repository.'
1204 assert_session_flash(response, msg)
1206 assert_session_flash(response, msg)
1205
1207
1206
1208
1207 def assert_pull_request_status(pull_request, expected_status):
1209 def assert_pull_request_status(pull_request, expected_status):
1208 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1210 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1209 assert status == expected_status
1211 assert status == expected_status
1210
1212
1211
1213
1212 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1214 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1213 @pytest.mark.usefixtures("autologin_user")
1215 @pytest.mark.usefixtures("autologin_user")
1214 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1216 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1215 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
1217 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,1493 +1,1506 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import collections
22 import collections
23
23
24 import formencode
24 import formencode
25 import formencode.htmlfill
25 import formencode.htmlfill
26 import peppercorn
26 import peppercorn
27 from pyramid.httpexceptions import (
27 from pyramid.httpexceptions import (
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 from pyramid.view import view_config
29 from pyramid.view import view_config
30 from pyramid.renderers import render
30 from pyramid.renderers import render
31
31
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33
33
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 from rhodecode.lib.base import vcs_operation_context
35 from rhodecode.lib.base import vcs_operation_context
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 from rhodecode.lib.ext_json import json
37 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
39 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
40 NotAnonymous, CSRFRequired)
40 NotAnonymous, CSRFRequired)
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
41 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
42 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
43 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
44 RepositoryRequirementError, EmptyRepositoryError)
44 RepositoryRequirementError, EmptyRepositoryError)
45 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.comment import CommentsModel
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
47 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
48 ChangesetComment, ChangesetStatus, Repository)
48 ChangesetComment, ChangesetStatus, Repository)
49 from rhodecode.model.forms import PullRequestForm
49 from rhodecode.model.forms import PullRequestForm
50 from rhodecode.model.meta import Session
50 from rhodecode.model.meta import Session
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
51 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
52 from rhodecode.model.scm import ScmModel
52 from rhodecode.model.scm import ScmModel
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
57 class RepoPullRequestsView(RepoAppView, DataGridAppView):
58
58
59 def load_default_context(self):
59 def load_default_context(self):
60 c = self._get_local_tmpl_context(include_app_defaults=True)
60 c = self._get_local_tmpl_context(include_app_defaults=True)
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
61 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
62 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
63 # backward compat., we use for OLD PRs a plain renderer
63 # backward compat., we use for OLD PRs a plain renderer
64 c.renderer = 'plain'
64 c.renderer = 'plain'
65 return c
65 return c
66
66
67 def _get_pull_requests_list(
67 def _get_pull_requests_list(
68 self, repo_name, source, filter_type, opened_by, statuses):
68 self, repo_name, source, filter_type, opened_by, statuses):
69
69
70 draw, start, limit = self._extract_chunk(self.request)
70 draw, start, limit = self._extract_chunk(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 _render = self.request.get_partial_renderer(
72 _render = self.request.get_partial_renderer(
73 'rhodecode:templates/data_table/_dt_elements.mako')
73 'rhodecode:templates/data_table/_dt_elements.mako')
74
74
75 # pagination
75 # pagination
76
76
77 if filter_type == 'awaiting_review':
77 if filter_type == 'awaiting_review':
78 pull_requests = PullRequestModel().get_awaiting_review(
78 pull_requests = PullRequestModel().get_awaiting_review(
79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
79 repo_name, search_q=search_q, source=source, opened_by=opened_by,
80 statuses=statuses, offset=start, length=limit,
80 statuses=statuses, offset=start, length=limit,
81 order_by=order_by, order_dir=order_dir)
81 order_by=order_by, order_dir=order_dir)
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 repo_name, search_q=search_q, source=source, statuses=statuses,
83 repo_name, search_q=search_q, source=source, statuses=statuses,
84 opened_by=opened_by)
84 opened_by=opened_by)
85 elif filter_type == 'awaiting_my_review':
85 elif filter_type == 'awaiting_my_review':
86 pull_requests = PullRequestModel().get_awaiting_my_review(
86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
87 repo_name, search_q=search_q, source=source, opened_by=opened_by,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 offset=start, length=limit, order_by=order_by,
89 offset=start, length=limit, order_by=order_by,
90 order_dir=order_dir)
90 order_dir=order_dir)
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
92 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
93 statuses=statuses, opened_by=opened_by)
93 statuses=statuses, opened_by=opened_by)
94 else:
94 else:
95 pull_requests = PullRequestModel().get_all(
95 pull_requests = PullRequestModel().get_all(
96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
96 repo_name, search_q=search_q, source=source, opened_by=opened_by,
97 statuses=statuses, offset=start, length=limit,
97 statuses=statuses, offset=start, length=limit,
98 order_by=order_by, order_dir=order_dir)
98 order_by=order_by, order_dir=order_dir)
99 pull_requests_total_count = PullRequestModel().count_all(
99 pull_requests_total_count = PullRequestModel().count_all(
100 repo_name, search_q=search_q, source=source, statuses=statuses,
100 repo_name, search_q=search_q, source=source, statuses=statuses,
101 opened_by=opened_by)
101 opened_by=opened_by)
102
102
103 data = []
103 data = []
104 comments_model = CommentsModel()
104 comments_model = CommentsModel()
105 for pr in pull_requests:
105 for pr in pull_requests:
106 comments = comments_model.get_all_comments(
106 comments = comments_model.get_all_comments(
107 self.db_repo.repo_id, pull_request=pr)
107 self.db_repo.repo_id, pull_request=pr)
108
108
109 data.append({
109 data.append({
110 'name': _render('pullrequest_name',
110 'name': _render('pullrequest_name',
111 pr.pull_request_id, pr.pull_request_state,
111 pr.pull_request_id, pr.pull_request_state,
112 pr.work_in_progress, pr.target_repo.repo_name),
112 pr.work_in_progress, pr.target_repo.repo_name),
113 'name_raw': pr.pull_request_id,
113 'name_raw': pr.pull_request_id,
114 'status': _render('pullrequest_status',
114 'status': _render('pullrequest_status',
115 pr.calculated_review_status()),
115 pr.calculated_review_status()),
116 'title': _render('pullrequest_title', pr.title, pr.description),
116 'title': _render('pullrequest_title', pr.title, pr.description),
117 'description': h.escape(pr.description),
117 'description': h.escape(pr.description),
118 'updated_on': _render('pullrequest_updated_on',
118 'updated_on': _render('pullrequest_updated_on',
119 h.datetime_to_time(pr.updated_on)),
119 h.datetime_to_time(pr.updated_on)),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 'created_on': _render('pullrequest_updated_on',
121 'created_on': _render('pullrequest_updated_on',
122 h.datetime_to_time(pr.created_on)),
122 h.datetime_to_time(pr.created_on)),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 'state': pr.pull_request_state,
124 'state': pr.pull_request_state,
125 'author': _render('pullrequest_author',
125 'author': _render('pullrequest_author',
126 pr.author.full_contact, ),
126 pr.author.full_contact, ),
127 'author_raw': pr.author.full_name,
127 'author_raw': pr.author.full_name,
128 'comments': _render('pullrequest_comments', len(comments)),
128 'comments': _render('pullrequest_comments', len(comments)),
129 'comments_raw': len(comments),
129 'comments_raw': len(comments),
130 'closed': pr.is_closed(),
130 'closed': pr.is_closed(),
131 })
131 })
132
132
133 data = ({
133 data = ({
134 'draw': draw,
134 'draw': draw,
135 'data': data,
135 'data': data,
136 'recordsTotal': pull_requests_total_count,
136 'recordsTotal': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
137 'recordsFiltered': pull_requests_total_count,
138 })
138 })
139 return data
139 return data
140
140
141 @LoginRequired()
141 @LoginRequired()
142 @HasRepoPermissionAnyDecorator(
142 @HasRepoPermissionAnyDecorator(
143 'repository.read', 'repository.write', 'repository.admin')
143 'repository.read', 'repository.write', 'repository.admin')
144 @view_config(
144 @view_config(
145 route_name='pullrequest_show_all', request_method='GET',
145 route_name='pullrequest_show_all', request_method='GET',
146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
147 def pull_request_list(self):
147 def pull_request_list(self):
148 c = self.load_default_context()
148 c = self.load_default_context()
149
149
150 req_get = self.request.GET
150 req_get = self.request.GET
151 c.source = str2bool(req_get.get('source'))
151 c.source = str2bool(req_get.get('source'))
152 c.closed = str2bool(req_get.get('closed'))
152 c.closed = str2bool(req_get.get('closed'))
153 c.my = str2bool(req_get.get('my'))
153 c.my = str2bool(req_get.get('my'))
154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
156
156
157 c.active = 'open'
157 c.active = 'open'
158 if c.my:
158 if c.my:
159 c.active = 'my'
159 c.active = 'my'
160 if c.closed:
160 if c.closed:
161 c.active = 'closed'
161 c.active = 'closed'
162 if c.awaiting_review and not c.source:
162 if c.awaiting_review and not c.source:
163 c.active = 'awaiting'
163 c.active = 'awaiting'
164 if c.source and not c.awaiting_review:
164 if c.source and not c.awaiting_review:
165 c.active = 'source'
165 c.active = 'source'
166 if c.awaiting_my_review:
166 if c.awaiting_my_review:
167 c.active = 'awaiting_my'
167 c.active = 'awaiting_my'
168
168
169 return self._get_template_context(c)
169 return self._get_template_context(c)
170
170
171 @LoginRequired()
171 @LoginRequired()
172 @HasRepoPermissionAnyDecorator(
172 @HasRepoPermissionAnyDecorator(
173 'repository.read', 'repository.write', 'repository.admin')
173 'repository.read', 'repository.write', 'repository.admin')
174 @view_config(
174 @view_config(
175 route_name='pullrequest_show_all_data', request_method='GET',
175 route_name='pullrequest_show_all_data', request_method='GET',
176 renderer='json_ext', xhr=True)
176 renderer='json_ext', xhr=True)
177 def pull_request_list_data(self):
177 def pull_request_list_data(self):
178 self.load_default_context()
178 self.load_default_context()
179
179
180 # additional filters
180 # additional filters
181 req_get = self.request.GET
181 req_get = self.request.GET
182 source = str2bool(req_get.get('source'))
182 source = str2bool(req_get.get('source'))
183 closed = str2bool(req_get.get('closed'))
183 closed = str2bool(req_get.get('closed'))
184 my = str2bool(req_get.get('my'))
184 my = str2bool(req_get.get('my'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187
187
188 filter_type = 'awaiting_review' if awaiting_review \
188 filter_type = 'awaiting_review' if awaiting_review \
189 else 'awaiting_my_review' if awaiting_my_review \
189 else 'awaiting_my_review' if awaiting_my_review \
190 else None
190 else None
191
191
192 opened_by = None
192 opened_by = None
193 if my:
193 if my:
194 opened_by = [self._rhodecode_user.user_id]
194 opened_by = [self._rhodecode_user.user_id]
195
195
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 if closed:
197 if closed:
198 statuses = [PullRequest.STATUS_CLOSED]
198 statuses = [PullRequest.STATUS_CLOSED]
199
199
200 data = self._get_pull_requests_list(
200 data = self._get_pull_requests_list(
201 repo_name=self.db_repo_name, source=source,
201 repo_name=self.db_repo_name, source=source,
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203
203
204 return data
204 return data
205
205
206 def _is_diff_cache_enabled(self, target_repo):
206 def _is_diff_cache_enabled(self, target_repo):
207 caching_enabled = self._get_general_setting(
207 caching_enabled = self._get_general_setting(
208 target_repo, 'rhodecode_diff_cache')
208 target_repo, 'rhodecode_diff_cache')
209 log.debug('Diff caching enabled: %s', caching_enabled)
209 log.debug('Diff caching enabled: %s', caching_enabled)
210 return caching_enabled
210 return caching_enabled
211
211
212 def _get_diffset(self, source_repo_name, source_repo,
212 def _get_diffset(self, source_repo_name, source_repo,
213 source_ref_id, target_ref_id,
213 source_ref_id, target_ref_id,
214 target_commit, source_commit, diff_limit, file_limit,
214 target_commit, source_commit, diff_limit, file_limit,
215 fulldiff, hide_whitespace_changes, diff_context):
215 fulldiff, hide_whitespace_changes, diff_context):
216
216
217 vcs_diff = PullRequestModel().get_diff(
217 vcs_diff = PullRequestModel().get_diff(
218 source_repo, source_ref_id, target_ref_id,
218 source_repo, source_ref_id, target_ref_id,
219 hide_whitespace_changes, diff_context)
219 hide_whitespace_changes, diff_context)
220
220
221 diff_processor = diffs.DiffProcessor(
221 diff_processor = diffs.DiffProcessor(
222 vcs_diff, format='newdiff', diff_limit=diff_limit,
222 vcs_diff, format='newdiff', diff_limit=diff_limit,
223 file_limit=file_limit, show_full_diff=fulldiff)
223 file_limit=file_limit, show_full_diff=fulldiff)
224
224
225 _parsed = diff_processor.prepare()
225 _parsed = diff_processor.prepare()
226
226
227 diffset = codeblocks.DiffSet(
227 diffset = codeblocks.DiffSet(
228 repo_name=self.db_repo_name,
228 repo_name=self.db_repo_name,
229 source_repo_name=source_repo_name,
229 source_repo_name=source_repo_name,
230 source_node_getter=codeblocks.diffset_node_getter(target_commit),
230 source_node_getter=codeblocks.diffset_node_getter(target_commit),
231 target_node_getter=codeblocks.diffset_node_getter(source_commit),
231 target_node_getter=codeblocks.diffset_node_getter(source_commit),
232 )
232 )
233 diffset = self.path_filter.render_patchset_filtered(
233 diffset = self.path_filter.render_patchset_filtered(
234 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
234 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
235
235
236 return diffset
236 return diffset
237
237
238 def _get_range_diffset(self, source_scm, source_repo,
238 def _get_range_diffset(self, source_scm, source_repo,
239 commit1, commit2, diff_limit, file_limit,
239 commit1, commit2, diff_limit, file_limit,
240 fulldiff, hide_whitespace_changes, diff_context):
240 fulldiff, hide_whitespace_changes, diff_context):
241 vcs_diff = source_scm.get_diff(
241 vcs_diff = source_scm.get_diff(
242 commit1, commit2,
242 commit1, commit2,
243 ignore_whitespace=hide_whitespace_changes,
243 ignore_whitespace=hide_whitespace_changes,
244 context=diff_context)
244 context=diff_context)
245
245
246 diff_processor = diffs.DiffProcessor(
246 diff_processor = diffs.DiffProcessor(
247 vcs_diff, format='newdiff', diff_limit=diff_limit,
247 vcs_diff, format='newdiff', diff_limit=diff_limit,
248 file_limit=file_limit, show_full_diff=fulldiff)
248 file_limit=file_limit, show_full_diff=fulldiff)
249
249
250 _parsed = diff_processor.prepare()
250 _parsed = diff_processor.prepare()
251
251
252 diffset = codeblocks.DiffSet(
252 diffset = codeblocks.DiffSet(
253 repo_name=source_repo.repo_name,
253 repo_name=source_repo.repo_name,
254 source_node_getter=codeblocks.diffset_node_getter(commit1),
254 source_node_getter=codeblocks.diffset_node_getter(commit1),
255 target_node_getter=codeblocks.diffset_node_getter(commit2))
255 target_node_getter=codeblocks.diffset_node_getter(commit2))
256
256
257 diffset = self.path_filter.render_patchset_filtered(
257 diffset = self.path_filter.render_patchset_filtered(
258 diffset, _parsed, commit1.raw_id, commit2.raw_id)
258 diffset, _parsed, commit1.raw_id, commit2.raw_id)
259
259
260 return diffset
260 return diffset
261
261
262 @LoginRequired()
262 @LoginRequired()
263 @HasRepoPermissionAnyDecorator(
263 @HasRepoPermissionAnyDecorator(
264 'repository.read', 'repository.write', 'repository.admin')
264 'repository.read', 'repository.write', 'repository.admin')
265 @view_config(
265 @view_config(
266 route_name='pullrequest_show', request_method='GET',
266 route_name='pullrequest_show', request_method='GET',
267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
267 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
268 def pull_request_show(self):
268 def pull_request_show(self):
269 _ = self.request.translate
269 _ = self.request.translate
270 c = self.load_default_context()
270 c = self.load_default_context()
271
271
272 pull_request = PullRequest.get_or_404(
272 pull_request = PullRequest.get_or_404(
273 self.request.matchdict['pull_request_id'])
273 self.request.matchdict['pull_request_id'])
274 pull_request_id = pull_request.pull_request_id
274 pull_request_id = pull_request.pull_request_id
275
275
276 c.state_progressing = pull_request.is_state_changing()
276 c.state_progressing = pull_request.is_state_changing()
277
277
278 _new_state = {
278 _new_state = {
279 'created': PullRequest.STATE_CREATED,
279 'created': PullRequest.STATE_CREATED,
280 }.get(self.request.GET.get('force_state'))
280 }.get(self.request.GET.get('force_state'))
281 if c.is_super_admin and _new_state:
281 if c.is_super_admin and _new_state:
282 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
282 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
283 h.flash(
283 h.flash(
284 _('Pull Request state was force changed to `{}`').format(_new_state),
284 _('Pull Request state was force changed to `{}`').format(_new_state),
285 category='success')
285 category='success')
286 Session().commit()
286 Session().commit()
287
287
288 raise HTTPFound(h.route_path(
288 raise HTTPFound(h.route_path(
289 'pullrequest_show', repo_name=self.db_repo_name,
289 'pullrequest_show', repo_name=self.db_repo_name,
290 pull_request_id=pull_request_id))
290 pull_request_id=pull_request_id))
291
291
292 version = self.request.GET.get('version')
292 version = self.request.GET.get('version')
293 from_version = self.request.GET.get('from_version') or version
293 from_version = self.request.GET.get('from_version') or version
294 merge_checks = self.request.GET.get('merge_checks')
294 merge_checks = self.request.GET.get('merge_checks')
295 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
295 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
296
296
297 # fetch global flags of ignore ws or context lines
297 # fetch global flags of ignore ws or context lines
298 diff_context = diffs.get_diff_context(self.request)
298 diff_context = diffs.get_diff_context(self.request)
299 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
299 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
300
300
301 force_refresh = str2bool(self.request.GET.get('force_refresh'))
301 force_refresh = str2bool(self.request.GET.get('force_refresh'))
302
302
303 (pull_request_latest,
303 (pull_request_latest,
304 pull_request_at_ver,
304 pull_request_at_ver,
305 pull_request_display_obj,
305 pull_request_display_obj,
306 at_version) = PullRequestModel().get_pr_version(
306 at_version) = PullRequestModel().get_pr_version(
307 pull_request_id, version=version)
307 pull_request_id, version=version)
308 pr_closed = pull_request_latest.is_closed()
308 pr_closed = pull_request_latest.is_closed()
309
309
310 if pr_closed and (version or from_version):
310 if pr_closed and (version or from_version):
311 # not allow to browse versions
311 # not allow to browse versions
312 raise HTTPFound(h.route_path(
312 raise HTTPFound(h.route_path(
313 'pullrequest_show', repo_name=self.db_repo_name,
313 'pullrequest_show', repo_name=self.db_repo_name,
314 pull_request_id=pull_request_id))
314 pull_request_id=pull_request_id))
315
315
316 versions = pull_request_display_obj.versions()
316 versions = pull_request_display_obj.versions()
317 # used to store per-commit range diffs
317 # used to store per-commit range diffs
318 c.changes = collections.OrderedDict()
318 c.changes = collections.OrderedDict()
319 c.range_diff_on = self.request.GET.get('range-diff') == "1"
319 c.range_diff_on = self.request.GET.get('range-diff') == "1"
320
320
321 c.at_version = at_version
321 c.at_version = at_version
322 c.at_version_num = (at_version
322 c.at_version_num = (at_version
323 if at_version and at_version != 'latest'
323 if at_version and at_version != 'latest'
324 else None)
324 else None)
325 c.at_version_pos = ChangesetComment.get_index_from_version(
325 c.at_version_pos = ChangesetComment.get_index_from_version(
326 c.at_version_num, versions)
326 c.at_version_num, versions)
327
327
328 (prev_pull_request_latest,
328 (prev_pull_request_latest,
329 prev_pull_request_at_ver,
329 prev_pull_request_at_ver,
330 prev_pull_request_display_obj,
330 prev_pull_request_display_obj,
331 prev_at_version) = PullRequestModel().get_pr_version(
331 prev_at_version) = PullRequestModel().get_pr_version(
332 pull_request_id, version=from_version)
332 pull_request_id, version=from_version)
333
333
334 c.from_version = prev_at_version
334 c.from_version = prev_at_version
335 c.from_version_num = (prev_at_version
335 c.from_version_num = (prev_at_version
336 if prev_at_version and prev_at_version != 'latest'
336 if prev_at_version and prev_at_version != 'latest'
337 else None)
337 else None)
338 c.from_version_pos = ChangesetComment.get_index_from_version(
338 c.from_version_pos = ChangesetComment.get_index_from_version(
339 c.from_version_num, versions)
339 c.from_version_num, versions)
340
340
341 # define if we're in COMPARE mode or VIEW at version mode
341 # define if we're in COMPARE mode or VIEW at version mode
342 compare = at_version != prev_at_version
342 compare = at_version != prev_at_version
343
343
344 # pull_requests repo_name we opened it against
344 # pull_requests repo_name we opened it against
345 # ie. target_repo must match
345 # ie. target_repo must match
346 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
346 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
347 raise HTTPNotFound()
347 raise HTTPNotFound()
348
348
349 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
349 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
350 pull_request_at_ver)
350 pull_request_at_ver)
351
351
352 c.pull_request = pull_request_display_obj
352 c.pull_request = pull_request_display_obj
353 c.renderer = pull_request_at_ver.description_renderer or c.renderer
353 c.renderer = pull_request_at_ver.description_renderer or c.renderer
354 c.pull_request_latest = pull_request_latest
354 c.pull_request_latest = pull_request_latest
355
355
356 if compare or (at_version and not at_version == 'latest'):
356 if compare or (at_version and not at_version == 'latest'):
357 c.allowed_to_change_status = False
357 c.allowed_to_change_status = False
358 c.allowed_to_update = False
358 c.allowed_to_update = False
359 c.allowed_to_merge = False
359 c.allowed_to_merge = False
360 c.allowed_to_delete = False
360 c.allowed_to_delete = False
361 c.allowed_to_comment = False
361 c.allowed_to_comment = False
362 c.allowed_to_close = False
362 c.allowed_to_close = False
363 else:
363 else:
364 can_change_status = PullRequestModel().check_user_change_status(
364 can_change_status = PullRequestModel().check_user_change_status(
365 pull_request_at_ver, self._rhodecode_user)
365 pull_request_at_ver, self._rhodecode_user)
366 c.allowed_to_change_status = can_change_status and not pr_closed
366 c.allowed_to_change_status = can_change_status and not pr_closed
367
367
368 c.allowed_to_update = PullRequestModel().check_user_update(
368 c.allowed_to_update = PullRequestModel().check_user_update(
369 pull_request_latest, self._rhodecode_user) and not pr_closed
369 pull_request_latest, self._rhodecode_user) and not pr_closed
370 c.allowed_to_merge = PullRequestModel().check_user_merge(
370 c.allowed_to_merge = PullRequestModel().check_user_merge(
371 pull_request_latest, self._rhodecode_user) and not pr_closed
371 pull_request_latest, self._rhodecode_user) and not pr_closed
372 c.allowed_to_delete = PullRequestModel().check_user_delete(
372 c.allowed_to_delete = PullRequestModel().check_user_delete(
373 pull_request_latest, self._rhodecode_user) and not pr_closed
373 pull_request_latest, self._rhodecode_user) and not pr_closed
374 c.allowed_to_comment = not pr_closed
374 c.allowed_to_comment = not pr_closed
375 c.allowed_to_close = c.allowed_to_merge and not pr_closed
375 c.allowed_to_close = c.allowed_to_merge and not pr_closed
376
376
377 c.forbid_adding_reviewers = False
377 c.forbid_adding_reviewers = False
378 c.forbid_author_to_review = False
378 c.forbid_author_to_review = False
379 c.forbid_commit_author_to_review = False
379 c.forbid_commit_author_to_review = False
380
380
381 if pull_request_latest.reviewer_data and \
381 if pull_request_latest.reviewer_data and \
382 'rules' in pull_request_latest.reviewer_data:
382 'rules' in pull_request_latest.reviewer_data:
383 rules = pull_request_latest.reviewer_data['rules'] or {}
383 rules = pull_request_latest.reviewer_data['rules'] or {}
384 try:
384 try:
385 c.forbid_adding_reviewers = rules.get(
385 c.forbid_adding_reviewers = rules.get(
386 'forbid_adding_reviewers')
386 'forbid_adding_reviewers')
387 c.forbid_author_to_review = rules.get(
387 c.forbid_author_to_review = rules.get(
388 'forbid_author_to_review')
388 'forbid_author_to_review')
389 c.forbid_commit_author_to_review = rules.get(
389 c.forbid_commit_author_to_review = rules.get(
390 'forbid_commit_author_to_review')
390 'forbid_commit_author_to_review')
391 except Exception:
391 except Exception:
392 pass
392 pass
393
393
394 # check merge capabilities
394 # check merge capabilities
395 _merge_check = MergeCheck.validate(
395 _merge_check = MergeCheck.validate(
396 pull_request_latest, auth_user=self._rhodecode_user,
396 pull_request_latest, auth_user=self._rhodecode_user,
397 translator=self.request.translate,
397 translator=self.request.translate,
398 force_shadow_repo_refresh=force_refresh)
398 force_shadow_repo_refresh=force_refresh)
399
399 c.pr_merge_errors = _merge_check.error_details
400 c.pr_merge_errors = _merge_check.error_details
400 c.pr_merge_possible = not _merge_check.failed
401 c.pr_merge_possible = not _merge_check.failed
401 c.pr_merge_message = _merge_check.merge_msg
402 c.pr_merge_message = _merge_check.merge_msg
402
403
403 c.pr_merge_info = MergeCheck.get_merge_conditions(
404 c.pr_merge_info = MergeCheck.get_merge_conditions(
404 pull_request_latest, translator=self.request.translate)
405 pull_request_latest, translator=self.request.translate)
405
406
406 c.pull_request_review_status = _merge_check.review_status
407 c.pull_request_review_status = _merge_check.review_status
407 if merge_checks:
408 if merge_checks:
408 self.request.override_renderer = \
409 self.request.override_renderer = \
409 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
410 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
410 return self._get_template_context(c)
411 return self._get_template_context(c)
411
412
412 comments_model = CommentsModel()
413 comments_model = CommentsModel()
413
414
414 # reviewers and statuses
415 # reviewers and statuses
415 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
416 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
416 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
417 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
417
418
418 # GENERAL COMMENTS with versions #
419 # GENERAL COMMENTS with versions #
419 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
420 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
420 q = q.order_by(ChangesetComment.comment_id.asc())
421 q = q.order_by(ChangesetComment.comment_id.asc())
421 general_comments = q
422 general_comments = q
422
423
423 # pick comments we want to render at current version
424 # pick comments we want to render at current version
424 c.comment_versions = comments_model.aggregate_comments(
425 c.comment_versions = comments_model.aggregate_comments(
425 general_comments, versions, c.at_version_num)
426 general_comments, versions, c.at_version_num)
426 c.comments = c.comment_versions[c.at_version_num]['until']
427 c.comments = c.comment_versions[c.at_version_num]['until']
427
428
428 # INLINE COMMENTS with versions #
429 # INLINE COMMENTS with versions #
429 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
430 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
430 q = q.order_by(ChangesetComment.comment_id.asc())
431 q = q.order_by(ChangesetComment.comment_id.asc())
431 inline_comments = q
432 inline_comments = q
432
433
433 c.inline_versions = comments_model.aggregate_comments(
434 c.inline_versions = comments_model.aggregate_comments(
434 inline_comments, versions, c.at_version_num, inline=True)
435 inline_comments, versions, c.at_version_num, inline=True)
435
436
436 # TODOs
437 # TODOs
437 c.unresolved_comments = CommentsModel() \
438 c.unresolved_comments = CommentsModel() \
438 .get_pull_request_unresolved_todos(pull_request)
439 .get_pull_request_unresolved_todos(pull_request)
439 c.resolved_comments = CommentsModel() \
440 c.resolved_comments = CommentsModel() \
440 .get_pull_request_resolved_todos(pull_request)
441 .get_pull_request_resolved_todos(pull_request)
441
442
442 # inject latest version
443 # inject latest version
443 latest_ver = PullRequest.get_pr_display_object(
444 latest_ver = PullRequest.get_pr_display_object(
444 pull_request_latest, pull_request_latest)
445 pull_request_latest, pull_request_latest)
445
446
446 c.versions = versions + [latest_ver]
447 c.versions = versions + [latest_ver]
447
448
448 # if we use version, then do not show later comments
449 # if we use version, then do not show later comments
449 # than current version
450 # than current version
450 display_inline_comments = collections.defaultdict(
451 display_inline_comments = collections.defaultdict(
451 lambda: collections.defaultdict(list))
452 lambda: collections.defaultdict(list))
452 for co in inline_comments:
453 for co in inline_comments:
453 if c.at_version_num:
454 if c.at_version_num:
454 # pick comments that are at least UPTO given version, so we
455 # pick comments that are at least UPTO given version, so we
455 # don't render comments for higher version
456 # don't render comments for higher version
456 should_render = co.pull_request_version_id and \
457 should_render = co.pull_request_version_id and \
457 co.pull_request_version_id <= c.at_version_num
458 co.pull_request_version_id <= c.at_version_num
458 else:
459 else:
459 # showing all, for 'latest'
460 # showing all, for 'latest'
460 should_render = True
461 should_render = True
461
462
462 if should_render:
463 if should_render:
463 display_inline_comments[co.f_path][co.line_no].append(co)
464 display_inline_comments[co.f_path][co.line_no].append(co)
464
465
465 # load diff data into template context, if we use compare mode then
466 # load diff data into template context, if we use compare mode then
466 # diff is calculated based on changes between versions of PR
467 # diff is calculated based on changes between versions of PR
467
468
468 source_repo = pull_request_at_ver.source_repo
469 source_repo = pull_request_at_ver.source_repo
469 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
470 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
470
471
471 target_repo = pull_request_at_ver.target_repo
472 target_repo = pull_request_at_ver.target_repo
472 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
473 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
473
474
474 if compare:
475 if compare:
475 # in compare switch the diff base to latest commit from prev version
476 # in compare switch the diff base to latest commit from prev version
476 target_ref_id = prev_pull_request_display_obj.revisions[0]
477 target_ref_id = prev_pull_request_display_obj.revisions[0]
477
478
478 # despite opening commits for bookmarks/branches/tags, we always
479 # despite opening commits for bookmarks/branches/tags, we always
479 # convert this to rev to prevent changes after bookmark or branch change
480 # convert this to rev to prevent changes after bookmark or branch change
480 c.source_ref_type = 'rev'
481 c.source_ref_type = 'rev'
481 c.source_ref = source_ref_id
482 c.source_ref = source_ref_id
482
483
483 c.target_ref_type = 'rev'
484 c.target_ref_type = 'rev'
484 c.target_ref = target_ref_id
485 c.target_ref = target_ref_id
485
486
486 c.source_repo = source_repo
487 c.source_repo = source_repo
487 c.target_repo = target_repo
488 c.target_repo = target_repo
488
489
489 c.commit_ranges = []
490 c.commit_ranges = []
490 source_commit = EmptyCommit()
491 source_commit = EmptyCommit()
491 target_commit = EmptyCommit()
492 target_commit = EmptyCommit()
492 c.missing_requirements = False
493 c.missing_requirements = False
493
494
494 source_scm = source_repo.scm_instance()
495 source_scm = source_repo.scm_instance()
495 target_scm = target_repo.scm_instance()
496 target_scm = target_repo.scm_instance()
496
497
497 shadow_scm = None
498 shadow_scm = None
498 try:
499 try:
499 shadow_scm = pull_request_latest.get_shadow_repo()
500 shadow_scm = pull_request_latest.get_shadow_repo()
500 except Exception:
501 except Exception:
501 log.debug('Failed to get shadow repo', exc_info=True)
502 log.debug('Failed to get shadow repo', exc_info=True)
502 # try first the existing source_repo, and then shadow
503 # try first the existing source_repo, and then shadow
503 # repo if we can obtain one
504 # repo if we can obtain one
504 commits_source_repo = source_scm
505 commits_source_repo = source_scm
505 if shadow_scm:
506 if shadow_scm:
506 commits_source_repo = shadow_scm
507 commits_source_repo = shadow_scm
507
508
508 c.commits_source_repo = commits_source_repo
509 c.commits_source_repo = commits_source_repo
509 c.ancestor = None # set it to None, to hide it from PR view
510 c.ancestor = None # set it to None, to hide it from PR view
510
511
511 # empty version means latest, so we keep this to prevent
512 # empty version means latest, so we keep this to prevent
512 # double caching
513 # double caching
513 version_normalized = version or 'latest'
514 version_normalized = version or 'latest'
514 from_version_normalized = from_version or 'latest'
515 from_version_normalized = from_version or 'latest'
515
516
516 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
517 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
517 cache_file_path = diff_cache_exist(
518 cache_file_path = diff_cache_exist(
518 cache_path, 'pull_request', pull_request_id, version_normalized,
519 cache_path, 'pull_request', pull_request_id, version_normalized,
519 from_version_normalized, source_ref_id, target_ref_id,
520 from_version_normalized, source_ref_id, target_ref_id,
520 hide_whitespace_changes, diff_context, c.fulldiff)
521 hide_whitespace_changes, diff_context, c.fulldiff)
521
522
522 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
523 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
523 force_recache = self.get_recache_flag()
524 force_recache = self.get_recache_flag()
524
525
525 cached_diff = None
526 cached_diff = None
526 if caching_enabled:
527 if caching_enabled:
527 cached_diff = load_cached_diff(cache_file_path)
528 cached_diff = load_cached_diff(cache_file_path)
528
529
529 has_proper_commit_cache = (
530 has_proper_commit_cache = (
530 cached_diff and cached_diff.get('commits')
531 cached_diff and cached_diff.get('commits')
531 and len(cached_diff.get('commits', [])) == 5
532 and len(cached_diff.get('commits', [])) == 5
532 and cached_diff.get('commits')[0]
533 and cached_diff.get('commits')[0]
533 and cached_diff.get('commits')[3])
534 and cached_diff.get('commits')[3])
534
535
535 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
536 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
536 diff_commit_cache = \
537 diff_commit_cache = \
537 (ancestor_commit, commit_cache, missing_requirements,
538 (ancestor_commit, commit_cache, missing_requirements,
538 source_commit, target_commit) = cached_diff['commits']
539 source_commit, target_commit) = cached_diff['commits']
539 else:
540 else:
541 # NOTE(marcink): we reach potentially unreachable errors when a PR has
542 # merge errors resulting in potentially hidden commits in the shadow repo.
543 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
544 and _merge_check.merge_response
545 maybe_unreachable = maybe_unreachable \
546 and _merge_check.merge_response.metadata.get('unresolved_files')
547 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
540 diff_commit_cache = \
548 diff_commit_cache = \
541 (ancestor_commit, commit_cache, missing_requirements,
549 (ancestor_commit, commit_cache, missing_requirements,
542 source_commit, target_commit) = self.get_commits(
550 source_commit, target_commit) = self.get_commits(
543 commits_source_repo,
551 commits_source_repo,
544 pull_request_at_ver,
552 pull_request_at_ver,
545 source_commit,
553 source_commit,
546 source_ref_id,
554 source_ref_id,
547 source_scm,
555 source_scm,
548 target_commit,
556 target_commit,
549 target_ref_id,
557 target_ref_id,
550 target_scm)
558 target_scm, maybe_unreachable=maybe_unreachable)
551
559
552 # register our commit range
560 # register our commit range
553 for comm in commit_cache.values():
561 for comm in commit_cache.values():
554 c.commit_ranges.append(comm)
562 c.commit_ranges.append(comm)
555
563
556 c.missing_requirements = missing_requirements
564 c.missing_requirements = missing_requirements
557 c.ancestor_commit = ancestor_commit
565 c.ancestor_commit = ancestor_commit
558 c.statuses = source_repo.statuses(
566 c.statuses = source_repo.statuses(
559 [x.raw_id for x in c.commit_ranges])
567 [x.raw_id for x in c.commit_ranges])
560
568
561 # auto collapse if we have more than limit
569 # auto collapse if we have more than limit
562 collapse_limit = diffs.DiffProcessor._collapse_commits_over
570 collapse_limit = diffs.DiffProcessor._collapse_commits_over
563 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
571 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
564 c.compare_mode = compare
572 c.compare_mode = compare
565
573
566 # diff_limit is the old behavior, will cut off the whole diff
574 # diff_limit is the old behavior, will cut off the whole diff
567 # if the limit is applied otherwise will just hide the
575 # if the limit is applied otherwise will just hide the
568 # big files from the front-end
576 # big files from the front-end
569 diff_limit = c.visual.cut_off_limit_diff
577 diff_limit = c.visual.cut_off_limit_diff
570 file_limit = c.visual.cut_off_limit_file
578 file_limit = c.visual.cut_off_limit_file
571
579
572 c.missing_commits = False
580 c.missing_commits = False
573 if (c.missing_requirements
581 if (c.missing_requirements
574 or isinstance(source_commit, EmptyCommit)
582 or isinstance(source_commit, EmptyCommit)
575 or source_commit == target_commit):
583 or source_commit == target_commit):
576
584
577 c.missing_commits = True
585 c.missing_commits = True
578 else:
586 else:
579 c.inline_comments = display_inline_comments
587 c.inline_comments = display_inline_comments
580
588
581 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
589 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
582 if not force_recache and has_proper_diff_cache:
590 if not force_recache and has_proper_diff_cache:
583 c.diffset = cached_diff['diff']
591 c.diffset = cached_diff['diff']
584 (ancestor_commit, commit_cache, missing_requirements,
592 (ancestor_commit, commit_cache, missing_requirements,
585 source_commit, target_commit) = cached_diff['commits']
593 source_commit, target_commit) = cached_diff['commits']
586 else:
594 else:
587 c.diffset = self._get_diffset(
595 c.diffset = self._get_diffset(
588 c.source_repo.repo_name, commits_source_repo,
596 c.source_repo.repo_name, commits_source_repo,
589 source_ref_id, target_ref_id,
597 source_ref_id, target_ref_id,
590 target_commit, source_commit,
598 target_commit, source_commit,
591 diff_limit, file_limit, c.fulldiff,
599 diff_limit, file_limit, c.fulldiff,
592 hide_whitespace_changes, diff_context)
600 hide_whitespace_changes, diff_context)
593
601
594 # save cached diff
602 # save cached diff
595 if caching_enabled:
603 if caching_enabled:
596 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
604 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
597
605
598 c.limited_diff = c.diffset.limited_diff
606 c.limited_diff = c.diffset.limited_diff
599
607
600 # calculate removed files that are bound to comments
608 # calculate removed files that are bound to comments
601 comment_deleted_files = [
609 comment_deleted_files = [
602 fname for fname in display_inline_comments
610 fname for fname in display_inline_comments
603 if fname not in c.diffset.file_stats]
611 if fname not in c.diffset.file_stats]
604
612
605 c.deleted_files_comments = collections.defaultdict(dict)
613 c.deleted_files_comments = collections.defaultdict(dict)
606 for fname, per_line_comments in display_inline_comments.items():
614 for fname, per_line_comments in display_inline_comments.items():
607 if fname in comment_deleted_files:
615 if fname in comment_deleted_files:
608 c.deleted_files_comments[fname]['stats'] = 0
616 c.deleted_files_comments[fname]['stats'] = 0
609 c.deleted_files_comments[fname]['comments'] = list()
617 c.deleted_files_comments[fname]['comments'] = list()
610 for lno, comments in per_line_comments.items():
618 for lno, comments in per_line_comments.items():
611 c.deleted_files_comments[fname]['comments'].extend(comments)
619 c.deleted_files_comments[fname]['comments'].extend(comments)
612
620
613 # maybe calculate the range diff
621 # maybe calculate the range diff
614 if c.range_diff_on:
622 if c.range_diff_on:
615 # TODO(marcink): set whitespace/context
623 # TODO(marcink): set whitespace/context
616 context_lcl = 3
624 context_lcl = 3
617 ign_whitespace_lcl = False
625 ign_whitespace_lcl = False
618
626
619 for commit in c.commit_ranges:
627 for commit in c.commit_ranges:
620 commit2 = commit
628 commit2 = commit
621 commit1 = commit.first_parent
629 commit1 = commit.first_parent
622
630
623 range_diff_cache_file_path = diff_cache_exist(
631 range_diff_cache_file_path = diff_cache_exist(
624 cache_path, 'diff', commit.raw_id,
632 cache_path, 'diff', commit.raw_id,
625 ign_whitespace_lcl, context_lcl, c.fulldiff)
633 ign_whitespace_lcl, context_lcl, c.fulldiff)
626
634
627 cached_diff = None
635 cached_diff = None
628 if caching_enabled:
636 if caching_enabled:
629 cached_diff = load_cached_diff(range_diff_cache_file_path)
637 cached_diff = load_cached_diff(range_diff_cache_file_path)
630
638
631 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
639 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
632 if not force_recache and has_proper_diff_cache:
640 if not force_recache and has_proper_diff_cache:
633 diffset = cached_diff['diff']
641 diffset = cached_diff['diff']
634 else:
642 else:
635 diffset = self._get_range_diffset(
643 diffset = self._get_range_diffset(
636 commits_source_repo, source_repo,
644 commits_source_repo, source_repo,
637 commit1, commit2, diff_limit, file_limit,
645 commit1, commit2, diff_limit, file_limit,
638 c.fulldiff, ign_whitespace_lcl, context_lcl
646 c.fulldiff, ign_whitespace_lcl, context_lcl
639 )
647 )
640
648
641 # save cached diff
649 # save cached diff
642 if caching_enabled:
650 if caching_enabled:
643 cache_diff(range_diff_cache_file_path, diffset, None)
651 cache_diff(range_diff_cache_file_path, diffset, None)
644
652
645 c.changes[commit.raw_id] = diffset
653 c.changes[commit.raw_id] = diffset
646
654
647 # this is a hack to properly display links, when creating PR, the
655 # this is a hack to properly display links, when creating PR, the
648 # compare view and others uses different notation, and
656 # compare view and others uses different notation, and
649 # compare_commits.mako renders links based on the target_repo.
657 # compare_commits.mako renders links based on the target_repo.
650 # We need to swap that here to generate it properly on the html side
658 # We need to swap that here to generate it properly on the html side
651 c.target_repo = c.source_repo
659 c.target_repo = c.source_repo
652
660
653 c.commit_statuses = ChangesetStatus.STATUSES
661 c.commit_statuses = ChangesetStatus.STATUSES
654
662
655 c.show_version_changes = not pr_closed
663 c.show_version_changes = not pr_closed
656 if c.show_version_changes:
664 if c.show_version_changes:
657 cur_obj = pull_request_at_ver
665 cur_obj = pull_request_at_ver
658 prev_obj = prev_pull_request_at_ver
666 prev_obj = prev_pull_request_at_ver
659
667
660 old_commit_ids = prev_obj.revisions
668 old_commit_ids = prev_obj.revisions
661 new_commit_ids = cur_obj.revisions
669 new_commit_ids = cur_obj.revisions
662 commit_changes = PullRequestModel()._calculate_commit_id_changes(
670 commit_changes = PullRequestModel()._calculate_commit_id_changes(
663 old_commit_ids, new_commit_ids)
671 old_commit_ids, new_commit_ids)
664 c.commit_changes_summary = commit_changes
672 c.commit_changes_summary = commit_changes
665
673
666 # calculate the diff for commits between versions
674 # calculate the diff for commits between versions
667 c.commit_changes = []
675 c.commit_changes = []
668 mark = lambda cs, fw: list(
676 mark = lambda cs, fw: list(
669 h.itertools.izip_longest([], cs, fillvalue=fw))
677 h.itertools.izip_longest([], cs, fillvalue=fw))
670 for c_type, raw_id in mark(commit_changes.added, 'a') \
678 for c_type, raw_id in mark(commit_changes.added, 'a') \
671 + mark(commit_changes.removed, 'r') \
679 + mark(commit_changes.removed, 'r') \
672 + mark(commit_changes.common, 'c'):
680 + mark(commit_changes.common, 'c'):
673
681
674 if raw_id in commit_cache:
682 if raw_id in commit_cache:
675 commit = commit_cache[raw_id]
683 commit = commit_cache[raw_id]
676 else:
684 else:
677 try:
685 try:
678 commit = commits_source_repo.get_commit(raw_id)
686 commit = commits_source_repo.get_commit(raw_id)
679 except CommitDoesNotExistError:
687 except CommitDoesNotExistError:
680 # in case we fail extracting still use "dummy" commit
688 # in case we fail extracting still use "dummy" commit
681 # for display in commit diff
689 # for display in commit diff
682 commit = h.AttributeDict(
690 commit = h.AttributeDict(
683 {'raw_id': raw_id,
691 {'raw_id': raw_id,
684 'message': 'EMPTY or MISSING COMMIT'})
692 'message': 'EMPTY or MISSING COMMIT'})
685 c.commit_changes.append([c_type, commit])
693 c.commit_changes.append([c_type, commit])
686
694
687 # current user review statuses for each version
695 # current user review statuses for each version
688 c.review_versions = {}
696 c.review_versions = {}
689 if self._rhodecode_user.user_id in allowed_reviewers:
697 if self._rhodecode_user.user_id in allowed_reviewers:
690 for co in general_comments:
698 for co in general_comments:
691 if co.author.user_id == self._rhodecode_user.user_id:
699 if co.author.user_id == self._rhodecode_user.user_id:
692 status = co.status_change
700 status = co.status_change
693 if status:
701 if status:
694 _ver_pr = status[0].comment.pull_request_version_id
702 _ver_pr = status[0].comment.pull_request_version_id
695 c.review_versions[_ver_pr] = status[0]
703 c.review_versions[_ver_pr] = status[0]
696
704
697 return self._get_template_context(c)
705 return self._get_template_context(c)
698
706
699 def get_commits(
707 def get_commits(
700 self, commits_source_repo, pull_request_at_ver, source_commit,
708 self, commits_source_repo, pull_request_at_ver, source_commit,
701 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
709 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
710 maybe_unreachable=False):
711
702 commit_cache = collections.OrderedDict()
712 commit_cache = collections.OrderedDict()
703 missing_requirements = False
713 missing_requirements = False
714
704 try:
715 try:
705 pre_load = ["author", "date", "message", "branch", "parents"]
716 pre_load = ["author", "date", "message", "branch", "parents"]
706 show_revs = pull_request_at_ver.revisions
717
707 for rev in show_revs:
718 pull_request_commits = pull_request_at_ver.revisions
708 comm = commits_source_repo.get_commit(
719 log.debug('Loading %s commits from %s',
709 commit_id=rev, pre_load=pre_load)
720 len(pull_request_commits), commits_source_repo)
721
722 for rev in pull_request_commits:
723 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
724 maybe_unreachable=maybe_unreachable)
710 commit_cache[comm.raw_id] = comm
725 commit_cache[comm.raw_id] = comm
711
726
712 # Order here matters, we first need to get target, and then
727 # Order here matters, we first need to get target, and then
713 # the source
728 # the source
714 target_commit = commits_source_repo.get_commit(
729 target_commit = commits_source_repo.get_commit(
715 commit_id=safe_str(target_ref_id))
730 commit_id=safe_str(target_ref_id))
716
731
717 source_commit = commits_source_repo.get_commit(
732 source_commit = commits_source_repo.get_commit(
718 commit_id=safe_str(source_ref_id))
733 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
719 except CommitDoesNotExistError:
734 except CommitDoesNotExistError:
720 log.warning(
735 log.warning('Failed to get commit from `{}` repo'.format(
721 'Failed to get commit from `{}` repo'.format(
736 commits_source_repo), exc_info=True)
722 commits_source_repo), exc_info=True)
723 except RepositoryRequirementError:
737 except RepositoryRequirementError:
724 log.warning(
738 log.warning('Failed to get all required data from repo', exc_info=True)
725 'Failed to get all required data from repo', exc_info=True)
726 missing_requirements = True
739 missing_requirements = True
727 ancestor_commit = None
740 ancestor_commit = None
728 try:
741 try:
729 ancestor_id = source_scm.get_common_ancestor(
742 ancestor_id = source_scm.get_common_ancestor(
730 source_commit.raw_id, target_commit.raw_id, target_scm)
743 source_commit.raw_id, target_commit.raw_id, target_scm)
731 ancestor_commit = source_scm.get_commit(ancestor_id)
744 ancestor_commit = source_scm.get_commit(ancestor_id)
732 except Exception:
745 except Exception:
733 ancestor_commit = None
746 ancestor_commit = None
734 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
747 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
735
748
736 def assure_not_empty_repo(self):
749 def assure_not_empty_repo(self):
737 _ = self.request.translate
750 _ = self.request.translate
738
751
739 try:
752 try:
740 self.db_repo.scm_instance().get_commit()
753 self.db_repo.scm_instance().get_commit()
741 except EmptyRepositoryError:
754 except EmptyRepositoryError:
742 h.flash(h.literal(_('There are no commits yet')),
755 h.flash(h.literal(_('There are no commits yet')),
743 category='warning')
756 category='warning')
744 raise HTTPFound(
757 raise HTTPFound(
745 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
758 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
746
759
747 @LoginRequired()
760 @LoginRequired()
748 @NotAnonymous()
761 @NotAnonymous()
749 @HasRepoPermissionAnyDecorator(
762 @HasRepoPermissionAnyDecorator(
750 'repository.read', 'repository.write', 'repository.admin')
763 'repository.read', 'repository.write', 'repository.admin')
751 @view_config(
764 @view_config(
752 route_name='pullrequest_new', request_method='GET',
765 route_name='pullrequest_new', request_method='GET',
753 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
766 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
754 def pull_request_new(self):
767 def pull_request_new(self):
755 _ = self.request.translate
768 _ = self.request.translate
756 c = self.load_default_context()
769 c = self.load_default_context()
757
770
758 self.assure_not_empty_repo()
771 self.assure_not_empty_repo()
759 source_repo = self.db_repo
772 source_repo = self.db_repo
760
773
761 commit_id = self.request.GET.get('commit')
774 commit_id = self.request.GET.get('commit')
762 branch_ref = self.request.GET.get('branch')
775 branch_ref = self.request.GET.get('branch')
763 bookmark_ref = self.request.GET.get('bookmark')
776 bookmark_ref = self.request.GET.get('bookmark')
764
777
765 try:
778 try:
766 source_repo_data = PullRequestModel().generate_repo_data(
779 source_repo_data = PullRequestModel().generate_repo_data(
767 source_repo, commit_id=commit_id,
780 source_repo, commit_id=commit_id,
768 branch=branch_ref, bookmark=bookmark_ref,
781 branch=branch_ref, bookmark=bookmark_ref,
769 translator=self.request.translate)
782 translator=self.request.translate)
770 except CommitDoesNotExistError as e:
783 except CommitDoesNotExistError as e:
771 log.exception(e)
784 log.exception(e)
772 h.flash(_('Commit does not exist'), 'error')
785 h.flash(_('Commit does not exist'), 'error')
773 raise HTTPFound(
786 raise HTTPFound(
774 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
787 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
775
788
776 default_target_repo = source_repo
789 default_target_repo = source_repo
777
790
778 if source_repo.parent and c.has_origin_repo_read_perm:
791 if source_repo.parent and c.has_origin_repo_read_perm:
779 parent_vcs_obj = source_repo.parent.scm_instance()
792 parent_vcs_obj = source_repo.parent.scm_instance()
780 if parent_vcs_obj and not parent_vcs_obj.is_empty():
793 if parent_vcs_obj and not parent_vcs_obj.is_empty():
781 # change default if we have a parent repo
794 # change default if we have a parent repo
782 default_target_repo = source_repo.parent
795 default_target_repo = source_repo.parent
783
796
784 target_repo_data = PullRequestModel().generate_repo_data(
797 target_repo_data = PullRequestModel().generate_repo_data(
785 default_target_repo, translator=self.request.translate)
798 default_target_repo, translator=self.request.translate)
786
799
787 selected_source_ref = source_repo_data['refs']['selected_ref']
800 selected_source_ref = source_repo_data['refs']['selected_ref']
788 title_source_ref = ''
801 title_source_ref = ''
789 if selected_source_ref:
802 if selected_source_ref:
790 title_source_ref = selected_source_ref.split(':', 2)[1]
803 title_source_ref = selected_source_ref.split(':', 2)[1]
791 c.default_title = PullRequestModel().generate_pullrequest_title(
804 c.default_title = PullRequestModel().generate_pullrequest_title(
792 source=source_repo.repo_name,
805 source=source_repo.repo_name,
793 source_ref=title_source_ref,
806 source_ref=title_source_ref,
794 target=default_target_repo.repo_name
807 target=default_target_repo.repo_name
795 )
808 )
796
809
797 c.default_repo_data = {
810 c.default_repo_data = {
798 'source_repo_name': source_repo.repo_name,
811 'source_repo_name': source_repo.repo_name,
799 'source_refs_json': json.dumps(source_repo_data),
812 'source_refs_json': json.dumps(source_repo_data),
800 'target_repo_name': default_target_repo.repo_name,
813 'target_repo_name': default_target_repo.repo_name,
801 'target_refs_json': json.dumps(target_repo_data),
814 'target_refs_json': json.dumps(target_repo_data),
802 }
815 }
803 c.default_source_ref = selected_source_ref
816 c.default_source_ref = selected_source_ref
804
817
805 return self._get_template_context(c)
818 return self._get_template_context(c)
806
819
807 @LoginRequired()
820 @LoginRequired()
808 @NotAnonymous()
821 @NotAnonymous()
809 @HasRepoPermissionAnyDecorator(
822 @HasRepoPermissionAnyDecorator(
810 'repository.read', 'repository.write', 'repository.admin')
823 'repository.read', 'repository.write', 'repository.admin')
811 @view_config(
824 @view_config(
812 route_name='pullrequest_repo_refs', request_method='GET',
825 route_name='pullrequest_repo_refs', request_method='GET',
813 renderer='json_ext', xhr=True)
826 renderer='json_ext', xhr=True)
814 def pull_request_repo_refs(self):
827 def pull_request_repo_refs(self):
815 self.load_default_context()
828 self.load_default_context()
816 target_repo_name = self.request.matchdict['target_repo_name']
829 target_repo_name = self.request.matchdict['target_repo_name']
817 repo = Repository.get_by_repo_name(target_repo_name)
830 repo = Repository.get_by_repo_name(target_repo_name)
818 if not repo:
831 if not repo:
819 raise HTTPNotFound()
832 raise HTTPNotFound()
820
833
821 target_perm = HasRepoPermissionAny(
834 target_perm = HasRepoPermissionAny(
822 'repository.read', 'repository.write', 'repository.admin')(
835 'repository.read', 'repository.write', 'repository.admin')(
823 target_repo_name)
836 target_repo_name)
824 if not target_perm:
837 if not target_perm:
825 raise HTTPNotFound()
838 raise HTTPNotFound()
826
839
827 return PullRequestModel().generate_repo_data(
840 return PullRequestModel().generate_repo_data(
828 repo, translator=self.request.translate)
841 repo, translator=self.request.translate)
829
842
830 @LoginRequired()
843 @LoginRequired()
831 @NotAnonymous()
844 @NotAnonymous()
832 @HasRepoPermissionAnyDecorator(
845 @HasRepoPermissionAnyDecorator(
833 'repository.read', 'repository.write', 'repository.admin')
846 'repository.read', 'repository.write', 'repository.admin')
834 @view_config(
847 @view_config(
835 route_name='pullrequest_repo_targets', request_method='GET',
848 route_name='pullrequest_repo_targets', request_method='GET',
836 renderer='json_ext', xhr=True)
849 renderer='json_ext', xhr=True)
837 def pullrequest_repo_targets(self):
850 def pullrequest_repo_targets(self):
838 _ = self.request.translate
851 _ = self.request.translate
839 filter_query = self.request.GET.get('query')
852 filter_query = self.request.GET.get('query')
840
853
841 # get the parents
854 # get the parents
842 parent_target_repos = []
855 parent_target_repos = []
843 if self.db_repo.parent:
856 if self.db_repo.parent:
844 parents_query = Repository.query() \
857 parents_query = Repository.query() \
845 .order_by(func.length(Repository.repo_name)) \
858 .order_by(func.length(Repository.repo_name)) \
846 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
859 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
847
860
848 if filter_query:
861 if filter_query:
849 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
862 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
850 parents_query = parents_query.filter(
863 parents_query = parents_query.filter(
851 Repository.repo_name.ilike(ilike_expression))
864 Repository.repo_name.ilike(ilike_expression))
852 parents = parents_query.limit(20).all()
865 parents = parents_query.limit(20).all()
853
866
854 for parent in parents:
867 for parent in parents:
855 parent_vcs_obj = parent.scm_instance()
868 parent_vcs_obj = parent.scm_instance()
856 if parent_vcs_obj and not parent_vcs_obj.is_empty():
869 if parent_vcs_obj and not parent_vcs_obj.is_empty():
857 parent_target_repos.append(parent)
870 parent_target_repos.append(parent)
858
871
859 # get other forks, and repo itself
872 # get other forks, and repo itself
860 query = Repository.query() \
873 query = Repository.query() \
861 .order_by(func.length(Repository.repo_name)) \
874 .order_by(func.length(Repository.repo_name)) \
862 .filter(
875 .filter(
863 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
876 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
864 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
877 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
865 ) \
878 ) \
866 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
879 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
867
880
868 if filter_query:
881 if filter_query:
869 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
882 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
870 query = query.filter(Repository.repo_name.ilike(ilike_expression))
883 query = query.filter(Repository.repo_name.ilike(ilike_expression))
871
884
872 limit = max(20 - len(parent_target_repos), 5) # not less then 5
885 limit = max(20 - len(parent_target_repos), 5) # not less then 5
873 target_repos = query.limit(limit).all()
886 target_repos = query.limit(limit).all()
874
887
875 all_target_repos = target_repos + parent_target_repos
888 all_target_repos = target_repos + parent_target_repos
876
889
877 repos = []
890 repos = []
878 # This checks permissions to the repositories
891 # This checks permissions to the repositories
879 for obj in ScmModel().get_repos(all_target_repos):
892 for obj in ScmModel().get_repos(all_target_repos):
880 repos.append({
893 repos.append({
881 'id': obj['name'],
894 'id': obj['name'],
882 'text': obj['name'],
895 'text': obj['name'],
883 'type': 'repo',
896 'type': 'repo',
884 'repo_id': obj['dbrepo']['repo_id'],
897 'repo_id': obj['dbrepo']['repo_id'],
885 'repo_type': obj['dbrepo']['repo_type'],
898 'repo_type': obj['dbrepo']['repo_type'],
886 'private': obj['dbrepo']['private'],
899 'private': obj['dbrepo']['private'],
887
900
888 })
901 })
889
902
890 data = {
903 data = {
891 'more': False,
904 'more': False,
892 'results': [{
905 'results': [{
893 'text': _('Repositories'),
906 'text': _('Repositories'),
894 'children': repos
907 'children': repos
895 }] if repos else []
908 }] if repos else []
896 }
909 }
897 return data
910 return data
898
911
899 @LoginRequired()
912 @LoginRequired()
900 @NotAnonymous()
913 @NotAnonymous()
901 @HasRepoPermissionAnyDecorator(
914 @HasRepoPermissionAnyDecorator(
902 'repository.read', 'repository.write', 'repository.admin')
915 'repository.read', 'repository.write', 'repository.admin')
903 @CSRFRequired()
916 @CSRFRequired()
904 @view_config(
917 @view_config(
905 route_name='pullrequest_create', request_method='POST',
918 route_name='pullrequest_create', request_method='POST',
906 renderer=None)
919 renderer=None)
907 def pull_request_create(self):
920 def pull_request_create(self):
908 _ = self.request.translate
921 _ = self.request.translate
909 self.assure_not_empty_repo()
922 self.assure_not_empty_repo()
910 self.load_default_context()
923 self.load_default_context()
911
924
912 controls = peppercorn.parse(self.request.POST.items())
925 controls = peppercorn.parse(self.request.POST.items())
913
926
914 try:
927 try:
915 form = PullRequestForm(
928 form = PullRequestForm(
916 self.request.translate, self.db_repo.repo_id)()
929 self.request.translate, self.db_repo.repo_id)()
917 _form = form.to_python(controls)
930 _form = form.to_python(controls)
918 except formencode.Invalid as errors:
931 except formencode.Invalid as errors:
919 if errors.error_dict.get('revisions'):
932 if errors.error_dict.get('revisions'):
920 msg = 'Revisions: %s' % errors.error_dict['revisions']
933 msg = 'Revisions: %s' % errors.error_dict['revisions']
921 elif errors.error_dict.get('pullrequest_title'):
934 elif errors.error_dict.get('pullrequest_title'):
922 msg = errors.error_dict.get('pullrequest_title')
935 msg = errors.error_dict.get('pullrequest_title')
923 else:
936 else:
924 msg = _('Error creating pull request: {}').format(errors)
937 msg = _('Error creating pull request: {}').format(errors)
925 log.exception(msg)
938 log.exception(msg)
926 h.flash(msg, 'error')
939 h.flash(msg, 'error')
927
940
928 # would rather just go back to form ...
941 # would rather just go back to form ...
929 raise HTTPFound(
942 raise HTTPFound(
930 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
943 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
931
944
932 source_repo = _form['source_repo']
945 source_repo = _form['source_repo']
933 source_ref = _form['source_ref']
946 source_ref = _form['source_ref']
934 target_repo = _form['target_repo']
947 target_repo = _form['target_repo']
935 target_ref = _form['target_ref']
948 target_ref = _form['target_ref']
936 commit_ids = _form['revisions'][::-1]
949 commit_ids = _form['revisions'][::-1]
937
950
938 # find the ancestor for this pr
951 # find the ancestor for this pr
939 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
952 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
940 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
953 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
941
954
942 if not (source_db_repo or target_db_repo):
955 if not (source_db_repo or target_db_repo):
943 h.flash(_('source_repo or target repo not found'), category='error')
956 h.flash(_('source_repo or target repo not found'), category='error')
944 raise HTTPFound(
957 raise HTTPFound(
945 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
958 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
946
959
947 # re-check permissions again here
960 # re-check permissions again here
948 # source_repo we must have read permissions
961 # source_repo we must have read permissions
949
962
950 source_perm = HasRepoPermissionAny(
963 source_perm = HasRepoPermissionAny(
951 'repository.read', 'repository.write', 'repository.admin')(
964 'repository.read', 'repository.write', 'repository.admin')(
952 source_db_repo.repo_name)
965 source_db_repo.repo_name)
953 if not source_perm:
966 if not source_perm:
954 msg = _('Not Enough permissions to source repo `{}`.'.format(
967 msg = _('Not Enough permissions to source repo `{}`.'.format(
955 source_db_repo.repo_name))
968 source_db_repo.repo_name))
956 h.flash(msg, category='error')
969 h.flash(msg, category='error')
957 # copy the args back to redirect
970 # copy the args back to redirect
958 org_query = self.request.GET.mixed()
971 org_query = self.request.GET.mixed()
959 raise HTTPFound(
972 raise HTTPFound(
960 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
973 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
961 _query=org_query))
974 _query=org_query))
962
975
963 # target repo we must have read permissions, and also later on
976 # target repo we must have read permissions, and also later on
964 # we want to check branch permissions here
977 # we want to check branch permissions here
965 target_perm = HasRepoPermissionAny(
978 target_perm = HasRepoPermissionAny(
966 'repository.read', 'repository.write', 'repository.admin')(
979 'repository.read', 'repository.write', 'repository.admin')(
967 target_db_repo.repo_name)
980 target_db_repo.repo_name)
968 if not target_perm:
981 if not target_perm:
969 msg = _('Not Enough permissions to target repo `{}`.'.format(
982 msg = _('Not Enough permissions to target repo `{}`.'.format(
970 target_db_repo.repo_name))
983 target_db_repo.repo_name))
971 h.flash(msg, category='error')
984 h.flash(msg, category='error')
972 # copy the args back to redirect
985 # copy the args back to redirect
973 org_query = self.request.GET.mixed()
986 org_query = self.request.GET.mixed()
974 raise HTTPFound(
987 raise HTTPFound(
975 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
988 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
976 _query=org_query))
989 _query=org_query))
977
990
978 source_scm = source_db_repo.scm_instance()
991 source_scm = source_db_repo.scm_instance()
979 target_scm = target_db_repo.scm_instance()
992 target_scm = target_db_repo.scm_instance()
980
993
981 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
994 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
982 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
995 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
983
996
984 ancestor = source_scm.get_common_ancestor(
997 ancestor = source_scm.get_common_ancestor(
985 source_commit.raw_id, target_commit.raw_id, target_scm)
998 source_commit.raw_id, target_commit.raw_id, target_scm)
986
999
987 # recalculate target ref based on ancestor
1000 # recalculate target ref based on ancestor
988 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
1001 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
989 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
1002 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
990
1003
991 get_default_reviewers_data, validate_default_reviewers = \
1004 get_default_reviewers_data, validate_default_reviewers = \
992 PullRequestModel().get_reviewer_functions()
1005 PullRequestModel().get_reviewer_functions()
993
1006
994 # recalculate reviewers logic, to make sure we can validate this
1007 # recalculate reviewers logic, to make sure we can validate this
995 reviewer_rules = get_default_reviewers_data(
1008 reviewer_rules = get_default_reviewers_data(
996 self._rhodecode_db_user, source_db_repo,
1009 self._rhodecode_db_user, source_db_repo,
997 source_commit, target_db_repo, target_commit)
1010 source_commit, target_db_repo, target_commit)
998
1011
999 given_reviewers = _form['review_members']
1012 given_reviewers = _form['review_members']
1000 reviewers = validate_default_reviewers(
1013 reviewers = validate_default_reviewers(
1001 given_reviewers, reviewer_rules)
1014 given_reviewers, reviewer_rules)
1002
1015
1003 pullrequest_title = _form['pullrequest_title']
1016 pullrequest_title = _form['pullrequest_title']
1004 title_source_ref = source_ref.split(':', 2)[1]
1017 title_source_ref = source_ref.split(':', 2)[1]
1005 if not pullrequest_title:
1018 if not pullrequest_title:
1006 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1019 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1007 source=source_repo,
1020 source=source_repo,
1008 source_ref=title_source_ref,
1021 source_ref=title_source_ref,
1009 target=target_repo
1022 target=target_repo
1010 )
1023 )
1011
1024
1012 description = _form['pullrequest_desc']
1025 description = _form['pullrequest_desc']
1013 description_renderer = _form['description_renderer']
1026 description_renderer = _form['description_renderer']
1014
1027
1015 try:
1028 try:
1016 pull_request = PullRequestModel().create(
1029 pull_request = PullRequestModel().create(
1017 created_by=self._rhodecode_user.user_id,
1030 created_by=self._rhodecode_user.user_id,
1018 source_repo=source_repo,
1031 source_repo=source_repo,
1019 source_ref=source_ref,
1032 source_ref=source_ref,
1020 target_repo=target_repo,
1033 target_repo=target_repo,
1021 target_ref=target_ref,
1034 target_ref=target_ref,
1022 revisions=commit_ids,
1035 revisions=commit_ids,
1023 reviewers=reviewers,
1036 reviewers=reviewers,
1024 title=pullrequest_title,
1037 title=pullrequest_title,
1025 description=description,
1038 description=description,
1026 description_renderer=description_renderer,
1039 description_renderer=description_renderer,
1027 reviewer_data=reviewer_rules,
1040 reviewer_data=reviewer_rules,
1028 auth_user=self._rhodecode_user
1041 auth_user=self._rhodecode_user
1029 )
1042 )
1030 Session().commit()
1043 Session().commit()
1031
1044
1032 h.flash(_('Successfully opened new pull request'),
1045 h.flash(_('Successfully opened new pull request'),
1033 category='success')
1046 category='success')
1034 except Exception:
1047 except Exception:
1035 msg = _('Error occurred during creation of this pull request.')
1048 msg = _('Error occurred during creation of this pull request.')
1036 log.exception(msg)
1049 log.exception(msg)
1037 h.flash(msg, category='error')
1050 h.flash(msg, category='error')
1038
1051
1039 # copy the args back to redirect
1052 # copy the args back to redirect
1040 org_query = self.request.GET.mixed()
1053 org_query = self.request.GET.mixed()
1041 raise HTTPFound(
1054 raise HTTPFound(
1042 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1055 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1043 _query=org_query))
1056 _query=org_query))
1044
1057
1045 raise HTTPFound(
1058 raise HTTPFound(
1046 h.route_path('pullrequest_show', repo_name=target_repo,
1059 h.route_path('pullrequest_show', repo_name=target_repo,
1047 pull_request_id=pull_request.pull_request_id))
1060 pull_request_id=pull_request.pull_request_id))
1048
1061
1049 @LoginRequired()
1062 @LoginRequired()
1050 @NotAnonymous()
1063 @NotAnonymous()
1051 @HasRepoPermissionAnyDecorator(
1064 @HasRepoPermissionAnyDecorator(
1052 'repository.read', 'repository.write', 'repository.admin')
1065 'repository.read', 'repository.write', 'repository.admin')
1053 @CSRFRequired()
1066 @CSRFRequired()
1054 @view_config(
1067 @view_config(
1055 route_name='pullrequest_update', request_method='POST',
1068 route_name='pullrequest_update', request_method='POST',
1056 renderer='json_ext')
1069 renderer='json_ext')
1057 def pull_request_update(self):
1070 def pull_request_update(self):
1058 pull_request = PullRequest.get_or_404(
1071 pull_request = PullRequest.get_or_404(
1059 self.request.matchdict['pull_request_id'])
1072 self.request.matchdict['pull_request_id'])
1060 _ = self.request.translate
1073 _ = self.request.translate
1061
1074
1062 self.load_default_context()
1075 self.load_default_context()
1063 redirect_url = None
1076 redirect_url = None
1064
1077
1065 if pull_request.is_closed():
1078 if pull_request.is_closed():
1066 log.debug('update: forbidden because pull request is closed')
1079 log.debug('update: forbidden because pull request is closed')
1067 msg = _(u'Cannot update closed pull requests.')
1080 msg = _(u'Cannot update closed pull requests.')
1068 h.flash(msg, category='error')
1081 h.flash(msg, category='error')
1069 return {'response': True,
1082 return {'response': True,
1070 'redirect_url': redirect_url}
1083 'redirect_url': redirect_url}
1071
1084
1072 is_state_changing = pull_request.is_state_changing()
1085 is_state_changing = pull_request.is_state_changing()
1073
1086
1074 # only owner or admin can update it
1087 # only owner or admin can update it
1075 allowed_to_update = PullRequestModel().check_user_update(
1088 allowed_to_update = PullRequestModel().check_user_update(
1076 pull_request, self._rhodecode_user)
1089 pull_request, self._rhodecode_user)
1077 if allowed_to_update:
1090 if allowed_to_update:
1078 controls = peppercorn.parse(self.request.POST.items())
1091 controls = peppercorn.parse(self.request.POST.items())
1079 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1092 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1080
1093
1081 if 'review_members' in controls:
1094 if 'review_members' in controls:
1082 self._update_reviewers(
1095 self._update_reviewers(
1083 pull_request, controls['review_members'],
1096 pull_request, controls['review_members'],
1084 pull_request.reviewer_data)
1097 pull_request.reviewer_data)
1085 elif str2bool(self.request.POST.get('update_commits', 'false')):
1098 elif str2bool(self.request.POST.get('update_commits', 'false')):
1086 if is_state_changing:
1099 if is_state_changing:
1087 log.debug('commits update: forbidden because pull request is in state %s',
1100 log.debug('commits update: forbidden because pull request is in state %s',
1088 pull_request.pull_request_state)
1101 pull_request.pull_request_state)
1089 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1102 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1090 u'Current state is: `{}`').format(
1103 u'Current state is: `{}`').format(
1091 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1104 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1092 h.flash(msg, category='error')
1105 h.flash(msg, category='error')
1093 return {'response': True,
1106 return {'response': True,
1094 'redirect_url': redirect_url}
1107 'redirect_url': redirect_url}
1095
1108
1096 self._update_commits(pull_request)
1109 self._update_commits(pull_request)
1097 if force_refresh:
1110 if force_refresh:
1098 redirect_url = h.route_path(
1111 redirect_url = h.route_path(
1099 'pullrequest_show', repo_name=self.db_repo_name,
1112 'pullrequest_show', repo_name=self.db_repo_name,
1100 pull_request_id=pull_request.pull_request_id,
1113 pull_request_id=pull_request.pull_request_id,
1101 _query={"force_refresh": 1})
1114 _query={"force_refresh": 1})
1102 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1115 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1103 self._edit_pull_request(pull_request)
1116 self._edit_pull_request(pull_request)
1104 else:
1117 else:
1105 raise HTTPBadRequest()
1118 raise HTTPBadRequest()
1106
1119
1107 return {'response': True,
1120 return {'response': True,
1108 'redirect_url': redirect_url}
1121 'redirect_url': redirect_url}
1109 raise HTTPForbidden()
1122 raise HTTPForbidden()
1110
1123
1111 def _edit_pull_request(self, pull_request):
1124 def _edit_pull_request(self, pull_request):
1112 _ = self.request.translate
1125 _ = self.request.translate
1113
1126
1114 try:
1127 try:
1115 PullRequestModel().edit(
1128 PullRequestModel().edit(
1116 pull_request,
1129 pull_request,
1117 self.request.POST.get('title'),
1130 self.request.POST.get('title'),
1118 self.request.POST.get('description'),
1131 self.request.POST.get('description'),
1119 self.request.POST.get('description_renderer'),
1132 self.request.POST.get('description_renderer'),
1120 self._rhodecode_user)
1133 self._rhodecode_user)
1121 except ValueError:
1134 except ValueError:
1122 msg = _(u'Cannot update closed pull requests.')
1135 msg = _(u'Cannot update closed pull requests.')
1123 h.flash(msg, category='error')
1136 h.flash(msg, category='error')
1124 return
1137 return
1125 else:
1138 else:
1126 Session().commit()
1139 Session().commit()
1127
1140
1128 msg = _(u'Pull request title & description updated.')
1141 msg = _(u'Pull request title & description updated.')
1129 h.flash(msg, category='success')
1142 h.flash(msg, category='success')
1130 return
1143 return
1131
1144
1132 def _update_commits(self, pull_request):
1145 def _update_commits(self, pull_request):
1133 _ = self.request.translate
1146 _ = self.request.translate
1134
1147
1135 with pull_request.set_state(PullRequest.STATE_UPDATING):
1148 with pull_request.set_state(PullRequest.STATE_UPDATING):
1136 resp = PullRequestModel().update_commits(
1149 resp = PullRequestModel().update_commits(
1137 pull_request, self._rhodecode_db_user)
1150 pull_request, self._rhodecode_db_user)
1138
1151
1139 if resp.executed:
1152 if resp.executed:
1140
1153
1141 if resp.target_changed and resp.source_changed:
1154 if resp.target_changed and resp.source_changed:
1142 changed = 'target and source repositories'
1155 changed = 'target and source repositories'
1143 elif resp.target_changed and not resp.source_changed:
1156 elif resp.target_changed and not resp.source_changed:
1144 changed = 'target repository'
1157 changed = 'target repository'
1145 elif not resp.target_changed and resp.source_changed:
1158 elif not resp.target_changed and resp.source_changed:
1146 changed = 'source repository'
1159 changed = 'source repository'
1147 else:
1160 else:
1148 changed = 'nothing'
1161 changed = 'nothing'
1149
1162
1150 msg = _(u'Pull request updated to "{source_commit_id}" with '
1163 msg = _(u'Pull request updated to "{source_commit_id}" with '
1151 u'{count_added} added, {count_removed} removed commits. '
1164 u'{count_added} added, {count_removed} removed commits. '
1152 u'Source of changes: {change_source}')
1165 u'Source of changes: {change_source}')
1153 msg = msg.format(
1166 msg = msg.format(
1154 source_commit_id=pull_request.source_ref_parts.commit_id,
1167 source_commit_id=pull_request.source_ref_parts.commit_id,
1155 count_added=len(resp.changes.added),
1168 count_added=len(resp.changes.added),
1156 count_removed=len(resp.changes.removed),
1169 count_removed=len(resp.changes.removed),
1157 change_source=changed)
1170 change_source=changed)
1158 h.flash(msg, category='success')
1171 h.flash(msg, category='success')
1159
1172
1160 channel = '/repo${}$/pr/{}'.format(
1173 channel = '/repo${}$/pr/{}'.format(
1161 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1174 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1162 message = msg + (
1175 message = msg + (
1163 ' - <a onclick="window.location.reload()">'
1176 ' - <a onclick="window.location.reload()">'
1164 '<strong>{}</strong></a>'.format(_('Reload page')))
1177 '<strong>{}</strong></a>'.format(_('Reload page')))
1165 channelstream.post_message(
1178 channelstream.post_message(
1166 channel, message, self._rhodecode_user.username,
1179 channel, message, self._rhodecode_user.username,
1167 registry=self.request.registry)
1180 registry=self.request.registry)
1168 else:
1181 else:
1169 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1182 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1170 warning_reasons = [
1183 warning_reasons = [
1171 UpdateFailureReason.NO_CHANGE,
1184 UpdateFailureReason.NO_CHANGE,
1172 UpdateFailureReason.WRONG_REF_TYPE,
1185 UpdateFailureReason.WRONG_REF_TYPE,
1173 ]
1186 ]
1174 category = 'warning' if resp.reason in warning_reasons else 'error'
1187 category = 'warning' if resp.reason in warning_reasons else 'error'
1175 h.flash(msg, category=category)
1188 h.flash(msg, category=category)
1176
1189
1177 @LoginRequired()
1190 @LoginRequired()
1178 @NotAnonymous()
1191 @NotAnonymous()
1179 @HasRepoPermissionAnyDecorator(
1192 @HasRepoPermissionAnyDecorator(
1180 'repository.read', 'repository.write', 'repository.admin')
1193 'repository.read', 'repository.write', 'repository.admin')
1181 @CSRFRequired()
1194 @CSRFRequired()
1182 @view_config(
1195 @view_config(
1183 route_name='pullrequest_merge', request_method='POST',
1196 route_name='pullrequest_merge', request_method='POST',
1184 renderer='json_ext')
1197 renderer='json_ext')
1185 def pull_request_merge(self):
1198 def pull_request_merge(self):
1186 """
1199 """
1187 Merge will perform a server-side merge of the specified
1200 Merge will perform a server-side merge of the specified
1188 pull request, if the pull request is approved and mergeable.
1201 pull request, if the pull request is approved and mergeable.
1189 After successful merging, the pull request is automatically
1202 After successful merging, the pull request is automatically
1190 closed, with a relevant comment.
1203 closed, with a relevant comment.
1191 """
1204 """
1192 pull_request = PullRequest.get_or_404(
1205 pull_request = PullRequest.get_or_404(
1193 self.request.matchdict['pull_request_id'])
1206 self.request.matchdict['pull_request_id'])
1194 _ = self.request.translate
1207 _ = self.request.translate
1195
1208
1196 if pull_request.is_state_changing():
1209 if pull_request.is_state_changing():
1197 log.debug('show: forbidden because pull request is in state %s',
1210 log.debug('show: forbidden because pull request is in state %s',
1198 pull_request.pull_request_state)
1211 pull_request.pull_request_state)
1199 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1212 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1200 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1213 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1201 pull_request.pull_request_state)
1214 pull_request.pull_request_state)
1202 h.flash(msg, category='error')
1215 h.flash(msg, category='error')
1203 raise HTTPFound(
1216 raise HTTPFound(
1204 h.route_path('pullrequest_show',
1217 h.route_path('pullrequest_show',
1205 repo_name=pull_request.target_repo.repo_name,
1218 repo_name=pull_request.target_repo.repo_name,
1206 pull_request_id=pull_request.pull_request_id))
1219 pull_request_id=pull_request.pull_request_id))
1207
1220
1208 self.load_default_context()
1221 self.load_default_context()
1209
1222
1210 with pull_request.set_state(PullRequest.STATE_UPDATING):
1223 with pull_request.set_state(PullRequest.STATE_UPDATING):
1211 check = MergeCheck.validate(
1224 check = MergeCheck.validate(
1212 pull_request, auth_user=self._rhodecode_user,
1225 pull_request, auth_user=self._rhodecode_user,
1213 translator=self.request.translate)
1226 translator=self.request.translate)
1214 merge_possible = not check.failed
1227 merge_possible = not check.failed
1215
1228
1216 for err_type, error_msg in check.errors:
1229 for err_type, error_msg in check.errors:
1217 h.flash(error_msg, category=err_type)
1230 h.flash(error_msg, category=err_type)
1218
1231
1219 if merge_possible:
1232 if merge_possible:
1220 log.debug("Pre-conditions checked, trying to merge.")
1233 log.debug("Pre-conditions checked, trying to merge.")
1221 extras = vcs_operation_context(
1234 extras = vcs_operation_context(
1222 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1235 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1223 username=self._rhodecode_db_user.username, action='push',
1236 username=self._rhodecode_db_user.username, action='push',
1224 scm=pull_request.target_repo.repo_type)
1237 scm=pull_request.target_repo.repo_type)
1225 with pull_request.set_state(PullRequest.STATE_UPDATING):
1238 with pull_request.set_state(PullRequest.STATE_UPDATING):
1226 self._merge_pull_request(
1239 self._merge_pull_request(
1227 pull_request, self._rhodecode_db_user, extras)
1240 pull_request, self._rhodecode_db_user, extras)
1228 else:
1241 else:
1229 log.debug("Pre-conditions failed, NOT merging.")
1242 log.debug("Pre-conditions failed, NOT merging.")
1230
1243
1231 raise HTTPFound(
1244 raise HTTPFound(
1232 h.route_path('pullrequest_show',
1245 h.route_path('pullrequest_show',
1233 repo_name=pull_request.target_repo.repo_name,
1246 repo_name=pull_request.target_repo.repo_name,
1234 pull_request_id=pull_request.pull_request_id))
1247 pull_request_id=pull_request.pull_request_id))
1235
1248
1236 def _merge_pull_request(self, pull_request, user, extras):
1249 def _merge_pull_request(self, pull_request, user, extras):
1237 _ = self.request.translate
1250 _ = self.request.translate
1238 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1251 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1239
1252
1240 if merge_resp.executed:
1253 if merge_resp.executed:
1241 log.debug("The merge was successful, closing the pull request.")
1254 log.debug("The merge was successful, closing the pull request.")
1242 PullRequestModel().close_pull_request(
1255 PullRequestModel().close_pull_request(
1243 pull_request.pull_request_id, user)
1256 pull_request.pull_request_id, user)
1244 Session().commit()
1257 Session().commit()
1245 msg = _('Pull request was successfully merged and closed.')
1258 msg = _('Pull request was successfully merged and closed.')
1246 h.flash(msg, category='success')
1259 h.flash(msg, category='success')
1247 else:
1260 else:
1248 log.debug(
1261 log.debug(
1249 "The merge was not successful. Merge response: %s", merge_resp)
1262 "The merge was not successful. Merge response: %s", merge_resp)
1250 msg = merge_resp.merge_status_message
1263 msg = merge_resp.merge_status_message
1251 h.flash(msg, category='error')
1264 h.flash(msg, category='error')
1252
1265
1253 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1266 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1254 _ = self.request.translate
1267 _ = self.request.translate
1255
1268
1256 get_default_reviewers_data, validate_default_reviewers = \
1269 get_default_reviewers_data, validate_default_reviewers = \
1257 PullRequestModel().get_reviewer_functions()
1270 PullRequestModel().get_reviewer_functions()
1258
1271
1259 try:
1272 try:
1260 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1273 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1261 except ValueError as e:
1274 except ValueError as e:
1262 log.error('Reviewers Validation: {}'.format(e))
1275 log.error('Reviewers Validation: {}'.format(e))
1263 h.flash(e, category='error')
1276 h.flash(e, category='error')
1264 return
1277 return
1265
1278
1266 old_calculated_status = pull_request.calculated_review_status()
1279 old_calculated_status = pull_request.calculated_review_status()
1267 PullRequestModel().update_reviewers(
1280 PullRequestModel().update_reviewers(
1268 pull_request, reviewers, self._rhodecode_user)
1281 pull_request, reviewers, self._rhodecode_user)
1269 h.flash(_('Pull request reviewers updated.'), category='success')
1282 h.flash(_('Pull request reviewers updated.'), category='success')
1270 Session().commit()
1283 Session().commit()
1271
1284
1272 # trigger status changed if change in reviewers changes the status
1285 # trigger status changed if change in reviewers changes the status
1273 calculated_status = pull_request.calculated_review_status()
1286 calculated_status = pull_request.calculated_review_status()
1274 if old_calculated_status != calculated_status:
1287 if old_calculated_status != calculated_status:
1275 PullRequestModel().trigger_pull_request_hook(
1288 PullRequestModel().trigger_pull_request_hook(
1276 pull_request, self._rhodecode_user, 'review_status_change',
1289 pull_request, self._rhodecode_user, 'review_status_change',
1277 data={'status': calculated_status})
1290 data={'status': calculated_status})
1278
1291
1279 @LoginRequired()
1292 @LoginRequired()
1280 @NotAnonymous()
1293 @NotAnonymous()
1281 @HasRepoPermissionAnyDecorator(
1294 @HasRepoPermissionAnyDecorator(
1282 'repository.read', 'repository.write', 'repository.admin')
1295 'repository.read', 'repository.write', 'repository.admin')
1283 @CSRFRequired()
1296 @CSRFRequired()
1284 @view_config(
1297 @view_config(
1285 route_name='pullrequest_delete', request_method='POST',
1298 route_name='pullrequest_delete', request_method='POST',
1286 renderer='json_ext')
1299 renderer='json_ext')
1287 def pull_request_delete(self):
1300 def pull_request_delete(self):
1288 _ = self.request.translate
1301 _ = self.request.translate
1289
1302
1290 pull_request = PullRequest.get_or_404(
1303 pull_request = PullRequest.get_or_404(
1291 self.request.matchdict['pull_request_id'])
1304 self.request.matchdict['pull_request_id'])
1292 self.load_default_context()
1305 self.load_default_context()
1293
1306
1294 pr_closed = pull_request.is_closed()
1307 pr_closed = pull_request.is_closed()
1295 allowed_to_delete = PullRequestModel().check_user_delete(
1308 allowed_to_delete = PullRequestModel().check_user_delete(
1296 pull_request, self._rhodecode_user) and not pr_closed
1309 pull_request, self._rhodecode_user) and not pr_closed
1297
1310
1298 # only owner can delete it !
1311 # only owner can delete it !
1299 if allowed_to_delete:
1312 if allowed_to_delete:
1300 PullRequestModel().delete(pull_request, self._rhodecode_user)
1313 PullRequestModel().delete(pull_request, self._rhodecode_user)
1301 Session().commit()
1314 Session().commit()
1302 h.flash(_('Successfully deleted pull request'),
1315 h.flash(_('Successfully deleted pull request'),
1303 category='success')
1316 category='success')
1304 raise HTTPFound(h.route_path('pullrequest_show_all',
1317 raise HTTPFound(h.route_path('pullrequest_show_all',
1305 repo_name=self.db_repo_name))
1318 repo_name=self.db_repo_name))
1306
1319
1307 log.warning('user %s tried to delete pull request without access',
1320 log.warning('user %s tried to delete pull request without access',
1308 self._rhodecode_user)
1321 self._rhodecode_user)
1309 raise HTTPNotFound()
1322 raise HTTPNotFound()
1310
1323
1311 @LoginRequired()
1324 @LoginRequired()
1312 @NotAnonymous()
1325 @NotAnonymous()
1313 @HasRepoPermissionAnyDecorator(
1326 @HasRepoPermissionAnyDecorator(
1314 'repository.read', 'repository.write', 'repository.admin')
1327 'repository.read', 'repository.write', 'repository.admin')
1315 @CSRFRequired()
1328 @CSRFRequired()
1316 @view_config(
1329 @view_config(
1317 route_name='pullrequest_comment_create', request_method='POST',
1330 route_name='pullrequest_comment_create', request_method='POST',
1318 renderer='json_ext')
1331 renderer='json_ext')
1319 def pull_request_comment_create(self):
1332 def pull_request_comment_create(self):
1320 _ = self.request.translate
1333 _ = self.request.translate
1321
1334
1322 pull_request = PullRequest.get_or_404(
1335 pull_request = PullRequest.get_or_404(
1323 self.request.matchdict['pull_request_id'])
1336 self.request.matchdict['pull_request_id'])
1324 pull_request_id = pull_request.pull_request_id
1337 pull_request_id = pull_request.pull_request_id
1325
1338
1326 if pull_request.is_closed():
1339 if pull_request.is_closed():
1327 log.debug('comment: forbidden because pull request is closed')
1340 log.debug('comment: forbidden because pull request is closed')
1328 raise HTTPForbidden()
1341 raise HTTPForbidden()
1329
1342
1330 allowed_to_comment = PullRequestModel().check_user_comment(
1343 allowed_to_comment = PullRequestModel().check_user_comment(
1331 pull_request, self._rhodecode_user)
1344 pull_request, self._rhodecode_user)
1332 if not allowed_to_comment:
1345 if not allowed_to_comment:
1333 log.debug(
1346 log.debug(
1334 'comment: forbidden because pull request is from forbidden repo')
1347 'comment: forbidden because pull request is from forbidden repo')
1335 raise HTTPForbidden()
1348 raise HTTPForbidden()
1336
1349
1337 c = self.load_default_context()
1350 c = self.load_default_context()
1338
1351
1339 status = self.request.POST.get('changeset_status', None)
1352 status = self.request.POST.get('changeset_status', None)
1340 text = self.request.POST.get('text')
1353 text = self.request.POST.get('text')
1341 comment_type = self.request.POST.get('comment_type')
1354 comment_type = self.request.POST.get('comment_type')
1342 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1355 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1343 close_pull_request = self.request.POST.get('close_pull_request')
1356 close_pull_request = self.request.POST.get('close_pull_request')
1344
1357
1345 # the logic here should work like following, if we submit close
1358 # the logic here should work like following, if we submit close
1346 # pr comment, use `close_pull_request_with_comment` function
1359 # pr comment, use `close_pull_request_with_comment` function
1347 # else handle regular comment logic
1360 # else handle regular comment logic
1348
1361
1349 if close_pull_request:
1362 if close_pull_request:
1350 # only owner or admin or person with write permissions
1363 # only owner or admin or person with write permissions
1351 allowed_to_close = PullRequestModel().check_user_update(
1364 allowed_to_close = PullRequestModel().check_user_update(
1352 pull_request, self._rhodecode_user)
1365 pull_request, self._rhodecode_user)
1353 if not allowed_to_close:
1366 if not allowed_to_close:
1354 log.debug('comment: forbidden because not allowed to close '
1367 log.debug('comment: forbidden because not allowed to close '
1355 'pull request %s', pull_request_id)
1368 'pull request %s', pull_request_id)
1356 raise HTTPForbidden()
1369 raise HTTPForbidden()
1357
1370
1358 # This also triggers `review_status_change`
1371 # This also triggers `review_status_change`
1359 comment, status = PullRequestModel().close_pull_request_with_comment(
1372 comment, status = PullRequestModel().close_pull_request_with_comment(
1360 pull_request, self._rhodecode_user, self.db_repo, message=text,
1373 pull_request, self._rhodecode_user, self.db_repo, message=text,
1361 auth_user=self._rhodecode_user)
1374 auth_user=self._rhodecode_user)
1362 Session().flush()
1375 Session().flush()
1363
1376
1364 PullRequestModel().trigger_pull_request_hook(
1377 PullRequestModel().trigger_pull_request_hook(
1365 pull_request, self._rhodecode_user, 'comment',
1378 pull_request, self._rhodecode_user, 'comment',
1366 data={'comment': comment})
1379 data={'comment': comment})
1367
1380
1368 else:
1381 else:
1369 # regular comment case, could be inline, or one with status.
1382 # regular comment case, could be inline, or one with status.
1370 # for that one we check also permissions
1383 # for that one we check also permissions
1371
1384
1372 allowed_to_change_status = PullRequestModel().check_user_change_status(
1385 allowed_to_change_status = PullRequestModel().check_user_change_status(
1373 pull_request, self._rhodecode_user)
1386 pull_request, self._rhodecode_user)
1374
1387
1375 if status and allowed_to_change_status:
1388 if status and allowed_to_change_status:
1376 message = (_('Status change %(transition_icon)s %(status)s')
1389 message = (_('Status change %(transition_icon)s %(status)s')
1377 % {'transition_icon': '>',
1390 % {'transition_icon': '>',
1378 'status': ChangesetStatus.get_status_lbl(status)})
1391 'status': ChangesetStatus.get_status_lbl(status)})
1379 text = text or message
1392 text = text or message
1380
1393
1381 comment = CommentsModel().create(
1394 comment = CommentsModel().create(
1382 text=text,
1395 text=text,
1383 repo=self.db_repo.repo_id,
1396 repo=self.db_repo.repo_id,
1384 user=self._rhodecode_user.user_id,
1397 user=self._rhodecode_user.user_id,
1385 pull_request=pull_request,
1398 pull_request=pull_request,
1386 f_path=self.request.POST.get('f_path'),
1399 f_path=self.request.POST.get('f_path'),
1387 line_no=self.request.POST.get('line'),
1400 line_no=self.request.POST.get('line'),
1388 status_change=(ChangesetStatus.get_status_lbl(status)
1401 status_change=(ChangesetStatus.get_status_lbl(status)
1389 if status and allowed_to_change_status else None),
1402 if status and allowed_to_change_status else None),
1390 status_change_type=(status
1403 status_change_type=(status
1391 if status and allowed_to_change_status else None),
1404 if status and allowed_to_change_status else None),
1392 comment_type=comment_type,
1405 comment_type=comment_type,
1393 resolves_comment_id=resolves_comment_id,
1406 resolves_comment_id=resolves_comment_id,
1394 auth_user=self._rhodecode_user
1407 auth_user=self._rhodecode_user
1395 )
1408 )
1396
1409
1397 if allowed_to_change_status:
1410 if allowed_to_change_status:
1398 # calculate old status before we change it
1411 # calculate old status before we change it
1399 old_calculated_status = pull_request.calculated_review_status()
1412 old_calculated_status = pull_request.calculated_review_status()
1400
1413
1401 # get status if set !
1414 # get status if set !
1402 if status:
1415 if status:
1403 ChangesetStatusModel().set_status(
1416 ChangesetStatusModel().set_status(
1404 self.db_repo.repo_id,
1417 self.db_repo.repo_id,
1405 status,
1418 status,
1406 self._rhodecode_user.user_id,
1419 self._rhodecode_user.user_id,
1407 comment,
1420 comment,
1408 pull_request=pull_request
1421 pull_request=pull_request
1409 )
1422 )
1410
1423
1411 Session().flush()
1424 Session().flush()
1412 # this is somehow required to get access to some relationship
1425 # this is somehow required to get access to some relationship
1413 # loaded on comment
1426 # loaded on comment
1414 Session().refresh(comment)
1427 Session().refresh(comment)
1415
1428
1416 PullRequestModel().trigger_pull_request_hook(
1429 PullRequestModel().trigger_pull_request_hook(
1417 pull_request, self._rhodecode_user, 'comment',
1430 pull_request, self._rhodecode_user, 'comment',
1418 data={'comment': comment})
1431 data={'comment': comment})
1419
1432
1420 # we now calculate the status of pull request, and based on that
1433 # we now calculate the status of pull request, and based on that
1421 # calculation we set the commits status
1434 # calculation we set the commits status
1422 calculated_status = pull_request.calculated_review_status()
1435 calculated_status = pull_request.calculated_review_status()
1423 if old_calculated_status != calculated_status:
1436 if old_calculated_status != calculated_status:
1424 PullRequestModel().trigger_pull_request_hook(
1437 PullRequestModel().trigger_pull_request_hook(
1425 pull_request, self._rhodecode_user, 'review_status_change',
1438 pull_request, self._rhodecode_user, 'review_status_change',
1426 data={'status': calculated_status})
1439 data={'status': calculated_status})
1427
1440
1428 Session().commit()
1441 Session().commit()
1429
1442
1430 data = {
1443 data = {
1431 'target_id': h.safeid(h.safe_unicode(
1444 'target_id': h.safeid(h.safe_unicode(
1432 self.request.POST.get('f_path'))),
1445 self.request.POST.get('f_path'))),
1433 }
1446 }
1434 if comment:
1447 if comment:
1435 c.co = comment
1448 c.co = comment
1436 rendered_comment = render(
1449 rendered_comment = render(
1437 'rhodecode:templates/changeset/changeset_comment_block.mako',
1450 'rhodecode:templates/changeset/changeset_comment_block.mako',
1438 self._get_template_context(c), self.request)
1451 self._get_template_context(c), self.request)
1439
1452
1440 data.update(comment.get_dict())
1453 data.update(comment.get_dict())
1441 data.update({'rendered_text': rendered_comment})
1454 data.update({'rendered_text': rendered_comment})
1442
1455
1443 return data
1456 return data
1444
1457
1445 @LoginRequired()
1458 @LoginRequired()
1446 @NotAnonymous()
1459 @NotAnonymous()
1447 @HasRepoPermissionAnyDecorator(
1460 @HasRepoPermissionAnyDecorator(
1448 'repository.read', 'repository.write', 'repository.admin')
1461 'repository.read', 'repository.write', 'repository.admin')
1449 @CSRFRequired()
1462 @CSRFRequired()
1450 @view_config(
1463 @view_config(
1451 route_name='pullrequest_comment_delete', request_method='POST',
1464 route_name='pullrequest_comment_delete', request_method='POST',
1452 renderer='json_ext')
1465 renderer='json_ext')
1453 def pull_request_comment_delete(self):
1466 def pull_request_comment_delete(self):
1454 pull_request = PullRequest.get_or_404(
1467 pull_request = PullRequest.get_or_404(
1455 self.request.matchdict['pull_request_id'])
1468 self.request.matchdict['pull_request_id'])
1456
1469
1457 comment = ChangesetComment.get_or_404(
1470 comment = ChangesetComment.get_or_404(
1458 self.request.matchdict['comment_id'])
1471 self.request.matchdict['comment_id'])
1459 comment_id = comment.comment_id
1472 comment_id = comment.comment_id
1460
1473
1461 if pull_request.is_closed():
1474 if pull_request.is_closed():
1462 log.debug('comment: forbidden because pull request is closed')
1475 log.debug('comment: forbidden because pull request is closed')
1463 raise HTTPForbidden()
1476 raise HTTPForbidden()
1464
1477
1465 if not comment:
1478 if not comment:
1466 log.debug('Comment with id:%s not found, skipping', comment_id)
1479 log.debug('Comment with id:%s not found, skipping', comment_id)
1467 # comment already deleted in another call probably
1480 # comment already deleted in another call probably
1468 return True
1481 return True
1469
1482
1470 if comment.pull_request.is_closed():
1483 if comment.pull_request.is_closed():
1471 # don't allow deleting comments on closed pull request
1484 # don't allow deleting comments on closed pull request
1472 raise HTTPForbidden()
1485 raise HTTPForbidden()
1473
1486
1474 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1487 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1475 super_admin = h.HasPermissionAny('hg.admin')()
1488 super_admin = h.HasPermissionAny('hg.admin')()
1476 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1489 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1477 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1490 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1478 comment_repo_admin = is_repo_admin and is_repo_comment
1491 comment_repo_admin = is_repo_admin and is_repo_comment
1479
1492
1480 if super_admin or comment_owner or comment_repo_admin:
1493 if super_admin or comment_owner or comment_repo_admin:
1481 old_calculated_status = comment.pull_request.calculated_review_status()
1494 old_calculated_status = comment.pull_request.calculated_review_status()
1482 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1495 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1483 Session().commit()
1496 Session().commit()
1484 calculated_status = comment.pull_request.calculated_review_status()
1497 calculated_status = comment.pull_request.calculated_review_status()
1485 if old_calculated_status != calculated_status:
1498 if old_calculated_status != calculated_status:
1486 PullRequestModel().trigger_pull_request_hook(
1499 PullRequestModel().trigger_pull_request_hook(
1487 comment.pull_request, self._rhodecode_user, 'review_status_change',
1500 comment.pull_request, self._rhodecode_user, 'review_status_change',
1488 data={'status': calculated_status})
1501 data={'status': calculated_status})
1489 return True
1502 return True
1490 else:
1503 else:
1491 log.warning('No permissions for user %s to delete comment_id: %s',
1504 log.warning('No permissions for user %s to delete comment_id: %s',
1492 self._rhodecode_db_user, comment_id)
1505 self._rhodecode_db_user, comment_id)
1493 raise HTTPNotFound()
1506 raise HTTPNotFound()
@@ -1,1100 +1,1104 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Some simple helper functions
23 Some simple helper functions
24 """
24 """
25
25
26 import collections
26 import collections
27 import datetime
27 import datetime
28 import dateutil.relativedelta
28 import dateutil.relativedelta
29 import hashlib
29 import hashlib
30 import logging
30 import logging
31 import re
31 import re
32 import sys
32 import sys
33 import time
33 import time
34 import urllib
34 import urllib
35 import urlobject
35 import urlobject
36 import uuid
36 import uuid
37 import getpass
37 import getpass
38 from functools import update_wrapper, partial
38 from functools import update_wrapper, partial
39
39
40 import pygments.lexers
40 import pygments.lexers
41 import sqlalchemy
41 import sqlalchemy
42 import sqlalchemy.engine.url
42 import sqlalchemy.engine.url
43 import sqlalchemy.exc
43 import sqlalchemy.exc
44 import sqlalchemy.sql
44 import sqlalchemy.sql
45 import webob
45 import webob
46 import pyramid.threadlocal
46 import pyramid.threadlocal
47 from pyramid import compat
47 from pyramid import compat
48 from pyramid.settings import asbool
48 from pyramid.settings import asbool
49
49
50 import rhodecode
50 import rhodecode
51 from rhodecode.translation import _, _pluralize
51 from rhodecode.translation import _, _pluralize
52
52
53
53
54 def md5(s):
54 def md5(s):
55 return hashlib.md5(s).hexdigest()
55 return hashlib.md5(s).hexdigest()
56
56
57
57
58 def md5_safe(s):
58 def md5_safe(s):
59 return md5(safe_str(s))
59 return md5(safe_str(s))
60
60
61
61
62 def sha1(s):
62 def sha1(s):
63 return hashlib.sha1(s).hexdigest()
63 return hashlib.sha1(s).hexdigest()
64
64
65
65
66 def sha1_safe(s):
66 def sha1_safe(s):
67 return sha1(safe_str(s))
67 return sha1(safe_str(s))
68
68
69
69
70 def __get_lem(extra_mapping=None):
70 def __get_lem(extra_mapping=None):
71 """
71 """
72 Get language extension map based on what's inside pygments lexers
72 Get language extension map based on what's inside pygments lexers
73 """
73 """
74 d = collections.defaultdict(lambda: [])
74 d = collections.defaultdict(lambda: [])
75
75
76 def __clean(s):
76 def __clean(s):
77 s = s.lstrip('*')
77 s = s.lstrip('*')
78 s = s.lstrip('.')
78 s = s.lstrip('.')
79
79
80 if s.find('[') != -1:
80 if s.find('[') != -1:
81 exts = []
81 exts = []
82 start, stop = s.find('['), s.find(']')
82 start, stop = s.find('['), s.find(']')
83
83
84 for suffix in s[start + 1:stop]:
84 for suffix in s[start + 1:stop]:
85 exts.append(s[:s.find('[')] + suffix)
85 exts.append(s[:s.find('[')] + suffix)
86 return [e.lower() for e in exts]
86 return [e.lower() for e in exts]
87 else:
87 else:
88 return [s.lower()]
88 return [s.lower()]
89
89
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
91 m = map(__clean, t[-2])
91 m = map(__clean, t[-2])
92 if m:
92 if m:
93 m = reduce(lambda x, y: x + y, m)
93 m = reduce(lambda x, y: x + y, m)
94 for ext in m:
94 for ext in m:
95 desc = lx.replace('Lexer', '')
95 desc = lx.replace('Lexer', '')
96 d[ext].append(desc)
96 d[ext].append(desc)
97
97
98 data = dict(d)
98 data = dict(d)
99
99
100 extra_mapping = extra_mapping or {}
100 extra_mapping = extra_mapping or {}
101 if extra_mapping:
101 if extra_mapping:
102 for k, v in extra_mapping.items():
102 for k, v in extra_mapping.items():
103 if k not in data:
103 if k not in data:
104 # register new mapping2lexer
104 # register new mapping2lexer
105 data[k] = [v]
105 data[k] = [v]
106
106
107 return data
107 return data
108
108
109
109
110 def str2bool(_str):
110 def str2bool(_str):
111 """
111 """
112 returns True/False value from given string, it tries to translate the
112 returns True/False value from given string, it tries to translate the
113 string into boolean
113 string into boolean
114
114
115 :param _str: string value to translate into boolean
115 :param _str: string value to translate into boolean
116 :rtype: boolean
116 :rtype: boolean
117 :returns: boolean from given string
117 :returns: boolean from given string
118 """
118 """
119 if _str is None:
119 if _str is None:
120 return False
120 return False
121 if _str in (True, False):
121 if _str in (True, False):
122 return _str
122 return _str
123 _str = str(_str).strip().lower()
123 _str = str(_str).strip().lower()
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
125
125
126
126
127 def aslist(obj, sep=None, strip=True):
127 def aslist(obj, sep=None, strip=True):
128 """
128 """
129 Returns given string separated by sep as list
129 Returns given string separated by sep as list
130
130
131 :param obj:
131 :param obj:
132 :param sep:
132 :param sep:
133 :param strip:
133 :param strip:
134 """
134 """
135 if isinstance(obj, (basestring,)):
135 if isinstance(obj, (basestring,)):
136 lst = obj.split(sep)
136 lst = obj.split(sep)
137 if strip:
137 if strip:
138 lst = [v.strip() for v in lst]
138 lst = [v.strip() for v in lst]
139 return lst
139 return lst
140 elif isinstance(obj, (list, tuple)):
140 elif isinstance(obj, (list, tuple)):
141 return obj
141 return obj
142 elif obj is None:
142 elif obj is None:
143 return []
143 return []
144 else:
144 else:
145 return [obj]
145 return [obj]
146
146
147
147
148 def convert_line_endings(line, mode):
148 def convert_line_endings(line, mode):
149 """
149 """
150 Converts a given line "line end" accordingly to given mode
150 Converts a given line "line end" accordingly to given mode
151
151
152 Available modes are::
152 Available modes are::
153 0 - Unix
153 0 - Unix
154 1 - Mac
154 1 - Mac
155 2 - DOS
155 2 - DOS
156
156
157 :param line: given line to convert
157 :param line: given line to convert
158 :param mode: mode to convert to
158 :param mode: mode to convert to
159 :rtype: str
159 :rtype: str
160 :return: converted line according to mode
160 :return: converted line according to mode
161 """
161 """
162 if mode == 0:
162 if mode == 0:
163 line = line.replace('\r\n', '\n')
163 line = line.replace('\r\n', '\n')
164 line = line.replace('\r', '\n')
164 line = line.replace('\r', '\n')
165 elif mode == 1:
165 elif mode == 1:
166 line = line.replace('\r\n', '\r')
166 line = line.replace('\r\n', '\r')
167 line = line.replace('\n', '\r')
167 line = line.replace('\n', '\r')
168 elif mode == 2:
168 elif mode == 2:
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
170 return line
170 return line
171
171
172
172
173 def detect_mode(line, default):
173 def detect_mode(line, default):
174 """
174 """
175 Detects line break for given line, if line break couldn't be found
175 Detects line break for given line, if line break couldn't be found
176 given default value is returned
176 given default value is returned
177
177
178 :param line: str line
178 :param line: str line
179 :param default: default
179 :param default: default
180 :rtype: int
180 :rtype: int
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
182 """
182 """
183 if line.endswith('\r\n'):
183 if line.endswith('\r\n'):
184 return 2
184 return 2
185 elif line.endswith('\n'):
185 elif line.endswith('\n'):
186 return 0
186 return 0
187 elif line.endswith('\r'):
187 elif line.endswith('\r'):
188 return 1
188 return 1
189 else:
189 else:
190 return default
190 return default
191
191
192
192
193 def safe_int(val, default=None):
193 def safe_int(val, default=None):
194 """
194 """
195 Returns int() of val if val is not convertable to int use default
195 Returns int() of val if val is not convertable to int use default
196 instead
196 instead
197
197
198 :param val:
198 :param val:
199 :param default:
199 :param default:
200 """
200 """
201
201
202 try:
202 try:
203 val = int(val)
203 val = int(val)
204 except (ValueError, TypeError):
204 except (ValueError, TypeError):
205 val = default
205 val = default
206
206
207 return val
207 return val
208
208
209
209
210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
211 """
211 """
212 safe unicode function. Does few trick to turn str_ into unicode
212 safe unicode function. Does few trick to turn str_ into unicode
213
213
214 In case of UnicodeDecode error, we try to return it with encoding detected
214 In case of UnicodeDecode error, we try to return it with encoding detected
215 by chardet library if it fails fallback to unicode with errors replaced
215 by chardet library if it fails fallback to unicode with errors replaced
216
216
217 :param str_: string to decode
217 :param str_: string to decode
218 :rtype: unicode
218 :rtype: unicode
219 :returns: unicode object
219 :returns: unicode object
220 """
220 """
221 if isinstance(str_, unicode):
221 if isinstance(str_, unicode):
222 return str_
222 return str_
223
223
224 if not from_encoding:
224 if not from_encoding:
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
226 'utf8'), sep=',')
226 'utf8'), sep=',')
227 from_encoding = DEFAULT_ENCODINGS
227 from_encoding = DEFAULT_ENCODINGS
228
228
229 if not isinstance(from_encoding, (list, tuple)):
229 if not isinstance(from_encoding, (list, tuple)):
230 from_encoding = [from_encoding]
230 from_encoding = [from_encoding]
231
231
232 try:
232 try:
233 return unicode(str_)
233 return unicode(str_)
234 except UnicodeDecodeError:
234 except UnicodeDecodeError:
235 pass
235 pass
236
236
237 for enc in from_encoding:
237 for enc in from_encoding:
238 try:
238 try:
239 return unicode(str_, enc)
239 return unicode(str_, enc)
240 except UnicodeDecodeError:
240 except UnicodeDecodeError:
241 pass
241 pass
242
242
243 if use_chardet:
243 if use_chardet:
244 try:
244 try:
245 import chardet
245 import chardet
246 encoding = chardet.detect(str_)['encoding']
246 encoding = chardet.detect(str_)['encoding']
247 if encoding is None:
247 if encoding is None:
248 raise Exception()
248 raise Exception()
249 return str_.decode(encoding)
249 return str_.decode(encoding)
250 except (ImportError, UnicodeDecodeError, Exception):
250 except (ImportError, UnicodeDecodeError, Exception):
251 return unicode(str_, from_encoding[0], 'replace')
251 return unicode(str_, from_encoding[0], 'replace')
252 else:
252 else:
253 return unicode(str_, from_encoding[0], 'replace')
253 return unicode(str_, from_encoding[0], 'replace')
254
254
255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
256 """
256 """
257 safe str function. Does few trick to turn unicode_ into string
257 safe str function. Does few trick to turn unicode_ into string
258
258
259 In case of UnicodeEncodeError, we try to return it with encoding detected
259 In case of UnicodeEncodeError, we try to return it with encoding detected
260 by chardet library if it fails fallback to string with errors replaced
260 by chardet library if it fails fallback to string with errors replaced
261
261
262 :param unicode_: unicode to encode
262 :param unicode_: unicode to encode
263 :rtype: str
263 :rtype: str
264 :returns: str object
264 :returns: str object
265 """
265 """
266
266
267 # if it's not basestr cast to str
267 # if it's not basestr cast to str
268 if not isinstance(unicode_, compat.string_types):
268 if not isinstance(unicode_, compat.string_types):
269 return str(unicode_)
269 return str(unicode_)
270
270
271 if isinstance(unicode_, str):
271 if isinstance(unicode_, str):
272 return unicode_
272 return unicode_
273
273
274 if not to_encoding:
274 if not to_encoding:
275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
276 'utf8'), sep=',')
276 'utf8'), sep=',')
277 to_encoding = DEFAULT_ENCODINGS
277 to_encoding = DEFAULT_ENCODINGS
278
278
279 if not isinstance(to_encoding, (list, tuple)):
279 if not isinstance(to_encoding, (list, tuple)):
280 to_encoding = [to_encoding]
280 to_encoding = [to_encoding]
281
281
282 for enc in to_encoding:
282 for enc in to_encoding:
283 try:
283 try:
284 return unicode_.encode(enc)
284 return unicode_.encode(enc)
285 except UnicodeEncodeError:
285 except UnicodeEncodeError:
286 pass
286 pass
287
287
288 if use_chardet:
288 if use_chardet:
289 try:
289 try:
290 import chardet
290 import chardet
291 encoding = chardet.detect(unicode_)['encoding']
291 encoding = chardet.detect(unicode_)['encoding']
292 if encoding is None:
292 if encoding is None:
293 raise UnicodeEncodeError()
293 raise UnicodeEncodeError()
294
294
295 return unicode_.encode(encoding)
295 return unicode_.encode(encoding)
296 except (ImportError, UnicodeEncodeError):
296 except (ImportError, UnicodeEncodeError):
297 return unicode_.encode(to_encoding[0], 'replace')
297 return unicode_.encode(to_encoding[0], 'replace')
298 else:
298 else:
299 return unicode_.encode(to_encoding[0], 'replace')
299 return unicode_.encode(to_encoding[0], 'replace')
300
300
301
301
302 def remove_suffix(s, suffix):
302 def remove_suffix(s, suffix):
303 if s.endswith(suffix):
303 if s.endswith(suffix):
304 s = s[:-1 * len(suffix)]
304 s = s[:-1 * len(suffix)]
305 return s
305 return s
306
306
307
307
308 def remove_prefix(s, prefix):
308 def remove_prefix(s, prefix):
309 if s.startswith(prefix):
309 if s.startswith(prefix):
310 s = s[len(prefix):]
310 s = s[len(prefix):]
311 return s
311 return s
312
312
313
313
314 def find_calling_context(ignore_modules=None):
314 def find_calling_context(ignore_modules=None):
315 """
315 """
316 Look through the calling stack and return the frame which called
316 Look through the calling stack and return the frame which called
317 this function and is part of core module ( ie. rhodecode.* )
317 this function and is part of core module ( ie. rhodecode.* )
318
318
319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
320 """
320 """
321
321
322 ignore_modules = ignore_modules or []
322 ignore_modules = ignore_modules or []
323
323
324 f = sys._getframe(2)
324 f = sys._getframe(2)
325 while f.f_back is not None:
325 while f.f_back is not None:
326 name = f.f_globals.get('__name__')
326 name = f.f_globals.get('__name__')
327 if name and name.startswith(__name__.split('.')[0]):
327 if name and name.startswith(__name__.split('.')[0]):
328 if name not in ignore_modules:
328 if name not in ignore_modules:
329 return f
329 return f
330 f = f.f_back
330 f = f.f_back
331 return None
331 return None
332
332
333
333
334 def ping_connection(connection, branch):
334 def ping_connection(connection, branch):
335 if branch:
335 if branch:
336 # "branch" refers to a sub-connection of a connection,
336 # "branch" refers to a sub-connection of a connection,
337 # we don't want to bother pinging on these.
337 # we don't want to bother pinging on these.
338 return
338 return
339
339
340 # turn off "close with result". This flag is only used with
340 # turn off "close with result". This flag is only used with
341 # "connectionless" execution, otherwise will be False in any case
341 # "connectionless" execution, otherwise will be False in any case
342 save_should_close_with_result = connection.should_close_with_result
342 save_should_close_with_result = connection.should_close_with_result
343 connection.should_close_with_result = False
343 connection.should_close_with_result = False
344
344
345 try:
345 try:
346 # run a SELECT 1. use a core select() so that
346 # run a SELECT 1. use a core select() so that
347 # the SELECT of a scalar value without a table is
347 # the SELECT of a scalar value without a table is
348 # appropriately formatted for the backend
348 # appropriately formatted for the backend
349 connection.scalar(sqlalchemy.sql.select([1]))
349 connection.scalar(sqlalchemy.sql.select([1]))
350 except sqlalchemy.exc.DBAPIError as err:
350 except sqlalchemy.exc.DBAPIError as err:
351 # catch SQLAlchemy's DBAPIError, which is a wrapper
351 # catch SQLAlchemy's DBAPIError, which is a wrapper
352 # for the DBAPI's exception. It includes a .connection_invalidated
352 # for the DBAPI's exception. It includes a .connection_invalidated
353 # attribute which specifies if this connection is a "disconnect"
353 # attribute which specifies if this connection is a "disconnect"
354 # condition, which is based on inspection of the original exception
354 # condition, which is based on inspection of the original exception
355 # by the dialect in use.
355 # by the dialect in use.
356 if err.connection_invalidated:
356 if err.connection_invalidated:
357 # run the same SELECT again - the connection will re-validate
357 # run the same SELECT again - the connection will re-validate
358 # itself and establish a new connection. The disconnect detection
358 # itself and establish a new connection. The disconnect detection
359 # here also causes the whole connection pool to be invalidated
359 # here also causes the whole connection pool to be invalidated
360 # so that all stale connections are discarded.
360 # so that all stale connections are discarded.
361 connection.scalar(sqlalchemy.sql.select([1]))
361 connection.scalar(sqlalchemy.sql.select([1]))
362 else:
362 else:
363 raise
363 raise
364 finally:
364 finally:
365 # restore "close with result"
365 # restore "close with result"
366 connection.should_close_with_result = save_should_close_with_result
366 connection.should_close_with_result = save_should_close_with_result
367
367
368
368
369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
370 """Custom engine_from_config functions."""
370 """Custom engine_from_config functions."""
371 log = logging.getLogger('sqlalchemy.engine')
371 log = logging.getLogger('sqlalchemy.engine')
372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
374
374
375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
376
376
377 def color_sql(sql):
377 def color_sql(sql):
378 color_seq = '\033[1;33m' # This is yellow: code 33
378 color_seq = '\033[1;33m' # This is yellow: code 33
379 normal = '\x1b[0m'
379 normal = '\x1b[0m'
380 return ''.join([color_seq, sql, normal])
380 return ''.join([color_seq, sql, normal])
381
381
382 if use_ping_connection:
382 if use_ping_connection:
383 log.debug('Adding ping_connection on the engine config.')
383 log.debug('Adding ping_connection on the engine config.')
384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
385
385
386 if debug:
386 if debug:
387 # attach events only for debug configuration
387 # attach events only for debug configuration
388 def before_cursor_execute(conn, cursor, statement,
388 def before_cursor_execute(conn, cursor, statement,
389 parameters, context, executemany):
389 parameters, context, executemany):
390 setattr(conn, 'query_start_time', time.time())
390 setattr(conn, 'query_start_time', time.time())
391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
392 calling_context = find_calling_context(ignore_modules=[
392 calling_context = find_calling_context(ignore_modules=[
393 'rhodecode.lib.caching_query',
393 'rhodecode.lib.caching_query',
394 'rhodecode.model.settings',
394 'rhodecode.model.settings',
395 ])
395 ])
396 if calling_context:
396 if calling_context:
397 log.info(color_sql('call context %s:%s' % (
397 log.info(color_sql('call context %s:%s' % (
398 calling_context.f_code.co_filename,
398 calling_context.f_code.co_filename,
399 calling_context.f_lineno,
399 calling_context.f_lineno,
400 )))
400 )))
401
401
402 def after_cursor_execute(conn, cursor, statement,
402 def after_cursor_execute(conn, cursor, statement,
403 parameters, context, executemany):
403 parameters, context, executemany):
404 delattr(conn, 'query_start_time')
404 delattr(conn, 'query_start_time')
405
405
406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
408
408
409 return engine
409 return engine
410
410
411
411
412 def get_encryption_key(config):
412 def get_encryption_key(config):
413 secret = config.get('rhodecode.encrypted_values.secret')
413 secret = config.get('rhodecode.encrypted_values.secret')
414 default = config['beaker.session.secret']
414 default = config['beaker.session.secret']
415 return secret or default
415 return secret or default
416
416
417
417
418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
419 short_format=False):
419 short_format=False):
420 """
420 """
421 Turns a datetime into an age string.
421 Turns a datetime into an age string.
422 If show_short_version is True, this generates a shorter string with
422 If show_short_version is True, this generates a shorter string with
423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
424
424
425 * IMPORTANT*
425 * IMPORTANT*
426 Code of this function is written in special way so it's easier to
426 Code of this function is written in special way so it's easier to
427 backport it to javascript. If you mean to update it, please also update
427 backport it to javascript. If you mean to update it, please also update
428 `jquery.timeago-extension.js` file
428 `jquery.timeago-extension.js` file
429
429
430 :param prevdate: datetime object
430 :param prevdate: datetime object
431 :param now: get current time, if not define we use
431 :param now: get current time, if not define we use
432 `datetime.datetime.now()`
432 `datetime.datetime.now()`
433 :param show_short_version: if it should approximate the date and
433 :param show_short_version: if it should approximate the date and
434 return a shorter string
434 return a shorter string
435 :param show_suffix:
435 :param show_suffix:
436 :param short_format: show short format, eg 2D instead of 2 days
436 :param short_format: show short format, eg 2D instead of 2 days
437 :rtype: unicode
437 :rtype: unicode
438 :returns: unicode words describing age
438 :returns: unicode words describing age
439 """
439 """
440
440
441 def _get_relative_delta(now, prevdate):
441 def _get_relative_delta(now, prevdate):
442 base = dateutil.relativedelta.relativedelta(now, prevdate)
442 base = dateutil.relativedelta.relativedelta(now, prevdate)
443 return {
443 return {
444 'year': base.years,
444 'year': base.years,
445 'month': base.months,
445 'month': base.months,
446 'day': base.days,
446 'day': base.days,
447 'hour': base.hours,
447 'hour': base.hours,
448 'minute': base.minutes,
448 'minute': base.minutes,
449 'second': base.seconds,
449 'second': base.seconds,
450 }
450 }
451
451
452 def _is_leap_year(year):
452 def _is_leap_year(year):
453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
454
454
455 def get_month(prevdate):
455 def get_month(prevdate):
456 return prevdate.month
456 return prevdate.month
457
457
458 def get_year(prevdate):
458 def get_year(prevdate):
459 return prevdate.year
459 return prevdate.year
460
460
461 now = now or datetime.datetime.now()
461 now = now or datetime.datetime.now()
462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
463 deltas = {}
463 deltas = {}
464 future = False
464 future = False
465
465
466 if prevdate > now:
466 if prevdate > now:
467 now_old = now
467 now_old = now
468 now = prevdate
468 now = prevdate
469 prevdate = now_old
469 prevdate = now_old
470 future = True
470 future = True
471 if future:
471 if future:
472 prevdate = prevdate.replace(microsecond=0)
472 prevdate = prevdate.replace(microsecond=0)
473 # Get date parts deltas
473 # Get date parts deltas
474 for part in order:
474 for part in order:
475 rel_delta = _get_relative_delta(now, prevdate)
475 rel_delta = _get_relative_delta(now, prevdate)
476 deltas[part] = rel_delta[part]
476 deltas[part] = rel_delta[part]
477
477
478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
479 # not 1 hour, -59 minutes and -59 seconds)
479 # not 1 hour, -59 minutes and -59 seconds)
480 offsets = [[5, 60], [4, 60], [3, 24]]
480 offsets = [[5, 60], [4, 60], [3, 24]]
481 for element in offsets: # seconds, minutes, hours
481 for element in offsets: # seconds, minutes, hours
482 num = element[0]
482 num = element[0]
483 length = element[1]
483 length = element[1]
484
484
485 part = order[num]
485 part = order[num]
486 carry_part = order[num - 1]
486 carry_part = order[num - 1]
487
487
488 if deltas[part] < 0:
488 if deltas[part] < 0:
489 deltas[part] += length
489 deltas[part] += length
490 deltas[carry_part] -= 1
490 deltas[carry_part] -= 1
491
491
492 # Same thing for days except that the increment depends on the (variable)
492 # Same thing for days except that the increment depends on the (variable)
493 # number of days in the month
493 # number of days in the month
494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
495 if deltas['day'] < 0:
495 if deltas['day'] < 0:
496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
497 deltas['day'] += 29
497 deltas['day'] += 29
498 else:
498 else:
499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
500
500
501 deltas['month'] -= 1
501 deltas['month'] -= 1
502
502
503 if deltas['month'] < 0:
503 if deltas['month'] < 0:
504 deltas['month'] += 12
504 deltas['month'] += 12
505 deltas['year'] -= 1
505 deltas['year'] -= 1
506
506
507 # Format the result
507 # Format the result
508 if short_format:
508 if short_format:
509 fmt_funcs = {
509 fmt_funcs = {
510 'year': lambda d: u'%dy' % d,
510 'year': lambda d: u'%dy' % d,
511 'month': lambda d: u'%dm' % d,
511 'month': lambda d: u'%dm' % d,
512 'day': lambda d: u'%dd' % d,
512 'day': lambda d: u'%dd' % d,
513 'hour': lambda d: u'%dh' % d,
513 'hour': lambda d: u'%dh' % d,
514 'minute': lambda d: u'%dmin' % d,
514 'minute': lambda d: u'%dmin' % d,
515 'second': lambda d: u'%dsec' % d,
515 'second': lambda d: u'%dsec' % d,
516 }
516 }
517 else:
517 else:
518 fmt_funcs = {
518 fmt_funcs = {
519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
525 }
525 }
526
526
527 i = 0
527 i = 0
528 for part in order:
528 for part in order:
529 value = deltas[part]
529 value = deltas[part]
530 if value != 0:
530 if value != 0:
531
531
532 if i < 5:
532 if i < 5:
533 sub_part = order[i + 1]
533 sub_part = order[i + 1]
534 sub_value = deltas[sub_part]
534 sub_value = deltas[sub_part]
535 else:
535 else:
536 sub_value = 0
536 sub_value = 0
537
537
538 if sub_value == 0 or show_short_version:
538 if sub_value == 0 or show_short_version:
539 _val = fmt_funcs[part](value)
539 _val = fmt_funcs[part](value)
540 if future:
540 if future:
541 if show_suffix:
541 if show_suffix:
542 return _(u'in ${ago}', mapping={'ago': _val})
542 return _(u'in ${ago}', mapping={'ago': _val})
543 else:
543 else:
544 return _(_val)
544 return _(_val)
545
545
546 else:
546 else:
547 if show_suffix:
547 if show_suffix:
548 return _(u'${ago} ago', mapping={'ago': _val})
548 return _(u'${ago} ago', mapping={'ago': _val})
549 else:
549 else:
550 return _(_val)
550 return _(_val)
551
551
552 val = fmt_funcs[part](value)
552 val = fmt_funcs[part](value)
553 val_detail = fmt_funcs[sub_part](sub_value)
553 val_detail = fmt_funcs[sub_part](sub_value)
554 mapping = {'val': val, 'detail': val_detail}
554 mapping = {'val': val, 'detail': val_detail}
555
555
556 if short_format:
556 if short_format:
557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
558 if show_suffix:
558 if show_suffix:
559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
560 if future:
560 if future:
561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
562 else:
562 else:
563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
564 if show_suffix:
564 if show_suffix:
565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
566 if future:
566 if future:
567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
568
568
569 return datetime_tmpl
569 return datetime_tmpl
570 i += 1
570 i += 1
571 return _(u'just now')
571 return _(u'just now')
572
572
573
573
574 def age_from_seconds(seconds):
574 def age_from_seconds(seconds):
575 seconds = safe_int(seconds) or 0
575 seconds = safe_int(seconds) or 0
576 prevdate = time_to_datetime(time.time() + seconds)
576 prevdate = time_to_datetime(time.time() + seconds)
577 return age(prevdate, show_suffix=False, show_short_version=True)
577 return age(prevdate, show_suffix=False, show_short_version=True)
578
578
579
579
580 def cleaned_uri(uri):
580 def cleaned_uri(uri):
581 """
581 """
582 Quotes '[' and ']' from uri if there is only one of them.
582 Quotes '[' and ']' from uri if there is only one of them.
583 according to RFC3986 we cannot use such chars in uri
583 according to RFC3986 we cannot use such chars in uri
584 :param uri:
584 :param uri:
585 :return: uri without this chars
585 :return: uri without this chars
586 """
586 """
587 return urllib.quote(uri, safe='@$:/')
587 return urllib.quote(uri, safe='@$:/')
588
588
589
589
590 def uri_filter(uri):
590 def uri_filter(uri):
591 """
591 """
592 Removes user:password from given url string
592 Removes user:password from given url string
593
593
594 :param uri:
594 :param uri:
595 :rtype: unicode
595 :rtype: unicode
596 :returns: filtered list of strings
596 :returns: filtered list of strings
597 """
597 """
598 if not uri:
598 if not uri:
599 return ''
599 return ''
600
600
601 proto = ''
601 proto = ''
602
602
603 for pat in ('https://', 'http://'):
603 for pat in ('https://', 'http://'):
604 if uri.startswith(pat):
604 if uri.startswith(pat):
605 uri = uri[len(pat):]
605 uri = uri[len(pat):]
606 proto = pat
606 proto = pat
607 break
607 break
608
608
609 # remove passwords and username
609 # remove passwords and username
610 uri = uri[uri.find('@') + 1:]
610 uri = uri[uri.find('@') + 1:]
611
611
612 # get the port
612 # get the port
613 cred_pos = uri.find(':')
613 cred_pos = uri.find(':')
614 if cred_pos == -1:
614 if cred_pos == -1:
615 host, port = uri, None
615 host, port = uri, None
616 else:
616 else:
617 host, port = uri[:cred_pos], uri[cred_pos + 1:]
617 host, port = uri[:cred_pos], uri[cred_pos + 1:]
618
618
619 return filter(None, [proto, host, port])
619 return filter(None, [proto, host, port])
620
620
621
621
622 def credentials_filter(uri):
622 def credentials_filter(uri):
623 """
623 """
624 Returns a url with removed credentials
624 Returns a url with removed credentials
625
625
626 :param uri:
626 :param uri:
627 """
627 """
628
628
629 uri = uri_filter(uri)
629 uri = uri_filter(uri)
630 # check if we have port
630 # check if we have port
631 if len(uri) > 2 and uri[2]:
631 if len(uri) > 2 and uri[2]:
632 uri[2] = ':' + uri[2]
632 uri[2] = ':' + uri[2]
633
633
634 return ''.join(uri)
634 return ''.join(uri)
635
635
636
636
637 def get_host_info(request):
637 def get_host_info(request):
638 """
638 """
639 Generate host info, to obtain full url e.g https://server.com
639 Generate host info, to obtain full url e.g https://server.com
640 use this
640 use this
641 `{scheme}://{netloc}`
641 `{scheme}://{netloc}`
642 """
642 """
643 if not request:
643 if not request:
644 return {}
644 return {}
645
645
646 qualified_home_url = request.route_url('home')
646 qualified_home_url = request.route_url('home')
647 parsed_url = urlobject.URLObject(qualified_home_url)
647 parsed_url = urlobject.URLObject(qualified_home_url)
648 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
648 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
649
649
650 return {
650 return {
651 'scheme': parsed_url.scheme,
651 'scheme': parsed_url.scheme,
652 'netloc': parsed_url.netloc+decoded_path,
652 'netloc': parsed_url.netloc+decoded_path,
653 'hostname': parsed_url.hostname,
653 'hostname': parsed_url.hostname,
654 }
654 }
655
655
656
656
657 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
657 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
658 qualified_home_url = request.route_url('home')
658 qualified_home_url = request.route_url('home')
659 parsed_url = urlobject.URLObject(qualified_home_url)
659 parsed_url = urlobject.URLObject(qualified_home_url)
660 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
660 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
661
661
662 args = {
662 args = {
663 'scheme': parsed_url.scheme,
663 'scheme': parsed_url.scheme,
664 'user': '',
664 'user': '',
665 'sys_user': getpass.getuser(),
665 'sys_user': getpass.getuser(),
666 # path if we use proxy-prefix
666 # path if we use proxy-prefix
667 'netloc': parsed_url.netloc+decoded_path,
667 'netloc': parsed_url.netloc+decoded_path,
668 'hostname': parsed_url.hostname,
668 'hostname': parsed_url.hostname,
669 'prefix': decoded_path,
669 'prefix': decoded_path,
670 'repo': repo_name,
670 'repo': repo_name,
671 'repoid': str(repo_id),
671 'repoid': str(repo_id),
672 'repo_type': repo_type
672 'repo_type': repo_type
673 }
673 }
674 args.update(override)
674 args.update(override)
675 args['user'] = urllib.quote(safe_str(args['user']))
675 args['user'] = urllib.quote(safe_str(args['user']))
676
676
677 for k, v in args.items():
677 for k, v in args.items():
678 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
678 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
679
679
680 # special case for SVN clone url
680 # special case for SVN clone url
681 if repo_type == 'svn':
681 if repo_type == 'svn':
682 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
682 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
683
683
684 # remove leading @ sign if it's present. Case of empty user
684 # remove leading @ sign if it's present. Case of empty user
685 url_obj = urlobject.URLObject(uri_tmpl)
685 url_obj = urlobject.URLObject(uri_tmpl)
686 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
686 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
687
687
688 return safe_unicode(url)
688 return safe_unicode(url)
689
689
690
690
691 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None):
691 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
692 maybe_unreachable=False):
692 """
693 """
693 Safe version of get_commit if this commit doesn't exists for a
694 Safe version of get_commit if this commit doesn't exists for a
694 repository it returns a Dummy one instead
695 repository it returns a Dummy one instead
695
696
696 :param repo: repository instance
697 :param repo: repository instance
697 :param commit_id: commit id as str
698 :param commit_id: commit id as str
699 :param commit_idx: numeric commit index
698 :param pre_load: optional list of commit attributes to load
700 :param pre_load: optional list of commit attributes to load
701 :param maybe_unreachable: translate unreachable commits on git repos
699 """
702 """
700 # TODO(skreft): remove these circular imports
703 # TODO(skreft): remove these circular imports
701 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
704 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
702 from rhodecode.lib.vcs.exceptions import RepositoryError
705 from rhodecode.lib.vcs.exceptions import RepositoryError
703 if not isinstance(repo, BaseRepository):
706 if not isinstance(repo, BaseRepository):
704 raise Exception('You must pass an Repository '
707 raise Exception('You must pass an Repository '
705 'object as first argument got %s', type(repo))
708 'object as first argument got %s', type(repo))
706
709
707 try:
710 try:
708 commit = repo.get_commit(
711 commit = repo.get_commit(
709 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
712 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
713 maybe_unreachable=maybe_unreachable)
710 except (RepositoryError, LookupError):
714 except (RepositoryError, LookupError):
711 commit = EmptyCommit()
715 commit = EmptyCommit()
712 return commit
716 return commit
713
717
714
718
715 def datetime_to_time(dt):
719 def datetime_to_time(dt):
716 if dt:
720 if dt:
717 return time.mktime(dt.timetuple())
721 return time.mktime(dt.timetuple())
718
722
719
723
720 def time_to_datetime(tm):
724 def time_to_datetime(tm):
721 if tm:
725 if tm:
722 if isinstance(tm, compat.string_types):
726 if isinstance(tm, compat.string_types):
723 try:
727 try:
724 tm = float(tm)
728 tm = float(tm)
725 except ValueError:
729 except ValueError:
726 return
730 return
727 return datetime.datetime.fromtimestamp(tm)
731 return datetime.datetime.fromtimestamp(tm)
728
732
729
733
730 def time_to_utcdatetime(tm):
734 def time_to_utcdatetime(tm):
731 if tm:
735 if tm:
732 if isinstance(tm, compat.string_types):
736 if isinstance(tm, compat.string_types):
733 try:
737 try:
734 tm = float(tm)
738 tm = float(tm)
735 except ValueError:
739 except ValueError:
736 return
740 return
737 return datetime.datetime.utcfromtimestamp(tm)
741 return datetime.datetime.utcfromtimestamp(tm)
738
742
739
743
740 MENTIONS_REGEX = re.compile(
744 MENTIONS_REGEX = re.compile(
741 # ^@ or @ without any special chars in front
745 # ^@ or @ without any special chars in front
742 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
746 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
743 # main body starts with letter, then can be . - _
747 # main body starts with letter, then can be . - _
744 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
748 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
745 re.VERBOSE | re.MULTILINE)
749 re.VERBOSE | re.MULTILINE)
746
750
747
751
748 def extract_mentioned_users(s):
752 def extract_mentioned_users(s):
749 """
753 """
750 Returns unique usernames from given string s that have @mention
754 Returns unique usernames from given string s that have @mention
751
755
752 :param s: string to get mentions
756 :param s: string to get mentions
753 """
757 """
754 usrs = set()
758 usrs = set()
755 for username in MENTIONS_REGEX.findall(s):
759 for username in MENTIONS_REGEX.findall(s):
756 usrs.add(username)
760 usrs.add(username)
757
761
758 return sorted(list(usrs), key=lambda k: k.lower())
762 return sorted(list(usrs), key=lambda k: k.lower())
759
763
760
764
761 class AttributeDictBase(dict):
765 class AttributeDictBase(dict):
762 def __getstate__(self):
766 def __getstate__(self):
763 odict = self.__dict__ # get attribute dictionary
767 odict = self.__dict__ # get attribute dictionary
764 return odict
768 return odict
765
769
766 def __setstate__(self, dict):
770 def __setstate__(self, dict):
767 self.__dict__ = dict
771 self.__dict__ = dict
768
772
769 __setattr__ = dict.__setitem__
773 __setattr__ = dict.__setitem__
770 __delattr__ = dict.__delitem__
774 __delattr__ = dict.__delitem__
771
775
772
776
773 class StrictAttributeDict(AttributeDictBase):
777 class StrictAttributeDict(AttributeDictBase):
774 """
778 """
775 Strict Version of Attribute dict which raises an Attribute error when
779 Strict Version of Attribute dict which raises an Attribute error when
776 requested attribute is not set
780 requested attribute is not set
777 """
781 """
778 def __getattr__(self, attr):
782 def __getattr__(self, attr):
779 try:
783 try:
780 return self[attr]
784 return self[attr]
781 except KeyError:
785 except KeyError:
782 raise AttributeError('%s object has no attribute %s' % (
786 raise AttributeError('%s object has no attribute %s' % (
783 self.__class__, attr))
787 self.__class__, attr))
784
788
785
789
786 class AttributeDict(AttributeDictBase):
790 class AttributeDict(AttributeDictBase):
787 def __getattr__(self, attr):
791 def __getattr__(self, attr):
788 return self.get(attr, None)
792 return self.get(attr, None)
789
793
790
794
791
795
792 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
796 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
793 def __init__(self, default_factory=None, *args, **kwargs):
797 def __init__(self, default_factory=None, *args, **kwargs):
794 # in python3 you can omit the args to super
798 # in python3 you can omit the args to super
795 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
799 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
796 self.default_factory = default_factory
800 self.default_factory = default_factory
797
801
798
802
799 def fix_PATH(os_=None):
803 def fix_PATH(os_=None):
800 """
804 """
801 Get current active python path, and append it to PATH variable to fix
805 Get current active python path, and append it to PATH variable to fix
802 issues of subprocess calls and different python versions
806 issues of subprocess calls and different python versions
803 """
807 """
804 if os_ is None:
808 if os_ is None:
805 import os
809 import os
806 else:
810 else:
807 os = os_
811 os = os_
808
812
809 cur_path = os.path.split(sys.executable)[0]
813 cur_path = os.path.split(sys.executable)[0]
810 if not os.environ['PATH'].startswith(cur_path):
814 if not os.environ['PATH'].startswith(cur_path):
811 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
815 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
812
816
813
817
814 def obfuscate_url_pw(engine):
818 def obfuscate_url_pw(engine):
815 _url = engine or ''
819 _url = engine or ''
816 try:
820 try:
817 _url = sqlalchemy.engine.url.make_url(engine)
821 _url = sqlalchemy.engine.url.make_url(engine)
818 if _url.password:
822 if _url.password:
819 _url.password = 'XXXXX'
823 _url.password = 'XXXXX'
820 except Exception:
824 except Exception:
821 pass
825 pass
822 return unicode(_url)
826 return unicode(_url)
823
827
824
828
825 def get_server_url(environ):
829 def get_server_url(environ):
826 req = webob.Request(environ)
830 req = webob.Request(environ)
827 return req.host_url + req.script_name
831 return req.host_url + req.script_name
828
832
829
833
830 def unique_id(hexlen=32):
834 def unique_id(hexlen=32):
831 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
835 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
832 return suuid(truncate_to=hexlen, alphabet=alphabet)
836 return suuid(truncate_to=hexlen, alphabet=alphabet)
833
837
834
838
835 def suuid(url=None, truncate_to=22, alphabet=None):
839 def suuid(url=None, truncate_to=22, alphabet=None):
836 """
840 """
837 Generate and return a short URL safe UUID.
841 Generate and return a short URL safe UUID.
838
842
839 If the url parameter is provided, set the namespace to the provided
843 If the url parameter is provided, set the namespace to the provided
840 URL and generate a UUID.
844 URL and generate a UUID.
841
845
842 :param url to get the uuid for
846 :param url to get the uuid for
843 :truncate_to: truncate the basic 22 UUID to shorter version
847 :truncate_to: truncate the basic 22 UUID to shorter version
844
848
845 The IDs won't be universally unique any longer, but the probability of
849 The IDs won't be universally unique any longer, but the probability of
846 a collision will still be very low.
850 a collision will still be very low.
847 """
851 """
848 # Define our alphabet.
852 # Define our alphabet.
849 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
853 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
850
854
851 # If no URL is given, generate a random UUID.
855 # If no URL is given, generate a random UUID.
852 if url is None:
856 if url is None:
853 unique_id = uuid.uuid4().int
857 unique_id = uuid.uuid4().int
854 else:
858 else:
855 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
859 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
856
860
857 alphabet_length = len(_ALPHABET)
861 alphabet_length = len(_ALPHABET)
858 output = []
862 output = []
859 while unique_id > 0:
863 while unique_id > 0:
860 digit = unique_id % alphabet_length
864 digit = unique_id % alphabet_length
861 output.append(_ALPHABET[digit])
865 output.append(_ALPHABET[digit])
862 unique_id = int(unique_id / alphabet_length)
866 unique_id = int(unique_id / alphabet_length)
863 return "".join(output)[:truncate_to]
867 return "".join(output)[:truncate_to]
864
868
865
869
866 def get_current_rhodecode_user(request=None):
870 def get_current_rhodecode_user(request=None):
867 """
871 """
868 Gets rhodecode user from request
872 Gets rhodecode user from request
869 """
873 """
870 pyramid_request = request or pyramid.threadlocal.get_current_request()
874 pyramid_request = request or pyramid.threadlocal.get_current_request()
871
875
872 # web case
876 # web case
873 if pyramid_request and hasattr(pyramid_request, 'user'):
877 if pyramid_request and hasattr(pyramid_request, 'user'):
874 return pyramid_request.user
878 return pyramid_request.user
875
879
876 # api case
880 # api case
877 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
881 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
878 return pyramid_request.rpc_user
882 return pyramid_request.rpc_user
879
883
880 return None
884 return None
881
885
882
886
883 def action_logger_generic(action, namespace=''):
887 def action_logger_generic(action, namespace=''):
884 """
888 """
885 A generic logger for actions useful to the system overview, tries to find
889 A generic logger for actions useful to the system overview, tries to find
886 an acting user for the context of the call otherwise reports unknown user
890 an acting user for the context of the call otherwise reports unknown user
887
891
888 :param action: logging message eg 'comment 5 deleted'
892 :param action: logging message eg 'comment 5 deleted'
889 :param type: string
893 :param type: string
890
894
891 :param namespace: namespace of the logging message eg. 'repo.comments'
895 :param namespace: namespace of the logging message eg. 'repo.comments'
892 :param type: string
896 :param type: string
893
897
894 """
898 """
895
899
896 logger_name = 'rhodecode.actions'
900 logger_name = 'rhodecode.actions'
897
901
898 if namespace:
902 if namespace:
899 logger_name += '.' + namespace
903 logger_name += '.' + namespace
900
904
901 log = logging.getLogger(logger_name)
905 log = logging.getLogger(logger_name)
902
906
903 # get a user if we can
907 # get a user if we can
904 user = get_current_rhodecode_user()
908 user = get_current_rhodecode_user()
905
909
906 logfunc = log.info
910 logfunc = log.info
907
911
908 if not user:
912 if not user:
909 user = '<unknown user>'
913 user = '<unknown user>'
910 logfunc = log.warning
914 logfunc = log.warning
911
915
912 logfunc('Logging action by {}: {}'.format(user, action))
916 logfunc('Logging action by {}: {}'.format(user, action))
913
917
914
918
915 def escape_split(text, sep=',', maxsplit=-1):
919 def escape_split(text, sep=',', maxsplit=-1):
916 r"""
920 r"""
917 Allows for escaping of the separator: e.g. arg='foo\, bar'
921 Allows for escaping of the separator: e.g. arg='foo\, bar'
918
922
919 It should be noted that the way bash et. al. do command line parsing, those
923 It should be noted that the way bash et. al. do command line parsing, those
920 single quotes are required.
924 single quotes are required.
921 """
925 """
922 escaped_sep = r'\%s' % sep
926 escaped_sep = r'\%s' % sep
923
927
924 if escaped_sep not in text:
928 if escaped_sep not in text:
925 return text.split(sep, maxsplit)
929 return text.split(sep, maxsplit)
926
930
927 before, _mid, after = text.partition(escaped_sep)
931 before, _mid, after = text.partition(escaped_sep)
928 startlist = before.split(sep, maxsplit) # a regular split is fine here
932 startlist = before.split(sep, maxsplit) # a regular split is fine here
929 unfinished = startlist[-1]
933 unfinished = startlist[-1]
930 startlist = startlist[:-1]
934 startlist = startlist[:-1]
931
935
932 # recurse because there may be more escaped separators
936 # recurse because there may be more escaped separators
933 endlist = escape_split(after, sep, maxsplit)
937 endlist = escape_split(after, sep, maxsplit)
934
938
935 # finish building the escaped value. we use endlist[0] becaue the first
939 # finish building the escaped value. we use endlist[0] becaue the first
936 # part of the string sent in recursion is the rest of the escaped value.
940 # part of the string sent in recursion is the rest of the escaped value.
937 unfinished += sep + endlist[0]
941 unfinished += sep + endlist[0]
938
942
939 return startlist + [unfinished] + endlist[1:] # put together all the parts
943 return startlist + [unfinished] + endlist[1:] # put together all the parts
940
944
941
945
942 class OptionalAttr(object):
946 class OptionalAttr(object):
943 """
947 """
944 Special Optional Option that defines other attribute. Example::
948 Special Optional Option that defines other attribute. Example::
945
949
946 def test(apiuser, userid=Optional(OAttr('apiuser')):
950 def test(apiuser, userid=Optional(OAttr('apiuser')):
947 user = Optional.extract(userid)
951 user = Optional.extract(userid)
948 # calls
952 # calls
949
953
950 """
954 """
951
955
952 def __init__(self, attr_name):
956 def __init__(self, attr_name):
953 self.attr_name = attr_name
957 self.attr_name = attr_name
954
958
955 def __repr__(self):
959 def __repr__(self):
956 return '<OptionalAttr:%s>' % self.attr_name
960 return '<OptionalAttr:%s>' % self.attr_name
957
961
958 def __call__(self):
962 def __call__(self):
959 return self
963 return self
960
964
961
965
962 # alias
966 # alias
963 OAttr = OptionalAttr
967 OAttr = OptionalAttr
964
968
965
969
966 class Optional(object):
970 class Optional(object):
967 """
971 """
968 Defines an optional parameter::
972 Defines an optional parameter::
969
973
970 param = param.getval() if isinstance(param, Optional) else param
974 param = param.getval() if isinstance(param, Optional) else param
971 param = param() if isinstance(param, Optional) else param
975 param = param() if isinstance(param, Optional) else param
972
976
973 is equivalent of::
977 is equivalent of::
974
978
975 param = Optional.extract(param)
979 param = Optional.extract(param)
976
980
977 """
981 """
978
982
979 def __init__(self, type_):
983 def __init__(self, type_):
980 self.type_ = type_
984 self.type_ = type_
981
985
982 def __repr__(self):
986 def __repr__(self):
983 return '<Optional:%s>' % self.type_.__repr__()
987 return '<Optional:%s>' % self.type_.__repr__()
984
988
985 def __call__(self):
989 def __call__(self):
986 return self.getval()
990 return self.getval()
987
991
988 def getval(self):
992 def getval(self):
989 """
993 """
990 returns value from this Optional instance
994 returns value from this Optional instance
991 """
995 """
992 if isinstance(self.type_, OAttr):
996 if isinstance(self.type_, OAttr):
993 # use params name
997 # use params name
994 return self.type_.attr_name
998 return self.type_.attr_name
995 return self.type_
999 return self.type_
996
1000
997 @classmethod
1001 @classmethod
998 def extract(cls, val):
1002 def extract(cls, val):
999 """
1003 """
1000 Extracts value from Optional() instance
1004 Extracts value from Optional() instance
1001
1005
1002 :param val:
1006 :param val:
1003 :return: original value if it's not Optional instance else
1007 :return: original value if it's not Optional instance else
1004 value of instance
1008 value of instance
1005 """
1009 """
1006 if isinstance(val, cls):
1010 if isinstance(val, cls):
1007 return val.getval()
1011 return val.getval()
1008 return val
1012 return val
1009
1013
1010
1014
1011 def glob2re(pat):
1015 def glob2re(pat):
1012 """
1016 """
1013 Translate a shell PATTERN to a regular expression.
1017 Translate a shell PATTERN to a regular expression.
1014
1018
1015 There is no way to quote meta-characters.
1019 There is no way to quote meta-characters.
1016 """
1020 """
1017
1021
1018 i, n = 0, len(pat)
1022 i, n = 0, len(pat)
1019 res = ''
1023 res = ''
1020 while i < n:
1024 while i < n:
1021 c = pat[i]
1025 c = pat[i]
1022 i = i+1
1026 i = i+1
1023 if c == '*':
1027 if c == '*':
1024 #res = res + '.*'
1028 #res = res + '.*'
1025 res = res + '[^/]*'
1029 res = res + '[^/]*'
1026 elif c == '?':
1030 elif c == '?':
1027 #res = res + '.'
1031 #res = res + '.'
1028 res = res + '[^/]'
1032 res = res + '[^/]'
1029 elif c == '[':
1033 elif c == '[':
1030 j = i
1034 j = i
1031 if j < n and pat[j] == '!':
1035 if j < n and pat[j] == '!':
1032 j = j+1
1036 j = j+1
1033 if j < n and pat[j] == ']':
1037 if j < n and pat[j] == ']':
1034 j = j+1
1038 j = j+1
1035 while j < n and pat[j] != ']':
1039 while j < n and pat[j] != ']':
1036 j = j+1
1040 j = j+1
1037 if j >= n:
1041 if j >= n:
1038 res = res + '\\['
1042 res = res + '\\['
1039 else:
1043 else:
1040 stuff = pat[i:j].replace('\\','\\\\')
1044 stuff = pat[i:j].replace('\\','\\\\')
1041 i = j+1
1045 i = j+1
1042 if stuff[0] == '!':
1046 if stuff[0] == '!':
1043 stuff = '^' + stuff[1:]
1047 stuff = '^' + stuff[1:]
1044 elif stuff[0] == '^':
1048 elif stuff[0] == '^':
1045 stuff = '\\' + stuff
1049 stuff = '\\' + stuff
1046 res = '%s[%s]' % (res, stuff)
1050 res = '%s[%s]' % (res, stuff)
1047 else:
1051 else:
1048 res = res + re.escape(c)
1052 res = res + re.escape(c)
1049 return res + '\Z(?ms)'
1053 return res + '\Z(?ms)'
1050
1054
1051
1055
1052 def parse_byte_string(size_str):
1056 def parse_byte_string(size_str):
1053 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1057 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1054 if not match:
1058 if not match:
1055 raise ValueError('Given size:%s is invalid, please make sure '
1059 raise ValueError('Given size:%s is invalid, please make sure '
1056 'to use format of <num>(MB|KB)' % size_str)
1060 'to use format of <num>(MB|KB)' % size_str)
1057
1061
1058 _parts = match.groups()
1062 _parts = match.groups()
1059 num, type_ = _parts
1063 num, type_ = _parts
1060 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1064 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1061
1065
1062
1066
1063 class CachedProperty(object):
1067 class CachedProperty(object):
1064 """
1068 """
1065 Lazy Attributes. With option to invalidate the cache by running a method
1069 Lazy Attributes. With option to invalidate the cache by running a method
1066
1070
1067 class Foo():
1071 class Foo():
1068
1072
1069 @CachedProperty
1073 @CachedProperty
1070 def heavy_func():
1074 def heavy_func():
1071 return 'super-calculation'
1075 return 'super-calculation'
1072
1076
1073 foo = Foo()
1077 foo = Foo()
1074 foo.heavy_func() # first computions
1078 foo.heavy_func() # first computions
1075 foo.heavy_func() # fetch from cache
1079 foo.heavy_func() # fetch from cache
1076 foo._invalidate_prop_cache('heavy_func')
1080 foo._invalidate_prop_cache('heavy_func')
1077 # at this point calling foo.heavy_func() will be re-computed
1081 # at this point calling foo.heavy_func() will be re-computed
1078 """
1082 """
1079
1083
1080 def __init__(self, func, func_name=None):
1084 def __init__(self, func, func_name=None):
1081
1085
1082 if func_name is None:
1086 if func_name is None:
1083 func_name = func.__name__
1087 func_name = func.__name__
1084 self.data = (func, func_name)
1088 self.data = (func, func_name)
1085 update_wrapper(self, func)
1089 update_wrapper(self, func)
1086
1090
1087 def __get__(self, inst, class_):
1091 def __get__(self, inst, class_):
1088 if inst is None:
1092 if inst is None:
1089 return self
1093 return self
1090
1094
1091 func, func_name = self.data
1095 func, func_name = self.data
1092 value = func(inst)
1096 value = func(inst)
1093 inst.__dict__[func_name] = value
1097 inst.__dict__[func_name] = value
1094 if '_invalidate_prop_cache' not in inst.__dict__:
1098 if '_invalidate_prop_cache' not in inst.__dict__:
1095 inst.__dict__['_invalidate_prop_cache'] = partial(
1099 inst.__dict__['_invalidate_prop_cache'] = partial(
1096 self._invalidate_prop_cache, inst)
1100 self._invalidate_prop_cache, inst)
1097 return value
1101 return value
1098
1102
1099 def _invalidate_prop_cache(self, inst, name):
1103 def _invalidate_prop_cache(self, inst, name):
1100 inst.__dict__.pop(name, None)
1104 inst.__dict__.pop(name, None)
@@ -1,1899 +1,1901 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from pyramid import compat
37 from pyramid import compat
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class MergeFailureReason(object):
63 class MergeFailureReason(object):
64 """
64 """
65 Enumeration with all the reasons why the server side merge could fail.
65 Enumeration with all the reasons why the server side merge could fail.
66
66
67 DO NOT change the number of the reasons, as they may be stored in the
67 DO NOT change the number of the reasons, as they may be stored in the
68 database.
68 database.
69
69
70 Changing the name of a reason is acceptable and encouraged to deprecate old
70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 reasons.
71 reasons.
72 """
72 """
73
73
74 # Everything went well.
74 # Everything went well.
75 NONE = 0
75 NONE = 0
76
76
77 # An unexpected exception was raised. Check the logs for more details.
77 # An unexpected exception was raised. Check the logs for more details.
78 UNKNOWN = 1
78 UNKNOWN = 1
79
79
80 # The merge was not successful, there are conflicts.
80 # The merge was not successful, there are conflicts.
81 MERGE_FAILED = 2
81 MERGE_FAILED = 2
82
82
83 # The merge succeeded but we could not push it to the target repository.
83 # The merge succeeded but we could not push it to the target repository.
84 PUSH_FAILED = 3
84 PUSH_FAILED = 3
85
85
86 # The specified target is not a head in the target repository.
86 # The specified target is not a head in the target repository.
87 TARGET_IS_NOT_HEAD = 4
87 TARGET_IS_NOT_HEAD = 4
88
88
89 # The source repository contains more branches than the target. Pushing
89 # The source repository contains more branches than the target. Pushing
90 # the merge will create additional branches in the target.
90 # the merge will create additional branches in the target.
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92
92
93 # The target reference has multiple heads. That does not allow to correctly
93 # The target reference has multiple heads. That does not allow to correctly
94 # identify the target location. This could only happen for mercurial
94 # identify the target location. This could only happen for mercurial
95 # branches.
95 # branches.
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97
97
98 # The target repository is locked
98 # The target repository is locked
99 TARGET_IS_LOCKED = 7
99 TARGET_IS_LOCKED = 7
100
100
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 # A involved commit could not be found.
102 # A involved commit could not be found.
103 _DEPRECATED_MISSING_COMMIT = 8
103 _DEPRECATED_MISSING_COMMIT = 8
104
104
105 # The target repo reference is missing.
105 # The target repo reference is missing.
106 MISSING_TARGET_REF = 9
106 MISSING_TARGET_REF = 9
107
107
108 # The source repo reference is missing.
108 # The source repo reference is missing.
109 MISSING_SOURCE_REF = 10
109 MISSING_SOURCE_REF = 10
110
110
111 # The merge was not successful, there are conflicts related to sub
111 # The merge was not successful, there are conflicts related to sub
112 # repositories.
112 # repositories.
113 SUBREPO_MERGE_FAILED = 11
113 SUBREPO_MERGE_FAILED = 11
114
114
115
115
116 class UpdateFailureReason(object):
116 class UpdateFailureReason(object):
117 """
117 """
118 Enumeration with all the reasons why the pull request update could fail.
118 Enumeration with all the reasons why the pull request update could fail.
119
119
120 DO NOT change the number of the reasons, as they may be stored in the
120 DO NOT change the number of the reasons, as they may be stored in the
121 database.
121 database.
122
122
123 Changing the name of a reason is acceptable and encouraged to deprecate old
123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 reasons.
124 reasons.
125 """
125 """
126
126
127 # Everything went well.
127 # Everything went well.
128 NONE = 0
128 NONE = 0
129
129
130 # An unexpected exception was raised. Check the logs for more details.
130 # An unexpected exception was raised. Check the logs for more details.
131 UNKNOWN = 1
131 UNKNOWN = 1
132
132
133 # The pull request is up to date.
133 # The pull request is up to date.
134 NO_CHANGE = 2
134 NO_CHANGE = 2
135
135
136 # The pull request has a reference type that is not supported for update.
136 # The pull request has a reference type that is not supported for update.
137 WRONG_REF_TYPE = 3
137 WRONG_REF_TYPE = 3
138
138
139 # Update failed because the target reference is missing.
139 # Update failed because the target reference is missing.
140 MISSING_TARGET_REF = 4
140 MISSING_TARGET_REF = 4
141
141
142 # Update failed because the source reference is missing.
142 # Update failed because the source reference is missing.
143 MISSING_SOURCE_REF = 5
143 MISSING_SOURCE_REF = 5
144
144
145
145
146 class MergeResponse(object):
146 class MergeResponse(object):
147
147
148 # uses .format(**metadata) for variables
148 # uses .format(**metadata) for variables
149 MERGE_STATUS_MESSAGES = {
149 MERGE_STATUS_MESSAGES = {
150 MergeFailureReason.NONE: lazy_ugettext(
150 MergeFailureReason.NONE: lazy_ugettext(
151 u'This pull request can be automatically merged.'),
151 u'This pull request can be automatically merged.'),
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 u'This pull request cannot be merged because of an unhandled exception. '
153 u'This pull request cannot be merged because of an unhandled exception. '
154 u'{exception}'),
154 u'{exception}'),
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 u'This pull request could not be merged because push to '
158 u'This pull request could not be merged because push to '
159 u'target:`{target}@{merge_commit}` failed.'),
159 u'target:`{target}@{merge_commit}` failed.'),
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 u'This pull request cannot be merged because the target '
161 u'This pull request cannot be merged because the target '
162 u'`{target_ref.name}` is not a head.'),
162 u'`{target_ref.name}` is not a head.'),
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 u'This pull request cannot be merged because the source contains '
164 u'This pull request cannot be merged because the source contains '
165 u'more branches than the target.'),
165 u'more branches than the target.'),
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 u'has multiple heads: `{heads}`.'),
168 u'has multiple heads: `{heads}`.'),
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 u'This pull request cannot be merged because the target repository is '
170 u'This pull request cannot be merged because the target repository is '
171 u'locked by {locked_by}.'),
171 u'locked by {locked_by}.'),
172
172
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the target '
174 u'This pull request cannot be merged because the target '
175 u'reference `{target_ref.name}` is missing.'),
175 u'reference `{target_ref.name}` is missing.'),
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 u'This pull request cannot be merged because the source '
177 u'This pull request cannot be merged because the source '
178 u'reference `{source_ref.name}` is missing.'),
178 u'reference `{source_ref.name}` is missing.'),
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 u'This pull request cannot be merged because of conflicts related '
180 u'This pull request cannot be merged because of conflicts related '
181 u'to sub repositories.'),
181 u'to sub repositories.'),
182
182
183 # Deprecations
183 # Deprecations
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 u'This pull request cannot be merged because the target or the '
185 u'This pull request cannot be merged because the target or the '
186 u'source reference is missing.'),
186 u'source reference is missing.'),
187
187
188 }
188 }
189
189
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 self.possible = possible
191 self.possible = possible
192 self.executed = executed
192 self.executed = executed
193 self.merge_ref = merge_ref
193 self.merge_ref = merge_ref
194 self.failure_reason = failure_reason
194 self.failure_reason = failure_reason
195 self.metadata = metadata or {}
195 self.metadata = metadata or {}
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199
199
200 def __eq__(self, other):
200 def __eq__(self, other):
201 same_instance = isinstance(other, self.__class__)
201 same_instance = isinstance(other, self.__class__)
202 return same_instance \
202 return same_instance \
203 and self.possible == other.possible \
203 and self.possible == other.possible \
204 and self.executed == other.executed \
204 and self.executed == other.executed \
205 and self.failure_reason == other.failure_reason
205 and self.failure_reason == other.failure_reason
206
206
207 @property
207 @property
208 def label(self):
208 def label(self):
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 not k.startswith('_'))
210 not k.startswith('_'))
211 return label_dict.get(self.failure_reason)
211 return label_dict.get(self.failure_reason)
212
212
213 @property
213 @property
214 def merge_status_message(self):
214 def merge_status_message(self):
215 """
215 """
216 Return a human friendly error message for the given merge status code.
216 Return a human friendly error message for the given merge status code.
217 """
217 """
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219
219 try:
220 try:
220 return msg.format(**self.metadata)
221 return msg.format(**self.metadata)
221 except Exception:
222 except Exception:
222 log.exception('Failed to format %s message', self)
223 log.exception('Failed to format %s message', self)
223 return msg
224 return msg
224
225
225 def asdict(self):
226 def asdict(self):
226 data = {}
227 data = {}
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 'merge_status_message']:
229 'merge_status_message']:
229 data[k] = getattr(self, k)
230 data[k] = getattr(self, k)
230 return data
231 return data
231
232
232
233
233 class BaseRepository(object):
234 class BaseRepository(object):
234 """
235 """
235 Base Repository for final backends
236 Base Repository for final backends
236
237
237 .. attribute:: DEFAULT_BRANCH_NAME
238 .. attribute:: DEFAULT_BRANCH_NAME
238
239
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240
241
241 .. attribute:: commit_ids
242 .. attribute:: commit_ids
242
243
243 list of all available commit ids, in ascending order
244 list of all available commit ids, in ascending order
244
245
245 .. attribute:: path
246 .. attribute:: path
246
247
247 absolute path to the repository
248 absolute path to the repository
248
249
249 .. attribute:: bookmarks
250 .. attribute:: bookmarks
250
251
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 there are no bookmarks or the backend implementation does not support
253 there are no bookmarks or the backend implementation does not support
253 bookmarks.
254 bookmarks.
254
255
255 .. attribute:: tags
256 .. attribute:: tags
256
257
257 Mapping from name to :term:`Commit ID` of the tag.
258 Mapping from name to :term:`Commit ID` of the tag.
258
259
259 """
260 """
260
261
261 DEFAULT_BRANCH_NAME = None
262 DEFAULT_BRANCH_NAME = None
262 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
264 DEFAULT_DESCRIPTION = u"unknown"
264 EMPTY_COMMIT_ID = '0' * 40
265 EMPTY_COMMIT_ID = '0' * 40
265
266
266 path = None
267 path = None
267
268
268 _is_empty = None
269 _is_empty = None
269 _commit_ids = {}
270 _commit_ids = {}
270
271
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 """
273 """
273 Initializes repository. Raises RepositoryError if repository could
274 Initializes repository. Raises RepositoryError if repository could
274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 not be find at the given ``repo_path`` or directory at ``repo_path``
275 exists and ``create`` is set to True.
276 exists and ``create`` is set to True.
276
277
277 :param repo_path: local path of the repository
278 :param repo_path: local path of the repository
278 :param config: repository configuration
279 :param config: repository configuration
279 :param create=False: if set to True, would try to create repository.
280 :param create=False: if set to True, would try to create repository.
280 :param src_url=None: if set, should be proper url from which repository
281 :param src_url=None: if set, should be proper url from which repository
281 would be cloned; requires ``create`` parameter to be set to True -
282 would be cloned; requires ``create`` parameter to be set to True -
282 raises RepositoryError if src_url is set and create evaluates to
283 raises RepositoryError if src_url is set and create evaluates to
283 False
284 False
284 """
285 """
285 raise NotImplementedError
286 raise NotImplementedError
286
287
287 def __repr__(self):
288 def __repr__(self):
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289 return '<%s at %s>' % (self.__class__.__name__, self.path)
289
290
290 def __len__(self):
291 def __len__(self):
291 return self.count()
292 return self.count()
292
293
293 def __eq__(self, other):
294 def __eq__(self, other):
294 same_instance = isinstance(other, self.__class__)
295 same_instance = isinstance(other, self.__class__)
295 return same_instance and other.path == self.path
296 return same_instance and other.path == self.path
296
297
297 def __ne__(self, other):
298 def __ne__(self, other):
298 return not self.__eq__(other)
299 return not self.__eq__(other)
299
300
300 def get_create_shadow_cache_pr_path(self, db_repo):
301 def get_create_shadow_cache_pr_path(self, db_repo):
301 path = db_repo.cached_diffs_dir
302 path = db_repo.cached_diffs_dir
302 if not os.path.exists(path):
303 if not os.path.exists(path):
303 os.makedirs(path, 0o755)
304 os.makedirs(path, 0o755)
304 return path
305 return path
305
306
306 @classmethod
307 @classmethod
307 def get_default_config(cls, default=None):
308 def get_default_config(cls, default=None):
308 config = Config()
309 config = Config()
309 if default and isinstance(default, list):
310 if default and isinstance(default, list):
310 for section, key, val in default:
311 for section, key, val in default:
311 config.set(section, key, val)
312 config.set(section, key, val)
312 return config
313 return config
313
314
314 @LazyProperty
315 @LazyProperty
315 def _remote(self):
316 def _remote(self):
316 raise NotImplementedError
317 raise NotImplementedError
317
318
318 def _heads(self, branch=None):
319 def _heads(self, branch=None):
319 return []
320 return []
320
321
321 @LazyProperty
322 @LazyProperty
322 def EMPTY_COMMIT(self):
323 def EMPTY_COMMIT(self):
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324 return EmptyCommit(self.EMPTY_COMMIT_ID)
324
325
325 @LazyProperty
326 @LazyProperty
326 def alias(self):
327 def alias(self):
327 for k, v in settings.BACKENDS.items():
328 for k, v in settings.BACKENDS.items():
328 if v.split('.')[-1] == str(self.__class__.__name__):
329 if v.split('.')[-1] == str(self.__class__.__name__):
329 return k
330 return k
330
331
331 @LazyProperty
332 @LazyProperty
332 def name(self):
333 def name(self):
333 return safe_unicode(os.path.basename(self.path))
334 return safe_unicode(os.path.basename(self.path))
334
335
335 @LazyProperty
336 @LazyProperty
336 def description(self):
337 def description(self):
337 raise NotImplementedError
338 raise NotImplementedError
338
339
339 def refs(self):
340 def refs(self):
340 """
341 """
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 for this repository
343 for this repository
343 """
344 """
344 return dict(
345 return dict(
345 branches=self.branches,
346 branches=self.branches,
346 branches_closed=self.branches_closed,
347 branches_closed=self.branches_closed,
347 tags=self.tags,
348 tags=self.tags,
348 bookmarks=self.bookmarks
349 bookmarks=self.bookmarks
349 )
350 )
350
351
351 @LazyProperty
352 @LazyProperty
352 def branches(self):
353 def branches(self):
353 """
354 """
354 A `dict` which maps branch names to commit ids.
355 A `dict` which maps branch names to commit ids.
355 """
356 """
356 raise NotImplementedError
357 raise NotImplementedError
357
358
358 @LazyProperty
359 @LazyProperty
359 def branches_closed(self):
360 def branches_closed(self):
360 """
361 """
361 A `dict` which maps tags names to commit ids.
362 A `dict` which maps tags names to commit ids.
362 """
363 """
363 raise NotImplementedError
364 raise NotImplementedError
364
365
365 @LazyProperty
366 @LazyProperty
366 def bookmarks(self):
367 def bookmarks(self):
367 """
368 """
368 A `dict` which maps tags names to commit ids.
369 A `dict` which maps tags names to commit ids.
369 """
370 """
370 raise NotImplementedError
371 raise NotImplementedError
371
372
372 @LazyProperty
373 @LazyProperty
373 def tags(self):
374 def tags(self):
374 """
375 """
375 A `dict` which maps tags names to commit ids.
376 A `dict` which maps tags names to commit ids.
376 """
377 """
377 raise NotImplementedError
378 raise NotImplementedError
378
379
379 @LazyProperty
380 @LazyProperty
380 def size(self):
381 def size(self):
381 """
382 """
382 Returns combined size in bytes for all repository files
383 Returns combined size in bytes for all repository files
383 """
384 """
384 tip = self.get_commit()
385 tip = self.get_commit()
385 return tip.size
386 return tip.size
386
387
387 def size_at_commit(self, commit_id):
388 def size_at_commit(self, commit_id):
388 commit = self.get_commit(commit_id)
389 commit = self.get_commit(commit_id)
389 return commit.size
390 return commit.size
390
391
391 def _check_for_empty(self):
392 def _check_for_empty(self):
392 no_commits = len(self._commit_ids) == 0
393 no_commits = len(self._commit_ids) == 0
393 if no_commits:
394 if no_commits:
394 # check on remote to be sure
395 # check on remote to be sure
395 return self._remote.is_empty()
396 return self._remote.is_empty()
396 else:
397 else:
397 return False
398 return False
398
399
399 def is_empty(self):
400 def is_empty(self):
400 if rhodecode.is_test:
401 if rhodecode.is_test:
401 return self._check_for_empty()
402 return self._check_for_empty()
402
403
403 if self._is_empty is None:
404 if self._is_empty is None:
404 # cache empty for production, but not tests
405 # cache empty for production, but not tests
405 self._is_empty = self._check_for_empty()
406 self._is_empty = self._check_for_empty()
406
407
407 return self._is_empty
408 return self._is_empty
408
409
409 @staticmethod
410 @staticmethod
410 def check_url(url, config):
411 def check_url(url, config):
411 """
412 """
412 Function will check given url and try to verify if it's a valid
413 Function will check given url and try to verify if it's a valid
413 link.
414 link.
414 """
415 """
415 raise NotImplementedError
416 raise NotImplementedError
416
417
417 @staticmethod
418 @staticmethod
418 def is_valid_repository(path):
419 def is_valid_repository(path):
419 """
420 """
420 Check if given `path` contains a valid repository of this backend
421 Check if given `path` contains a valid repository of this backend
421 """
422 """
422 raise NotImplementedError
423 raise NotImplementedError
423
424
424 # ==========================================================================
425 # ==========================================================================
425 # COMMITS
426 # COMMITS
426 # ==========================================================================
427 # ==========================================================================
427
428
428 @CachedProperty
429 @CachedProperty
429 def commit_ids(self):
430 def commit_ids(self):
430 raise NotImplementedError
431 raise NotImplementedError
431
432
432 def append_commit_id(self, commit_id):
433 def append_commit_id(self, commit_id):
433 if commit_id not in self.commit_ids:
434 if commit_id not in self.commit_ids:
434 self._rebuild_cache(self.commit_ids + [commit_id])
435 self._rebuild_cache(self.commit_ids + [commit_id])
435
436
436 # clear cache
437 # clear cache
437 self._invalidate_prop_cache('commit_ids')
438 self._invalidate_prop_cache('commit_ids')
438 self._is_empty = False
439 self._is_empty = False
439
440
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
441 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
442 translate_tag=None, maybe_unreachable=False):
441 """
443 """
442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
444 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
443 are both None, most recent commit is returned.
445 are both None, most recent commit is returned.
444
446
445 :param pre_load: Optional. List of commit attributes to load.
447 :param pre_load: Optional. List of commit attributes to load.
446
448
447 :raises ``EmptyRepositoryError``: if there are no commits
449 :raises ``EmptyRepositoryError``: if there are no commits
448 """
450 """
449 raise NotImplementedError
451 raise NotImplementedError
450
452
451 def __iter__(self):
453 def __iter__(self):
452 for commit_id in self.commit_ids:
454 for commit_id in self.commit_ids:
453 yield self.get_commit(commit_id=commit_id)
455 yield self.get_commit(commit_id=commit_id)
454
456
455 def get_commits(
457 def get_commits(
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
458 """
460 """
459 Returns iterator of `BaseCommit` objects from start to end
461 Returns iterator of `BaseCommit` objects from start to end
460 not inclusive. This should behave just like a list, ie. end is not
462 not inclusive. This should behave just like a list, ie. end is not
461 inclusive.
463 inclusive.
462
464
463 :param start_id: None or str, must be a valid commit id
465 :param start_id: None or str, must be a valid commit id
464 :param end_id: None or str, must be a valid commit id
466 :param end_id: None or str, must be a valid commit id
465 :param start_date:
467 :param start_date:
466 :param end_date:
468 :param end_date:
467 :param branch_name:
469 :param branch_name:
468 :param show_hidden:
470 :param show_hidden:
469 :param pre_load:
471 :param pre_load:
470 :param translate_tags:
472 :param translate_tags:
471 """
473 """
472 raise NotImplementedError
474 raise NotImplementedError
473
475
474 def __getitem__(self, key):
476 def __getitem__(self, key):
475 """
477 """
476 Allows index based access to the commit objects of this repository.
478 Allows index based access to the commit objects of this repository.
477 """
479 """
478 pre_load = ["author", "branch", "date", "message", "parents"]
480 pre_load = ["author", "branch", "date", "message", "parents"]
479 if isinstance(key, slice):
481 if isinstance(key, slice):
480 return self._get_range(key, pre_load)
482 return self._get_range(key, pre_load)
481 return self.get_commit(commit_idx=key, pre_load=pre_load)
483 return self.get_commit(commit_idx=key, pre_load=pre_load)
482
484
483 def _get_range(self, slice_obj, pre_load):
485 def _get_range(self, slice_obj, pre_load):
484 for commit_id in self.commit_ids.__getitem__(slice_obj):
486 for commit_id in self.commit_ids.__getitem__(slice_obj):
485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
487 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
486
488
487 def count(self):
489 def count(self):
488 return len(self.commit_ids)
490 return len(self.commit_ids)
489
491
490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
492 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
491 """
493 """
492 Creates and returns a tag for the given ``commit_id``.
494 Creates and returns a tag for the given ``commit_id``.
493
495
494 :param name: name for new tag
496 :param name: name for new tag
495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
497 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
496 :param commit_id: commit id for which new tag would be created
498 :param commit_id: commit id for which new tag would be created
497 :param message: message of the tag's commit
499 :param message: message of the tag's commit
498 :param date: date of tag's commit
500 :param date: date of tag's commit
499
501
500 :raises TagAlreadyExistError: if tag with same name already exists
502 :raises TagAlreadyExistError: if tag with same name already exists
501 """
503 """
502 raise NotImplementedError
504 raise NotImplementedError
503
505
504 def remove_tag(self, name, user, message=None, date=None):
506 def remove_tag(self, name, user, message=None, date=None):
505 """
507 """
506 Removes tag with the given ``name``.
508 Removes tag with the given ``name``.
507
509
508 :param name: name of the tag to be removed
510 :param name: name of the tag to be removed
509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
511 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
510 :param message: message of the tag's removal commit
512 :param message: message of the tag's removal commit
511 :param date: date of tag's removal commit
513 :param date: date of tag's removal commit
512
514
513 :raises TagDoesNotExistError: if tag with given name does not exists
515 :raises TagDoesNotExistError: if tag with given name does not exists
514 """
516 """
515 raise NotImplementedError
517 raise NotImplementedError
516
518
517 def get_diff(
519 def get_diff(
518 self, commit1, commit2, path=None, ignore_whitespace=False,
520 self, commit1, commit2, path=None, ignore_whitespace=False,
519 context=3, path1=None):
521 context=3, path1=None):
520 """
522 """
521 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 `commit2` since `commit1`.
524 `commit2` since `commit1`.
523
525
524 :param commit1: Entry point from which diff is shown. Can be
526 :param commit1: Entry point from which diff is shown. Can be
525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 the changes since empty state of the repository until `commit2`
528 the changes since empty state of the repository until `commit2`
527 :param commit2: Until which commit changes should be shown.
529 :param commit2: Until which commit changes should be shown.
528 :param path: Can be set to a path of a file to create a diff of that
530 :param path: Can be set to a path of a file to create a diff of that
529 file. If `path1` is also set, this value is only associated to
531 file. If `path1` is also set, this value is only associated to
530 `commit2`.
532 `commit2`.
531 :param ignore_whitespace: If set to ``True``, would not show whitespace
533 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 changes. Defaults to ``False``.
534 changes. Defaults to ``False``.
533 :param context: How many lines before/after changed lines should be
535 :param context: How many lines before/after changed lines should be
534 shown. Defaults to ``3``.
536 shown. Defaults to ``3``.
535 :param path1: Can be set to a path to associate with `commit1`. This
537 :param path1: Can be set to a path to associate with `commit1`. This
536 parameter works only for backends which support diff generation for
538 parameter works only for backends which support diff generation for
537 different paths. Other backends will raise a `ValueError` if `path1`
539 different paths. Other backends will raise a `ValueError` if `path1`
538 is set and has a different value than `path`.
540 is set and has a different value than `path`.
539 :param file_path: filter this diff by given path pattern
541 :param file_path: filter this diff by given path pattern
540 """
542 """
541 raise NotImplementedError
543 raise NotImplementedError
542
544
543 def strip(self, commit_id, branch=None):
545 def strip(self, commit_id, branch=None):
544 """
546 """
545 Strip given commit_id from the repository
547 Strip given commit_id from the repository
546 """
548 """
547 raise NotImplementedError
549 raise NotImplementedError
548
550
549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
551 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
550 """
552 """
551 Return a latest common ancestor commit if one exists for this repo
553 Return a latest common ancestor commit if one exists for this repo
552 `commit_id1` vs `commit_id2` from `repo2`.
554 `commit_id1` vs `commit_id2` from `repo2`.
553
555
554 :param commit_id1: Commit it from this repository to use as a
556 :param commit_id1: Commit it from this repository to use as a
555 target for the comparison.
557 target for the comparison.
556 :param commit_id2: Source commit id to use for comparison.
558 :param commit_id2: Source commit id to use for comparison.
557 :param repo2: Source repository to use for comparison.
559 :param repo2: Source repository to use for comparison.
558 """
560 """
559 raise NotImplementedError
561 raise NotImplementedError
560
562
561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
563 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
562 """
564 """
563 Compare this repository's revision `commit_id1` with `commit_id2`.
565 Compare this repository's revision `commit_id1` with `commit_id2`.
564
566
565 Returns a tuple(commits, ancestor) that would be merged from
567 Returns a tuple(commits, ancestor) that would be merged from
566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
568 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
567 will be returned as ancestor.
569 will be returned as ancestor.
568
570
569 :param commit_id1: Commit it from this repository to use as a
571 :param commit_id1: Commit it from this repository to use as a
570 target for the comparison.
572 target for the comparison.
571 :param commit_id2: Source commit id to use for comparison.
573 :param commit_id2: Source commit id to use for comparison.
572 :param repo2: Source repository to use for comparison.
574 :param repo2: Source repository to use for comparison.
573 :param merge: If set to ``True`` will do a merge compare which also
575 :param merge: If set to ``True`` will do a merge compare which also
574 returns the common ancestor.
576 returns the common ancestor.
575 :param pre_load: Optional. List of commit attributes to load.
577 :param pre_load: Optional. List of commit attributes to load.
576 """
578 """
577 raise NotImplementedError
579 raise NotImplementedError
578
580
579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
581 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
580 user_name='', user_email='', message='', dry_run=False,
582 user_name='', user_email='', message='', dry_run=False,
581 use_rebase=False, close_branch=False):
583 use_rebase=False, close_branch=False):
582 """
584 """
583 Merge the revisions specified in `source_ref` from `source_repo`
585 Merge the revisions specified in `source_ref` from `source_repo`
584 onto the `target_ref` of this repository.
586 onto the `target_ref` of this repository.
585
587
586 `source_ref` and `target_ref` are named tupls with the following
588 `source_ref` and `target_ref` are named tupls with the following
587 fields `type`, `name` and `commit_id`.
589 fields `type`, `name` and `commit_id`.
588
590
589 Returns a MergeResponse named tuple with the following fields
591 Returns a MergeResponse named tuple with the following fields
590 'possible', 'executed', 'source_commit', 'target_commit',
592 'possible', 'executed', 'source_commit', 'target_commit',
591 'merge_commit'.
593 'merge_commit'.
592
594
593 :param repo_id: `repo_id` target repo id.
595 :param repo_id: `repo_id` target repo id.
594 :param workspace_id: `workspace_id` unique identifier.
596 :param workspace_id: `workspace_id` unique identifier.
595 :param target_ref: `target_ref` points to the commit on top of which
597 :param target_ref: `target_ref` points to the commit on top of which
596 the `source_ref` should be merged.
598 the `source_ref` should be merged.
597 :param source_repo: The repository that contains the commits to be
599 :param source_repo: The repository that contains the commits to be
598 merged.
600 merged.
599 :param source_ref: `source_ref` points to the topmost commit from
601 :param source_ref: `source_ref` points to the topmost commit from
600 the `source_repo` which should be merged.
602 the `source_repo` which should be merged.
601 :param user_name: Merge commit `user_name`.
603 :param user_name: Merge commit `user_name`.
602 :param user_email: Merge commit `user_email`.
604 :param user_email: Merge commit `user_email`.
603 :param message: Merge commit `message`.
605 :param message: Merge commit `message`.
604 :param dry_run: If `True` the merge will not take place.
606 :param dry_run: If `True` the merge will not take place.
605 :param use_rebase: If `True` commits from the source will be rebased
607 :param use_rebase: If `True` commits from the source will be rebased
606 on top of the target instead of being merged.
608 on top of the target instead of being merged.
607 :param close_branch: If `True` branch will be close before merging it
609 :param close_branch: If `True` branch will be close before merging it
608 """
610 """
609 if dry_run:
611 if dry_run:
610 message = message or settings.MERGE_DRY_RUN_MESSAGE
612 message = message or settings.MERGE_DRY_RUN_MESSAGE
611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
613 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
612 user_name = user_name or settings.MERGE_DRY_RUN_USER
614 user_name = user_name or settings.MERGE_DRY_RUN_USER
613 else:
615 else:
614 if not user_name:
616 if not user_name:
615 raise ValueError('user_name cannot be empty')
617 raise ValueError('user_name cannot be empty')
616 if not user_email:
618 if not user_email:
617 raise ValueError('user_email cannot be empty')
619 raise ValueError('user_email cannot be empty')
618 if not message:
620 if not message:
619 raise ValueError('message cannot be empty')
621 raise ValueError('message cannot be empty')
620
622
621 try:
623 try:
622 return self._merge_repo(
624 return self._merge_repo(
623 repo_id, workspace_id, target_ref, source_repo,
625 repo_id, workspace_id, target_ref, source_repo,
624 source_ref, message, user_name, user_email, dry_run=dry_run,
626 source_ref, message, user_name, user_email, dry_run=dry_run,
625 use_rebase=use_rebase, close_branch=close_branch)
627 use_rebase=use_rebase, close_branch=close_branch)
626 except RepositoryError as exc:
628 except RepositoryError as exc:
627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
629 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
628 return MergeResponse(
630 return MergeResponse(
629 False, False, None, MergeFailureReason.UNKNOWN,
631 False, False, None, MergeFailureReason.UNKNOWN,
630 metadata={'exception': str(exc)})
632 metadata={'exception': str(exc)})
631
633
632 def _merge_repo(self, repo_id, workspace_id, target_ref,
634 def _merge_repo(self, repo_id, workspace_id, target_ref,
633 source_repo, source_ref, merge_message,
635 source_repo, source_ref, merge_message,
634 merger_name, merger_email, dry_run=False,
636 merger_name, merger_email, dry_run=False,
635 use_rebase=False, close_branch=False):
637 use_rebase=False, close_branch=False):
636 """Internal implementation of merge."""
638 """Internal implementation of merge."""
637 raise NotImplementedError
639 raise NotImplementedError
638
640
639 def _maybe_prepare_merge_workspace(
641 def _maybe_prepare_merge_workspace(
640 self, repo_id, workspace_id, target_ref, source_ref):
642 self, repo_id, workspace_id, target_ref, source_ref):
641 """
643 """
642 Create the merge workspace.
644 Create the merge workspace.
643
645
644 :param workspace_id: `workspace_id` unique identifier.
646 :param workspace_id: `workspace_id` unique identifier.
645 """
647 """
646 raise NotImplementedError
648 raise NotImplementedError
647
649
648 @classmethod
650 @classmethod
649 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
651 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
650 """
652 """
651 Legacy version that was used before. We still need it for
653 Legacy version that was used before. We still need it for
652 backward compat
654 backward compat
653 """
655 """
654 return os.path.join(
656 return os.path.join(
655 os.path.dirname(repo_path),
657 os.path.dirname(repo_path),
656 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
658 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
657
659
658 @classmethod
660 @classmethod
659 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
661 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
660 # The name of the shadow repository must start with '.', so it is
662 # The name of the shadow repository must start with '.', so it is
661 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
663 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
662 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
664 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
663 if os.path.exists(legacy_repository_path):
665 if os.path.exists(legacy_repository_path):
664 return legacy_repository_path
666 return legacy_repository_path
665 else:
667 else:
666 return os.path.join(
668 return os.path.join(
667 os.path.dirname(repo_path),
669 os.path.dirname(repo_path),
668 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
670 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
669
671
670 def cleanup_merge_workspace(self, repo_id, workspace_id):
672 def cleanup_merge_workspace(self, repo_id, workspace_id):
671 """
673 """
672 Remove merge workspace.
674 Remove merge workspace.
673
675
674 This function MUST not fail in case there is no workspace associated to
676 This function MUST not fail in case there is no workspace associated to
675 the given `workspace_id`.
677 the given `workspace_id`.
676
678
677 :param workspace_id: `workspace_id` unique identifier.
679 :param workspace_id: `workspace_id` unique identifier.
678 """
680 """
679 shadow_repository_path = self._get_shadow_repository_path(
681 shadow_repository_path = self._get_shadow_repository_path(
680 self.path, repo_id, workspace_id)
682 self.path, repo_id, workspace_id)
681 shadow_repository_path_del = '{}.{}.delete'.format(
683 shadow_repository_path_del = '{}.{}.delete'.format(
682 shadow_repository_path, time.time())
684 shadow_repository_path, time.time())
683
685
684 # move the shadow repo, so it never conflicts with the one used.
686 # move the shadow repo, so it never conflicts with the one used.
685 # we use this method because shutil.rmtree had some edge case problems
687 # we use this method because shutil.rmtree had some edge case problems
686 # removing symlinked repositories
688 # removing symlinked repositories
687 if not os.path.isdir(shadow_repository_path):
689 if not os.path.isdir(shadow_repository_path):
688 return
690 return
689
691
690 shutil.move(shadow_repository_path, shadow_repository_path_del)
692 shutil.move(shadow_repository_path, shadow_repository_path_del)
691 try:
693 try:
692 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
694 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
693 except Exception:
695 except Exception:
694 log.exception('Failed to gracefully remove shadow repo under %s',
696 log.exception('Failed to gracefully remove shadow repo under %s',
695 shadow_repository_path_del)
697 shadow_repository_path_del)
696 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
698 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
697
699
698 # ========== #
700 # ========== #
699 # COMMIT API #
701 # COMMIT API #
700 # ========== #
702 # ========== #
701
703
702 @LazyProperty
704 @LazyProperty
703 def in_memory_commit(self):
705 def in_memory_commit(self):
704 """
706 """
705 Returns :class:`InMemoryCommit` object for this repository.
707 Returns :class:`InMemoryCommit` object for this repository.
706 """
708 """
707 raise NotImplementedError
709 raise NotImplementedError
708
710
709 # ======================== #
711 # ======================== #
710 # UTILITIES FOR SUBCLASSES #
712 # UTILITIES FOR SUBCLASSES #
711 # ======================== #
713 # ======================== #
712
714
713 def _validate_diff_commits(self, commit1, commit2):
715 def _validate_diff_commits(self, commit1, commit2):
714 """
716 """
715 Validates that the given commits are related to this repository.
717 Validates that the given commits are related to this repository.
716
718
717 Intended as a utility for sub classes to have a consistent validation
719 Intended as a utility for sub classes to have a consistent validation
718 of input parameters in methods like :meth:`get_diff`.
720 of input parameters in methods like :meth:`get_diff`.
719 """
721 """
720 self._validate_commit(commit1)
722 self._validate_commit(commit1)
721 self._validate_commit(commit2)
723 self._validate_commit(commit2)
722 if (isinstance(commit1, EmptyCommit) and
724 if (isinstance(commit1, EmptyCommit) and
723 isinstance(commit2, EmptyCommit)):
725 isinstance(commit2, EmptyCommit)):
724 raise ValueError("Cannot compare two empty commits")
726 raise ValueError("Cannot compare two empty commits")
725
727
726 def _validate_commit(self, commit):
728 def _validate_commit(self, commit):
727 if not isinstance(commit, BaseCommit):
729 if not isinstance(commit, BaseCommit):
728 raise TypeError(
730 raise TypeError(
729 "%s is not of type BaseCommit" % repr(commit))
731 "%s is not of type BaseCommit" % repr(commit))
730 if commit.repository != self and not isinstance(commit, EmptyCommit):
732 if commit.repository != self and not isinstance(commit, EmptyCommit):
731 raise ValueError(
733 raise ValueError(
732 "Commit %s must be a valid commit from this repository %s, "
734 "Commit %s must be a valid commit from this repository %s, "
733 "related to this repository instead %s." %
735 "related to this repository instead %s." %
734 (commit, self, commit.repository))
736 (commit, self, commit.repository))
735
737
736 def _validate_commit_id(self, commit_id):
738 def _validate_commit_id(self, commit_id):
737 if not isinstance(commit_id, compat.string_types):
739 if not isinstance(commit_id, compat.string_types):
738 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
740 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
739
741
740 def _validate_commit_idx(self, commit_idx):
742 def _validate_commit_idx(self, commit_idx):
741 if not isinstance(commit_idx, (int, long)):
743 if not isinstance(commit_idx, (int, long)):
742 raise TypeError("commit_idx must be a numeric value")
744 raise TypeError("commit_idx must be a numeric value")
743
745
744 def _validate_branch_name(self, branch_name):
746 def _validate_branch_name(self, branch_name):
745 if branch_name and branch_name not in self.branches_all:
747 if branch_name and branch_name not in self.branches_all:
746 msg = ("Branch %s not found in %s" % (branch_name, self))
748 msg = ("Branch %s not found in %s" % (branch_name, self))
747 raise BranchDoesNotExistError(msg)
749 raise BranchDoesNotExistError(msg)
748
750
749 #
751 #
750 # Supporting deprecated API parts
752 # Supporting deprecated API parts
751 # TODO: johbo: consider to move this into a mixin
753 # TODO: johbo: consider to move this into a mixin
752 #
754 #
753
755
754 @property
756 @property
755 def EMPTY_CHANGESET(self):
757 def EMPTY_CHANGESET(self):
756 warnings.warn(
758 warnings.warn(
757 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
759 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
758 return self.EMPTY_COMMIT_ID
760 return self.EMPTY_COMMIT_ID
759
761
760 @property
762 @property
761 def revisions(self):
763 def revisions(self):
762 warnings.warn("Use commits attribute instead", DeprecationWarning)
764 warnings.warn("Use commits attribute instead", DeprecationWarning)
763 return self.commit_ids
765 return self.commit_ids
764
766
765 @revisions.setter
767 @revisions.setter
766 def revisions(self, value):
768 def revisions(self, value):
767 warnings.warn("Use commits attribute instead", DeprecationWarning)
769 warnings.warn("Use commits attribute instead", DeprecationWarning)
768 self.commit_ids = value
770 self.commit_ids = value
769
771
770 def get_changeset(self, revision=None, pre_load=None):
772 def get_changeset(self, revision=None, pre_load=None):
771 warnings.warn("Use get_commit instead", DeprecationWarning)
773 warnings.warn("Use get_commit instead", DeprecationWarning)
772 commit_id = None
774 commit_id = None
773 commit_idx = None
775 commit_idx = None
774 if isinstance(revision, compat.string_types):
776 if isinstance(revision, compat.string_types):
775 commit_id = revision
777 commit_id = revision
776 else:
778 else:
777 commit_idx = revision
779 commit_idx = revision
778 return self.get_commit(
780 return self.get_commit(
779 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
781 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
780
782
781 def get_changesets(
783 def get_changesets(
782 self, start=None, end=None, start_date=None, end_date=None,
784 self, start=None, end=None, start_date=None, end_date=None,
783 branch_name=None, pre_load=None):
785 branch_name=None, pre_load=None):
784 warnings.warn("Use get_commits instead", DeprecationWarning)
786 warnings.warn("Use get_commits instead", DeprecationWarning)
785 start_id = self._revision_to_commit(start)
787 start_id = self._revision_to_commit(start)
786 end_id = self._revision_to_commit(end)
788 end_id = self._revision_to_commit(end)
787 return self.get_commits(
789 return self.get_commits(
788 start_id=start_id, end_id=end_id, start_date=start_date,
790 start_id=start_id, end_id=end_id, start_date=start_date,
789 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
791 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
790
792
791 def _revision_to_commit(self, revision):
793 def _revision_to_commit(self, revision):
792 """
794 """
793 Translates a revision to a commit_id
795 Translates a revision to a commit_id
794
796
795 Helps to support the old changeset based API which allows to use
797 Helps to support the old changeset based API which allows to use
796 commit ids and commit indices interchangeable.
798 commit ids and commit indices interchangeable.
797 """
799 """
798 if revision is None:
800 if revision is None:
799 return revision
801 return revision
800
802
801 if isinstance(revision, compat.string_types):
803 if isinstance(revision, compat.string_types):
802 commit_id = revision
804 commit_id = revision
803 else:
805 else:
804 commit_id = self.commit_ids[revision]
806 commit_id = self.commit_ids[revision]
805 return commit_id
807 return commit_id
806
808
807 @property
809 @property
808 def in_memory_changeset(self):
810 def in_memory_changeset(self):
809 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
811 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
810 return self.in_memory_commit
812 return self.in_memory_commit
811
813
812 def get_path_permissions(self, username):
814 def get_path_permissions(self, username):
813 """
815 """
814 Returns a path permission checker or None if not supported
816 Returns a path permission checker or None if not supported
815
817
816 :param username: session user name
818 :param username: session user name
817 :return: an instance of BasePathPermissionChecker or None
819 :return: an instance of BasePathPermissionChecker or None
818 """
820 """
819 return None
821 return None
820
822
821 def install_hooks(self, force=False):
823 def install_hooks(self, force=False):
822 return self._remote.install_hooks(force)
824 return self._remote.install_hooks(force)
823
825
824 def get_hooks_info(self):
826 def get_hooks_info(self):
825 return self._remote.get_hooks_info()
827 return self._remote.get_hooks_info()
826
828
827
829
828 class BaseCommit(object):
830 class BaseCommit(object):
829 """
831 """
830 Each backend should implement it's commit representation.
832 Each backend should implement it's commit representation.
831
833
832 **Attributes**
834 **Attributes**
833
835
834 ``repository``
836 ``repository``
835 repository object within which commit exists
837 repository object within which commit exists
836
838
837 ``id``
839 ``id``
838 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
840 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
839 just ``tip``.
841 just ``tip``.
840
842
841 ``raw_id``
843 ``raw_id``
842 raw commit representation (i.e. full 40 length sha for git
844 raw commit representation (i.e. full 40 length sha for git
843 backend)
845 backend)
844
846
845 ``short_id``
847 ``short_id``
846 shortened (if apply) version of ``raw_id``; it would be simple
848 shortened (if apply) version of ``raw_id``; it would be simple
847 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
849 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
848 as ``raw_id`` for subversion
850 as ``raw_id`` for subversion
849
851
850 ``idx``
852 ``idx``
851 commit index
853 commit index
852
854
853 ``files``
855 ``files``
854 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
856 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
855
857
856 ``dirs``
858 ``dirs``
857 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
859 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
858
860
859 ``nodes``
861 ``nodes``
860 combined list of ``Node`` objects
862 combined list of ``Node`` objects
861
863
862 ``author``
864 ``author``
863 author of the commit, as unicode
865 author of the commit, as unicode
864
866
865 ``message``
867 ``message``
866 message of the commit, as unicode
868 message of the commit, as unicode
867
869
868 ``parents``
870 ``parents``
869 list of parent commits
871 list of parent commits
870
872
871 """
873 """
872
874
873 branch = None
875 branch = None
874 """
876 """
875 Depending on the backend this should be set to the branch name of the
877 Depending on the backend this should be set to the branch name of the
876 commit. Backends not supporting branches on commits should leave this
878 commit. Backends not supporting branches on commits should leave this
877 value as ``None``.
879 value as ``None``.
878 """
880 """
879
881
880 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
882 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
881 """
883 """
882 This template is used to generate a default prefix for repository archives
884 This template is used to generate a default prefix for repository archives
883 if no prefix has been specified.
885 if no prefix has been specified.
884 """
886 """
885
887
886 def __str__(self):
888 def __str__(self):
887 return '<%s at %s:%s>' % (
889 return '<%s at %s:%s>' % (
888 self.__class__.__name__, self.idx, self.short_id)
890 self.__class__.__name__, self.idx, self.short_id)
889
891
890 def __repr__(self):
892 def __repr__(self):
891 return self.__str__()
893 return self.__str__()
892
894
893 def __unicode__(self):
895 def __unicode__(self):
894 return u'%s:%s' % (self.idx, self.short_id)
896 return u'%s:%s' % (self.idx, self.short_id)
895
897
896 def __eq__(self, other):
898 def __eq__(self, other):
897 same_instance = isinstance(other, self.__class__)
899 same_instance = isinstance(other, self.__class__)
898 return same_instance and self.raw_id == other.raw_id
900 return same_instance and self.raw_id == other.raw_id
899
901
900 def __json__(self):
902 def __json__(self):
901 parents = []
903 parents = []
902 try:
904 try:
903 for parent in self.parents:
905 for parent in self.parents:
904 parents.append({'raw_id': parent.raw_id})
906 parents.append({'raw_id': parent.raw_id})
905 except NotImplementedError:
907 except NotImplementedError:
906 # empty commit doesn't have parents implemented
908 # empty commit doesn't have parents implemented
907 pass
909 pass
908
910
909 return {
911 return {
910 'short_id': self.short_id,
912 'short_id': self.short_id,
911 'raw_id': self.raw_id,
913 'raw_id': self.raw_id,
912 'revision': self.idx,
914 'revision': self.idx,
913 'message': self.message,
915 'message': self.message,
914 'date': self.date,
916 'date': self.date,
915 'author': self.author,
917 'author': self.author,
916 'parents': parents,
918 'parents': parents,
917 'branch': self.branch
919 'branch': self.branch
918 }
920 }
919
921
920 def __getstate__(self):
922 def __getstate__(self):
921 d = self.__dict__.copy()
923 d = self.__dict__.copy()
922 d.pop('_remote', None)
924 d.pop('_remote', None)
923 d.pop('repository', None)
925 d.pop('repository', None)
924 return d
926 return d
925
927
926 def _get_refs(self):
928 def _get_refs(self):
927 return {
929 return {
928 'branches': [self.branch] if self.branch else [],
930 'branches': [self.branch] if self.branch else [],
929 'bookmarks': getattr(self, 'bookmarks', []),
931 'bookmarks': getattr(self, 'bookmarks', []),
930 'tags': self.tags
932 'tags': self.tags
931 }
933 }
932
934
933 @LazyProperty
935 @LazyProperty
934 def last(self):
936 def last(self):
935 """
937 """
936 ``True`` if this is last commit in repository, ``False``
938 ``True`` if this is last commit in repository, ``False``
937 otherwise; trying to access this attribute while there is no
939 otherwise; trying to access this attribute while there is no
938 commits would raise `EmptyRepositoryError`
940 commits would raise `EmptyRepositoryError`
939 """
941 """
940 if self.repository is None:
942 if self.repository is None:
941 raise CommitError("Cannot check if it's most recent commit")
943 raise CommitError("Cannot check if it's most recent commit")
942 return self.raw_id == self.repository.commit_ids[-1]
944 return self.raw_id == self.repository.commit_ids[-1]
943
945
944 @LazyProperty
946 @LazyProperty
945 def parents(self):
947 def parents(self):
946 """
948 """
947 Returns list of parent commits.
949 Returns list of parent commits.
948 """
950 """
949 raise NotImplementedError
951 raise NotImplementedError
950
952
951 @LazyProperty
953 @LazyProperty
952 def first_parent(self):
954 def first_parent(self):
953 """
955 """
954 Returns list of parent commits.
956 Returns list of parent commits.
955 """
957 """
956 return self.parents[0] if self.parents else EmptyCommit()
958 return self.parents[0] if self.parents else EmptyCommit()
957
959
958 @property
960 @property
959 def merge(self):
961 def merge(self):
960 """
962 """
961 Returns boolean if commit is a merge.
963 Returns boolean if commit is a merge.
962 """
964 """
963 return len(self.parents) > 1
965 return len(self.parents) > 1
964
966
965 @LazyProperty
967 @LazyProperty
966 def children(self):
968 def children(self):
967 """
969 """
968 Returns list of child commits.
970 Returns list of child commits.
969 """
971 """
970 raise NotImplementedError
972 raise NotImplementedError
971
973
972 @LazyProperty
974 @LazyProperty
973 def id(self):
975 def id(self):
974 """
976 """
975 Returns string identifying this commit.
977 Returns string identifying this commit.
976 """
978 """
977 raise NotImplementedError
979 raise NotImplementedError
978
980
979 @LazyProperty
981 @LazyProperty
980 def raw_id(self):
982 def raw_id(self):
981 """
983 """
982 Returns raw string identifying this commit.
984 Returns raw string identifying this commit.
983 """
985 """
984 raise NotImplementedError
986 raise NotImplementedError
985
987
986 @LazyProperty
988 @LazyProperty
987 def short_id(self):
989 def short_id(self):
988 """
990 """
989 Returns shortened version of ``raw_id`` attribute, as string,
991 Returns shortened version of ``raw_id`` attribute, as string,
990 identifying this commit, useful for presentation to users.
992 identifying this commit, useful for presentation to users.
991 """
993 """
992 raise NotImplementedError
994 raise NotImplementedError
993
995
994 @LazyProperty
996 @LazyProperty
995 def idx(self):
997 def idx(self):
996 """
998 """
997 Returns integer identifying this commit.
999 Returns integer identifying this commit.
998 """
1000 """
999 raise NotImplementedError
1001 raise NotImplementedError
1000
1002
1001 @LazyProperty
1003 @LazyProperty
1002 def committer(self):
1004 def committer(self):
1003 """
1005 """
1004 Returns committer for this commit
1006 Returns committer for this commit
1005 """
1007 """
1006 raise NotImplementedError
1008 raise NotImplementedError
1007
1009
1008 @LazyProperty
1010 @LazyProperty
1009 def committer_name(self):
1011 def committer_name(self):
1010 """
1012 """
1011 Returns committer name for this commit
1013 Returns committer name for this commit
1012 """
1014 """
1013
1015
1014 return author_name(self.committer)
1016 return author_name(self.committer)
1015
1017
1016 @LazyProperty
1018 @LazyProperty
1017 def committer_email(self):
1019 def committer_email(self):
1018 """
1020 """
1019 Returns committer email address for this commit
1021 Returns committer email address for this commit
1020 """
1022 """
1021
1023
1022 return author_email(self.committer)
1024 return author_email(self.committer)
1023
1025
1024 @LazyProperty
1026 @LazyProperty
1025 def author(self):
1027 def author(self):
1026 """
1028 """
1027 Returns author for this commit
1029 Returns author for this commit
1028 """
1030 """
1029
1031
1030 raise NotImplementedError
1032 raise NotImplementedError
1031
1033
1032 @LazyProperty
1034 @LazyProperty
1033 def author_name(self):
1035 def author_name(self):
1034 """
1036 """
1035 Returns author name for this commit
1037 Returns author name for this commit
1036 """
1038 """
1037
1039
1038 return author_name(self.author)
1040 return author_name(self.author)
1039
1041
1040 @LazyProperty
1042 @LazyProperty
1041 def author_email(self):
1043 def author_email(self):
1042 """
1044 """
1043 Returns author email address for this commit
1045 Returns author email address for this commit
1044 """
1046 """
1045
1047
1046 return author_email(self.author)
1048 return author_email(self.author)
1047
1049
1048 def get_file_mode(self, path):
1050 def get_file_mode(self, path):
1049 """
1051 """
1050 Returns stat mode of the file at `path`.
1052 Returns stat mode of the file at `path`.
1051 """
1053 """
1052 raise NotImplementedError
1054 raise NotImplementedError
1053
1055
1054 def is_link(self, path):
1056 def is_link(self, path):
1055 """
1057 """
1056 Returns ``True`` if given `path` is a symlink
1058 Returns ``True`` if given `path` is a symlink
1057 """
1059 """
1058 raise NotImplementedError
1060 raise NotImplementedError
1059
1061
1060 def is_node_binary(self, path):
1062 def is_node_binary(self, path):
1061 """
1063 """
1062 Returns ``True`` is given path is a binary file
1064 Returns ``True`` is given path is a binary file
1063 """
1065 """
1064 raise NotImplementedError
1066 raise NotImplementedError
1065
1067
1066 def get_file_content(self, path):
1068 def get_file_content(self, path):
1067 """
1069 """
1068 Returns content of the file at the given `path`.
1070 Returns content of the file at the given `path`.
1069 """
1071 """
1070 raise NotImplementedError
1072 raise NotImplementedError
1071
1073
1072 def get_file_content_streamed(self, path):
1074 def get_file_content_streamed(self, path):
1073 """
1075 """
1074 returns a streaming response from vcsserver with file content
1076 returns a streaming response from vcsserver with file content
1075 """
1077 """
1076 raise NotImplementedError
1078 raise NotImplementedError
1077
1079
1078 def get_file_size(self, path):
1080 def get_file_size(self, path):
1079 """
1081 """
1080 Returns size of the file at the given `path`.
1082 Returns size of the file at the given `path`.
1081 """
1083 """
1082 raise NotImplementedError
1084 raise NotImplementedError
1083
1085
1084 def get_path_commit(self, path, pre_load=None):
1086 def get_path_commit(self, path, pre_load=None):
1085 """
1087 """
1086 Returns last commit of the file at the given `path`.
1088 Returns last commit of the file at the given `path`.
1087
1089
1088 :param pre_load: Optional. List of commit attributes to load.
1090 :param pre_load: Optional. List of commit attributes to load.
1089 """
1091 """
1090 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1092 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1091 if not commits:
1093 if not commits:
1092 raise RepositoryError(
1094 raise RepositoryError(
1093 'Failed to fetch history for path {}. '
1095 'Failed to fetch history for path {}. '
1094 'Please check if such path exists in your repository'.format(
1096 'Please check if such path exists in your repository'.format(
1095 path))
1097 path))
1096 return commits[0]
1098 return commits[0]
1097
1099
1098 def get_path_history(self, path, limit=None, pre_load=None):
1100 def get_path_history(self, path, limit=None, pre_load=None):
1099 """
1101 """
1100 Returns history of file as reversed list of :class:`BaseCommit`
1102 Returns history of file as reversed list of :class:`BaseCommit`
1101 objects for which file at given `path` has been modified.
1103 objects for which file at given `path` has been modified.
1102
1104
1103 :param limit: Optional. Allows to limit the size of the returned
1105 :param limit: Optional. Allows to limit the size of the returned
1104 history. This is intended as a hint to the underlying backend, so
1106 history. This is intended as a hint to the underlying backend, so
1105 that it can apply optimizations depending on the limit.
1107 that it can apply optimizations depending on the limit.
1106 :param pre_load: Optional. List of commit attributes to load.
1108 :param pre_load: Optional. List of commit attributes to load.
1107 """
1109 """
1108 raise NotImplementedError
1110 raise NotImplementedError
1109
1111
1110 def get_file_annotate(self, path, pre_load=None):
1112 def get_file_annotate(self, path, pre_load=None):
1111 """
1113 """
1112 Returns a generator of four element tuples with
1114 Returns a generator of four element tuples with
1113 lineno, sha, commit lazy loader and line
1115 lineno, sha, commit lazy loader and line
1114
1116
1115 :param pre_load: Optional. List of commit attributes to load.
1117 :param pre_load: Optional. List of commit attributes to load.
1116 """
1118 """
1117 raise NotImplementedError
1119 raise NotImplementedError
1118
1120
1119 def get_nodes(self, path):
1121 def get_nodes(self, path):
1120 """
1122 """
1121 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1123 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1122 state of commit at the given ``path``.
1124 state of commit at the given ``path``.
1123
1125
1124 :raises ``CommitError``: if node at the given ``path`` is not
1126 :raises ``CommitError``: if node at the given ``path`` is not
1125 instance of ``DirNode``
1127 instance of ``DirNode``
1126 """
1128 """
1127 raise NotImplementedError
1129 raise NotImplementedError
1128
1130
1129 def get_node(self, path):
1131 def get_node(self, path):
1130 """
1132 """
1131 Returns ``Node`` object from the given ``path``.
1133 Returns ``Node`` object from the given ``path``.
1132
1134
1133 :raises ``NodeDoesNotExistError``: if there is no node at the given
1135 :raises ``NodeDoesNotExistError``: if there is no node at the given
1134 ``path``
1136 ``path``
1135 """
1137 """
1136 raise NotImplementedError
1138 raise NotImplementedError
1137
1139
1138 def get_largefile_node(self, path):
1140 def get_largefile_node(self, path):
1139 """
1141 """
1140 Returns the path to largefile from Mercurial/Git-lfs storage.
1142 Returns the path to largefile from Mercurial/Git-lfs storage.
1141 or None if it's not a largefile node
1143 or None if it's not a largefile node
1142 """
1144 """
1143 return None
1145 return None
1144
1146
1145 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1147 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1146 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1148 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1147 """
1149 """
1148 Creates an archive containing the contents of the repository.
1150 Creates an archive containing the contents of the repository.
1149
1151
1150 :param archive_dest_path: path to the file which to create the archive.
1152 :param archive_dest_path: path to the file which to create the archive.
1151 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1153 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1152 :param prefix: name of root directory in archive.
1154 :param prefix: name of root directory in archive.
1153 Default is repository name and commit's short_id joined with dash:
1155 Default is repository name and commit's short_id joined with dash:
1154 ``"{repo_name}-{short_id}"``.
1156 ``"{repo_name}-{short_id}"``.
1155 :param write_metadata: write a metadata file into archive.
1157 :param write_metadata: write a metadata file into archive.
1156 :param mtime: custom modification time for archive creation, defaults
1158 :param mtime: custom modification time for archive creation, defaults
1157 to time.time() if not given.
1159 to time.time() if not given.
1158 :param archive_at_path: pack files at this path (default '/')
1160 :param archive_at_path: pack files at this path (default '/')
1159
1161
1160 :raise VCSError: If prefix has a problem.
1162 :raise VCSError: If prefix has a problem.
1161 """
1163 """
1162 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1164 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1163 if kind not in allowed_kinds:
1165 if kind not in allowed_kinds:
1164 raise ImproperArchiveTypeError(
1166 raise ImproperArchiveTypeError(
1165 'Archive kind (%s) not supported use one of %s' %
1167 'Archive kind (%s) not supported use one of %s' %
1166 (kind, allowed_kinds))
1168 (kind, allowed_kinds))
1167
1169
1168 prefix = self._validate_archive_prefix(prefix)
1170 prefix = self._validate_archive_prefix(prefix)
1169
1171
1170 mtime = mtime is not None or time.mktime(self.date.timetuple())
1172 mtime = mtime is not None or time.mktime(self.date.timetuple())
1171
1173
1172 file_info = []
1174 file_info = []
1173 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1175 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1174 for _r, _d, files in cur_rev.walk(archive_at_path):
1176 for _r, _d, files in cur_rev.walk(archive_at_path):
1175 for f in files:
1177 for f in files:
1176 f_path = os.path.join(prefix, f.path)
1178 f_path = os.path.join(prefix, f.path)
1177 file_info.append(
1179 file_info.append(
1178 (f_path, f.mode, f.is_link(), f.raw_bytes))
1180 (f_path, f.mode, f.is_link(), f.raw_bytes))
1179
1181
1180 if write_metadata:
1182 if write_metadata:
1181 metadata = [
1183 metadata = [
1182 ('repo_name', self.repository.name),
1184 ('repo_name', self.repository.name),
1183 ('commit_id', self.raw_id),
1185 ('commit_id', self.raw_id),
1184 ('mtime', mtime),
1186 ('mtime', mtime),
1185 ('branch', self.branch),
1187 ('branch', self.branch),
1186 ('tags', ','.join(self.tags)),
1188 ('tags', ','.join(self.tags)),
1187 ]
1189 ]
1188 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1190 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1189 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1191 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1190
1192
1191 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1193 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1192
1194
1193 def _validate_archive_prefix(self, prefix):
1195 def _validate_archive_prefix(self, prefix):
1194 if prefix is None:
1196 if prefix is None:
1195 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1197 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1196 repo_name=safe_str(self.repository.name),
1198 repo_name=safe_str(self.repository.name),
1197 short_id=self.short_id)
1199 short_id=self.short_id)
1198 elif not isinstance(prefix, str):
1200 elif not isinstance(prefix, str):
1199 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1201 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1200 elif prefix.startswith('/'):
1202 elif prefix.startswith('/'):
1201 raise VCSError("Prefix cannot start with leading slash")
1203 raise VCSError("Prefix cannot start with leading slash")
1202 elif prefix.strip() == '':
1204 elif prefix.strip() == '':
1203 raise VCSError("Prefix cannot be empty")
1205 raise VCSError("Prefix cannot be empty")
1204 return prefix
1206 return prefix
1205
1207
1206 @LazyProperty
1208 @LazyProperty
1207 def root(self):
1209 def root(self):
1208 """
1210 """
1209 Returns ``RootNode`` object for this commit.
1211 Returns ``RootNode`` object for this commit.
1210 """
1212 """
1211 return self.get_node('')
1213 return self.get_node('')
1212
1214
1213 def next(self, branch=None):
1215 def next(self, branch=None):
1214 """
1216 """
1215 Returns next commit from current, if branch is gives it will return
1217 Returns next commit from current, if branch is gives it will return
1216 next commit belonging to this branch
1218 next commit belonging to this branch
1217
1219
1218 :param branch: show commits within the given named branch
1220 :param branch: show commits within the given named branch
1219 """
1221 """
1220 indexes = xrange(self.idx + 1, self.repository.count())
1222 indexes = xrange(self.idx + 1, self.repository.count())
1221 return self._find_next(indexes, branch)
1223 return self._find_next(indexes, branch)
1222
1224
1223 def prev(self, branch=None):
1225 def prev(self, branch=None):
1224 """
1226 """
1225 Returns previous commit from current, if branch is gives it will
1227 Returns previous commit from current, if branch is gives it will
1226 return previous commit belonging to this branch
1228 return previous commit belonging to this branch
1227
1229
1228 :param branch: show commit within the given named branch
1230 :param branch: show commit within the given named branch
1229 """
1231 """
1230 indexes = xrange(self.idx - 1, -1, -1)
1232 indexes = xrange(self.idx - 1, -1, -1)
1231 return self._find_next(indexes, branch)
1233 return self._find_next(indexes, branch)
1232
1234
1233 def _find_next(self, indexes, branch=None):
1235 def _find_next(self, indexes, branch=None):
1234 if branch and self.branch != branch:
1236 if branch and self.branch != branch:
1235 raise VCSError('Branch option used on commit not belonging '
1237 raise VCSError('Branch option used on commit not belonging '
1236 'to that branch')
1238 'to that branch')
1237
1239
1238 for next_idx in indexes:
1240 for next_idx in indexes:
1239 commit = self.repository.get_commit(commit_idx=next_idx)
1241 commit = self.repository.get_commit(commit_idx=next_idx)
1240 if branch and branch != commit.branch:
1242 if branch and branch != commit.branch:
1241 continue
1243 continue
1242 return commit
1244 return commit
1243 raise CommitDoesNotExistError
1245 raise CommitDoesNotExistError
1244
1246
1245 def diff(self, ignore_whitespace=True, context=3):
1247 def diff(self, ignore_whitespace=True, context=3):
1246 """
1248 """
1247 Returns a `Diff` object representing the change made by this commit.
1249 Returns a `Diff` object representing the change made by this commit.
1248 """
1250 """
1249 parent = self.first_parent
1251 parent = self.first_parent
1250 diff = self.repository.get_diff(
1252 diff = self.repository.get_diff(
1251 parent, self,
1253 parent, self,
1252 ignore_whitespace=ignore_whitespace,
1254 ignore_whitespace=ignore_whitespace,
1253 context=context)
1255 context=context)
1254 return diff
1256 return diff
1255
1257
1256 @LazyProperty
1258 @LazyProperty
1257 def added(self):
1259 def added(self):
1258 """
1260 """
1259 Returns list of added ``FileNode`` objects.
1261 Returns list of added ``FileNode`` objects.
1260 """
1262 """
1261 raise NotImplementedError
1263 raise NotImplementedError
1262
1264
1263 @LazyProperty
1265 @LazyProperty
1264 def changed(self):
1266 def changed(self):
1265 """
1267 """
1266 Returns list of modified ``FileNode`` objects.
1268 Returns list of modified ``FileNode`` objects.
1267 """
1269 """
1268 raise NotImplementedError
1270 raise NotImplementedError
1269
1271
1270 @LazyProperty
1272 @LazyProperty
1271 def removed(self):
1273 def removed(self):
1272 """
1274 """
1273 Returns list of removed ``FileNode`` objects.
1275 Returns list of removed ``FileNode`` objects.
1274 """
1276 """
1275 raise NotImplementedError
1277 raise NotImplementedError
1276
1278
1277 @LazyProperty
1279 @LazyProperty
1278 def size(self):
1280 def size(self):
1279 """
1281 """
1280 Returns total number of bytes from contents of all filenodes.
1282 Returns total number of bytes from contents of all filenodes.
1281 """
1283 """
1282 return sum((node.size for node in self.get_filenodes_generator()))
1284 return sum((node.size for node in self.get_filenodes_generator()))
1283
1285
1284 def walk(self, topurl=''):
1286 def walk(self, topurl=''):
1285 """
1287 """
1286 Similar to os.walk method. Insted of filesystem it walks through
1288 Similar to os.walk method. Insted of filesystem it walks through
1287 commit starting at given ``topurl``. Returns generator of tuples
1289 commit starting at given ``topurl``. Returns generator of tuples
1288 (topnode, dirnodes, filenodes).
1290 (topnode, dirnodes, filenodes).
1289 """
1291 """
1290 topnode = self.get_node(topurl)
1292 topnode = self.get_node(topurl)
1291 if not topnode.is_dir():
1293 if not topnode.is_dir():
1292 return
1294 return
1293 yield (topnode, topnode.dirs, topnode.files)
1295 yield (topnode, topnode.dirs, topnode.files)
1294 for dirnode in topnode.dirs:
1296 for dirnode in topnode.dirs:
1295 for tup in self.walk(dirnode.path):
1297 for tup in self.walk(dirnode.path):
1296 yield tup
1298 yield tup
1297
1299
1298 def get_filenodes_generator(self):
1300 def get_filenodes_generator(self):
1299 """
1301 """
1300 Returns generator that yields *all* file nodes.
1302 Returns generator that yields *all* file nodes.
1301 """
1303 """
1302 for topnode, dirs, files in self.walk():
1304 for topnode, dirs, files in self.walk():
1303 for node in files:
1305 for node in files:
1304 yield node
1306 yield node
1305
1307
1306 #
1308 #
1307 # Utilities for sub classes to support consistent behavior
1309 # Utilities for sub classes to support consistent behavior
1308 #
1310 #
1309
1311
1310 def no_node_at_path(self, path):
1312 def no_node_at_path(self, path):
1311 return NodeDoesNotExistError(
1313 return NodeDoesNotExistError(
1312 u"There is no file nor directory at the given path: "
1314 u"There is no file nor directory at the given path: "
1313 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1315 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1314
1316
1315 def _fix_path(self, path):
1317 def _fix_path(self, path):
1316 """
1318 """
1317 Paths are stored without trailing slash so we need to get rid off it if
1319 Paths are stored without trailing slash so we need to get rid off it if
1318 needed.
1320 needed.
1319 """
1321 """
1320 return path.rstrip('/')
1322 return path.rstrip('/')
1321
1323
1322 #
1324 #
1323 # Deprecated API based on changesets
1325 # Deprecated API based on changesets
1324 #
1326 #
1325
1327
1326 @property
1328 @property
1327 def revision(self):
1329 def revision(self):
1328 warnings.warn("Use idx instead", DeprecationWarning)
1330 warnings.warn("Use idx instead", DeprecationWarning)
1329 return self.idx
1331 return self.idx
1330
1332
1331 @revision.setter
1333 @revision.setter
1332 def revision(self, value):
1334 def revision(self, value):
1333 warnings.warn("Use idx instead", DeprecationWarning)
1335 warnings.warn("Use idx instead", DeprecationWarning)
1334 self.idx = value
1336 self.idx = value
1335
1337
1336 def get_file_changeset(self, path):
1338 def get_file_changeset(self, path):
1337 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1339 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1338 return self.get_path_commit(path)
1340 return self.get_path_commit(path)
1339
1341
1340
1342
1341 class BaseChangesetClass(type):
1343 class BaseChangesetClass(type):
1342
1344
1343 def __instancecheck__(self, instance):
1345 def __instancecheck__(self, instance):
1344 return isinstance(instance, BaseCommit)
1346 return isinstance(instance, BaseCommit)
1345
1347
1346
1348
1347 class BaseChangeset(BaseCommit):
1349 class BaseChangeset(BaseCommit):
1348
1350
1349 __metaclass__ = BaseChangesetClass
1351 __metaclass__ = BaseChangesetClass
1350
1352
1351 def __new__(cls, *args, **kwargs):
1353 def __new__(cls, *args, **kwargs):
1352 warnings.warn(
1354 warnings.warn(
1353 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1355 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1354 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1356 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1355
1357
1356
1358
1357 class BaseInMemoryCommit(object):
1359 class BaseInMemoryCommit(object):
1358 """
1360 """
1359 Represents differences between repository's state (most recent head) and
1361 Represents differences between repository's state (most recent head) and
1360 changes made *in place*.
1362 changes made *in place*.
1361
1363
1362 **Attributes**
1364 **Attributes**
1363
1365
1364 ``repository``
1366 ``repository``
1365 repository object for this in-memory-commit
1367 repository object for this in-memory-commit
1366
1368
1367 ``added``
1369 ``added``
1368 list of ``FileNode`` objects marked as *added*
1370 list of ``FileNode`` objects marked as *added*
1369
1371
1370 ``changed``
1372 ``changed``
1371 list of ``FileNode`` objects marked as *changed*
1373 list of ``FileNode`` objects marked as *changed*
1372
1374
1373 ``removed``
1375 ``removed``
1374 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1376 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1375 *removed*
1377 *removed*
1376
1378
1377 ``parents``
1379 ``parents``
1378 list of :class:`BaseCommit` instances representing parents of
1380 list of :class:`BaseCommit` instances representing parents of
1379 in-memory commit. Should always be 2-element sequence.
1381 in-memory commit. Should always be 2-element sequence.
1380
1382
1381 """
1383 """
1382
1384
1383 def __init__(self, repository):
1385 def __init__(self, repository):
1384 self.repository = repository
1386 self.repository = repository
1385 self.added = []
1387 self.added = []
1386 self.changed = []
1388 self.changed = []
1387 self.removed = []
1389 self.removed = []
1388 self.parents = []
1390 self.parents = []
1389
1391
1390 def add(self, *filenodes):
1392 def add(self, *filenodes):
1391 """
1393 """
1392 Marks given ``FileNode`` objects as *to be committed*.
1394 Marks given ``FileNode`` objects as *to be committed*.
1393
1395
1394 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1396 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1395 latest commit
1397 latest commit
1396 :raises ``NodeAlreadyAddedError``: if node with same path is already
1398 :raises ``NodeAlreadyAddedError``: if node with same path is already
1397 marked as *added*
1399 marked as *added*
1398 """
1400 """
1399 # Check if not already marked as *added* first
1401 # Check if not already marked as *added* first
1400 for node in filenodes:
1402 for node in filenodes:
1401 if node.path in (n.path for n in self.added):
1403 if node.path in (n.path for n in self.added):
1402 raise NodeAlreadyAddedError(
1404 raise NodeAlreadyAddedError(
1403 "Such FileNode %s is already marked for addition"
1405 "Such FileNode %s is already marked for addition"
1404 % node.path)
1406 % node.path)
1405 for node in filenodes:
1407 for node in filenodes:
1406 self.added.append(node)
1408 self.added.append(node)
1407
1409
1408 def change(self, *filenodes):
1410 def change(self, *filenodes):
1409 """
1411 """
1410 Marks given ``FileNode`` objects to be *changed* in next commit.
1412 Marks given ``FileNode`` objects to be *changed* in next commit.
1411
1413
1412 :raises ``EmptyRepositoryError``: if there are no commits yet
1414 :raises ``EmptyRepositoryError``: if there are no commits yet
1413 :raises ``NodeAlreadyExistsError``: if node with same path is already
1415 :raises ``NodeAlreadyExistsError``: if node with same path is already
1414 marked to be *changed*
1416 marked to be *changed*
1415 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1417 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1416 marked to be *removed*
1418 marked to be *removed*
1417 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1419 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1418 commit
1420 commit
1419 :raises ``NodeNotChangedError``: if node hasn't really be changed
1421 :raises ``NodeNotChangedError``: if node hasn't really be changed
1420 """
1422 """
1421 for node in filenodes:
1423 for node in filenodes:
1422 if node.path in (n.path for n in self.removed):
1424 if node.path in (n.path for n in self.removed):
1423 raise NodeAlreadyRemovedError(
1425 raise NodeAlreadyRemovedError(
1424 "Node at %s is already marked as removed" % node.path)
1426 "Node at %s is already marked as removed" % node.path)
1425 try:
1427 try:
1426 self.repository.get_commit()
1428 self.repository.get_commit()
1427 except EmptyRepositoryError:
1429 except EmptyRepositoryError:
1428 raise EmptyRepositoryError(
1430 raise EmptyRepositoryError(
1429 "Nothing to change - try to *add* new nodes rather than "
1431 "Nothing to change - try to *add* new nodes rather than "
1430 "changing them")
1432 "changing them")
1431 for node in filenodes:
1433 for node in filenodes:
1432 if node.path in (n.path for n in self.changed):
1434 if node.path in (n.path for n in self.changed):
1433 raise NodeAlreadyChangedError(
1435 raise NodeAlreadyChangedError(
1434 "Node at '%s' is already marked as changed" % node.path)
1436 "Node at '%s' is already marked as changed" % node.path)
1435 self.changed.append(node)
1437 self.changed.append(node)
1436
1438
1437 def remove(self, *filenodes):
1439 def remove(self, *filenodes):
1438 """
1440 """
1439 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1441 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1440 *removed* in next commit.
1442 *removed* in next commit.
1441
1443
1442 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1444 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1443 be *removed*
1445 be *removed*
1444 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1446 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1445 be *changed*
1447 be *changed*
1446 """
1448 """
1447 for node in filenodes:
1449 for node in filenodes:
1448 if node.path in (n.path for n in self.removed):
1450 if node.path in (n.path for n in self.removed):
1449 raise NodeAlreadyRemovedError(
1451 raise NodeAlreadyRemovedError(
1450 "Node is already marked to for removal at %s" % node.path)
1452 "Node is already marked to for removal at %s" % node.path)
1451 if node.path in (n.path for n in self.changed):
1453 if node.path in (n.path for n in self.changed):
1452 raise NodeAlreadyChangedError(
1454 raise NodeAlreadyChangedError(
1453 "Node is already marked to be changed at %s" % node.path)
1455 "Node is already marked to be changed at %s" % node.path)
1454 # We only mark node as *removed* - real removal is done by
1456 # We only mark node as *removed* - real removal is done by
1455 # commit method
1457 # commit method
1456 self.removed.append(node)
1458 self.removed.append(node)
1457
1459
1458 def reset(self):
1460 def reset(self):
1459 """
1461 """
1460 Resets this instance to initial state (cleans ``added``, ``changed``
1462 Resets this instance to initial state (cleans ``added``, ``changed``
1461 and ``removed`` lists).
1463 and ``removed`` lists).
1462 """
1464 """
1463 self.added = []
1465 self.added = []
1464 self.changed = []
1466 self.changed = []
1465 self.removed = []
1467 self.removed = []
1466 self.parents = []
1468 self.parents = []
1467
1469
1468 def get_ipaths(self):
1470 def get_ipaths(self):
1469 """
1471 """
1470 Returns generator of paths from nodes marked as added, changed or
1472 Returns generator of paths from nodes marked as added, changed or
1471 removed.
1473 removed.
1472 """
1474 """
1473 for node in itertools.chain(self.added, self.changed, self.removed):
1475 for node in itertools.chain(self.added, self.changed, self.removed):
1474 yield node.path
1476 yield node.path
1475
1477
1476 def get_paths(self):
1478 def get_paths(self):
1477 """
1479 """
1478 Returns list of paths from nodes marked as added, changed or removed.
1480 Returns list of paths from nodes marked as added, changed or removed.
1479 """
1481 """
1480 return list(self.get_ipaths())
1482 return list(self.get_ipaths())
1481
1483
1482 def check_integrity(self, parents=None):
1484 def check_integrity(self, parents=None):
1483 """
1485 """
1484 Checks in-memory commit's integrity. Also, sets parents if not
1486 Checks in-memory commit's integrity. Also, sets parents if not
1485 already set.
1487 already set.
1486
1488
1487 :raises CommitError: if any error occurs (i.e.
1489 :raises CommitError: if any error occurs (i.e.
1488 ``NodeDoesNotExistError``).
1490 ``NodeDoesNotExistError``).
1489 """
1491 """
1490 if not self.parents:
1492 if not self.parents:
1491 parents = parents or []
1493 parents = parents or []
1492 if len(parents) == 0:
1494 if len(parents) == 0:
1493 try:
1495 try:
1494 parents = [self.repository.get_commit(), None]
1496 parents = [self.repository.get_commit(), None]
1495 except EmptyRepositoryError:
1497 except EmptyRepositoryError:
1496 parents = [None, None]
1498 parents = [None, None]
1497 elif len(parents) == 1:
1499 elif len(parents) == 1:
1498 parents += [None]
1500 parents += [None]
1499 self.parents = parents
1501 self.parents = parents
1500
1502
1501 # Local parents, only if not None
1503 # Local parents, only if not None
1502 parents = [p for p in self.parents if p]
1504 parents = [p for p in self.parents if p]
1503
1505
1504 # Check nodes marked as added
1506 # Check nodes marked as added
1505 for p in parents:
1507 for p in parents:
1506 for node in self.added:
1508 for node in self.added:
1507 try:
1509 try:
1508 p.get_node(node.path)
1510 p.get_node(node.path)
1509 except NodeDoesNotExistError:
1511 except NodeDoesNotExistError:
1510 pass
1512 pass
1511 else:
1513 else:
1512 raise NodeAlreadyExistsError(
1514 raise NodeAlreadyExistsError(
1513 "Node `%s` already exists at %s" % (node.path, p))
1515 "Node `%s` already exists at %s" % (node.path, p))
1514
1516
1515 # Check nodes marked as changed
1517 # Check nodes marked as changed
1516 missing = set(self.changed)
1518 missing = set(self.changed)
1517 not_changed = set(self.changed)
1519 not_changed = set(self.changed)
1518 if self.changed and not parents:
1520 if self.changed and not parents:
1519 raise NodeDoesNotExistError(str(self.changed[0].path))
1521 raise NodeDoesNotExistError(str(self.changed[0].path))
1520 for p in parents:
1522 for p in parents:
1521 for node in self.changed:
1523 for node in self.changed:
1522 try:
1524 try:
1523 old = p.get_node(node.path)
1525 old = p.get_node(node.path)
1524 missing.remove(node)
1526 missing.remove(node)
1525 # if content actually changed, remove node from not_changed
1527 # if content actually changed, remove node from not_changed
1526 if old.content != node.content:
1528 if old.content != node.content:
1527 not_changed.remove(node)
1529 not_changed.remove(node)
1528 except NodeDoesNotExistError:
1530 except NodeDoesNotExistError:
1529 pass
1531 pass
1530 if self.changed and missing:
1532 if self.changed and missing:
1531 raise NodeDoesNotExistError(
1533 raise NodeDoesNotExistError(
1532 "Node `%s` marked as modified but missing in parents: %s"
1534 "Node `%s` marked as modified but missing in parents: %s"
1533 % (node.path, parents))
1535 % (node.path, parents))
1534
1536
1535 if self.changed and not_changed:
1537 if self.changed and not_changed:
1536 raise NodeNotChangedError(
1538 raise NodeNotChangedError(
1537 "Node `%s` wasn't actually changed (parents: %s)"
1539 "Node `%s` wasn't actually changed (parents: %s)"
1538 % (not_changed.pop().path, parents))
1540 % (not_changed.pop().path, parents))
1539
1541
1540 # Check nodes marked as removed
1542 # Check nodes marked as removed
1541 if self.removed and not parents:
1543 if self.removed and not parents:
1542 raise NodeDoesNotExistError(
1544 raise NodeDoesNotExistError(
1543 "Cannot remove node at %s as there "
1545 "Cannot remove node at %s as there "
1544 "were no parents specified" % self.removed[0].path)
1546 "were no parents specified" % self.removed[0].path)
1545 really_removed = set()
1547 really_removed = set()
1546 for p in parents:
1548 for p in parents:
1547 for node in self.removed:
1549 for node in self.removed:
1548 try:
1550 try:
1549 p.get_node(node.path)
1551 p.get_node(node.path)
1550 really_removed.add(node)
1552 really_removed.add(node)
1551 except CommitError:
1553 except CommitError:
1552 pass
1554 pass
1553 not_removed = set(self.removed) - really_removed
1555 not_removed = set(self.removed) - really_removed
1554 if not_removed:
1556 if not_removed:
1555 # TODO: johbo: This code branch does not seem to be covered
1557 # TODO: johbo: This code branch does not seem to be covered
1556 raise NodeDoesNotExistError(
1558 raise NodeDoesNotExistError(
1557 "Cannot remove node at %s from "
1559 "Cannot remove node at %s from "
1558 "following parents: %s" % (not_removed, parents))
1560 "following parents: %s" % (not_removed, parents))
1559
1561
1560 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1562 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1561 """
1563 """
1562 Performs in-memory commit (doesn't check workdir in any way) and
1564 Performs in-memory commit (doesn't check workdir in any way) and
1563 returns newly created :class:`BaseCommit`. Updates repository's
1565 returns newly created :class:`BaseCommit`. Updates repository's
1564 attribute `commits`.
1566 attribute `commits`.
1565
1567
1566 .. note::
1568 .. note::
1567
1569
1568 While overriding this method each backend's should call
1570 While overriding this method each backend's should call
1569 ``self.check_integrity(parents)`` in the first place.
1571 ``self.check_integrity(parents)`` in the first place.
1570
1572
1571 :param message: message of the commit
1573 :param message: message of the commit
1572 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1574 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1573 :param parents: single parent or sequence of parents from which commit
1575 :param parents: single parent or sequence of parents from which commit
1574 would be derived
1576 would be derived
1575 :param date: ``datetime.datetime`` instance. Defaults to
1577 :param date: ``datetime.datetime`` instance. Defaults to
1576 ``datetime.datetime.now()``.
1578 ``datetime.datetime.now()``.
1577 :param branch: branch name, as string. If none given, default backend's
1579 :param branch: branch name, as string. If none given, default backend's
1578 branch would be used.
1580 branch would be used.
1579
1581
1580 :raises ``CommitError``: if any error occurs while committing
1582 :raises ``CommitError``: if any error occurs while committing
1581 """
1583 """
1582 raise NotImplementedError
1584 raise NotImplementedError
1583
1585
1584
1586
1585 class BaseInMemoryChangesetClass(type):
1587 class BaseInMemoryChangesetClass(type):
1586
1588
1587 def __instancecheck__(self, instance):
1589 def __instancecheck__(self, instance):
1588 return isinstance(instance, BaseInMemoryCommit)
1590 return isinstance(instance, BaseInMemoryCommit)
1589
1591
1590
1592
1591 class BaseInMemoryChangeset(BaseInMemoryCommit):
1593 class BaseInMemoryChangeset(BaseInMemoryCommit):
1592
1594
1593 __metaclass__ = BaseInMemoryChangesetClass
1595 __metaclass__ = BaseInMemoryChangesetClass
1594
1596
1595 def __new__(cls, *args, **kwargs):
1597 def __new__(cls, *args, **kwargs):
1596 warnings.warn(
1598 warnings.warn(
1597 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1599 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1598 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1600 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1599
1601
1600
1602
1601 class EmptyCommit(BaseCommit):
1603 class EmptyCommit(BaseCommit):
1602 """
1604 """
1603 An dummy empty commit. It's possible to pass hash when creating
1605 An dummy empty commit. It's possible to pass hash when creating
1604 an EmptyCommit
1606 an EmptyCommit
1605 """
1607 """
1606
1608
1607 def __init__(
1609 def __init__(
1608 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1610 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1609 message='', author='', date=None):
1611 message='', author='', date=None):
1610 self._empty_commit_id = commit_id
1612 self._empty_commit_id = commit_id
1611 # TODO: johbo: Solve idx parameter, default value does not make
1613 # TODO: johbo: Solve idx parameter, default value does not make
1612 # too much sense
1614 # too much sense
1613 self.idx = idx
1615 self.idx = idx
1614 self.message = message
1616 self.message = message
1615 self.author = author
1617 self.author = author
1616 self.date = date or datetime.datetime.fromtimestamp(0)
1618 self.date = date or datetime.datetime.fromtimestamp(0)
1617 self.repository = repo
1619 self.repository = repo
1618 self.alias = alias
1620 self.alias = alias
1619
1621
1620 @LazyProperty
1622 @LazyProperty
1621 def raw_id(self):
1623 def raw_id(self):
1622 """
1624 """
1623 Returns raw string identifying this commit, useful for web
1625 Returns raw string identifying this commit, useful for web
1624 representation.
1626 representation.
1625 """
1627 """
1626
1628
1627 return self._empty_commit_id
1629 return self._empty_commit_id
1628
1630
1629 @LazyProperty
1631 @LazyProperty
1630 def branch(self):
1632 def branch(self):
1631 if self.alias:
1633 if self.alias:
1632 from rhodecode.lib.vcs.backends import get_backend
1634 from rhodecode.lib.vcs.backends import get_backend
1633 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1635 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1634
1636
1635 @LazyProperty
1637 @LazyProperty
1636 def short_id(self):
1638 def short_id(self):
1637 return self.raw_id[:12]
1639 return self.raw_id[:12]
1638
1640
1639 @LazyProperty
1641 @LazyProperty
1640 def id(self):
1642 def id(self):
1641 return self.raw_id
1643 return self.raw_id
1642
1644
1643 def get_path_commit(self, path):
1645 def get_path_commit(self, path):
1644 return self
1646 return self
1645
1647
1646 def get_file_content(self, path):
1648 def get_file_content(self, path):
1647 return u''
1649 return u''
1648
1650
1649 def get_file_content_streamed(self, path):
1651 def get_file_content_streamed(self, path):
1650 yield self.get_file_content()
1652 yield self.get_file_content()
1651
1653
1652 def get_file_size(self, path):
1654 def get_file_size(self, path):
1653 return 0
1655 return 0
1654
1656
1655
1657
1656 class EmptyChangesetClass(type):
1658 class EmptyChangesetClass(type):
1657
1659
1658 def __instancecheck__(self, instance):
1660 def __instancecheck__(self, instance):
1659 return isinstance(instance, EmptyCommit)
1661 return isinstance(instance, EmptyCommit)
1660
1662
1661
1663
1662 class EmptyChangeset(EmptyCommit):
1664 class EmptyChangeset(EmptyCommit):
1663
1665
1664 __metaclass__ = EmptyChangesetClass
1666 __metaclass__ = EmptyChangesetClass
1665
1667
1666 def __new__(cls, *args, **kwargs):
1668 def __new__(cls, *args, **kwargs):
1667 warnings.warn(
1669 warnings.warn(
1668 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1670 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1669 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1671 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1670
1672
1671 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1673 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1672 alias=None, revision=-1, message='', author='', date=None):
1674 alias=None, revision=-1, message='', author='', date=None):
1673 if requested_revision is not None:
1675 if requested_revision is not None:
1674 warnings.warn(
1676 warnings.warn(
1675 "Parameter requested_revision not supported anymore",
1677 "Parameter requested_revision not supported anymore",
1676 DeprecationWarning)
1678 DeprecationWarning)
1677 super(EmptyChangeset, self).__init__(
1679 super(EmptyChangeset, self).__init__(
1678 commit_id=cs, repo=repo, alias=alias, idx=revision,
1680 commit_id=cs, repo=repo, alias=alias, idx=revision,
1679 message=message, author=author, date=date)
1681 message=message, author=author, date=date)
1680
1682
1681 @property
1683 @property
1682 def revision(self):
1684 def revision(self):
1683 warnings.warn("Use idx instead", DeprecationWarning)
1685 warnings.warn("Use idx instead", DeprecationWarning)
1684 return self.idx
1686 return self.idx
1685
1687
1686 @revision.setter
1688 @revision.setter
1687 def revision(self, value):
1689 def revision(self, value):
1688 warnings.warn("Use idx instead", DeprecationWarning)
1690 warnings.warn("Use idx instead", DeprecationWarning)
1689 self.idx = value
1691 self.idx = value
1690
1692
1691
1693
1692 class EmptyRepository(BaseRepository):
1694 class EmptyRepository(BaseRepository):
1693 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1695 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1694 pass
1696 pass
1695
1697
1696 def get_diff(self, *args, **kwargs):
1698 def get_diff(self, *args, **kwargs):
1697 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1699 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1698 return GitDiff('')
1700 return GitDiff('')
1699
1701
1700
1702
1701 class CollectionGenerator(object):
1703 class CollectionGenerator(object):
1702
1704
1703 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1705 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1704 self.repo = repo
1706 self.repo = repo
1705 self.commit_ids = commit_ids
1707 self.commit_ids = commit_ids
1706 # TODO: (oliver) this isn't currently hooked up
1708 # TODO: (oliver) this isn't currently hooked up
1707 self.collection_size = None
1709 self.collection_size = None
1708 self.pre_load = pre_load
1710 self.pre_load = pre_load
1709 self.translate_tag = translate_tag
1711 self.translate_tag = translate_tag
1710
1712
1711 def __len__(self):
1713 def __len__(self):
1712 if self.collection_size is not None:
1714 if self.collection_size is not None:
1713 return self.collection_size
1715 return self.collection_size
1714 return self.commit_ids.__len__()
1716 return self.commit_ids.__len__()
1715
1717
1716 def __iter__(self):
1718 def __iter__(self):
1717 for commit_id in self.commit_ids:
1719 for commit_id in self.commit_ids:
1718 # TODO: johbo: Mercurial passes in commit indices or commit ids
1720 # TODO: johbo: Mercurial passes in commit indices or commit ids
1719 yield self._commit_factory(commit_id)
1721 yield self._commit_factory(commit_id)
1720
1722
1721 def _commit_factory(self, commit_id):
1723 def _commit_factory(self, commit_id):
1722 """
1724 """
1723 Allows backends to override the way commits are generated.
1725 Allows backends to override the way commits are generated.
1724 """
1726 """
1725 return self.repo.get_commit(
1727 return self.repo.get_commit(
1726 commit_id=commit_id, pre_load=self.pre_load,
1728 commit_id=commit_id, pre_load=self.pre_load,
1727 translate_tag=self.translate_tag)
1729 translate_tag=self.translate_tag)
1728
1730
1729 def __getslice__(self, i, j):
1731 def __getslice__(self, i, j):
1730 """
1732 """
1731 Returns an iterator of sliced repository
1733 Returns an iterator of sliced repository
1732 """
1734 """
1733 commit_ids = self.commit_ids[i:j]
1735 commit_ids = self.commit_ids[i:j]
1734 return self.__class__(
1736 return self.__class__(
1735 self.repo, commit_ids, pre_load=self.pre_load,
1737 self.repo, commit_ids, pre_load=self.pre_load,
1736 translate_tag=self.translate_tag)
1738 translate_tag=self.translate_tag)
1737
1739
1738 def __repr__(self):
1740 def __repr__(self):
1739 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1741 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1740
1742
1741
1743
1742 class Config(object):
1744 class Config(object):
1743 """
1745 """
1744 Represents the configuration for a repository.
1746 Represents the configuration for a repository.
1745
1747
1746 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1748 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1747 standard library. It implements only the needed subset.
1749 standard library. It implements only the needed subset.
1748 """
1750 """
1749
1751
1750 def __init__(self):
1752 def __init__(self):
1751 self._values = {}
1753 self._values = {}
1752
1754
1753 def copy(self):
1755 def copy(self):
1754 clone = Config()
1756 clone = Config()
1755 for section, values in self._values.items():
1757 for section, values in self._values.items():
1756 clone._values[section] = values.copy()
1758 clone._values[section] = values.copy()
1757 return clone
1759 return clone
1758
1760
1759 def __repr__(self):
1761 def __repr__(self):
1760 return '<Config(%s sections) at %s>' % (
1762 return '<Config(%s sections) at %s>' % (
1761 len(self._values), hex(id(self)))
1763 len(self._values), hex(id(self)))
1762
1764
1763 def items(self, section):
1765 def items(self, section):
1764 return self._values.get(section, {}).iteritems()
1766 return self._values.get(section, {}).iteritems()
1765
1767
1766 def get(self, section, option):
1768 def get(self, section, option):
1767 return self._values.get(section, {}).get(option)
1769 return self._values.get(section, {}).get(option)
1768
1770
1769 def set(self, section, option, value):
1771 def set(self, section, option, value):
1770 section_values = self._values.setdefault(section, {})
1772 section_values = self._values.setdefault(section, {})
1771 section_values[option] = value
1773 section_values[option] = value
1772
1774
1773 def clear_section(self, section):
1775 def clear_section(self, section):
1774 self._values[section] = {}
1776 self._values[section] = {}
1775
1777
1776 def serialize(self):
1778 def serialize(self):
1777 """
1779 """
1778 Creates a list of three tuples (section, key, value) representing
1780 Creates a list of three tuples (section, key, value) representing
1779 this config object.
1781 this config object.
1780 """
1782 """
1781 items = []
1783 items = []
1782 for section in self._values:
1784 for section in self._values:
1783 for option, value in self._values[section].items():
1785 for option, value in self._values[section].items():
1784 items.append(
1786 items.append(
1785 (safe_str(section), safe_str(option), safe_str(value)))
1787 (safe_str(section), safe_str(option), safe_str(value)))
1786 return items
1788 return items
1787
1789
1788
1790
1789 class Diff(object):
1791 class Diff(object):
1790 """
1792 """
1791 Represents a diff result from a repository backend.
1793 Represents a diff result from a repository backend.
1792
1794
1793 Subclasses have to provide a backend specific value for
1795 Subclasses have to provide a backend specific value for
1794 :attr:`_header_re` and :attr:`_meta_re`.
1796 :attr:`_header_re` and :attr:`_meta_re`.
1795 """
1797 """
1796 _meta_re = None
1798 _meta_re = None
1797 _header_re = None
1799 _header_re = None
1798
1800
1799 def __init__(self, raw_diff):
1801 def __init__(self, raw_diff):
1800 self.raw = raw_diff
1802 self.raw = raw_diff
1801
1803
1802 def chunks(self):
1804 def chunks(self):
1803 """
1805 """
1804 split the diff in chunks of separate --git a/file b/file chunks
1806 split the diff in chunks of separate --git a/file b/file chunks
1805 to make diffs consistent we must prepend with \n, and make sure
1807 to make diffs consistent we must prepend with \n, and make sure
1806 we can detect last chunk as this was also has special rule
1808 we can detect last chunk as this was also has special rule
1807 """
1809 """
1808
1810
1809 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1811 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1810 header = diff_parts[0]
1812 header = diff_parts[0]
1811
1813
1812 if self._meta_re:
1814 if self._meta_re:
1813 match = self._meta_re.match(header)
1815 match = self._meta_re.match(header)
1814
1816
1815 chunks = diff_parts[1:]
1817 chunks = diff_parts[1:]
1816 total_chunks = len(chunks)
1818 total_chunks = len(chunks)
1817
1819
1818 return (
1820 return (
1819 DiffChunk(chunk, self, cur_chunk == total_chunks)
1821 DiffChunk(chunk, self, cur_chunk == total_chunks)
1820 for cur_chunk, chunk in enumerate(chunks, start=1))
1822 for cur_chunk, chunk in enumerate(chunks, start=1))
1821
1823
1822
1824
1823 class DiffChunk(object):
1825 class DiffChunk(object):
1824
1826
1825 def __init__(self, chunk, diff, last_chunk):
1827 def __init__(self, chunk, diff, last_chunk):
1826 self._diff = diff
1828 self._diff = diff
1827
1829
1828 # since we split by \ndiff --git that part is lost from original diff
1830 # since we split by \ndiff --git that part is lost from original diff
1829 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1831 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1830 if not last_chunk:
1832 if not last_chunk:
1831 chunk += '\n'
1833 chunk += '\n'
1832
1834
1833 match = self._diff._header_re.match(chunk)
1835 match = self._diff._header_re.match(chunk)
1834 self.header = match.groupdict()
1836 self.header = match.groupdict()
1835 self.diff = chunk[match.end():]
1837 self.diff = chunk[match.end():]
1836 self.raw = chunk
1838 self.raw = chunk
1837
1839
1838
1840
1839 class BasePathPermissionChecker(object):
1841 class BasePathPermissionChecker(object):
1840
1842
1841 @staticmethod
1843 @staticmethod
1842 def create_from_patterns(includes, excludes):
1844 def create_from_patterns(includes, excludes):
1843 if includes and '*' in includes and not excludes:
1845 if includes and '*' in includes and not excludes:
1844 return AllPathPermissionChecker()
1846 return AllPathPermissionChecker()
1845 elif excludes and '*' in excludes:
1847 elif excludes and '*' in excludes:
1846 return NonePathPermissionChecker()
1848 return NonePathPermissionChecker()
1847 else:
1849 else:
1848 return PatternPathPermissionChecker(includes, excludes)
1850 return PatternPathPermissionChecker(includes, excludes)
1849
1851
1850 @property
1852 @property
1851 def has_full_access(self):
1853 def has_full_access(self):
1852 raise NotImplemented()
1854 raise NotImplemented()
1853
1855
1854 def has_access(self, path):
1856 def has_access(self, path):
1855 raise NotImplemented()
1857 raise NotImplemented()
1856
1858
1857
1859
1858 class AllPathPermissionChecker(BasePathPermissionChecker):
1860 class AllPathPermissionChecker(BasePathPermissionChecker):
1859
1861
1860 @property
1862 @property
1861 def has_full_access(self):
1863 def has_full_access(self):
1862 return True
1864 return True
1863
1865
1864 def has_access(self, path):
1866 def has_access(self, path):
1865 return True
1867 return True
1866
1868
1867
1869
1868 class NonePathPermissionChecker(BasePathPermissionChecker):
1870 class NonePathPermissionChecker(BasePathPermissionChecker):
1869
1871
1870 @property
1872 @property
1871 def has_full_access(self):
1873 def has_full_access(self):
1872 return False
1874 return False
1873
1875
1874 def has_access(self, path):
1876 def has_access(self, path):
1875 return False
1877 return False
1876
1878
1877
1879
1878 class PatternPathPermissionChecker(BasePathPermissionChecker):
1880 class PatternPathPermissionChecker(BasePathPermissionChecker):
1879
1881
1880 def __init__(self, includes, excludes):
1882 def __init__(self, includes, excludes):
1881 self.includes = includes
1883 self.includes = includes
1882 self.excludes = excludes
1884 self.excludes = excludes
1883 self.includes_re = [] if not includes else [
1885 self.includes_re = [] if not includes else [
1884 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1886 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1885 self.excludes_re = [] if not excludes else [
1887 self.excludes_re = [] if not excludes else [
1886 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1888 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1887
1889
1888 @property
1890 @property
1889 def has_full_access(self):
1891 def has_full_access(self):
1890 return '*' in self.includes and not self.excludes
1892 return '*' in self.includes and not self.excludes
1891
1893
1892 def has_access(self, path):
1894 def has_access(self, path):
1893 for regex in self.excludes_re:
1895 for regex in self.excludes_re:
1894 if regex.match(path):
1896 if regex.match(path):
1895 return False
1897 return False
1896 for regex in self.includes_re:
1898 for regex in self.includes_re:
1897 if regex.match(path):
1899 if regex.match(path):
1898 return True
1900 return True
1899 return False
1901 return False
@@ -1,1017 +1,1029 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
58
58
59 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63
63
64 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else self.get_default_config()
65 self.config = config if config else self.get_default_config()
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67
67
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def _remote(self):
74 def _remote(self):
75 repo_id = self.path
75 repo_id = self.path
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77
77
78 @LazyProperty
78 @LazyProperty
79 def bare(self):
79 def bare(self):
80 return self._remote.bare()
80 return self._remote.bare()
81
81
82 @LazyProperty
82 @LazyProperty
83 def head(self):
83 def head(self):
84 return self._remote.head()
84 return self._remote.head()
85
85
86 @CachedProperty
86 @CachedProperty
87 def commit_ids(self):
87 def commit_ids(self):
88 """
88 """
89 Returns list of commit ids, in ascending order. Being lazy
89 Returns list of commit ids, in ascending order. Being lazy
90 attribute allows external tools to inject commit ids from cache.
90 attribute allows external tools to inject commit ids from cache.
91 """
91 """
92 commit_ids = self._get_all_commit_ids()
92 commit_ids = self._get_all_commit_ids()
93 self._rebuild_cache(commit_ids)
93 self._rebuild_cache(commit_ids)
94 return commit_ids
94 return commit_ids
95
95
96 def _rebuild_cache(self, commit_ids):
96 def _rebuild_cache(self, commit_ids):
97 self._commit_ids = dict((commit_id, index)
97 self._commit_ids = dict((commit_id, index)
98 for index, commit_id in enumerate(commit_ids))
98 for index, commit_id in enumerate(commit_ids))
99
99
100 def run_git_command(self, cmd, **opts):
100 def run_git_command(self, cmd, **opts):
101 """
101 """
102 Runs given ``cmd`` as git command and returns tuple
102 Runs given ``cmd`` as git command and returns tuple
103 (stdout, stderr).
103 (stdout, stderr).
104
104
105 :param cmd: git command to be executed
105 :param cmd: git command to be executed
106 :param opts: env options to pass into Subprocess command
106 :param opts: env options to pass into Subprocess command
107 """
107 """
108 if not isinstance(cmd, list):
108 if not isinstance(cmd, list):
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110
110
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 out, err = self._remote.run_git_command(cmd, **opts)
112 out, err = self._remote.run_git_command(cmd, **opts)
113 if err and not skip_stderr_log:
113 if err and not skip_stderr_log:
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 return out, err
115 return out, err
116
116
117 @staticmethod
117 @staticmethod
118 def check_url(url, config):
118 def check_url(url, config):
119 """
119 """
120 Function will check given url and try to verify if it's a valid
120 Function will check given url and try to verify if it's a valid
121 link. Sometimes it may happened that git will issue basic
121 link. Sometimes it may happened that git will issue basic
122 auth request that can cause whole API to hang when used from python
122 auth request that can cause whole API to hang when used from python
123 or other external calls.
123 or other external calls.
124
124
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 when the return code is non 200
126 when the return code is non 200
127 """
127 """
128 # check first if it's not an url
128 # check first if it's not an url
129 if os.path.isdir(url) or url.startswith('file:'):
129 if os.path.isdir(url) or url.startswith('file:'):
130 return True
130 return True
131
131
132 if '+' in url.split('://', 1)[0]:
132 if '+' in url.split('://', 1)[0]:
133 url = url.split('+', 1)[1]
133 url = url.split('+', 1)[1]
134
134
135 # Request the _remote to verify the url
135 # Request the _remote to verify the url
136 return connection.Git.check_url(url, config.serialize())
136 return connection.Git.check_url(url, config.serialize())
137
137
138 @staticmethod
138 @staticmethod
139 def is_valid_repository(path):
139 def is_valid_repository(path):
140 if os.path.isdir(os.path.join(path, '.git')):
140 if os.path.isdir(os.path.join(path, '.git')):
141 return True
141 return True
142 # check case of bare repository
142 # check case of bare repository
143 try:
143 try:
144 GitRepository(path)
144 GitRepository(path)
145 return True
145 return True
146 except VCSError:
146 except VCSError:
147 pass
147 pass
148 return False
148 return False
149
149
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 bare=False):
151 bare=False):
152 if create and os.path.exists(self.path):
152 if create and os.path.exists(self.path):
153 raise RepositoryError(
153 raise RepositoryError(
154 "Cannot create repository at %s, location already exist"
154 "Cannot create repository at %s, location already exist"
155 % self.path)
155 % self.path)
156
156
157 if bare and do_workspace_checkout:
157 if bare and do_workspace_checkout:
158 raise RepositoryError("Cannot update a bare repository")
158 raise RepositoryError("Cannot update a bare repository")
159 try:
159 try:
160
160
161 if src_url:
161 if src_url:
162 # check URL before any actions
162 # check URL before any actions
163 GitRepository.check_url(src_url, self.config)
163 GitRepository.check_url(src_url, self.config)
164
164
165 if create:
165 if create:
166 os.makedirs(self.path, mode=0o755)
166 os.makedirs(self.path, mode=0o755)
167
167
168 if bare:
168 if bare:
169 self._remote.init_bare()
169 self._remote.init_bare()
170 else:
170 else:
171 self._remote.init()
171 self._remote.init()
172
172
173 if src_url and bare:
173 if src_url and bare:
174 # bare repository only allows a fetch and checkout is not allowed
174 # bare repository only allows a fetch and checkout is not allowed
175 self.fetch(src_url, commit_ids=None)
175 self.fetch(src_url, commit_ids=None)
176 elif src_url:
176 elif src_url:
177 self.pull(src_url, commit_ids=None,
177 self.pull(src_url, commit_ids=None,
178 update_after=do_workspace_checkout)
178 update_after=do_workspace_checkout)
179
179
180 else:
180 else:
181 if not self._remote.assert_correct_path():
181 if not self._remote.assert_correct_path():
182 raise RepositoryError(
182 raise RepositoryError(
183 'Path "%s" does not contain a Git repository' %
183 'Path "%s" does not contain a Git repository' %
184 (self.path,))
184 (self.path,))
185
185
186 # TODO: johbo: check if we have to translate the OSError here
186 # TODO: johbo: check if we have to translate the OSError here
187 except OSError as err:
187 except OSError as err:
188 raise RepositoryError(err)
188 raise RepositoryError(err)
189
189
190 def _get_all_commit_ids(self):
190 def _get_all_commit_ids(self):
191 return self._remote.get_all_commit_ids()
191 return self._remote.get_all_commit_ids()
192
192
193 def _get_commit_ids(self, filters=None):
193 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
196 # errors
197
197
198 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
199
199
200 if not head:
200 if not head:
201 return []
201 return []
202
202
203 rev_filter = ['--branches', '--tags']
203 rev_filter = ['--branches', '--tags']
204 extra_filter = []
204 extra_filter = []
205
205
206 if filters:
206 if filters:
207 if filters.get('since'):
207 if filters.get('since'):
208 extra_filter.append('--since=%s' % (filters['since']))
208 extra_filter.append('--since=%s' % (filters['since']))
209 if filters.get('until'):
209 if filters.get('until'):
210 extra_filter.append('--until=%s' % (filters['until']))
210 extra_filter.append('--until=%s' % (filters['until']))
211 if filters.get('branch_name'):
211 if filters.get('branch_name'):
212 rev_filter = []
212 rev_filter = []
213 extra_filter.append(filters['branch_name'])
213 extra_filter.append(filters['branch_name'])
214 rev_filter.extend(extra_filter)
214 rev_filter.extend(extra_filter)
215
215
216 # if filters.get('start') or filters.get('end'):
216 # if filters.get('start') or filters.get('end'):
217 # # skip is offset, max-count is limit
217 # # skip is offset, max-count is limit
218 # if filters.get('start'):
218 # if filters.get('start'):
219 # extra_filter += ' --skip=%s' % filters['start']
219 # extra_filter += ' --skip=%s' % filters['start']
220 # if filters.get('end'):
220 # if filters.get('end'):
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222
222
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 try:
224 try:
225 output, __ = self.run_git_command(cmd)
225 output, __ = self.run_git_command(cmd)
226 except RepositoryError:
226 except RepositoryError:
227 # Can be raised for empty repositories
227 # Can be raised for empty repositories
228 return []
228 return []
229 return output.splitlines()
229 return output.splitlines()
230
230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237
237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 *map(safe_str, [commit_id_or_idx, self.name]))
239 *map(safe_str, [commit_id_or_idx, self.name]))
239
240
240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 try:
244 try:
244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 except Exception:
246 except Exception:
246 raise CommitDoesNotExistError(commit_missing_err)
247 raise CommitDoesNotExistError(commit_missing_err)
247
248
248 elif is_bstr:
249 elif is_bstr:
249 # Need to call remote to translate id for tagging scenario
250 # Need to call remote to translate id for tagging scenario
250 try:
251 try:
251 remote_data = self._remote.get_object(commit_id_or_idx)
252 remote_data = self._remote.get_object(commit_id_or_idx,
253 maybe_unreachable=maybe_unreachable)
252 commit_id_or_idx = remote_data["commit_id"]
254 commit_id_or_idx = remote_data["commit_id"]
253 except (CommitDoesNotExistError,):
255 except (CommitDoesNotExistError,):
254 raise CommitDoesNotExistError(commit_missing_err)
256 raise CommitDoesNotExistError(commit_missing_err)
255
257
256 # Ensure we return full id
258 # Ensure we return full id
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
259 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 raise CommitDoesNotExistError(
260 raise CommitDoesNotExistError(
259 "Given commit id %s not recognized" % commit_id_or_idx)
261 "Given commit id %s not recognized" % commit_id_or_idx)
260 return commit_id_or_idx
262 return commit_id_or_idx
261
263
262 def get_hook_location(self):
264 def get_hook_location(self):
263 """
265 """
264 returns absolute path to location where hooks are stored
266 returns absolute path to location where hooks are stored
265 """
267 """
266 loc = os.path.join(self.path, 'hooks')
268 loc = os.path.join(self.path, 'hooks')
267 if not self.bare:
269 if not self.bare:
268 loc = os.path.join(self.path, '.git', 'hooks')
270 loc = os.path.join(self.path, '.git', 'hooks')
269 return loc
271 return loc
270
272
271 @LazyProperty
273 @LazyProperty
272 def last_change(self):
274 def last_change(self):
273 """
275 """
274 Returns last change made on this repository as
276 Returns last change made on this repository as
275 `datetime.datetime` object.
277 `datetime.datetime` object.
276 """
278 """
277 try:
279 try:
278 return self.get_commit().date
280 return self.get_commit().date
279 except RepositoryError:
281 except RepositoryError:
280 tzoffset = makedate()[1]
282 tzoffset = makedate()[1]
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
283 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282
284
283 def _get_fs_mtime(self):
285 def _get_fs_mtime(self):
284 idx_loc = '' if self.bare else '.git'
286 idx_loc = '' if self.bare else '.git'
285 # fallback to filesystem
287 # fallback to filesystem
286 in_path = os.path.join(self.path, idx_loc, "index")
288 in_path = os.path.join(self.path, idx_loc, "index")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
289 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 if os.path.exists(in_path):
290 if os.path.exists(in_path):
289 return os.stat(in_path).st_mtime
291 return os.stat(in_path).st_mtime
290 else:
292 else:
291 return os.stat(he_path).st_mtime
293 return os.stat(he_path).st_mtime
292
294
293 @LazyProperty
295 @LazyProperty
294 def description(self):
296 def description(self):
295 description = self._remote.get_description()
297 description = self._remote.get_description()
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
298 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297
299
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
300 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 if self.is_empty():
301 if self.is_empty():
300 return OrderedDict()
302 return OrderedDict()
301
303
302 result = []
304 result = []
303 for ref, sha in self._refs.iteritems():
305 for ref, sha in self._refs.iteritems():
304 if ref.startswith(prefix):
306 if ref.startswith(prefix):
305 ref_name = ref
307 ref_name = ref
306 if strip_prefix:
308 if strip_prefix:
307 ref_name = ref[len(prefix):]
309 ref_name = ref[len(prefix):]
308 result.append((safe_unicode(ref_name), sha))
310 result.append((safe_unicode(ref_name), sha))
309
311
310 def get_name(entry):
312 def get_name(entry):
311 return entry[0]
313 return entry[0]
312
314
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
315 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314
316
315 def _get_branches(self):
317 def _get_branches(self):
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
318 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317
319
318 @CachedProperty
320 @CachedProperty
319 def branches(self):
321 def branches(self):
320 return self._get_branches()
322 return self._get_branches()
321
323
322 @CachedProperty
324 @CachedProperty
323 def branches_closed(self):
325 def branches_closed(self):
324 return {}
326 return {}
325
327
326 @CachedProperty
328 @CachedProperty
327 def bookmarks(self):
329 def bookmarks(self):
328 return {}
330 return {}
329
331
330 @CachedProperty
332 @CachedProperty
331 def branches_all(self):
333 def branches_all(self):
332 all_branches = {}
334 all_branches = {}
333 all_branches.update(self.branches)
335 all_branches.update(self.branches)
334 all_branches.update(self.branches_closed)
336 all_branches.update(self.branches_closed)
335 return all_branches
337 return all_branches
336
338
337 @CachedProperty
339 @CachedProperty
338 def tags(self):
340 def tags(self):
339 return self._get_tags()
341 return self._get_tags()
340
342
341 def _get_tags(self):
343 def _get_tags(self):
342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
344 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343
345
344 def tag(self, name, user, commit_id=None, message=None, date=None,
346 def tag(self, name, user, commit_id=None, message=None, date=None,
345 **kwargs):
347 **kwargs):
346 # TODO: fix this method to apply annotated tags correct with message
348 # TODO: fix this method to apply annotated tags correct with message
347 """
349 """
348 Creates and returns a tag for the given ``commit_id``.
350 Creates and returns a tag for the given ``commit_id``.
349
351
350 :param name: name for new tag
352 :param name: name for new tag
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
353 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 :param commit_id: commit id for which new tag would be created
354 :param commit_id: commit id for which new tag would be created
353 :param message: message of the tag's commit
355 :param message: message of the tag's commit
354 :param date: date of tag's commit
356 :param date: date of tag's commit
355
357
356 :raises TagAlreadyExistError: if tag with same name already exists
358 :raises TagAlreadyExistError: if tag with same name already exists
357 """
359 """
358 if name in self.tags:
360 if name in self.tags:
359 raise TagAlreadyExistError("Tag %s already exists" % name)
361 raise TagAlreadyExistError("Tag %s already exists" % name)
360 commit = self.get_commit(commit_id=commit_id)
362 commit = self.get_commit(commit_id=commit_id)
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
363 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362
364
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
365 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364
366
365 self._invalidate_prop_cache('tags')
367 self._invalidate_prop_cache('tags')
366 self._invalidate_prop_cache('_refs')
368 self._invalidate_prop_cache('_refs')
367
369
368 return commit
370 return commit
369
371
370 def remove_tag(self, name, user, message=None, date=None):
372 def remove_tag(self, name, user, message=None, date=None):
371 """
373 """
372 Removes tag with the given ``name``.
374 Removes tag with the given ``name``.
373
375
374 :param name: name of the tag to be removed
376 :param name: name of the tag to be removed
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 :param message: message of the tag's removal commit
378 :param message: message of the tag's removal commit
377 :param date: date of tag's removal commit
379 :param date: date of tag's removal commit
378
380
379 :raises TagDoesNotExistError: if tag with given name does not exists
381 :raises TagDoesNotExistError: if tag with given name does not exists
380 """
382 """
381 if name not in self.tags:
383 if name not in self.tags:
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
384 raise TagDoesNotExistError("Tag %s does not exist" % name)
383
385
384 self._remote.tag_remove(name)
386 self._remote.tag_remove(name)
385 self._invalidate_prop_cache('tags')
387 self._invalidate_prop_cache('tags')
386 self._invalidate_prop_cache('_refs')
388 self._invalidate_prop_cache('_refs')
387
389
388 def _get_refs(self):
390 def _get_refs(self):
389 return self._remote.get_refs()
391 return self._remote.get_refs()
390
392
391 @CachedProperty
393 @CachedProperty
392 def _refs(self):
394 def _refs(self):
393 return self._get_refs()
395 return self._get_refs()
394
396
395 @property
397 @property
396 def _ref_tree(self):
398 def _ref_tree(self):
397 node = tree = {}
399 node = tree = {}
398 for ref, sha in self._refs.iteritems():
400 for ref, sha in self._refs.iteritems():
399 path = ref.split('/')
401 path = ref.split('/')
400 for bit in path[:-1]:
402 for bit in path[:-1]:
401 node = node.setdefault(bit, {})
403 node = node.setdefault(bit, {})
402 node[path[-1]] = sha
404 node[path[-1]] = sha
403 node = tree
405 node = tree
404 return tree
406 return tree
405
407
406 def get_remote_ref(self, ref_name):
408 def get_remote_ref(self, ref_name):
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 try:
410 try:
409 return self._refs[ref_key]
411 return self._refs[ref_key]
410 except Exception:
412 except Exception:
411 return
413 return
412
414
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
415 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 translate_tag=True, maybe_unreachable=False):
414 """
417 """
415 Returns `GitCommit` object representing commit from git repository
418 Returns `GitCommit` object representing commit from git repository
416 at the given `commit_id` or head (most recent commit) if None given.
419 at the given `commit_id` or head (most recent commit) if None given.
417 """
420 """
418 if self.is_empty():
421 if self.is_empty():
419 raise EmptyRepositoryError("There are no commits yet")
422 raise EmptyRepositoryError("There are no commits yet")
420
423
421 if commit_id is not None:
424 if commit_id is not None:
422 self._validate_commit_id(commit_id)
425 self._validate_commit_id(commit_id)
423 try:
426 try:
424 # we have cached idx, use it without contacting the remote
427 # we have cached idx, use it without contacting the remote
425 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
429 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 except KeyError:
430 except KeyError:
428 pass
431 pass
429
432
430 elif commit_idx is not None:
433 elif commit_idx is not None:
431 self._validate_commit_idx(commit_idx)
434 self._validate_commit_idx(commit_idx)
432 try:
435 try:
433 _commit_id = self.commit_ids[commit_idx]
436 _commit_id = self.commit_ids[commit_idx]
434 if commit_idx < 0:
437 if commit_idx < 0:
435 commit_idx = self.commit_ids.index(_commit_id)
438 commit_idx = self.commit_ids.index(_commit_id)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
439 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 except IndexError:
440 except IndexError:
438 commit_id = commit_idx
441 commit_id = commit_idx
439 else:
442 else:
440 commit_id = "tip"
443 commit_id = "tip"
441
444
442 if translate_tag:
445 if translate_tag:
443 commit_id = self._lookup_commit(commit_id)
446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
444
447
445 try:
448 try:
446 idx = self._commit_ids[commit_id]
449 idx = self._commit_ids[commit_id]
447 except KeyError:
450 except KeyError:
448 idx = -1
451 idx = -1
449
452
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
453 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451
454
452 def get_commits(
455 def get_commits(
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 """
458 """
456 Returns generator of `GitCommit` objects from start to end (both
459 Returns generator of `GitCommit` objects from start to end (both
457 are inclusive), in ascending date order.
460 are inclusive), in ascending date order.
458
461
459 :param start_id: None, str(commit_id)
462 :param start_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
463 :param end_id: None, str(commit_id)
461 :param start_date: if specified, commits with commit date less than
464 :param start_date: if specified, commits with commit date less than
462 ``start_date`` would be filtered out from returned set
465 ``start_date`` would be filtered out from returned set
463 :param end_date: if specified, commits with commit date greater than
466 :param end_date: if specified, commits with commit date greater than
464 ``end_date`` would be filtered out from returned set
467 ``end_date`` would be filtered out from returned set
465 :param branch_name: if specified, commits not reachable from given
468 :param branch_name: if specified, commits not reachable from given
466 branch would be filtered out from returned set
469 branch would be filtered out from returned set
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 Mercurial evolve
471 Mercurial evolve
469 :raise BranchDoesNotExistError: If given `branch_name` does not
472 :raise BranchDoesNotExistError: If given `branch_name` does not
470 exist.
473 exist.
471 :raise CommitDoesNotExistError: If commits for given `start` or
474 :raise CommitDoesNotExistError: If commits for given `start` or
472 `end` could not be found.
475 `end` could not be found.
473
476
474 """
477 """
475 if self.is_empty():
478 if self.is_empty():
476 raise EmptyRepositoryError("There are no commits yet")
479 raise EmptyRepositoryError("There are no commits yet")
477
480
478 self._validate_branch_name(branch_name)
481 self._validate_branch_name(branch_name)
479
482
480 if start_id is not None:
483 if start_id is not None:
481 self._validate_commit_id(start_id)
484 self._validate_commit_id(start_id)
482 if end_id is not None:
485 if end_id is not None:
483 self._validate_commit_id(end_id)
486 self._validate_commit_id(end_id)
484
487
485 start_raw_id = self._lookup_commit(start_id)
488 start_raw_id = self._lookup_commit(start_id)
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
489 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 end_raw_id = self._lookup_commit(end_id)
490 end_raw_id = self._lookup_commit(end_id)
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
491 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489
492
490 if None not in [start_id, end_id] and start_pos > end_pos:
493 if None not in [start_id, end_id] and start_pos > end_pos:
491 raise RepositoryError(
494 raise RepositoryError(
492 "Start commit '%s' cannot be after end commit '%s'" %
495 "Start commit '%s' cannot be after end commit '%s'" %
493 (start_id, end_id))
496 (start_id, end_id))
494
497
495 if end_pos is not None:
498 if end_pos is not None:
496 end_pos += 1
499 end_pos += 1
497
500
498 filter_ = []
501 filter_ = []
499 if branch_name:
502 if branch_name:
500 filter_.append({'branch_name': branch_name})
503 filter_.append({'branch_name': branch_name})
501 if start_date and not end_date:
504 if start_date and not end_date:
502 filter_.append({'since': start_date})
505 filter_.append({'since': start_date})
503 if end_date and not start_date:
506 if end_date and not start_date:
504 filter_.append({'until': end_date})
507 filter_.append({'until': end_date})
505 if start_date and end_date:
508 if start_date and end_date:
506 filter_.append({'since': start_date})
509 filter_.append({'since': start_date})
507 filter_.append({'until': end_date})
510 filter_.append({'until': end_date})
508
511
509 # if start_pos or end_pos:
512 # if start_pos or end_pos:
510 # filter_.append({'start': start_pos})
513 # filter_.append({'start': start_pos})
511 # filter_.append({'end': end_pos})
514 # filter_.append({'end': end_pos})
512
515
513 if filter_:
516 if filter_:
514 revfilters = {
517 revfilters = {
515 'branch_name': branch_name,
518 'branch_name': branch_name,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
519 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
520 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 'start': start_pos,
521 'start': start_pos,
519 'end': end_pos,
522 'end': end_pos,
520 }
523 }
521 commit_ids = self._get_commit_ids(filters=revfilters)
524 commit_ids = self._get_commit_ids(filters=revfilters)
522
525
523 else:
526 else:
524 commit_ids = self.commit_ids
527 commit_ids = self.commit_ids
525
528
526 if start_pos or end_pos:
529 if start_pos or end_pos:
527 commit_ids = commit_ids[start_pos: end_pos]
530 commit_ids = commit_ids[start_pos: end_pos]
528
531
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
532 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 translate_tag=translate_tags)
533 translate_tag=translate_tags)
531
534
532 def get_diff(
535 def get_diff(
533 self, commit1, commit2, path='', ignore_whitespace=False,
536 self, commit1, commit2, path='', ignore_whitespace=False,
534 context=3, path1=None):
537 context=3, path1=None):
535 """
538 """
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
539 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 ``commit2`` since ``commit1``.
540 ``commit2`` since ``commit1``.
538
541
539 :param commit1: Entry point from which diff is shown. Can be
542 :param commit1: Entry point from which diff is shown. Can be
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
543 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 the changes since empty state of the repository until ``commit2``
544 the changes since empty state of the repository until ``commit2``
542 :param commit2: Until which commits changes should be shown.
545 :param commit2: Until which commits changes should be shown.
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
546 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 changes. Defaults to ``False``.
547 changes. Defaults to ``False``.
545 :param context: How many lines before/after changed lines should be
548 :param context: How many lines before/after changed lines should be
546 shown. Defaults to ``3``.
549 shown. Defaults to ``3``.
547 """
550 """
548 self._validate_diff_commits(commit1, commit2)
551 self._validate_diff_commits(commit1, commit2)
549 if path1 is not None and path1 != path:
552 if path1 is not None and path1 != path:
550 raise ValueError("Diff of two different paths not supported.")
553 raise ValueError("Diff of two different paths not supported.")
551
554
552 if path:
555 if path:
553 file_filter = path
556 file_filter = path
554 else:
557 else:
555 file_filter = None
558 file_filter = None
556
559
557 diff = self._remote.diff(
560 diff = self._remote.diff(
558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
561 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 opt_ignorews=ignore_whitespace,
562 opt_ignorews=ignore_whitespace,
560 context=context)
563 context=context)
561 return GitDiff(diff)
564 return GitDiff(diff)
562
565
563 def strip(self, commit_id, branch_name):
566 def strip(self, commit_id, branch_name):
564 commit = self.get_commit(commit_id=commit_id)
567 commit = self.get_commit(commit_id=commit_id)
565 if commit.merge:
568 if commit.merge:
566 raise Exception('Cannot reset to merge commit')
569 raise Exception('Cannot reset to merge commit')
567
570
568 # parent is going to be the new head now
571 # parent is going to be the new head now
569 commit = commit.parents[0]
572 commit = commit.parents[0]
570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571
574
572 # clear cached properties
575 # clear cached properties
573 self._invalidate_prop_cache('commit_ids')
576 self._invalidate_prop_cache('commit_ids')
574 self._invalidate_prop_cache('_refs')
577 self._invalidate_prop_cache('_refs')
575 self._invalidate_prop_cache('branches')
578 self._invalidate_prop_cache('branches')
576
579
577 return len(self.commit_ids)
580 return len(self.commit_ids)
578
581
579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
582 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
580 if commit_id1 == commit_id2:
583 if commit_id1 == commit_id2:
581 return commit_id1
584 return commit_id1
582
585
583 if self != repo2:
586 if self != repo2:
584 commits = self._remote.get_missing_revs(
587 commits = self._remote.get_missing_revs(
585 commit_id1, commit_id2, repo2.path)
588 commit_id1, commit_id2, repo2.path)
586 if commits:
589 if commits:
587 commit = repo2.get_commit(commits[-1])
590 commit = repo2.get_commit(commits[-1])
588 if commit.parents:
591 if commit.parents:
589 ancestor_id = commit.parents[0].raw_id
592 ancestor_id = commit.parents[0].raw_id
590 else:
593 else:
591 ancestor_id = None
594 ancestor_id = None
592 else:
595 else:
593 # no commits from other repo, ancestor_id is the commit_id2
596 # no commits from other repo, ancestor_id is the commit_id2
594 ancestor_id = commit_id2
597 ancestor_id = commit_id2
595 else:
598 else:
596 output, __ = self.run_git_command(
599 output, __ = self.run_git_command(
597 ['merge-base', commit_id1, commit_id2])
600 ['merge-base', commit_id1, commit_id2])
598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
601 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
599
602
600 return ancestor_id
603 return ancestor_id
601
604
602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
605 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
603 repo1 = self
606 repo1 = self
604 ancestor_id = None
607 ancestor_id = None
605
608
606 if commit_id1 == commit_id2:
609 if commit_id1 == commit_id2:
607 commits = []
610 commits = []
608 elif repo1 != repo2:
611 elif repo1 != repo2:
609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
612 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
610 repo2.path)
613 repo2.path)
611 commits = [
614 commits = [
612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
615 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
613 for commit_id in reversed(missing_ids)]
616 for commit_id in reversed(missing_ids)]
614 else:
617 else:
615 output, __ = repo1.run_git_command(
618 output, __ = repo1.run_git_command(
616 ['log', '--reverse', '--pretty=format: %H', '-s',
619 ['log', '--reverse', '--pretty=format: %H', '-s',
617 '%s..%s' % (commit_id1, commit_id2)])
620 '%s..%s' % (commit_id1, commit_id2)])
618 commits = [
621 commits = [
619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
622 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
623 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
621
624
622 return commits
625 return commits
623
626
624 @LazyProperty
627 @LazyProperty
625 def in_memory_commit(self):
628 def in_memory_commit(self):
626 """
629 """
627 Returns ``GitInMemoryCommit`` object for this repository.
630 Returns ``GitInMemoryCommit`` object for this repository.
628 """
631 """
629 return GitInMemoryCommit(self)
632 return GitInMemoryCommit(self)
630
633
631 def pull(self, url, commit_ids=None, update_after=False):
634 def pull(self, url, commit_ids=None, update_after=False):
632 """
635 """
633 Pull changes from external location. Pull is different in GIT
636 Pull changes from external location. Pull is different in GIT
634 that fetch since it's doing a checkout
637 that fetch since it's doing a checkout
635
638
636 :param commit_ids: Optional. Can be set to a list of commit ids
639 :param commit_ids: Optional. Can be set to a list of commit ids
637 which shall be pulled from the other repository.
640 which shall be pulled from the other repository.
638 """
641 """
639 refs = None
642 refs = None
640 if commit_ids is not None:
643 if commit_ids is not None:
641 remote_refs = self._remote.get_remote_refs(url)
644 remote_refs = self._remote.get_remote_refs(url)
642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
645 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
643 self._remote.pull(url, refs=refs, update_after=update_after)
646 self._remote.pull(url, refs=refs, update_after=update_after)
644 self._remote.invalidate_vcs_cache()
647 self._remote.invalidate_vcs_cache()
645
648
646 def fetch(self, url, commit_ids=None):
649 def fetch(self, url, commit_ids=None):
647 """
650 """
648 Fetch all git objects from external location.
651 Fetch all git objects from external location.
649 """
652 """
650 self._remote.sync_fetch(url, refs=commit_ids)
653 self._remote.sync_fetch(url, refs=commit_ids)
651 self._remote.invalidate_vcs_cache()
654 self._remote.invalidate_vcs_cache()
652
655
653 def push(self, url):
656 def push(self, url):
654 refs = None
657 refs = None
655 self._remote.sync_push(url, refs=refs)
658 self._remote.sync_push(url, refs=refs)
656
659
657 def set_refs(self, ref_name, commit_id):
660 def set_refs(self, ref_name, commit_id):
658 self._remote.set_refs(ref_name, commit_id)
661 self._remote.set_refs(ref_name, commit_id)
659 self._invalidate_prop_cache('_refs')
662 self._invalidate_prop_cache('_refs')
660
663
661 def remove_ref(self, ref_name):
664 def remove_ref(self, ref_name):
662 self._remote.remove_ref(ref_name)
665 self._remote.remove_ref(ref_name)
663 self._invalidate_prop_cache('_refs')
666 self._invalidate_prop_cache('_refs')
664
667
668 def run_gc(self, prune=True):
669 cmd = ['gc', '--aggressive']
670 if prune:
671 cmd += ['--prune=now']
672 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
673 return stderr
674
665 def _update_server_info(self):
675 def _update_server_info(self):
666 """
676 """
667 runs gits update-server-info command in this repo instance
677 runs gits update-server-info command in this repo instance
668 """
678 """
669 self._remote.update_server_info()
679 self._remote.update_server_info()
670
680
671 def _current_branch(self):
681 def _current_branch(self):
672 """
682 """
673 Return the name of the current branch.
683 Return the name of the current branch.
674
684
675 It only works for non bare repositories (i.e. repositories with a
685 It only works for non bare repositories (i.e. repositories with a
676 working copy)
686 working copy)
677 """
687 """
678 if self.bare:
688 if self.bare:
679 raise RepositoryError('Bare git repos do not have active branches')
689 raise RepositoryError('Bare git repos do not have active branches')
680
690
681 if self.is_empty():
691 if self.is_empty():
682 return None
692 return None
683
693
684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
694 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
685 return stdout.strip()
695 return stdout.strip()
686
696
687 def _checkout(self, branch_name, create=False, force=False):
697 def _checkout(self, branch_name, create=False, force=False):
688 """
698 """
689 Checkout a branch in the working directory.
699 Checkout a branch in the working directory.
690
700
691 It tries to create the branch if create is True, failing if the branch
701 It tries to create the branch if create is True, failing if the branch
692 already exists.
702 already exists.
693
703
694 It only works for non bare repositories (i.e. repositories with a
704 It only works for non bare repositories (i.e. repositories with a
695 working copy)
705 working copy)
696 """
706 """
697 if self.bare:
707 if self.bare:
698 raise RepositoryError('Cannot checkout branches in a bare git repo')
708 raise RepositoryError('Cannot checkout branches in a bare git repo')
699
709
700 cmd = ['checkout']
710 cmd = ['checkout']
701 if force:
711 if force:
702 cmd.append('-f')
712 cmd.append('-f')
703 if create:
713 if create:
704 cmd.append('-b')
714 cmd.append('-b')
705 cmd.append(branch_name)
715 cmd.append(branch_name)
706 self.run_git_command(cmd, fail_on_stderr=False)
716 self.run_git_command(cmd, fail_on_stderr=False)
707
717
708 def _create_branch(self, branch_name, commit_id):
718 def _create_branch(self, branch_name, commit_id):
709 """
719 """
710 creates a branch in a GIT repo
720 creates a branch in a GIT repo
711 """
721 """
712 self._remote.create_branch(branch_name, commit_id)
722 self._remote.create_branch(branch_name, commit_id)
713
723
714 def _identify(self):
724 def _identify(self):
715 """
725 """
716 Return the current state of the working directory.
726 Return the current state of the working directory.
717 """
727 """
718 if self.bare:
728 if self.bare:
719 raise RepositoryError('Bare git repos do not have active branches')
729 raise RepositoryError('Bare git repos do not have active branches')
720
730
721 if self.is_empty():
731 if self.is_empty():
722 return None
732 return None
723
733
724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
734 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
725 return stdout.strip()
735 return stdout.strip()
726
736
727 def _local_clone(self, clone_path, branch_name, source_branch=None):
737 def _local_clone(self, clone_path, branch_name, source_branch=None):
728 """
738 """
729 Create a local clone of the current repo.
739 Create a local clone of the current repo.
730 """
740 """
731 # N.B.(skreft): the --branch option is required as otherwise the shallow
741 # N.B.(skreft): the --branch option is required as otherwise the shallow
732 # clone will only fetch the active branch.
742 # clone will only fetch the active branch.
733 cmd = ['clone', '--branch', branch_name,
743 cmd = ['clone', '--branch', branch_name,
734 self.path, os.path.abspath(clone_path)]
744 self.path, os.path.abspath(clone_path)]
735
745
736 self.run_git_command(cmd, fail_on_stderr=False)
746 self.run_git_command(cmd, fail_on_stderr=False)
737
747
738 # if we get the different source branch, make sure we also fetch it for
748 # if we get the different source branch, make sure we also fetch it for
739 # merge conditions
749 # merge conditions
740 if source_branch and source_branch != branch_name:
750 if source_branch and source_branch != branch_name:
741 # check if the ref exists.
751 # check if the ref exists.
742 shadow_repo = GitRepository(os.path.abspath(clone_path))
752 shadow_repo = GitRepository(os.path.abspath(clone_path))
743 if shadow_repo.get_remote_ref(source_branch):
753 if shadow_repo.get_remote_ref(source_branch):
744 cmd = ['fetch', self.path, source_branch]
754 cmd = ['fetch', self.path, source_branch]
745 self.run_git_command(cmd, fail_on_stderr=False)
755 self.run_git_command(cmd, fail_on_stderr=False)
746
756
747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
757 def _local_fetch(self, repository_path, branch_name, use_origin=False):
748 """
758 """
749 Fetch a branch from a local repository.
759 Fetch a branch from a local repository.
750 """
760 """
751 repository_path = os.path.abspath(repository_path)
761 repository_path = os.path.abspath(repository_path)
752 if repository_path == self.path:
762 if repository_path == self.path:
753 raise ValueError('Cannot fetch from the same repository')
763 raise ValueError('Cannot fetch from the same repository')
754
764
755 if use_origin:
765 if use_origin:
756 branch_name = '+{branch}:refs/heads/{branch}'.format(
766 branch_name = '+{branch}:refs/heads/{branch}'.format(
757 branch=branch_name)
767 branch=branch_name)
758
768
759 cmd = ['fetch', '--no-tags', '--update-head-ok',
769 cmd = ['fetch', '--no-tags', '--update-head-ok',
760 repository_path, branch_name]
770 repository_path, branch_name]
761 self.run_git_command(cmd, fail_on_stderr=False)
771 self.run_git_command(cmd, fail_on_stderr=False)
762
772
763 def _local_reset(self, branch_name):
773 def _local_reset(self, branch_name):
764 branch_name = '{}'.format(branch_name)
774 branch_name = '{}'.format(branch_name)
765 cmd = ['reset', '--hard', branch_name, '--']
775 cmd = ['reset', '--hard', branch_name, '--']
766 self.run_git_command(cmd, fail_on_stderr=False)
776 self.run_git_command(cmd, fail_on_stderr=False)
767
777
768 def _last_fetch_heads(self):
778 def _last_fetch_heads(self):
769 """
779 """
770 Return the last fetched heads that need merging.
780 Return the last fetched heads that need merging.
771
781
772 The algorithm is defined at
782 The algorithm is defined at
773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
783 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
774 """
784 """
775 if not self.bare:
785 if not self.bare:
776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
786 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
777 else:
787 else:
778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
788 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
779
789
780 heads = []
790 heads = []
781 with open(fetch_heads_path) as f:
791 with open(fetch_heads_path) as f:
782 for line in f:
792 for line in f:
783 if ' not-for-merge ' in line:
793 if ' not-for-merge ' in line:
784 continue
794 continue
785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
795 line = re.sub('\t.*', '', line, flags=re.DOTALL)
786 heads.append(line)
796 heads.append(line)
787
797
788 return heads
798 return heads
789
799
790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
800 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
801 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
792
802
793 def _local_pull(self, repository_path, branch_name, ff_only=True):
803 def _local_pull(self, repository_path, branch_name, ff_only=True):
794 """
804 """
795 Pull a branch from a local repository.
805 Pull a branch from a local repository.
796 """
806 """
797 if self.bare:
807 if self.bare:
798 raise RepositoryError('Cannot pull into a bare git repository')
808 raise RepositoryError('Cannot pull into a bare git repository')
799 # N.B.(skreft): The --ff-only option is to make sure this is a
809 # N.B.(skreft): The --ff-only option is to make sure this is a
800 # fast-forward (i.e., we are only pulling new changes and there are no
810 # fast-forward (i.e., we are only pulling new changes and there are no
801 # conflicts with our current branch)
811 # conflicts with our current branch)
802 # Additionally, that option needs to go before --no-tags, otherwise git
812 # Additionally, that option needs to go before --no-tags, otherwise git
803 # pull complains about it being an unknown flag.
813 # pull complains about it being an unknown flag.
804 cmd = ['pull']
814 cmd = ['pull']
805 if ff_only:
815 if ff_only:
806 cmd.append('--ff-only')
816 cmd.append('--ff-only')
807 cmd.extend(['--no-tags', repository_path, branch_name])
817 cmd.extend(['--no-tags', repository_path, branch_name])
808 self.run_git_command(cmd, fail_on_stderr=False)
818 self.run_git_command(cmd, fail_on_stderr=False)
809
819
810 def _local_merge(self, merge_message, user_name, user_email, heads):
820 def _local_merge(self, merge_message, user_name, user_email, heads):
811 """
821 """
812 Merge the given head into the checked out branch.
822 Merge the given head into the checked out branch.
813
823
814 It will force a merge commit.
824 It will force a merge commit.
815
825
816 Currently it raises an error if the repo is empty, as it is not possible
826 Currently it raises an error if the repo is empty, as it is not possible
817 to create a merge commit in an empty repo.
827 to create a merge commit in an empty repo.
818
828
819 :param merge_message: The message to use for the merge commit.
829 :param merge_message: The message to use for the merge commit.
820 :param heads: the heads to merge.
830 :param heads: the heads to merge.
821 """
831 """
822 if self.bare:
832 if self.bare:
823 raise RepositoryError('Cannot merge into a bare git repository')
833 raise RepositoryError('Cannot merge into a bare git repository')
824
834
825 if not heads:
835 if not heads:
826 return
836 return
827
837
828 if self.is_empty():
838 if self.is_empty():
829 # TODO(skreft): do something more robust in this case.
839 # TODO(skreft): do something more robust in this case.
830 raise RepositoryError(
840 raise RepositoryError('Do not know how to merge into empty repositories yet')
831 'Do not know how to merge into empty repositories yet')
832 unresolved = None
841 unresolved = None
833
842
834 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
843 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
835 # commit message. We also specify the user who is doing the merge.
844 # commit message. We also specify the user who is doing the merge.
836 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
845 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
837 '-c', 'user.email=%s' % safe_str(user_email),
846 '-c', 'user.email=%s' % safe_str(user_email),
838 'merge', '--no-ff', '-m', safe_str(merge_message)]
847 'merge', '--no-ff', '-m', safe_str(merge_message)]
839 cmd.extend(heads)
848
849 merge_cmd = cmd + heads
850
840 try:
851 try:
841 output = self.run_git_command(cmd, fail_on_stderr=False)
852 self.run_git_command(merge_cmd, fail_on_stderr=False)
842 except RepositoryError:
853 except RepositoryError:
843 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
854 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
844 fail_on_stderr=False)[0].splitlines()
855 fail_on_stderr=False)[0].splitlines()
845 # NOTE(marcink): we add U notation for consistent with HG backend output
856 # NOTE(marcink): we add U notation for consistent with HG backend output
846 unresolved = ['U {}'.format(f) for f in files]
857 unresolved = ['U {}'.format(f) for f in files]
847
858
848 # Cleanup any merge leftovers
859 # Cleanup any merge leftovers
860 self._remote.invalidate_vcs_cache()
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
861 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850
862
851 if unresolved:
863 if unresolved:
852 raise UnresolvedFilesInRepo(unresolved)
864 raise UnresolvedFilesInRepo(unresolved)
853 else:
865 else:
854 raise
866 raise
855
867
856 def _local_push(
868 def _local_push(
857 self, source_branch, repository_path, target_branch,
869 self, source_branch, repository_path, target_branch,
858 enable_hooks=False, rc_scm_data=None):
870 enable_hooks=False, rc_scm_data=None):
859 """
871 """
860 Push the source_branch to the given repository and target_branch.
872 Push the source_branch to the given repository and target_branch.
861
873
862 Currently it if the target_branch is not master and the target repo is
874 Currently it if the target_branch is not master and the target repo is
863 empty, the push will work, but then GitRepository won't be able to find
875 empty, the push will work, but then GitRepository won't be able to find
864 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
876 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
865 pointing to master, which does not exist).
877 pointing to master, which does not exist).
866
878
867 It does not run the hooks in the target repo.
879 It does not run the hooks in the target repo.
868 """
880 """
869 # TODO(skreft): deal with the case in which the target repo is empty,
881 # TODO(skreft): deal with the case in which the target repo is empty,
870 # and the target_branch is not master.
882 # and the target_branch is not master.
871 target_repo = GitRepository(repository_path)
883 target_repo = GitRepository(repository_path)
872 if (not target_repo.bare and
884 if (not target_repo.bare and
873 target_repo._current_branch() == target_branch):
885 target_repo._current_branch() == target_branch):
874 # Git prevents pushing to the checked out branch, so simulate it by
886 # Git prevents pushing to the checked out branch, so simulate it by
875 # pulling into the target repository.
887 # pulling into the target repository.
876 target_repo._local_pull(self.path, source_branch)
888 target_repo._local_pull(self.path, source_branch)
877 else:
889 else:
878 cmd = ['push', os.path.abspath(repository_path),
890 cmd = ['push', os.path.abspath(repository_path),
879 '%s:%s' % (source_branch, target_branch)]
891 '%s:%s' % (source_branch, target_branch)]
880 gitenv = {}
892 gitenv = {}
881 if rc_scm_data:
893 if rc_scm_data:
882 gitenv.update({'RC_SCM_DATA': rc_scm_data})
894 gitenv.update({'RC_SCM_DATA': rc_scm_data})
883
895
884 if not enable_hooks:
896 if not enable_hooks:
885 gitenv['RC_SKIP_HOOKS'] = '1'
897 gitenv['RC_SKIP_HOOKS'] = '1'
886 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
898 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
887
899
888 def _get_new_pr_branch(self, source_branch, target_branch):
900 def _get_new_pr_branch(self, source_branch, target_branch):
889 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
901 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
890 pr_branches = []
902 pr_branches = []
891 for branch in self.branches:
903 for branch in self.branches:
892 if branch.startswith(prefix):
904 if branch.startswith(prefix):
893 pr_branches.append(int(branch[len(prefix):]))
905 pr_branches.append(int(branch[len(prefix):]))
894
906
895 if not pr_branches:
907 if not pr_branches:
896 branch_id = 0
908 branch_id = 0
897 else:
909 else:
898 branch_id = max(pr_branches) + 1
910 branch_id = max(pr_branches) + 1
899
911
900 return '%s%d' % (prefix, branch_id)
912 return '%s%d' % (prefix, branch_id)
901
913
902 def _maybe_prepare_merge_workspace(
914 def _maybe_prepare_merge_workspace(
903 self, repo_id, workspace_id, target_ref, source_ref):
915 self, repo_id, workspace_id, target_ref, source_ref):
904 shadow_repository_path = self._get_shadow_repository_path(
916 shadow_repository_path = self._get_shadow_repository_path(
905 self.path, repo_id, workspace_id)
917 self.path, repo_id, workspace_id)
906 if not os.path.exists(shadow_repository_path):
918 if not os.path.exists(shadow_repository_path):
907 self._local_clone(
919 self._local_clone(
908 shadow_repository_path, target_ref.name, source_ref.name)
920 shadow_repository_path, target_ref.name, source_ref.name)
909 log.debug('Prepared %s shadow repository in %s',
921 log.debug('Prepared %s shadow repository in %s',
910 self.alias, shadow_repository_path)
922 self.alias, shadow_repository_path)
911
923
912 return shadow_repository_path
924 return shadow_repository_path
913
925
914 def _merge_repo(self, repo_id, workspace_id, target_ref,
926 def _merge_repo(self, repo_id, workspace_id, target_ref,
915 source_repo, source_ref, merge_message,
927 source_repo, source_ref, merge_message,
916 merger_name, merger_email, dry_run=False,
928 merger_name, merger_email, dry_run=False,
917 use_rebase=False, close_branch=False):
929 use_rebase=False, close_branch=False):
918
930
919 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
931 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
920 'rebase' if use_rebase else 'merge', dry_run)
932 'rebase' if use_rebase else 'merge', dry_run)
921 if target_ref.commit_id != self.branches[target_ref.name]:
933 if target_ref.commit_id != self.branches[target_ref.name]:
922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
934 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
923 target_ref.commit_id, self.branches[target_ref.name])
935 target_ref.commit_id, self.branches[target_ref.name])
924 return MergeResponse(
936 return MergeResponse(
925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
937 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
926 metadata={'target_ref': target_ref})
938 metadata={'target_ref': target_ref})
927
939
928 shadow_repository_path = self._maybe_prepare_merge_workspace(
940 shadow_repository_path = self._maybe_prepare_merge_workspace(
929 repo_id, workspace_id, target_ref, source_ref)
941 repo_id, workspace_id, target_ref, source_ref)
930 shadow_repo = self.get_shadow_instance(shadow_repository_path)
942 shadow_repo = self.get_shadow_instance(shadow_repository_path)
931
943
932 # checkout source, if it's different. Otherwise we could not
944 # checkout source, if it's different. Otherwise we could not
933 # fetch proper commits for merge testing
945 # fetch proper commits for merge testing
934 if source_ref.name != target_ref.name:
946 if source_ref.name != target_ref.name:
935 if shadow_repo.get_remote_ref(source_ref.name):
947 if shadow_repo.get_remote_ref(source_ref.name):
936 shadow_repo._checkout(source_ref.name, force=True)
948 shadow_repo._checkout(source_ref.name, force=True)
937
949
938 # checkout target, and fetch changes
950 # checkout target, and fetch changes
939 shadow_repo._checkout(target_ref.name, force=True)
951 shadow_repo._checkout(target_ref.name, force=True)
940
952
941 # fetch/reset pull the target, in case it is changed
953 # fetch/reset pull the target, in case it is changed
942 # this handles even force changes
954 # this handles even force changes
943 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
955 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
944 shadow_repo._local_reset(target_ref.name)
956 shadow_repo._local_reset(target_ref.name)
945
957
946 # Need to reload repo to invalidate the cache, or otherwise we cannot
958 # Need to reload repo to invalidate the cache, or otherwise we cannot
947 # retrieve the last target commit.
959 # retrieve the last target commit.
948 shadow_repo = self.get_shadow_instance(shadow_repository_path)
960 shadow_repo = self.get_shadow_instance(shadow_repository_path)
949 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
961 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
950 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
962 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
951 target_ref, target_ref.commit_id,
963 target_ref, target_ref.commit_id,
952 shadow_repo.branches[target_ref.name])
964 shadow_repo.branches[target_ref.name])
953 return MergeResponse(
965 return MergeResponse(
954 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
966 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
955 metadata={'target_ref': target_ref})
967 metadata={'target_ref': target_ref})
956
968
957 # calculate new branch
969 # calculate new branch
958 pr_branch = shadow_repo._get_new_pr_branch(
970 pr_branch = shadow_repo._get_new_pr_branch(
959 source_ref.name, target_ref.name)
971 source_ref.name, target_ref.name)
960 log.debug('using pull-request merge branch: `%s`', pr_branch)
972 log.debug('using pull-request merge branch: `%s`', pr_branch)
961 # checkout to temp branch, and fetch changes
973 # checkout to temp branch, and fetch changes
962 shadow_repo._checkout(pr_branch, create=True)
974 shadow_repo._checkout(pr_branch, create=True)
963 try:
975 try:
964 shadow_repo._local_fetch(source_repo.path, source_ref.name)
976 shadow_repo._local_fetch(source_repo.path, source_ref.name)
965 except RepositoryError:
977 except RepositoryError:
966 log.exception('Failure when doing local fetch on '
978 log.exception('Failure when doing local fetch on '
967 'shadow repo: %s', shadow_repo)
979 'shadow repo: %s', shadow_repo)
968 return MergeResponse(
980 return MergeResponse(
969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
981 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
970 metadata={'source_ref': source_ref})
982 metadata={'source_ref': source_ref})
971
983
972 merge_ref = None
984 merge_ref = None
973 merge_failure_reason = MergeFailureReason.NONE
985 merge_failure_reason = MergeFailureReason.NONE
974 metadata = {}
986 metadata = {}
975 try:
987 try:
976 shadow_repo._local_merge(merge_message, merger_name, merger_email,
988 shadow_repo._local_merge(merge_message, merger_name, merger_email,
977 [source_ref.commit_id])
989 [source_ref.commit_id])
978 merge_possible = True
990 merge_possible = True
979
991
980 # Need to invalidate the cache, or otherwise we
992 # Need to invalidate the cache, or otherwise we
981 # cannot retrieve the merge commit.
993 # cannot retrieve the merge commit.
982 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
994 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
983 merge_commit_id = shadow_repo.branches[pr_branch]
995 merge_commit_id = shadow_repo.branches[pr_branch]
984
996
985 # Set a reference pointing to the merge commit. This reference may
997 # Set a reference pointing to the merge commit. This reference may
986 # be used to easily identify the last successful merge commit in
998 # be used to easily identify the last successful merge commit in
987 # the shadow repository.
999 # the shadow repository.
988 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1000 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
989 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1001 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
990 except RepositoryError as e:
1002 except RepositoryError as e:
991 log.exception('Failure when doing local merge on git shadow repo')
1003 log.exception('Failure when doing local merge on git shadow repo')
992 if isinstance(e, UnresolvedFilesInRepo):
1004 if isinstance(e, UnresolvedFilesInRepo):
993 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1005 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
994
1006
995 merge_possible = False
1007 merge_possible = False
996 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1008 merge_failure_reason = MergeFailureReason.MERGE_FAILED
997
1009
998 if merge_possible and not dry_run:
1010 if merge_possible and not dry_run:
999 try:
1011 try:
1000 shadow_repo._local_push(
1012 shadow_repo._local_push(
1001 pr_branch, self.path, target_ref.name, enable_hooks=True,
1013 pr_branch, self.path, target_ref.name, enable_hooks=True,
1002 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1014 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1003 merge_succeeded = True
1015 merge_succeeded = True
1004 except RepositoryError:
1016 except RepositoryError:
1005 log.exception(
1017 log.exception(
1006 'Failure when doing local push from the shadow '
1018 'Failure when doing local push from the shadow '
1007 'repository to the target repository at %s.', self.path)
1019 'repository to the target repository at %s.', self.path)
1008 merge_succeeded = False
1020 merge_succeeded = False
1009 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1021 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1010 metadata['target'] = 'git shadow repo'
1022 metadata['target'] = 'git shadow repo'
1011 metadata['merge_commit'] = pr_branch
1023 metadata['merge_commit'] = pr_branch
1012 else:
1024 else:
1013 merge_succeeded = False
1025 merge_succeeded = False
1014
1026
1015 return MergeResponse(
1027 return MergeResponse(
1016 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1028 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1017 metadata=metadata)
1029 metadata=metadata)
@@ -1,978 +1,979 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 if commit_id1 == commit_id2:
300 if commit_id1 == commit_id2:
301 return commit_id1
301 return commit_id1
302
302
303 ancestors = self._remote.revs_from_revspec(
303 ancestors = self._remote.revs_from_revspec(
304 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
304 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
305 other_path=repo2.path)
305 other_path=repo2.path)
306 return repo2[ancestors[0]].raw_id if ancestors else None
306 return repo2[ancestors[0]].raw_id if ancestors else None
307
307
308 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
308 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
309 if commit_id1 == commit_id2:
309 if commit_id1 == commit_id2:
310 commits = []
310 commits = []
311 else:
311 else:
312 if merge:
312 if merge:
313 indexes = self._remote.revs_from_revspec(
313 indexes = self._remote.revs_from_revspec(
314 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
314 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
315 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
315 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
316 else:
316 else:
317 indexes = self._remote.revs_from_revspec(
317 indexes = self._remote.revs_from_revspec(
318 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
318 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
319 commit_id1, other_path=repo2.path)
319 commit_id1, other_path=repo2.path)
320
320
321 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
321 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
322 for idx in indexes]
322 for idx in indexes]
323
323
324 return commits
324 return commits
325
325
326 @staticmethod
326 @staticmethod
327 def check_url(url, config):
327 def check_url(url, config):
328 """
328 """
329 Function will check given url and try to verify if it's a valid
329 Function will check given url and try to verify if it's a valid
330 link. Sometimes it may happened that mercurial will issue basic
330 link. Sometimes it may happened that mercurial will issue basic
331 auth request that can cause whole API to hang when used from python
331 auth request that can cause whole API to hang when used from python
332 or other external calls.
332 or other external calls.
333
333
334 On failures it'll raise urllib2.HTTPError, exception is also thrown
334 On failures it'll raise urllib2.HTTPError, exception is also thrown
335 when the return code is non 200
335 when the return code is non 200
336 """
336 """
337 # check first if it's not an local url
337 # check first if it's not an local url
338 if os.path.isdir(url) or url.startswith('file:'):
338 if os.path.isdir(url) or url.startswith('file:'):
339 return True
339 return True
340
340
341 # Request the _remote to verify the url
341 # Request the _remote to verify the url
342 return connection.Hg.check_url(url, config.serialize())
342 return connection.Hg.check_url(url, config.serialize())
343
343
344 @staticmethod
344 @staticmethod
345 def is_valid_repository(path):
345 def is_valid_repository(path):
346 return os.path.isdir(os.path.join(path, '.hg'))
346 return os.path.isdir(os.path.join(path, '.hg'))
347
347
348 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
348 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
349 """
349 """
350 Function will check for mercurial repository in given path. If there
350 Function will check for mercurial repository in given path. If there
351 is no repository in that path it will raise an exception unless
351 is no repository in that path it will raise an exception unless
352 `create` parameter is set to True - in that case repository would
352 `create` parameter is set to True - in that case repository would
353 be created.
353 be created.
354
354
355 If `src_url` is given, would try to clone repository from the
355 If `src_url` is given, would try to clone repository from the
356 location at given clone_point. Additionally it'll make update to
356 location at given clone_point. Additionally it'll make update to
357 working copy accordingly to `do_workspace_checkout` flag.
357 working copy accordingly to `do_workspace_checkout` flag.
358 """
358 """
359 if create and os.path.exists(self.path):
359 if create and os.path.exists(self.path):
360 raise RepositoryError(
360 raise RepositoryError(
361 "Cannot create repository at %s, location already exist"
361 "Cannot create repository at %s, location already exist"
362 % self.path)
362 % self.path)
363
363
364 if src_url:
364 if src_url:
365 url = str(self._get_url(src_url))
365 url = str(self._get_url(src_url))
366 MercurialRepository.check_url(url, self.config)
366 MercurialRepository.check_url(url, self.config)
367
367
368 self._remote.clone(url, self.path, do_workspace_checkout)
368 self._remote.clone(url, self.path, do_workspace_checkout)
369
369
370 # Don't try to create if we've already cloned repo
370 # Don't try to create if we've already cloned repo
371 create = False
371 create = False
372
372
373 if create:
373 if create:
374 os.makedirs(self.path, mode=0o755)
374 os.makedirs(self.path, mode=0o755)
375 self._remote.localrepository(create)
375 self._remote.localrepository(create)
376
376
377 @LazyProperty
377 @LazyProperty
378 def in_memory_commit(self):
378 def in_memory_commit(self):
379 return MercurialInMemoryCommit(self)
379 return MercurialInMemoryCommit(self)
380
380
381 @LazyProperty
381 @LazyProperty
382 def description(self):
382 def description(self):
383 description = self._remote.get_config_value(
383 description = self._remote.get_config_value(
384 'web', 'description', untrusted=True)
384 'web', 'description', untrusted=True)
385 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
385 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
386
386
387 @LazyProperty
387 @LazyProperty
388 def contact(self):
388 def contact(self):
389 contact = (
389 contact = (
390 self._remote.get_config_value("web", "contact") or
390 self._remote.get_config_value("web", "contact") or
391 self._remote.get_config_value("ui", "username"))
391 self._remote.get_config_value("ui", "username"))
392 return safe_unicode(contact or self.DEFAULT_CONTACT)
392 return safe_unicode(contact or self.DEFAULT_CONTACT)
393
393
394 @LazyProperty
394 @LazyProperty
395 def last_change(self):
395 def last_change(self):
396 """
396 """
397 Returns last change made on this repository as
397 Returns last change made on this repository as
398 `datetime.datetime` object.
398 `datetime.datetime` object.
399 """
399 """
400 try:
400 try:
401 return self.get_commit().date
401 return self.get_commit().date
402 except RepositoryError:
402 except RepositoryError:
403 tzoffset = makedate()[1]
403 tzoffset = makedate()[1]
404 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
404 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
405
405
406 def _get_fs_mtime(self):
406 def _get_fs_mtime(self):
407 # fallback to filesystem
407 # fallback to filesystem
408 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
408 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
409 st_path = os.path.join(self.path, '.hg', "store")
409 st_path = os.path.join(self.path, '.hg', "store")
410 if os.path.exists(cl_path):
410 if os.path.exists(cl_path):
411 return os.stat(cl_path).st_mtime
411 return os.stat(cl_path).st_mtime
412 else:
412 else:
413 return os.stat(st_path).st_mtime
413 return os.stat(st_path).st_mtime
414
414
415 def _get_url(self, url):
415 def _get_url(self, url):
416 """
416 """
417 Returns normalized url. If schema is not given, would fall
417 Returns normalized url. If schema is not given, would fall
418 to filesystem
418 to filesystem
419 (``file:///``) schema.
419 (``file:///``) schema.
420 """
420 """
421 url = url.encode('utf8')
421 url = url.encode('utf8')
422 if url != 'default' and '://' not in url:
422 if url != 'default' and '://' not in url:
423 url = "file:" + urllib.pathname2url(url)
423 url = "file:" + urllib.pathname2url(url)
424 return url
424 return url
425
425
426 def get_hook_location(self):
426 def get_hook_location(self):
427 """
427 """
428 returns absolute path to location where hooks are stored
428 returns absolute path to location where hooks are stored
429 """
429 """
430 return os.path.join(self.path, '.hg', '.hgrc')
430 return os.path.join(self.path, '.hg', '.hgrc')
431
431
432 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
432 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
433 translate_tag=None, maybe_unreachable=False):
433 """
434 """
434 Returns ``MercurialCommit`` object representing repository's
435 Returns ``MercurialCommit`` object representing repository's
435 commit at the given `commit_id` or `commit_idx`.
436 commit at the given `commit_id` or `commit_idx`.
436 """
437 """
437 if self.is_empty():
438 if self.is_empty():
438 raise EmptyRepositoryError("There are no commits yet")
439 raise EmptyRepositoryError("There are no commits yet")
439
440
440 if commit_id is not None:
441 if commit_id is not None:
441 self._validate_commit_id(commit_id)
442 self._validate_commit_id(commit_id)
442 try:
443 try:
443 # we have cached idx, use it without contacting the remote
444 # we have cached idx, use it without contacting the remote
444 idx = self._commit_ids[commit_id]
445 idx = self._commit_ids[commit_id]
445 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
446 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
446 except KeyError:
447 except KeyError:
447 pass
448 pass
448
449
449 elif commit_idx is not None:
450 elif commit_idx is not None:
450 self._validate_commit_idx(commit_idx)
451 self._validate_commit_idx(commit_idx)
451 try:
452 try:
452 _commit_id = self.commit_ids[commit_idx]
453 _commit_id = self.commit_ids[commit_idx]
453 if commit_idx < 0:
454 if commit_idx < 0:
454 commit_idx = self.commit_ids.index(_commit_id)
455 commit_idx = self.commit_ids.index(_commit_id)
455
456
456 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
457 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
457 except IndexError:
458 except IndexError:
458 commit_id = commit_idx
459 commit_id = commit_idx
459 else:
460 else:
460 commit_id = "tip"
461 commit_id = "tip"
461
462
462 if isinstance(commit_id, unicode):
463 if isinstance(commit_id, unicode):
463 commit_id = safe_str(commit_id)
464 commit_id = safe_str(commit_id)
464
465
465 try:
466 try:
466 raw_id, idx = self._remote.lookup(commit_id, both=True)
467 raw_id, idx = self._remote.lookup(commit_id, both=True)
467 except CommitDoesNotExistError:
468 except CommitDoesNotExistError:
468 msg = "Commit {} does not exist for `{}`".format(
469 msg = "Commit {} does not exist for `{}`".format(
469 *map(safe_str, [commit_id, self.name]))
470 *map(safe_str, [commit_id, self.name]))
470 raise CommitDoesNotExistError(msg)
471 raise CommitDoesNotExistError(msg)
471
472
472 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
473 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
473
474
474 def get_commits(
475 def get_commits(
475 self, start_id=None, end_id=None, start_date=None, end_date=None,
476 self, start_id=None, end_id=None, start_date=None, end_date=None,
476 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
477 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
477 """
478 """
478 Returns generator of ``MercurialCommit`` objects from start to end
479 Returns generator of ``MercurialCommit`` objects from start to end
479 (both are inclusive)
480 (both are inclusive)
480
481
481 :param start_id: None, str(commit_id)
482 :param start_id: None, str(commit_id)
482 :param end_id: None, str(commit_id)
483 :param end_id: None, str(commit_id)
483 :param start_date: if specified, commits with commit date less than
484 :param start_date: if specified, commits with commit date less than
484 ``start_date`` would be filtered out from returned set
485 ``start_date`` would be filtered out from returned set
485 :param end_date: if specified, commits with commit date greater than
486 :param end_date: if specified, commits with commit date greater than
486 ``end_date`` would be filtered out from returned set
487 ``end_date`` would be filtered out from returned set
487 :param branch_name: if specified, commits not reachable from given
488 :param branch_name: if specified, commits not reachable from given
488 branch would be filtered out from returned set
489 branch would be filtered out from returned set
489 :param show_hidden: Show hidden commits such as obsolete or hidden from
490 :param show_hidden: Show hidden commits such as obsolete or hidden from
490 Mercurial evolve
491 Mercurial evolve
491 :raise BranchDoesNotExistError: If given ``branch_name`` does not
492 :raise BranchDoesNotExistError: If given ``branch_name`` does not
492 exist.
493 exist.
493 :raise CommitDoesNotExistError: If commit for given ``start`` or
494 :raise CommitDoesNotExistError: If commit for given ``start`` or
494 ``end`` could not be found.
495 ``end`` could not be found.
495 """
496 """
496 # actually we should check now if it's not an empty repo
497 # actually we should check now if it's not an empty repo
497 if self.is_empty():
498 if self.is_empty():
498 raise EmptyRepositoryError("There are no commits yet")
499 raise EmptyRepositoryError("There are no commits yet")
499 self._validate_branch_name(branch_name)
500 self._validate_branch_name(branch_name)
500
501
501 branch_ancestors = False
502 branch_ancestors = False
502 if start_id is not None:
503 if start_id is not None:
503 self._validate_commit_id(start_id)
504 self._validate_commit_id(start_id)
504 c_start = self.get_commit(commit_id=start_id)
505 c_start = self.get_commit(commit_id=start_id)
505 start_pos = self._commit_ids[c_start.raw_id]
506 start_pos = self._commit_ids[c_start.raw_id]
506 else:
507 else:
507 start_pos = None
508 start_pos = None
508
509
509 if end_id is not None:
510 if end_id is not None:
510 self._validate_commit_id(end_id)
511 self._validate_commit_id(end_id)
511 c_end = self.get_commit(commit_id=end_id)
512 c_end = self.get_commit(commit_id=end_id)
512 end_pos = max(0, self._commit_ids[c_end.raw_id])
513 end_pos = max(0, self._commit_ids[c_end.raw_id])
513 else:
514 else:
514 end_pos = None
515 end_pos = None
515
516
516 if None not in [start_id, end_id] and start_pos > end_pos:
517 if None not in [start_id, end_id] and start_pos > end_pos:
517 raise RepositoryError(
518 raise RepositoryError(
518 "Start commit '%s' cannot be after end commit '%s'" %
519 "Start commit '%s' cannot be after end commit '%s'" %
519 (start_id, end_id))
520 (start_id, end_id))
520
521
521 if end_pos is not None:
522 if end_pos is not None:
522 end_pos += 1
523 end_pos += 1
523
524
524 commit_filter = []
525 commit_filter = []
525
526
526 if branch_name and not branch_ancestors:
527 if branch_name and not branch_ancestors:
527 commit_filter.append('branch("%s")' % (branch_name,))
528 commit_filter.append('branch("%s")' % (branch_name,))
528 elif branch_name and branch_ancestors:
529 elif branch_name and branch_ancestors:
529 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
530 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
530
531
531 if start_date and not end_date:
532 if start_date and not end_date:
532 commit_filter.append('date(">%s")' % (start_date,))
533 commit_filter.append('date(">%s")' % (start_date,))
533 if end_date and not start_date:
534 if end_date and not start_date:
534 commit_filter.append('date("<%s")' % (end_date,))
535 commit_filter.append('date("<%s")' % (end_date,))
535 if start_date and end_date:
536 if start_date and end_date:
536 commit_filter.append(
537 commit_filter.append(
537 'date(">%s") and date("<%s")' % (start_date, end_date))
538 'date(">%s") and date("<%s")' % (start_date, end_date))
538
539
539 if not show_hidden:
540 if not show_hidden:
540 commit_filter.append('not obsolete()')
541 commit_filter.append('not obsolete()')
541 commit_filter.append('not hidden()')
542 commit_filter.append('not hidden()')
542
543
543 # TODO: johbo: Figure out a simpler way for this solution
544 # TODO: johbo: Figure out a simpler way for this solution
544 collection_generator = CollectionGenerator
545 collection_generator = CollectionGenerator
545 if commit_filter:
546 if commit_filter:
546 commit_filter = ' and '.join(map(safe_str, commit_filter))
547 commit_filter = ' and '.join(map(safe_str, commit_filter))
547 revisions = self._remote.rev_range([commit_filter])
548 revisions = self._remote.rev_range([commit_filter])
548 collection_generator = MercurialIndexBasedCollectionGenerator
549 collection_generator = MercurialIndexBasedCollectionGenerator
549 else:
550 else:
550 revisions = self.commit_ids
551 revisions = self.commit_ids
551
552
552 if start_pos or end_pos:
553 if start_pos or end_pos:
553 revisions = revisions[start_pos:end_pos]
554 revisions = revisions[start_pos:end_pos]
554
555
555 return collection_generator(self, revisions, pre_load=pre_load)
556 return collection_generator(self, revisions, pre_load=pre_load)
556
557
557 def pull(self, url, commit_ids=None):
558 def pull(self, url, commit_ids=None):
558 """
559 """
559 Pull changes from external location.
560 Pull changes from external location.
560
561
561 :param commit_ids: Optional. Can be set to a list of commit ids
562 :param commit_ids: Optional. Can be set to a list of commit ids
562 which shall be pulled from the other repository.
563 which shall be pulled from the other repository.
563 """
564 """
564 url = self._get_url(url)
565 url = self._get_url(url)
565 self._remote.pull(url, commit_ids=commit_ids)
566 self._remote.pull(url, commit_ids=commit_ids)
566 self._remote.invalidate_vcs_cache()
567 self._remote.invalidate_vcs_cache()
567
568
568 def fetch(self, url, commit_ids=None):
569 def fetch(self, url, commit_ids=None):
569 """
570 """
570 Backward compatibility with GIT fetch==pull
571 Backward compatibility with GIT fetch==pull
571 """
572 """
572 return self.pull(url, commit_ids=commit_ids)
573 return self.pull(url, commit_ids=commit_ids)
573
574
574 def push(self, url):
575 def push(self, url):
575 url = self._get_url(url)
576 url = self._get_url(url)
576 self._remote.sync_push(url)
577 self._remote.sync_push(url)
577
578
578 def _local_clone(self, clone_path):
579 def _local_clone(self, clone_path):
579 """
580 """
580 Create a local clone of the current repo.
581 Create a local clone of the current repo.
581 """
582 """
582 self._remote.clone(self.path, clone_path, update_after_clone=True,
583 self._remote.clone(self.path, clone_path, update_after_clone=True,
583 hooks=False)
584 hooks=False)
584
585
585 def _update(self, revision, clean=False):
586 def _update(self, revision, clean=False):
586 """
587 """
587 Update the working copy to the specified revision.
588 Update the working copy to the specified revision.
588 """
589 """
589 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
590 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
590 self._remote.update(revision, clean=clean)
591 self._remote.update(revision, clean=clean)
591
592
592 def _identify(self):
593 def _identify(self):
593 """
594 """
594 Return the current state of the working directory.
595 Return the current state of the working directory.
595 """
596 """
596 return self._remote.identify().strip().rstrip('+')
597 return self._remote.identify().strip().rstrip('+')
597
598
598 def _heads(self, branch=None):
599 def _heads(self, branch=None):
599 """
600 """
600 Return the commit ids of the repository heads.
601 Return the commit ids of the repository heads.
601 """
602 """
602 return self._remote.heads(branch=branch).strip().split(' ')
603 return self._remote.heads(branch=branch).strip().split(' ')
603
604
604 def _ancestor(self, revision1, revision2):
605 def _ancestor(self, revision1, revision2):
605 """
606 """
606 Return the common ancestor of the two revisions.
607 Return the common ancestor of the two revisions.
607 """
608 """
608 return self._remote.ancestor(revision1, revision2)
609 return self._remote.ancestor(revision1, revision2)
609
610
610 def _local_push(
611 def _local_push(
611 self, revision, repository_path, push_branches=False,
612 self, revision, repository_path, push_branches=False,
612 enable_hooks=False):
613 enable_hooks=False):
613 """
614 """
614 Push the given revision to the specified repository.
615 Push the given revision to the specified repository.
615
616
616 :param push_branches: allow to create branches in the target repo.
617 :param push_branches: allow to create branches in the target repo.
617 """
618 """
618 self._remote.push(
619 self._remote.push(
619 [revision], repository_path, hooks=enable_hooks,
620 [revision], repository_path, hooks=enable_hooks,
620 push_branches=push_branches)
621 push_branches=push_branches)
621
622
622 def _local_merge(self, target_ref, merge_message, user_name, user_email,
623 def _local_merge(self, target_ref, merge_message, user_name, user_email,
623 source_ref, use_rebase=False, dry_run=False):
624 source_ref, use_rebase=False, dry_run=False):
624 """
625 """
625 Merge the given source_revision into the checked out revision.
626 Merge the given source_revision into the checked out revision.
626
627
627 Returns the commit id of the merge and a boolean indicating if the
628 Returns the commit id of the merge and a boolean indicating if the
628 commit needs to be pushed.
629 commit needs to be pushed.
629 """
630 """
630 self._update(target_ref.commit_id, clean=True)
631 self._update(target_ref.commit_id, clean=True)
631
632
632 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
633 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
633 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
634 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
634
635
635 if ancestor == source_ref.commit_id:
636 if ancestor == source_ref.commit_id:
636 # Nothing to do, the changes were already integrated
637 # Nothing to do, the changes were already integrated
637 return target_ref.commit_id, False
638 return target_ref.commit_id, False
638
639
639 elif ancestor == target_ref.commit_id and is_the_same_branch:
640 elif ancestor == target_ref.commit_id and is_the_same_branch:
640 # In this case we should force a commit message
641 # In this case we should force a commit message
641 return source_ref.commit_id, True
642 return source_ref.commit_id, True
642
643
643 unresolved = None
644 unresolved = None
644 if use_rebase:
645 if use_rebase:
645 try:
646 try:
646 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
647 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
647 target_ref.commit_id)
648 target_ref.commit_id)
648 self.bookmark(bookmark_name, revision=source_ref.commit_id)
649 self.bookmark(bookmark_name, revision=source_ref.commit_id)
649 self._remote.rebase(
650 self._remote.rebase(
650 source=source_ref.commit_id, dest=target_ref.commit_id)
651 source=source_ref.commit_id, dest=target_ref.commit_id)
651 self._remote.invalidate_vcs_cache()
652 self._remote.invalidate_vcs_cache()
652 self._update(bookmark_name, clean=True)
653 self._update(bookmark_name, clean=True)
653 return self._identify(), True
654 return self._identify(), True
654 except RepositoryError as e:
655 except RepositoryError as e:
655 # The rebase-abort may raise another exception which 'hides'
656 # The rebase-abort may raise another exception which 'hides'
656 # the original one, therefore we log it here.
657 # the original one, therefore we log it here.
657 log.exception('Error while rebasing shadow repo during merge.')
658 log.exception('Error while rebasing shadow repo during merge.')
658 if 'unresolved conflicts' in safe_str(e):
659 if 'unresolved conflicts' in safe_str(e):
659 unresolved = self._remote.get_unresolved_files()
660 unresolved = self._remote.get_unresolved_files()
660 log.debug('unresolved files: %s', unresolved)
661 log.debug('unresolved files: %s', unresolved)
661
662
662 # Cleanup any rebase leftovers
663 # Cleanup any rebase leftovers
663 self._remote.invalidate_vcs_cache()
664 self._remote.invalidate_vcs_cache()
664 self._remote.rebase(abort=True)
665 self._remote.rebase(abort=True)
665 self._remote.invalidate_vcs_cache()
666 self._remote.invalidate_vcs_cache()
666 self._remote.update(clean=True)
667 self._remote.update(clean=True)
667 if unresolved:
668 if unresolved:
668 raise UnresolvedFilesInRepo(unresolved)
669 raise UnresolvedFilesInRepo(unresolved)
669 else:
670 else:
670 raise
671 raise
671 else:
672 else:
672 try:
673 try:
673 self._remote.merge(source_ref.commit_id)
674 self._remote.merge(source_ref.commit_id)
674 self._remote.invalidate_vcs_cache()
675 self._remote.invalidate_vcs_cache()
675 self._remote.commit(
676 self._remote.commit(
676 message=safe_str(merge_message),
677 message=safe_str(merge_message),
677 username=safe_str('%s <%s>' % (user_name, user_email)))
678 username=safe_str('%s <%s>' % (user_name, user_email)))
678 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
679 return self._identify(), True
680 return self._identify(), True
680 except RepositoryError as e:
681 except RepositoryError as e:
681 # The merge-abort may raise another exception which 'hides'
682 # The merge-abort may raise another exception which 'hides'
682 # the original one, therefore we log it here.
683 # the original one, therefore we log it here.
683 log.exception('Error while merging shadow repo during merge.')
684 log.exception('Error while merging shadow repo during merge.')
684 if 'unresolved merge conflicts' in safe_str(e):
685 if 'unresolved merge conflicts' in safe_str(e):
685 unresolved = self._remote.get_unresolved_files()
686 unresolved = self._remote.get_unresolved_files()
686 log.debug('unresolved files: %s', unresolved)
687 log.debug('unresolved files: %s', unresolved)
687
688
688 # Cleanup any merge leftovers
689 # Cleanup any merge leftovers
689 self._remote.update(clean=True)
690 self._remote.update(clean=True)
690 if unresolved:
691 if unresolved:
691 raise UnresolvedFilesInRepo(unresolved)
692 raise UnresolvedFilesInRepo(unresolved)
692 else:
693 else:
693 raise
694 raise
694
695
695 def _local_close(self, target_ref, user_name, user_email,
696 def _local_close(self, target_ref, user_name, user_email,
696 source_ref, close_message=''):
697 source_ref, close_message=''):
697 """
698 """
698 Close the branch of the given source_revision
699 Close the branch of the given source_revision
699
700
700 Returns the commit id of the close and a boolean indicating if the
701 Returns the commit id of the close and a boolean indicating if the
701 commit needs to be pushed.
702 commit needs to be pushed.
702 """
703 """
703 self._update(source_ref.commit_id)
704 self._update(source_ref.commit_id)
704 message = close_message or "Closing branch: `{}`".format(source_ref.name)
705 message = close_message or "Closing branch: `{}`".format(source_ref.name)
705 try:
706 try:
706 self._remote.commit(
707 self._remote.commit(
707 message=safe_str(message),
708 message=safe_str(message),
708 username=safe_str('%s <%s>' % (user_name, user_email)),
709 username=safe_str('%s <%s>' % (user_name, user_email)),
709 close_branch=True)
710 close_branch=True)
710 self._remote.invalidate_vcs_cache()
711 self._remote.invalidate_vcs_cache()
711 return self._identify(), True
712 return self._identify(), True
712 except RepositoryError:
713 except RepositoryError:
713 # Cleanup any commit leftovers
714 # Cleanup any commit leftovers
714 self._remote.update(clean=True)
715 self._remote.update(clean=True)
715 raise
716 raise
716
717
717 def _is_the_same_branch(self, target_ref, source_ref):
718 def _is_the_same_branch(self, target_ref, source_ref):
718 return (
719 return (
719 self._get_branch_name(target_ref) ==
720 self._get_branch_name(target_ref) ==
720 self._get_branch_name(source_ref))
721 self._get_branch_name(source_ref))
721
722
722 def _get_branch_name(self, ref):
723 def _get_branch_name(self, ref):
723 if ref.type == 'branch':
724 if ref.type == 'branch':
724 return ref.name
725 return ref.name
725 return self._remote.ctx_branch(ref.commit_id)
726 return self._remote.ctx_branch(ref.commit_id)
726
727
727 def _maybe_prepare_merge_workspace(
728 def _maybe_prepare_merge_workspace(
728 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
729 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
729 shadow_repository_path = self._get_shadow_repository_path(
730 shadow_repository_path = self._get_shadow_repository_path(
730 self.path, repo_id, workspace_id)
731 self.path, repo_id, workspace_id)
731 if not os.path.exists(shadow_repository_path):
732 if not os.path.exists(shadow_repository_path):
732 self._local_clone(shadow_repository_path)
733 self._local_clone(shadow_repository_path)
733 log.debug(
734 log.debug(
734 'Prepared shadow repository in %s', shadow_repository_path)
735 'Prepared shadow repository in %s', shadow_repository_path)
735
736
736 return shadow_repository_path
737 return shadow_repository_path
737
738
738 def _merge_repo(self, repo_id, workspace_id, target_ref,
739 def _merge_repo(self, repo_id, workspace_id, target_ref,
739 source_repo, source_ref, merge_message,
740 source_repo, source_ref, merge_message,
740 merger_name, merger_email, dry_run=False,
741 merger_name, merger_email, dry_run=False,
741 use_rebase=False, close_branch=False):
742 use_rebase=False, close_branch=False):
742
743
743 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
744 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
744 'rebase' if use_rebase else 'merge', dry_run)
745 'rebase' if use_rebase else 'merge', dry_run)
745 if target_ref.commit_id not in self._heads():
746 if target_ref.commit_id not in self._heads():
746 return MergeResponse(
747 return MergeResponse(
747 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
748 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
748 metadata={'target_ref': target_ref})
749 metadata={'target_ref': target_ref})
749
750
750 try:
751 try:
751 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
752 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
752 heads = '\n,'.join(self._heads(target_ref.name))
753 heads = '\n,'.join(self._heads(target_ref.name))
753 metadata = {
754 metadata = {
754 'target_ref': target_ref,
755 'target_ref': target_ref,
755 'source_ref': source_ref,
756 'source_ref': source_ref,
756 'heads': heads
757 'heads': heads
757 }
758 }
758 return MergeResponse(
759 return MergeResponse(
759 False, False, None,
760 False, False, None,
760 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
761 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
761 metadata=metadata)
762 metadata=metadata)
762 except CommitDoesNotExistError:
763 except CommitDoesNotExistError:
763 log.exception('Failure when looking up branch heads on hg target')
764 log.exception('Failure when looking up branch heads on hg target')
764 return MergeResponse(
765 return MergeResponse(
765 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
766 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
766 metadata={'target_ref': target_ref})
767 metadata={'target_ref': target_ref})
767
768
768 shadow_repository_path = self._maybe_prepare_merge_workspace(
769 shadow_repository_path = self._maybe_prepare_merge_workspace(
769 repo_id, workspace_id, target_ref, source_ref)
770 repo_id, workspace_id, target_ref, source_ref)
770 shadow_repo = self.get_shadow_instance(shadow_repository_path)
771 shadow_repo = self.get_shadow_instance(shadow_repository_path)
771
772
772 log.debug('Pulling in target reference %s', target_ref)
773 log.debug('Pulling in target reference %s', target_ref)
773 self._validate_pull_reference(target_ref)
774 self._validate_pull_reference(target_ref)
774 shadow_repo._local_pull(self.path, target_ref)
775 shadow_repo._local_pull(self.path, target_ref)
775
776
776 try:
777 try:
777 log.debug('Pulling in source reference %s', source_ref)
778 log.debug('Pulling in source reference %s', source_ref)
778 source_repo._validate_pull_reference(source_ref)
779 source_repo._validate_pull_reference(source_ref)
779 shadow_repo._local_pull(source_repo.path, source_ref)
780 shadow_repo._local_pull(source_repo.path, source_ref)
780 except CommitDoesNotExistError:
781 except CommitDoesNotExistError:
781 log.exception('Failure when doing local pull on hg shadow repo')
782 log.exception('Failure when doing local pull on hg shadow repo')
782 return MergeResponse(
783 return MergeResponse(
783 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
784 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
784 metadata={'source_ref': source_ref})
785 metadata={'source_ref': source_ref})
785
786
786 merge_ref = None
787 merge_ref = None
787 merge_commit_id = None
788 merge_commit_id = None
788 close_commit_id = None
789 close_commit_id = None
789 merge_failure_reason = MergeFailureReason.NONE
790 merge_failure_reason = MergeFailureReason.NONE
790 metadata = {}
791 metadata = {}
791
792
792 # enforce that close branch should be used only in case we source from
793 # enforce that close branch should be used only in case we source from
793 # an actual Branch
794 # an actual Branch
794 close_branch = close_branch and source_ref.type == 'branch'
795 close_branch = close_branch and source_ref.type == 'branch'
795
796
796 # don't allow to close branch if source and target are the same
797 # don't allow to close branch if source and target are the same
797 close_branch = close_branch and source_ref.name != target_ref.name
798 close_branch = close_branch and source_ref.name != target_ref.name
798
799
799 needs_push_on_close = False
800 needs_push_on_close = False
800 if close_branch and not use_rebase and not dry_run:
801 if close_branch and not use_rebase and not dry_run:
801 try:
802 try:
802 close_commit_id, needs_push_on_close = shadow_repo._local_close(
803 close_commit_id, needs_push_on_close = shadow_repo._local_close(
803 target_ref, merger_name, merger_email, source_ref)
804 target_ref, merger_name, merger_email, source_ref)
804 merge_possible = True
805 merge_possible = True
805 except RepositoryError:
806 except RepositoryError:
806 log.exception('Failure when doing close branch on '
807 log.exception('Failure when doing close branch on '
807 'shadow repo: %s', shadow_repo)
808 'shadow repo: %s', shadow_repo)
808 merge_possible = False
809 merge_possible = False
809 merge_failure_reason = MergeFailureReason.MERGE_FAILED
810 merge_failure_reason = MergeFailureReason.MERGE_FAILED
810 else:
811 else:
811 merge_possible = True
812 merge_possible = True
812
813
813 needs_push = False
814 needs_push = False
814 if merge_possible:
815 if merge_possible:
815 try:
816 try:
816 merge_commit_id, needs_push = shadow_repo._local_merge(
817 merge_commit_id, needs_push = shadow_repo._local_merge(
817 target_ref, merge_message, merger_name, merger_email,
818 target_ref, merge_message, merger_name, merger_email,
818 source_ref, use_rebase=use_rebase, dry_run=dry_run)
819 source_ref, use_rebase=use_rebase, dry_run=dry_run)
819 merge_possible = True
820 merge_possible = True
820
821
821 # read the state of the close action, if it
822 # read the state of the close action, if it
822 # maybe required a push
823 # maybe required a push
823 needs_push = needs_push or needs_push_on_close
824 needs_push = needs_push or needs_push_on_close
824
825
825 # Set a bookmark pointing to the merge commit. This bookmark
826 # Set a bookmark pointing to the merge commit. This bookmark
826 # may be used to easily identify the last successful merge
827 # may be used to easily identify the last successful merge
827 # commit in the shadow repository.
828 # commit in the shadow repository.
828 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
829 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
829 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
830 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
830 except SubrepoMergeError:
831 except SubrepoMergeError:
831 log.exception(
832 log.exception(
832 'Subrepo merge error during local merge on hg shadow repo.')
833 'Subrepo merge error during local merge on hg shadow repo.')
833 merge_possible = False
834 merge_possible = False
834 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
835 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
835 needs_push = False
836 needs_push = False
836 except RepositoryError as e:
837 except RepositoryError as e:
837 log.exception('Failure when doing local merge on hg shadow repo')
838 log.exception('Failure when doing local merge on hg shadow repo')
838 if isinstance(e, UnresolvedFilesInRepo):
839 if isinstance(e, UnresolvedFilesInRepo):
839 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
840 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
840
841
841 merge_possible = False
842 merge_possible = False
842 merge_failure_reason = MergeFailureReason.MERGE_FAILED
843 merge_failure_reason = MergeFailureReason.MERGE_FAILED
843 needs_push = False
844 needs_push = False
844
845
845 if merge_possible and not dry_run:
846 if merge_possible and not dry_run:
846 if needs_push:
847 if needs_push:
847 # In case the target is a bookmark, update it, so after pushing
848 # In case the target is a bookmark, update it, so after pushing
848 # the bookmarks is also updated in the target.
849 # the bookmarks is also updated in the target.
849 if target_ref.type == 'book':
850 if target_ref.type == 'book':
850 shadow_repo.bookmark(
851 shadow_repo.bookmark(
851 target_ref.name, revision=merge_commit_id)
852 target_ref.name, revision=merge_commit_id)
852 try:
853 try:
853 shadow_repo_with_hooks = self.get_shadow_instance(
854 shadow_repo_with_hooks = self.get_shadow_instance(
854 shadow_repository_path,
855 shadow_repository_path,
855 enable_hooks=True)
856 enable_hooks=True)
856 # This is the actual merge action, we push from shadow
857 # This is the actual merge action, we push from shadow
857 # into origin.
858 # into origin.
858 # Note: the push_branches option will push any new branch
859 # Note: the push_branches option will push any new branch
859 # defined in the source repository to the target. This may
860 # defined in the source repository to the target. This may
860 # be dangerous as branches are permanent in Mercurial.
861 # be dangerous as branches are permanent in Mercurial.
861 # This feature was requested in issue #441.
862 # This feature was requested in issue #441.
862 shadow_repo_with_hooks._local_push(
863 shadow_repo_with_hooks._local_push(
863 merge_commit_id, self.path, push_branches=True,
864 merge_commit_id, self.path, push_branches=True,
864 enable_hooks=True)
865 enable_hooks=True)
865
866
866 # maybe we also need to push the close_commit_id
867 # maybe we also need to push the close_commit_id
867 if close_commit_id:
868 if close_commit_id:
868 shadow_repo_with_hooks._local_push(
869 shadow_repo_with_hooks._local_push(
869 close_commit_id, self.path, push_branches=True,
870 close_commit_id, self.path, push_branches=True,
870 enable_hooks=True)
871 enable_hooks=True)
871 merge_succeeded = True
872 merge_succeeded = True
872 except RepositoryError:
873 except RepositoryError:
873 log.exception(
874 log.exception(
874 'Failure when doing local push from the shadow '
875 'Failure when doing local push from the shadow '
875 'repository to the target repository at %s.', self.path)
876 'repository to the target repository at %s.', self.path)
876 merge_succeeded = False
877 merge_succeeded = False
877 merge_failure_reason = MergeFailureReason.PUSH_FAILED
878 merge_failure_reason = MergeFailureReason.PUSH_FAILED
878 metadata['target'] = 'hg shadow repo'
879 metadata['target'] = 'hg shadow repo'
879 metadata['merge_commit'] = merge_commit_id
880 metadata['merge_commit'] = merge_commit_id
880 else:
881 else:
881 merge_succeeded = True
882 merge_succeeded = True
882 else:
883 else:
883 merge_succeeded = False
884 merge_succeeded = False
884
885
885 return MergeResponse(
886 return MergeResponse(
886 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
887 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
887 metadata=metadata)
888 metadata=metadata)
888
889
889 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
890 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
890 config = self.config.copy()
891 config = self.config.copy()
891 if not enable_hooks:
892 if not enable_hooks:
892 config.clear_section('hooks')
893 config.clear_section('hooks')
893 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
894 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
894
895
895 def _validate_pull_reference(self, reference):
896 def _validate_pull_reference(self, reference):
896 if not (reference.name in self.bookmarks or
897 if not (reference.name in self.bookmarks or
897 reference.name in self.branches or
898 reference.name in self.branches or
898 self.get_commit(reference.commit_id)):
899 self.get_commit(reference.commit_id)):
899 raise CommitDoesNotExistError(
900 raise CommitDoesNotExistError(
900 'Unknown branch, bookmark or commit id')
901 'Unknown branch, bookmark or commit id')
901
902
902 def _local_pull(self, repository_path, reference):
903 def _local_pull(self, repository_path, reference):
903 """
904 """
904 Fetch a branch, bookmark or commit from a local repository.
905 Fetch a branch, bookmark or commit from a local repository.
905 """
906 """
906 repository_path = os.path.abspath(repository_path)
907 repository_path = os.path.abspath(repository_path)
907 if repository_path == self.path:
908 if repository_path == self.path:
908 raise ValueError('Cannot pull from the same repository')
909 raise ValueError('Cannot pull from the same repository')
909
910
910 reference_type_to_option_name = {
911 reference_type_to_option_name = {
911 'book': 'bookmark',
912 'book': 'bookmark',
912 'branch': 'branch',
913 'branch': 'branch',
913 }
914 }
914 option_name = reference_type_to_option_name.get(
915 option_name = reference_type_to_option_name.get(
915 reference.type, 'revision')
916 reference.type, 'revision')
916
917
917 if option_name == 'revision':
918 if option_name == 'revision':
918 ref = reference.commit_id
919 ref = reference.commit_id
919 else:
920 else:
920 ref = reference.name
921 ref = reference.name
921
922
922 options = {option_name: [ref]}
923 options = {option_name: [ref]}
923 self._remote.pull_cmd(repository_path, hooks=False, **options)
924 self._remote.pull_cmd(repository_path, hooks=False, **options)
924 self._remote.invalidate_vcs_cache()
925 self._remote.invalidate_vcs_cache()
925
926
926 def bookmark(self, bookmark, revision=None):
927 def bookmark(self, bookmark, revision=None):
927 if isinstance(bookmark, unicode):
928 if isinstance(bookmark, unicode):
928 bookmark = safe_str(bookmark)
929 bookmark = safe_str(bookmark)
929 self._remote.bookmark(bookmark, revision=revision)
930 self._remote.bookmark(bookmark, revision=revision)
930 self._remote.invalidate_vcs_cache()
931 self._remote.invalidate_vcs_cache()
931
932
932 def get_path_permissions(self, username):
933 def get_path_permissions(self, username):
933 hgacl_file = os.path.join(self.path, '.hg/hgacl')
934 hgacl_file = os.path.join(self.path, '.hg/hgacl')
934
935
935 def read_patterns(suffix):
936 def read_patterns(suffix):
936 svalue = None
937 svalue = None
937 for section, option in [
938 for section, option in [
938 ('narrowacl', username + suffix),
939 ('narrowacl', username + suffix),
939 ('narrowacl', 'default' + suffix),
940 ('narrowacl', 'default' + suffix),
940 ('narrowhgacl', username + suffix),
941 ('narrowhgacl', username + suffix),
941 ('narrowhgacl', 'default' + suffix)
942 ('narrowhgacl', 'default' + suffix)
942 ]:
943 ]:
943 try:
944 try:
944 svalue = hgacl.get(section, option)
945 svalue = hgacl.get(section, option)
945 break # stop at the first value we find
946 break # stop at the first value we find
946 except configparser.NoOptionError:
947 except configparser.NoOptionError:
947 pass
948 pass
948 if not svalue:
949 if not svalue:
949 return None
950 return None
950 result = ['/']
951 result = ['/']
951 for pattern in svalue.split():
952 for pattern in svalue.split():
952 result.append(pattern)
953 result.append(pattern)
953 if '*' not in pattern and '?' not in pattern:
954 if '*' not in pattern and '?' not in pattern:
954 result.append(pattern + '/*')
955 result.append(pattern + '/*')
955 return result
956 return result
956
957
957 if os.path.exists(hgacl_file):
958 if os.path.exists(hgacl_file):
958 try:
959 try:
959 hgacl = configparser.RawConfigParser()
960 hgacl = configparser.RawConfigParser()
960 hgacl.read(hgacl_file)
961 hgacl.read(hgacl_file)
961
962
962 includes = read_patterns('.includes')
963 includes = read_patterns('.includes')
963 excludes = read_patterns('.excludes')
964 excludes = read_patterns('.excludes')
964 return BasePathPermissionChecker.create_from_patterns(
965 return BasePathPermissionChecker.create_from_patterns(
965 includes, excludes)
966 includes, excludes)
966 except BaseException as e:
967 except BaseException as e:
967 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
968 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
968 hgacl_file, self.name, e)
969 hgacl_file, self.name, e)
969 raise exceptions.RepositoryRequirementError(msg)
970 raise exceptions.RepositoryRequirementError(msg)
970 else:
971 else:
971 return None
972 return None
972
973
973
974
974 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
975 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
975
976
976 def _commit_factory(self, commit_id):
977 def _commit_factory(self, commit_id):
977 return self.repo.get_commit(
978 return self.repo.get_commit(
978 commit_idx=commit_id, pre_load=self.pre_load)
979 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,369 +1,370 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.utils import safe_str, safe_unicode
33 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
38 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
51 """
51 """
52 Subversion backend implementation
52 Subversion backend implementation
53
53
54 .. important::
54 .. important::
55
55
56 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
57 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
58 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
59 the other side will always be a `str`.
59 the other side will always be a `str`.
60
60
61 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
62 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
63
63
64 """
64 """
65
65
66 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
67 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
68
68
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71
71
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 bare=False, **kwargs):
73 bare=False, **kwargs):
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
75 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77
77
78 self._init_repo(create, src_url)
78 self._init_repo(create, src_url)
79
79
80 # caches
80 # caches
81 self._commit_ids = {}
81 self._commit_ids = {}
82
82
83 @LazyProperty
83 @LazyProperty
84 def _remote(self):
84 def _remote(self):
85 repo_id = self.path
85 repo_id = self.path
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87
87
88 def _init_repo(self, create, src_url):
88 def _init_repo(self, create, src_url):
89 if create and os.path.exists(self.path):
89 if create and os.path.exists(self.path):
90 raise RepositoryError(
90 raise RepositoryError(
91 "Cannot create repository at %s, location already exist"
91 "Cannot create repository at %s, location already exist"
92 % self.path)
92 % self.path)
93
93
94 if create:
94 if create:
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 if src_url:
96 if src_url:
97 src_url = _sanitize_url(src_url)
97 src_url = _sanitize_url(src_url)
98 self._remote.import_remote_repository(src_url)
98 self._remote.import_remote_repository(src_url)
99 else:
99 else:
100 self._check_path()
100 self._check_path()
101
101
102 @CachedProperty
102 @CachedProperty
103 def commit_ids(self):
103 def commit_ids(self):
104 head = self._remote.lookup(None)
104 head = self._remote.lookup(None)
105 return [str(r) for r in xrange(1, head + 1)]
105 return [str(r) for r in xrange(1, head + 1)]
106
106
107 def _rebuild_cache(self, commit_ids):
107 def _rebuild_cache(self, commit_ids):
108 pass
108 pass
109
109
110 def run_svn_command(self, cmd, **opts):
110 def run_svn_command(self, cmd, **opts):
111 """
111 """
112 Runs given ``cmd`` as svn command and returns tuple
112 Runs given ``cmd`` as svn command and returns tuple
113 (stdout, stderr).
113 (stdout, stderr).
114
114
115 :param cmd: full svn command to be executed
115 :param cmd: full svn command to be executed
116 :param opts: env options to pass into Subprocess command
116 :param opts: env options to pass into Subprocess command
117 """
117 """
118 if not isinstance(cmd, list):
118 if not isinstance(cmd, list):
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
120
120
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 out, err = self._remote.run_svn_command(cmd, **opts)
122 out, err = self._remote.run_svn_command(cmd, **opts)
123 if err and not skip_stderr_log:
123 if err and not skip_stderr_log:
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 return out, err
125 return out, err
126
126
127 @LazyProperty
127 @LazyProperty
128 def branches(self):
128 def branches(self):
129 return self._tags_or_branches('vcs_svn_branch')
129 return self._tags_or_branches('vcs_svn_branch')
130
130
131 @LazyProperty
131 @LazyProperty
132 def branches_closed(self):
132 def branches_closed(self):
133 return {}
133 return {}
134
134
135 @LazyProperty
135 @LazyProperty
136 def bookmarks(self):
136 def bookmarks(self):
137 return {}
137 return {}
138
138
139 @LazyProperty
139 @LazyProperty
140 def branches_all(self):
140 def branches_all(self):
141 # TODO: johbo: Implement proper branch support
141 # TODO: johbo: Implement proper branch support
142 all_branches = {}
142 all_branches = {}
143 all_branches.update(self.branches)
143 all_branches.update(self.branches)
144 all_branches.update(self.branches_closed)
144 all_branches.update(self.branches_closed)
145 return all_branches
145 return all_branches
146
146
147 @LazyProperty
147 @LazyProperty
148 def tags(self):
148 def tags(self):
149 return self._tags_or_branches('vcs_svn_tag')
149 return self._tags_or_branches('vcs_svn_tag')
150
150
151 def _tags_or_branches(self, config_section):
151 def _tags_or_branches(self, config_section):
152 found_items = {}
152 found_items = {}
153
153
154 if self.is_empty():
154 if self.is_empty():
155 return {}
155 return {}
156
156
157 for pattern in self._patterns_from_section(config_section):
157 for pattern in self._patterns_from_section(config_section):
158 pattern = vcspath.sanitize(pattern)
158 pattern = vcspath.sanitize(pattern)
159 tip = self.get_commit()
159 tip = self.get_commit()
160 try:
160 try:
161 if pattern.endswith('*'):
161 if pattern.endswith('*'):
162 basedir = tip.get_node(vcspath.dirname(pattern))
162 basedir = tip.get_node(vcspath.dirname(pattern))
163 directories = basedir.dirs
163 directories = basedir.dirs
164 else:
164 else:
165 directories = (tip.get_node(pattern), )
165 directories = (tip.get_node(pattern), )
166 except NodeDoesNotExistError:
166 except NodeDoesNotExistError:
167 continue
167 continue
168 found_items.update(
168 found_items.update(
169 (safe_unicode(n.path),
169 (safe_unicode(n.path),
170 self.commit_ids[-1])
170 self.commit_ids[-1])
171 for n in directories)
171 for n in directories)
172
172
173 def get_name(item):
173 def get_name(item):
174 return item[0]
174 return item[0]
175
175
176 return OrderedDict(sorted(found_items.items(), key=get_name))
176 return OrderedDict(sorted(found_items.items(), key=get_name))
177
177
178 def _patterns_from_section(self, section):
178 def _patterns_from_section(self, section):
179 return (pattern for key, pattern in self.config.items(section))
179 return (pattern for key, pattern in self.config.items(section))
180
180
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 if self != repo2:
182 if self != repo2:
183 raise ValueError(
183 raise ValueError(
184 "Subversion does not support getting common ancestor of"
184 "Subversion does not support getting common ancestor of"
185 " different repositories.")
185 " different repositories.")
186
186
187 if int(commit_id1) < int(commit_id2):
187 if int(commit_id1) < int(commit_id2):
188 return commit_id1
188 return commit_id1
189 return commit_id2
189 return commit_id2
190
190
191 def verify(self):
191 def verify(self):
192 verify = self._remote.verify()
192 verify = self._remote.verify()
193
193
194 self._remote.invalidate_vcs_cache()
194 self._remote.invalidate_vcs_cache()
195 return verify
195 return verify
196
196
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 # TODO: johbo: Implement better comparison, this is a very naive
198 # TODO: johbo: Implement better comparison, this is a very naive
199 # version which does not allow to compare branches, tags or folders
199 # version which does not allow to compare branches, tags or folders
200 # at all.
200 # at all.
201 if repo2 != self:
201 if repo2 != self:
202 raise ValueError(
202 raise ValueError(
203 "Subversion does not support comparison of of different "
203 "Subversion does not support comparison of of different "
204 "repositories.")
204 "repositories.")
205
205
206 if commit_id1 == commit_id2:
206 if commit_id1 == commit_id2:
207 return []
207 return []
208
208
209 commit_idx1 = self._get_commit_idx(commit_id1)
209 commit_idx1 = self._get_commit_idx(commit_id1)
210 commit_idx2 = self._get_commit_idx(commit_id2)
210 commit_idx2 = self._get_commit_idx(commit_id2)
211
211
212 commits = [
212 commits = [
213 self.get_commit(commit_idx=idx)
213 self.get_commit(commit_idx=idx)
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215
215
216 return commits
216 return commits
217
217
218 def _get_commit_idx(self, commit_id):
218 def _get_commit_idx(self, commit_id):
219 try:
219 try:
220 svn_rev = int(commit_id)
220 svn_rev = int(commit_id)
221 except:
221 except:
222 # TODO: johbo: this might be only one case, HEAD, check this
222 # TODO: johbo: this might be only one case, HEAD, check this
223 svn_rev = self._remote.lookup(commit_id)
223 svn_rev = self._remote.lookup(commit_id)
224 commit_idx = svn_rev - 1
224 commit_idx = svn_rev - 1
225 if commit_idx >= len(self.commit_ids):
225 if commit_idx >= len(self.commit_ids):
226 raise CommitDoesNotExistError(
226 raise CommitDoesNotExistError(
227 "Commit at index %s does not exist." % (commit_idx, ))
227 "Commit at index %s does not exist." % (commit_idx, ))
228 return commit_idx
228 return commit_idx
229
229
230 @staticmethod
230 @staticmethod
231 def check_url(url, config):
231 def check_url(url, config):
232 """
232 """
233 Check if `url` is a valid source to import a Subversion repository.
233 Check if `url` is a valid source to import a Subversion repository.
234 """
234 """
235 # convert to URL if it's a local directory
235 # convert to URL if it's a local directory
236 if os.path.isdir(url):
236 if os.path.isdir(url):
237 url = 'file://' + urllib.pathname2url(url)
237 url = 'file://' + urllib.pathname2url(url)
238 return connection.Svn.check_url(url, config.serialize())
238 return connection.Svn.check_url(url, config.serialize())
239
239
240 @staticmethod
240 @staticmethod
241 def is_valid_repository(path):
241 def is_valid_repository(path):
242 try:
242 try:
243 SubversionRepository(path)
243 SubversionRepository(path)
244 return True
244 return True
245 except VCSError:
245 except VCSError:
246 pass
246 pass
247 return False
247 return False
248
248
249 def _check_path(self):
249 def _check_path(self):
250 if not os.path.exists(self.path):
250 if not os.path.exists(self.path):
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 if not self._remote.is_path_valid_repository(self.path):
252 if not self._remote.is_path_valid_repository(self.path):
253 raise VCSError(
253 raise VCSError(
254 'Path "%s" does not contain a Subversion repository' %
254 'Path "%s" does not contain a Subversion repository' %
255 (self.path, ))
255 (self.path, ))
256
256
257 @LazyProperty
257 @LazyProperty
258 def last_change(self):
258 def last_change(self):
259 """
259 """
260 Returns last change made on this repository as
260 Returns last change made on this repository as
261 `datetime.datetime` object.
261 `datetime.datetime` object.
262 """
262 """
263 # Subversion always has a first commit which has id "0" and contains
263 # Subversion always has a first commit which has id "0" and contains
264 # what we are looking for.
264 # what we are looking for.
265 last_id = len(self.commit_ids)
265 last_id = len(self.commit_ids)
266 properties = self._remote.revision_properties(last_id)
266 properties = self._remote.revision_properties(last_id)
267 return _date_from_svn_properties(properties)
267 return _date_from_svn_properties(properties)
268
268
269 @LazyProperty
269 @LazyProperty
270 def in_memory_commit(self):
270 def in_memory_commit(self):
271 return SubversionInMemoryCommit(self)
271 return SubversionInMemoryCommit(self)
272
272
273 def get_hook_location(self):
273 def get_hook_location(self):
274 """
274 """
275 returns absolute path to location where hooks are stored
275 returns absolute path to location where hooks are stored
276 """
276 """
277 return os.path.join(self.path, 'hooks')
277 return os.path.join(self.path, 'hooks')
278
278
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 translate_tag=None, maybe_unreachable=False):
280 if self.is_empty():
281 if self.is_empty():
281 raise EmptyRepositoryError("There are no commits yet")
282 raise EmptyRepositoryError("There are no commits yet")
282 if commit_id is not None:
283 if commit_id is not None:
283 self._validate_commit_id(commit_id)
284 self._validate_commit_id(commit_id)
284 elif commit_idx is not None:
285 elif commit_idx is not None:
285 self._validate_commit_idx(commit_idx)
286 self._validate_commit_idx(commit_idx)
286 try:
287 try:
287 commit_id = self.commit_ids[commit_idx]
288 commit_id = self.commit_ids[commit_idx]
288 except IndexError:
289 except IndexError:
289 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
290
291
291 commit_id = self._sanitize_commit_id(commit_id)
292 commit_id = self._sanitize_commit_id(commit_id)
292 commit = SubversionCommit(repository=self, commit_id=commit_id)
293 commit = SubversionCommit(repository=self, commit_id=commit_id)
293 return commit
294 return commit
294
295
295 def get_commits(
296 def get_commits(
296 self, start_id=None, end_id=None, start_date=None, end_date=None,
297 self, start_id=None, end_id=None, start_date=None, end_date=None,
297 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
298 if self.is_empty():
299 if self.is_empty():
299 raise EmptyRepositoryError("There are no commit_ids yet")
300 raise EmptyRepositoryError("There are no commit_ids yet")
300 self._validate_branch_name(branch_name)
301 self._validate_branch_name(branch_name)
301
302
302 if start_id is not None:
303 if start_id is not None:
303 self._validate_commit_id(start_id)
304 self._validate_commit_id(start_id)
304 if end_id is not None:
305 if end_id is not None:
305 self._validate_commit_id(end_id)
306 self._validate_commit_id(end_id)
306
307
307 start_raw_id = self._sanitize_commit_id(start_id)
308 start_raw_id = self._sanitize_commit_id(start_id)
308 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
309 end_raw_id = self._sanitize_commit_id(end_id)
310 end_raw_id = self._sanitize_commit_id(end_id)
310 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
311
312
312 if None not in [start_id, end_id] and start_pos > end_pos:
313 if None not in [start_id, end_id] and start_pos > end_pos:
313 raise RepositoryError(
314 raise RepositoryError(
314 "Start commit '%s' cannot be after end commit '%s'" %
315 "Start commit '%s' cannot be after end commit '%s'" %
315 (start_id, end_id))
316 (start_id, end_id))
316 if end_pos is not None:
317 if end_pos is not None:
317 end_pos += 1
318 end_pos += 1
318
319
319 # Date based filtering
320 # Date based filtering
320 if start_date or end_date:
321 if start_date or end_date:
321 start_raw_id, end_raw_id = self._remote.lookup_interval(
322 start_raw_id, end_raw_id = self._remote.lookup_interval(
322 date_astimestamp(start_date) if start_date else None,
323 date_astimestamp(start_date) if start_date else None,
323 date_astimestamp(end_date) if end_date else None)
324 date_astimestamp(end_date) if end_date else None)
324 start_pos = start_raw_id - 1
325 start_pos = start_raw_id - 1
325 end_pos = end_raw_id
326 end_pos = end_raw_id
326
327
327 commit_ids = self.commit_ids
328 commit_ids = self.commit_ids
328
329
329 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
330 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
331 svn_rev = long(self.commit_ids[-1])
332 svn_rev = long(self.commit_ids[-1])
332 commit_ids = self._remote.node_history(
333 commit_ids = self._remote.node_history(
333 path=branch_name, revision=svn_rev, limit=None)
334 path=branch_name, revision=svn_rev, limit=None)
334 commit_ids = [str(i) for i in reversed(commit_ids)]
335 commit_ids = [str(i) for i in reversed(commit_ids)]
335
336
336 if start_pos or end_pos:
337 if start_pos or end_pos:
337 commit_ids = commit_ids[start_pos:end_pos]
338 commit_ids = commit_ids[start_pos:end_pos]
338 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
339
340
340 def _sanitize_commit_id(self, commit_id):
341 def _sanitize_commit_id(self, commit_id):
341 if commit_id and commit_id.isdigit():
342 if commit_id and commit_id.isdigit():
342 if int(commit_id) <= len(self.commit_ids):
343 if int(commit_id) <= len(self.commit_ids):
343 return commit_id
344 return commit_id
344 else:
345 else:
345 raise CommitDoesNotExistError(
346 raise CommitDoesNotExistError(
346 "Commit %s does not exist." % (commit_id, ))
347 "Commit %s does not exist." % (commit_id, ))
347 if commit_id not in [
348 if commit_id not in [
348 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
349 raise CommitDoesNotExistError(
350 raise CommitDoesNotExistError(
350 "Commit id %s not understood." % (commit_id, ))
351 "Commit id %s not understood." % (commit_id, ))
351 svn_rev = self._remote.lookup('HEAD')
352 svn_rev = self._remote.lookup('HEAD')
352 return str(svn_rev)
353 return str(svn_rev)
353
354
354 def get_diff(
355 def get_diff(
355 self, commit1, commit2, path=None, ignore_whitespace=False,
356 self, commit1, commit2, path=None, ignore_whitespace=False,
356 context=3, path1=None):
357 context=3, path1=None):
357 self._validate_diff_commits(commit1, commit2)
358 self._validate_diff_commits(commit1, commit2)
358 svn_rev1 = long(commit1.raw_id)
359 svn_rev1 = long(commit1.raw_id)
359 svn_rev2 = long(commit2.raw_id)
360 svn_rev2 = long(commit2.raw_id)
360 diff = self._remote.diff(
361 diff = self._remote.diff(
361 svn_rev1, svn_rev2, path1=path1, path2=path,
362 svn_rev1, svn_rev2, path1=path1, path2=path,
362 ignore_whitespace=ignore_whitespace, context=context)
363 ignore_whitespace=ignore_whitespace, context=context)
363 return SubversionDiff(diff)
364 return SubversionDiff(diff)
364
365
365
366
366 def _sanitize_url(url):
367 def _sanitize_url(url):
367 if '://' not in url:
368 if '://' not in url:
368 url = 'file://' + urllib.pathname2url(url)
369 url = 'file://' + urllib.pathname2url(url)
369 return url
370 return url
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now