##// END OF EJS Templates
tests: fixing tests for pull-requests and changelog(commits)
marcink -
r3772:5c4b5f3d new-ui
parent child Browse files
Show More
@@ -1,81 +1,81 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.model.db import ChangesetStatus
24 24 from rhodecode.api.tests.utils import (
25 25 build_data, api_call, assert_error, assert_ok)
26 26
27 27
28 28 @pytest.mark.usefixtures("testuser_api", "app")
29 29 class TestCommentCommit(object):
30 30 def test_api_comment_commit_on_empty_repo(self, backend):
31 31 repo = backend.create_repo()
32 32 id_, params = build_data(
33 33 self.apikey, 'comment_commit', repoid=repo.repo_name,
34 34 commit_id='tip', message='message', status_change=None)
35 35 response = api_call(self.app, params)
36 36 expected = 'There are no commits yet'
37 37 assert_error(id_, expected, given=response.body)
38 38
39 39 @pytest.mark.parametrize("commit_id, expected_err", [
40 ('abcabca', {'hg': 'Commit {commit} does not exist for {repo}',
41 'git': 'Commit {commit} does not exist for {repo}',
40 ('abcabca', {'hg': 'Commit {commit} does not exist for `{repo}`',
41 'git': 'Commit {commit} does not exist for `{repo}`',
42 42 'svn': 'Commit id {commit} not understood.'}),
43 ('idontexist', {'hg': 'Commit {commit} does not exist for {repo}',
44 'git': 'Commit {commit} does not exist for {repo}',
43 ('idontexist', {'hg': 'Commit {commit} does not exist for `{repo}`',
44 'git': 'Commit {commit} does not exist for `{repo}`',
45 45 'svn': 'Commit id {commit} not understood.'}),
46 46 ])
47 47 def test_api_comment_commit_wrong_hash(self, backend, commit_id, expected_err):
48 48 repo_name = backend.repo.repo_name
49 49 id_, params = build_data(
50 50 self.apikey, 'comment_commit', repoid=repo_name,
51 51 commit_id=commit_id, message='message', status_change=None)
52 52 response = api_call(self.app, params)
53 53
54 54 expected_err = expected_err[backend.alias]
55 55 expected_err = expected_err.format(
56 repo=backend.repo.scm_instance(), commit=commit_id)
56 repo=backend.repo.scm_instance().name, commit=commit_id)
57 57 assert_error(id_, expected_err, given=response.body)
58 58
59 59 @pytest.mark.parametrize("status_change, message, commit_id", [
60 60 (None, 'Hallo', 'tip'),
61 61 (ChangesetStatus.STATUS_APPROVED, 'Approved', 'tip'),
62 62 (ChangesetStatus.STATUS_REJECTED, 'Rejected', 'tip'),
63 63 ])
64 64 def test_api_comment_commit(
65 65 self, backend, status_change, message, commit_id,
66 66 no_notifications):
67 67
68 68 commit_id = backend.repo.scm_instance().get_commit(commit_id).raw_id
69 69
70 70 id_, params = build_data(
71 71 self.apikey, 'comment_commit', repoid=backend.repo_name,
72 72 commit_id=commit_id, message=message, status=status_change)
73 73 response = api_call(self.app, params)
74 74 repo = backend.repo.scm_instance()
75 75 expected = {
76 76 'msg': 'Commented on commit `%s` for repository `%s`' % (
77 77 repo.get_commit().raw_id, backend.repo_name),
78 78 'status_change': status_change,
79 79 'success': True
80 80 }
81 81 assert_ok(id_, expected, given=response.body)
@@ -1,83 +1,84 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22 from rhodecode.model.db import Repository
23 23
24 24
25 25 def route_path(name, params=None, **kwargs):
26 26 import urllib
27 27
28 28 base_url = {
29 29 'pullrequest_show_all': '/{repo_name}/pull-request',
30 30 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
31 31 }[name].format(**kwargs)
32 32
33 33 if params:
34 34 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
35 35 return base_url
36 36
37 37
38 38 @pytest.mark.backends("git", "hg")
39 39 @pytest.mark.usefixtures('autologin_user', 'app')
40 40 class TestPullRequestList(object):
41 41
42 42 @pytest.mark.parametrize('params, expected_title', [
43 ({'source': 0, 'closed': 1}, 'Closed Pull Requests'),
44 ({'source': 0, 'my': 1}, 'opened by me'),
45 ({'source': 0, 'awaiting_review': 1}, 'awaiting review'),
46 ({'source': 0, 'awaiting_my_review': 1}, 'awaiting my review'),
47 ({'source': 1}, 'Pull Requests from'),
43 ({'source': 0, 'closed': 1}, 'Closed'),
44 ({'source': 0, 'my': 1}, 'Opened by me'),
45 ({'source': 0, 'awaiting_review': 1}, 'Awaiting review'),
46 ({'source': 0, 'awaiting_my_review': 1}, 'Awaiting my review'),
47 ({'source': 1}, 'From this repo'),
48 48 ])
49 49 def test_showing_list_page(self, backend, pr_util, params, expected_title):
50 50 pull_request = pr_util.create_pull_request()
51 51
52 52 response = self.app.get(
53 53 route_path('pullrequest_show_all',
54 54 repo_name=pull_request.target_repo.repo_name,
55 55 params=params))
56 56
57 57 assert_response = response.assert_response()
58 assert_response.element_equals_to('.panel-title', expected_title)
59 element = assert_response.get_element('.panel-title')
60 element_text = assert_response._element_to_string(element)
58
59 element = assert_response.get_element('.title .active')
60 element_text = element.text_content()
61 assert expected_title == element_text
61 62
62 63 def test_showing_list_page_data(self, backend, pr_util, xhr_header):
63 64 pull_request = pr_util.create_pull_request()
64 65 response = self.app.get(
65 66 route_path('pullrequest_show_all_data',
66 67 repo_name=pull_request.target_repo.repo_name),
67 68 extra_environ=xhr_header)
68 69
69 70 assert response.json['recordsTotal'] == 1
70 71 assert response.json['data'][0]['description'] == 'Description'
71 72
72 73 def test_description_is_escaped_on_index_page(self, backend, pr_util, xhr_header):
73 74 xss_description = "<script>alert('Hi!')</script>"
74 75 pull_request = pr_util.create_pull_request(description=xss_description)
75 76
76 77 response = self.app.get(
77 78 route_path('pullrequest_show_all_data',
78 79 repo_name=pull_request.target_repo.repo_name),
79 80 extra_environ=xhr_header)
80 81
81 82 assert response.json['recordsTotal'] == 1
82 83 assert response.json['data'][0]['description'] == \
83 84 "&lt;script&gt;alert(&#39;Hi!&#39;)&lt;/script&gt;"
@@ -1,213 +1,218 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import re
22 22
23 23 import pytest
24 24
25 25 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
26 26 from rhodecode.tests import TestController
27 27
28 28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
29 29
30 30
31 31 def route_path(name, params=None, **kwargs):
32 32 import urllib
33 33
34 34 base_url = {
35 35 'repo_changelog': '/{repo_name}/changelog',
36 36 'repo_commits': '/{repo_name}/commits',
37 37 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
38 38 'repo_commits_elements': '/{repo_name}/commits_elements',
39 39 }[name].format(**kwargs)
40 40
41 41 if params:
42 42 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
43 43 return base_url
44 44
45 45
46 46 def assert_commits_on_page(response, indexes):
47 47 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.body)]
48 48 assert found_indexes == indexes
49 49
50 50
51 51 class TestChangelogController(TestController):
52 52
53 53 def test_commits_page(self, backend):
54 54 self.log_user()
55 55 response = self.app.get(
56 56 route_path('repo_commits', repo_name=backend.repo_name))
57 57
58 58 first_idx = -1
59 59 last_idx = -DEFAULT_CHANGELOG_SIZE
60 60 self.assert_commit_range_on_page(
61 61 response, first_idx, last_idx, backend)
62 62
63 63 def test_changelog(self, backend):
64 64 self.log_user()
65 65 response = self.app.get(
66 66 route_path('repo_changelog', repo_name=backend.repo_name))
67 67
68 68 first_idx = -1
69 69 last_idx = -DEFAULT_CHANGELOG_SIZE
70 70 self.assert_commit_range_on_page(
71 71 response, first_idx, last_idx, backend)
72 72
73 73 @pytest.mark.backends("hg", "git")
74 74 def test_changelog_filtered_by_branch(self, backend):
75 75 self.log_user()
76 76 self.app.get(
77 77 route_path('repo_changelog', repo_name=backend.repo_name,
78 78 params=dict(branch=backend.default_branch_name)),
79 79 status=200)
80 80
81 81 @pytest.mark.backends("hg", "git")
82 82 def test_commits_filtered_by_branch(self, backend):
83 83 self.log_user()
84 84 self.app.get(
85 85 route_path('repo_commits', repo_name=backend.repo_name,
86 86 params=dict(branch=backend.default_branch_name)),
87 87 status=200)
88 88
89 89 @pytest.mark.backends("svn")
90 90 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
91 91 repo = backend['svn-simple-layout']
92 92 response = self.app.get(
93 93 route_path('repo_changelog', repo_name=repo.repo_name,
94 94 params=dict(branch='trunk')),
95 95 status=200)
96 96
97 97 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
98 98
99 99 def test_commits_filtered_by_wrong_branch(self, backend):
100 100 self.log_user()
101 101 branch = 'wrong-branch-name'
102 102 response = self.app.get(
103 103 route_path('repo_commits', repo_name=backend.repo_name,
104 104 params=dict(branch=branch)),
105 105 status=302)
106 106 expected_url = '/{repo}/commits/{branch}'.format(
107 107 repo=backend.repo_name, branch=branch)
108 108 assert expected_url in response.location
109 109 response = response.follow()
110 110 expected_warning = 'Branch {} is not found.'.format(branch)
111 111 assert expected_warning in response.body
112 112
113 113 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
114 114 def test_changelog_filtered_by_branch_with_merges(
115 115 self, autologin_user, backend):
116 116
117 117 # Note: The changelog of branch "b" does not contain the commit "a1"
118 118 # although this is a parent of commit "b1". And branch "b" has commits
119 119 # which have a smaller index than commit "a1".
120 120 commits = [
121 121 {'message': 'a'},
122 122 {'message': 'b', 'branch': 'b'},
123 123 {'message': 'a1', 'parents': ['a']},
124 124 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
125 125 ]
126 126 backend.create_repo(commits)
127 127
128 128 self.app.get(
129 129 route_path('repo_changelog', repo_name=backend.repo_name,
130 130 params=dict(branch='b')),
131 131 status=200)
132 132
133 133 @pytest.mark.backends("hg")
134 134 def test_commits_closed_branches(self, autologin_user, backend):
135 135 repo = backend['closed_branch']
136 136 response = self.app.get(
137 137 route_path('repo_commits', repo_name=repo.repo_name,
138 138 params=dict(branch='experimental')),
139 139 status=200)
140 140
141 141 assert_commits_on_page(response, indexes=[3, 1])
142 142
143 143 def test_changelog_pagination(self, backend):
144 144 self.log_user()
145 145 # pagination, walk up to page 6
146 146 changelog_url = route_path(
147 147 'repo_commits', repo_name=backend.repo_name)
148 148
149 149 for page in range(1, 7):
150 150 response = self.app.get(changelog_url, {'page': page})
151 151
152 152 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
153 153 last_idx = -DEFAULT_CHANGELOG_SIZE * page
154 154 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
155 155
156 156 def assert_commit_range_on_page(
157 157 self, response, first_idx, last_idx, backend):
158 158 input_template = (
159 """<input class="commit-range" id="%(raw_id)s" """
159 """<input class="commit-range" """
160 """data-commit-id="%(raw_id)s" data-commit-idx="%(idx)s" id="%(raw_id)s" """
160 161 """name="%(raw_id)s" type="checkbox" value="1" />"""
161 162 )
163
162 164 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
163 165 repo = backend.repo
164 166
165 167 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
166 168 response.mustcontain(
167 input_template % {'raw_id': first_commit_on_page.raw_id})
169 input_template % {'raw_id': first_commit_on_page.raw_id,
170 'idx': first_commit_on_page.idx})
171
168 172 response.mustcontain(commit_span_template % (
169 173 first_commit_on_page.idx, first_commit_on_page.short_id)
170 174 )
171 175
172 176 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
173 177 response.mustcontain(
174 input_template % {'raw_id': last_commit_on_page.raw_id})
178 input_template % {'raw_id': last_commit_on_page.raw_id,
179 'idx': last_commit_on_page.idx})
175 180 response.mustcontain(commit_span_template % (
176 181 last_commit_on_page.idx, last_commit_on_page.short_id)
177 182 )
178 183
179 184 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
180 185 first_span_of_next_page = commit_span_template % (
181 186 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
182 187 assert first_span_of_next_page not in response
183 188
184 189 @pytest.mark.parametrize('test_path', [
185 190 'vcs/exceptions.py',
186 191 '/vcs/exceptions.py',
187 192 '//vcs/exceptions.py'
188 193 ])
189 194 def test_commits_with_filenode(self, backend, test_path):
190 195 self.log_user()
191 196 response = self.app.get(
192 197 route_path('repo_commits_file', repo_name=backend.repo_name,
193 198 commit_id='tip', f_path=test_path),
194 199 )
195 200
196 201 # history commits messages
197 202 response.mustcontain('Added exceptions module, this time for real')
198 203 response.mustcontain('Added not implemented hg backend test case')
199 204 response.mustcontain('Added BaseChangeset class')
200 205
201 206 def test_commits_with_filenode_that_is_dirnode(self, backend):
202 207 self.log_user()
203 208 self.app.get(
204 209 route_path('repo_commits_file', repo_name=backend.repo_name,
205 210 commit_id='tip', f_path='/tests'),
206 211 status=302)
207 212
208 213 def test_commits_with_filenode_not_existing(self, backend):
209 214 self.log_user()
210 215 self.app.get(
211 216 route_path('repo_commits_file', repo_name=backend.repo_name,
212 217 commit_id='tip', f_path='wrong_path'),
213 218 status=302)
@@ -1,535 +1,533 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from itertools import chain
28 28 from StringIO import StringIO
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 33 from rhodecode.lib.utils import safe_unicode, safe_str
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs.conf import settings
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 38 from rhodecode.lib.vcs.nodes import (
39 39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 41 RemovedFileNodesGenerator, LargeFileNode)
42 42 from rhodecode.lib.vcs.compat import configparser
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49 _author_property = 'author'
50 50 _committer_property = 'committer'
51 51 _date_property = 'commit_time'
52 52 _date_tz_property = 'commit_timezone'
53 53 _message_property = 'message'
54 54 _parents_property = 'parents'
55 55
56 56 _filter_pre_load = [
57 57 # done through a more complex tree walk on parents
58 58 "affected_files",
59 59 # based on repository cached property
60 60 "branch",
61 61 # done through subprocess not remote call
62 62 "children",
63 63 # done through a more complex tree walk on parents
64 64 "status",
65 65 # mercurial specific property not supported here
66 66 "_file_paths",
67 67 # mercurial specific property not supported here
68 68 'obsolete',
69 69 # mercurial specific property not supported here
70 70 'phase',
71 71 # mercurial specific property not supported here
72 72 'hidden'
73 73 ]
74 74
75 75 def __init__(self, repository, raw_id, idx, pre_load=None):
76 76 self.repository = repository
77 77 self._remote = repository._remote
78 78 # TODO: johbo: Tweak of raw_id should not be necessary
79 79 self.raw_id = safe_str(raw_id)
80 80 self.idx = idx
81 81
82 82 self._set_bulk_properties(pre_load)
83 83
84 84 # caches
85 85 self._stat_modes = {} # stat info for paths
86 86 self._paths = {} # path processed with parse_tree
87 87 self.nodes = {}
88 88 self._submodules = None
89 89
90 90 def _set_bulk_properties(self, pre_load):
91 91 if not pre_load:
92 92 return
93 93 pre_load = [entry for entry in pre_load
94 94 if entry not in self._filter_pre_load]
95 95 if not pre_load:
96 96 return
97 97
98 98 result = self._remote.bulk_request(self.raw_id, pre_load)
99 99 for attr, value in result.items():
100 100 if attr in ["author", "message"]:
101 101 if value:
102 102 value = safe_unicode(value)
103 103 elif attr == "date":
104 104 value = utcdate_fromtimestamp(*value)
105 105 elif attr == "parents":
106 106 value = self._make_commits(value)
107 107 self.__dict__[attr] = value
108 108
109 109 @LazyProperty
110 110 def _commit(self):
111 111 return self._remote[self.raw_id]
112 112
113 113 @LazyProperty
114 114 def _tree_id(self):
115 115 return self._remote[self._commit['tree']]['id']
116 116
117 117 @LazyProperty
118 118 def id(self):
119 119 return self.raw_id
120 120
121 121 @LazyProperty
122 122 def short_id(self):
123 123 return self.raw_id[:12]
124 124
125 125 @LazyProperty
126 126 def message(self):
127 127 return safe_unicode(
128 128 self._remote.commit_attribute(self.id, self._message_property))
129 129
130 130 @LazyProperty
131 131 def committer(self):
132 132 return safe_unicode(
133 133 self._remote.commit_attribute(self.id, self._committer_property))
134 134
135 135 @LazyProperty
136 136 def author(self):
137 137 return safe_unicode(
138 138 self._remote.commit_attribute(self.id, self._author_property))
139 139
140 140 @LazyProperty
141 141 def date(self):
142 142 unix_ts, tz = self._remote.get_object_attrs(
143 143 self.raw_id, self._date_property, self._date_tz_property)
144 144 return utcdate_fromtimestamp(unix_ts, tz)
145 145
146 146 @LazyProperty
147 147 def status(self):
148 148 """
149 149 Returns modified, added, removed, deleted files for current commit
150 150 """
151 151 return self.changed, self.added, self.removed
152 152
153 153 @LazyProperty
154 154 def tags(self):
155 155 tags = [safe_unicode(name) for name,
156 156 commit_id in self.repository.tags.iteritems()
157 157 if commit_id == self.raw_id]
158 158 return tags
159 159
160 160 @LazyProperty
161 161 def branch(self):
162 162 for name, commit_id in self.repository.branches.iteritems():
163 163 if commit_id == self.raw_id:
164 164 return safe_unicode(name)
165 165 return None
166 166
167 167 def _get_id_for_path(self, path):
168 168 path = safe_str(path)
169 169 if path in self._paths:
170 170 return self._paths[path]
171 171
172 172 tree_id = self._tree_id
173 173
174 174 path = path.strip('/')
175 175 if path == '':
176 176 data = [tree_id, "tree"]
177 177 self._paths[''] = data
178 178 return data
179 179
180 180 parts = path.split('/')
181 181 dirs, name = parts[:-1], parts[-1]
182 182 cur_dir = ''
183 183
184 184 # initially extract things from root dir
185 185 tree_items = self._remote.tree_items(tree_id)
186 186 self._process_tree_items(tree_items, cur_dir)
187 187
188 188 for dir in dirs:
189 189 if cur_dir:
190 190 cur_dir = '/'.join((cur_dir, dir))
191 191 else:
192 192 cur_dir = dir
193 193 dir_id = None
194 194 for item, stat_, id_, type_ in tree_items:
195 195 if item == dir:
196 196 dir_id = id_
197 197 break
198 198 if dir_id:
199 199 if type_ != "tree":
200 200 raise CommitError('%s is not a directory' % cur_dir)
201 201 # update tree
202 202 tree_items = self._remote.tree_items(dir_id)
203 203 else:
204 204 raise CommitError('%s have not been found' % cur_dir)
205 205
206 206 # cache all items from the given traversed tree
207 207 self._process_tree_items(tree_items, cur_dir)
208 208
209 209 if path not in self._paths:
210 210 raise self.no_node_at_path(path)
211 211
212 212 return self._paths[path]
213 213
214 214 def _process_tree_items(self, items, cur_dir):
215 215 for item, stat_, id_, type_ in items:
216 216 if cur_dir:
217 217 name = '/'.join((cur_dir, item))
218 218 else:
219 219 name = item
220 220 self._paths[name] = [id_, type_]
221 221 self._stat_modes[name] = stat_
222 222
223 223 def _get_kind(self, path):
224 224 path_id, type_ = self._get_id_for_path(path)
225 225 if type_ == 'blob':
226 226 return NodeKind.FILE
227 227 elif type_ == 'tree':
228 228 return NodeKind.DIR
229 229 elif type == 'link':
230 230 return NodeKind.SUBMODULE
231 231 return None
232 232
233 233 def _get_filectx(self, path):
234 234 path = self._fix_path(path)
235 235 if self._get_kind(path) != NodeKind.FILE:
236 236 raise CommitError(
237 "File does not exist for commit %s at '%s'" %
238 (self.raw_id, path))
237 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
239 238 return path
240 239
241 240 def _get_file_nodes(self):
242 241 return chain(*(t[2] for t in self.walk()))
243 242
244 243 @LazyProperty
245 244 def parents(self):
246 245 """
247 246 Returns list of parent commits.
248 247 """
249 248 parent_ids = self._remote.commit_attribute(
250 249 self.id, self._parents_property)
251 250 return self._make_commits(parent_ids)
252 251
253 252 @LazyProperty
254 253 def children(self):
255 254 """
256 255 Returns list of child commits.
257 256 """
258 257 rev_filter = settings.GIT_REV_FILTER
259 258 output, __ = self.repository.run_git_command(
260 259 ['rev-list', '--children'] + rev_filter)
261 260
262 261 child_ids = []
263 262 pat = re.compile(r'^%s' % self.raw_id)
264 263 for l in output.splitlines():
265 264 if pat.match(l):
266 265 found_ids = l.split(' ')[1:]
267 266 child_ids.extend(found_ids)
268 267 return self._make_commits(child_ids)
269 268
270 269 def _make_commits(self, commit_ids, pre_load=None):
271 270 return [
272 271 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load,
273 272 translate_tag=False)
274 273 for commit_id in commit_ids]
275 274
276 275 def get_file_mode(self, path):
277 276 """
278 277 Returns stat mode of the file at the given `path`.
279 278 """
280 279 path = safe_str(path)
281 280 # ensure path is traversed
282 281 self._get_id_for_path(path)
283 282 return self._stat_modes[path]
284 283
285 284 def is_link(self, path):
286 285 return stat.S_ISLNK(self.get_file_mode(path))
287 286
288 287 def get_file_content(self, path):
289 288 """
290 289 Returns content of the file at given `path`.
291 290 """
292 291 id_, _ = self._get_id_for_path(path)
293 292 return self._remote.blob_as_pretty_string(id_)
294 293
295 294 def get_file_size(self, path):
296 295 """
297 296 Returns size of the file at given `path`.
298 297 """
299 298 id_, _ = self._get_id_for_path(path)
300 299 return self._remote.blob_raw_length(id_)
301 300
302 301 def get_path_history(self, path, limit=None, pre_load=None):
303 302 """
304 303 Returns history of file as reversed list of `GitCommit` objects for
305 304 which file at given `path` has been modified.
306 305
307 306 TODO: This function now uses an underlying 'git' command which works
308 307 quickly but ideally we should replace with an algorithm.
309 308 """
310 309 self._get_filectx(path)
311 310 f_path = safe_str(path)
312 311
313 312 # optimize for n==1, rev-list is much faster for that use-case
314 313 if limit == 1:
315 314 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
316 315 else:
317 316 cmd = ['log']
318 317 if limit:
319 318 cmd.extend(['-n', str(safe_int(limit, 0))])
320 319 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
321 320
322 321 output, __ = self.repository.run_git_command(cmd)
323 322 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
324 323
325 324 return [
326 325 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
327 326 for commit_id in commit_ids]
328 327
329 328 def get_file_annotate(self, path, pre_load=None):
330 329 """
331 330 Returns a generator of four element tuples with
332 331 lineno, commit_id, commit lazy loader and line
333 332
334 333 TODO: This function now uses os underlying 'git' command which is
335 334 generally not good. Should be replaced with algorithm iterating
336 335 commits.
337 336 """
338 337 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
339 338 # -l ==> outputs long shas (and we need all 40 characters)
340 339 # --root ==> doesn't put '^' character for bounderies
341 340 # -r commit_id ==> blames for the given commit
342 341 output, __ = self.repository.run_git_command(cmd)
343 342
344 343 for i, blame_line in enumerate(output.split('\n')[:-1]):
345 344 line_no = i + 1
346 345 commit_id, line = re.split(r' ', blame_line, 1)
347 346 yield (
348 347 line_no, commit_id,
349 348 lambda: self.repository.get_commit(commit_id=commit_id,
350 349 pre_load=pre_load),
351 350 line)
352 351
353 352 def get_nodes(self, path):
354 353 if self._get_kind(path) != NodeKind.DIR:
355 354 raise CommitError(
356 "Directory does not exist for commit %s at "
357 " '%s'" % (self.raw_id, path))
355 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
358 356 path = self._fix_path(path)
359 357 id_, _ = self._get_id_for_path(path)
360 358 tree_id = self._remote[id_]['id']
361 359 dirnodes = []
362 360 filenodes = []
363 361 alias = self.repository.alias
364 362 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
365 363 if type_ == 'link':
366 364 url = self._get_submodule_url('/'.join((path, name)))
367 365 dirnodes.append(SubModuleNode(
368 366 name, url=url, commit=id_, alias=alias))
369 367 continue
370 368
371 369 if path != '':
372 370 obj_path = '/'.join((path, name))
373 371 else:
374 372 obj_path = name
375 373 if obj_path not in self._stat_modes:
376 374 self._stat_modes[obj_path] = stat_
377 375
378 376 if type_ == 'tree':
379 377 dirnodes.append(DirNode(obj_path, commit=self))
380 378 elif type_ == 'blob':
381 379 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
382 380 else:
383 381 raise CommitError(
384 382 "Requested object should be Tree or Blob, is %s", type_)
385 383
386 384 nodes = dirnodes + filenodes
387 385 for node in nodes:
388 386 if node.path not in self.nodes:
389 387 self.nodes[node.path] = node
390 388 nodes.sort()
391 389 return nodes
392 390
393 391 def get_node(self, path, pre_load=None):
394 392 if isinstance(path, unicode):
395 393 path = path.encode('utf-8')
396 394 path = self._fix_path(path)
397 395 if path not in self.nodes:
398 396 try:
399 397 id_, type_ = self._get_id_for_path(path)
400 398 except CommitError:
401 399 raise NodeDoesNotExistError(
402 400 "Cannot find one of parents' directories for a given "
403 401 "path: %s" % path)
404 402
405 403 if type_ == 'link':
406 404 url = self._get_submodule_url(path)
407 405 node = SubModuleNode(path, url=url, commit=id_,
408 406 alias=self.repository.alias)
409 407 elif type_ == 'tree':
410 408 if path == '':
411 409 node = RootNode(commit=self)
412 410 else:
413 411 node = DirNode(path, commit=self)
414 412 elif type_ == 'blob':
415 413 node = FileNode(path, commit=self, pre_load=pre_load)
416 414 else:
417 415 raise self.no_node_at_path(path)
418 416
419 417 # cache node
420 418 self.nodes[path] = node
421 419 return self.nodes[path]
422 420
423 421 def get_largefile_node(self, path):
424 422 id_, _ = self._get_id_for_path(path)
425 423 pointer_spec = self._remote.is_large_file(id_)
426 424
427 425 if pointer_spec:
428 426 # content of that file regular FileNode is the hash of largefile
429 427 file_id = pointer_spec.get('oid_hash')
430 428 if self._remote.in_largefiles_store(file_id):
431 429 lf_path = self._remote.store_path(file_id)
432 430 return LargeFileNode(lf_path, commit=self, org_path=path)
433 431
434 432 @LazyProperty
435 433 def affected_files(self):
436 434 """
437 435 Gets a fast accessible file changes for given commit
438 436 """
439 437 added, modified, deleted = self._changes_cache
440 438 return list(added.union(modified).union(deleted))
441 439
442 440 @LazyProperty
443 441 def _changes_cache(self):
444 442 added = set()
445 443 modified = set()
446 444 deleted = set()
447 445 _r = self._remote
448 446
449 447 parents = self.parents
450 448 if not self.parents:
451 449 parents = [base.EmptyCommit()]
452 450 for parent in parents:
453 451 if isinstance(parent, base.EmptyCommit):
454 452 oid = None
455 453 else:
456 454 oid = parent.raw_id
457 455 changes = _r.tree_changes(oid, self.raw_id)
458 456 for (oldpath, newpath), (_, _), (_, _) in changes:
459 457 if newpath and oldpath:
460 458 modified.add(newpath)
461 459 elif newpath and not oldpath:
462 460 added.add(newpath)
463 461 elif not newpath and oldpath:
464 462 deleted.add(oldpath)
465 463 return added, modified, deleted
466 464
467 465 def _get_paths_for_status(self, status):
468 466 """
469 467 Returns sorted list of paths for given ``status``.
470 468
471 469 :param status: one of: *added*, *modified* or *deleted*
472 470 """
473 471 added, modified, deleted = self._changes_cache
474 472 return sorted({
475 473 'added': list(added),
476 474 'modified': list(modified),
477 475 'deleted': list(deleted)}[status]
478 476 )
479 477
480 478 @LazyProperty
481 479 def added(self):
482 480 """
483 481 Returns list of added ``FileNode`` objects.
484 482 """
485 483 if not self.parents:
486 484 return list(self._get_file_nodes())
487 485 return AddedFileNodesGenerator(
488 486 [n for n in self._get_paths_for_status('added')], self)
489 487
490 488 @LazyProperty
491 489 def changed(self):
492 490 """
493 491 Returns list of modified ``FileNode`` objects.
494 492 """
495 493 if not self.parents:
496 494 return []
497 495 return ChangedFileNodesGenerator(
498 496 [n for n in self._get_paths_for_status('modified')], self)
499 497
500 498 @LazyProperty
501 499 def removed(self):
502 500 """
503 501 Returns list of removed ``FileNode`` objects.
504 502 """
505 503 if not self.parents:
506 504 return []
507 505 return RemovedFileNodesGenerator(
508 506 [n for n in self._get_paths_for_status('deleted')], self)
509 507
510 508 def _get_submodule_url(self, submodule_path):
511 509 git_modules_path = '.gitmodules'
512 510
513 511 if self._submodules is None:
514 512 self._submodules = {}
515 513
516 514 try:
517 515 submodules_node = self.get_node(git_modules_path)
518 516 except NodeDoesNotExistError:
519 517 return None
520 518
521 519 content = submodules_node.content
522 520
523 521 # ConfigParser fails if there are whitespaces
524 522 content = '\n'.join(l.strip() for l in content.split('\n'))
525 523
526 524 parser = configparser.ConfigParser()
527 525 parser.readfp(StringIO(content))
528 526
529 527 for section in parser.sections():
530 528 path = parser.get(section, 'path')
531 529 url = parser.get(section, 'url')
532 530 if path and url:
533 531 self._submodules[path.strip('/')] = url
534 532
535 533 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1037 +1,1037 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import time
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31 from zope.cachedescriptors.property import CachedProperty
32 32
33 33 from rhodecode.lib.compat import OrderedDict
34 34 from rhodecode.lib.datelib import (
35 35 utcdate_fromtimestamp, makedate, date_astimestamp)
36 36 from rhodecode.lib.utils import safe_unicode, safe_str
37 37 from rhodecode.lib.vcs import connection, path as vcspath
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference)
41 41 from rhodecode.lib.vcs.backends.git.commit import GitCommit
42 42 from rhodecode.lib.vcs.backends.git.diff import GitDiff
43 43 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 CommitDoesNotExistError, EmptyRepositoryError,
46 46 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
47 47
48 48
49 49 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class GitRepository(BaseRepository):
55 55 """
56 56 Git repository backend.
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'master'
59 59
60 60 contact = BaseRepository.DEFAULT_CONTACT
61 61
62 62 def __init__(self, repo_path, config=None, create=False, src_url=None,
63 63 do_workspace_checkout=False, with_wire=None, bare=False):
64 64
65 65 self.path = safe_str(os.path.abspath(repo_path))
66 66 self.config = config if config else self.get_default_config()
67 67 self.with_wire = with_wire
68 68
69 69 self._init_repo(create, src_url, do_workspace_checkout, bare)
70 70
71 71 # caches
72 72 self._commit_ids = {}
73 73
74 74 # dependent that trigger re-computation of commit_ids
75 75 self._commit_ids_ver = 0
76 76
77 77 @LazyProperty
78 78 def _remote(self):
79 79 return connection.Git(self.path, self.config, with_wire=self.with_wire)
80 80
81 81 @LazyProperty
82 82 def bare(self):
83 83 return self._remote.bare()
84 84
85 85 @LazyProperty
86 86 def head(self):
87 87 return self._remote.head()
88 88
89 89 @CachedProperty('_commit_ids_ver')
90 90 def commit_ids(self):
91 91 """
92 92 Returns list of commit ids, in ascending order. Being lazy
93 93 attribute allows external tools to inject commit ids from cache.
94 94 """
95 95 commit_ids = self._get_all_commit_ids()
96 96 self._rebuild_cache(commit_ids)
97 97 return commit_ids
98 98
99 99 def _rebuild_cache(self, commit_ids):
100 100 self._commit_ids = dict((commit_id, index)
101 101 for index, commit_id in enumerate(commit_ids))
102 102
103 103 def run_git_command(self, cmd, **opts):
104 104 """
105 105 Runs given ``cmd`` as git command and returns tuple
106 106 (stdout, stderr).
107 107
108 108 :param cmd: git command to be executed
109 109 :param opts: env options to pass into Subprocess command
110 110 """
111 111 if not isinstance(cmd, list):
112 112 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
113 113
114 114 skip_stderr_log = opts.pop('skip_stderr_log', False)
115 115 out, err = self._remote.run_git_command(cmd, **opts)
116 116 if err and not skip_stderr_log:
117 117 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
118 118 return out, err
119 119
120 120 @staticmethod
121 121 def check_url(url, config):
122 122 """
123 123 Function will check given url and try to verify if it's a valid
124 124 link. Sometimes it may happened that git will issue basic
125 125 auth request that can cause whole API to hang when used from python
126 126 or other external calls.
127 127
128 128 On failures it'll raise urllib2.HTTPError, exception is also thrown
129 129 when the return code is non 200
130 130 """
131 131 # check first if it's not an url
132 132 if os.path.isdir(url) or url.startswith('file:'):
133 133 return True
134 134
135 135 if '+' in url.split('://', 1)[0]:
136 136 url = url.split('+', 1)[1]
137 137
138 138 # Request the _remote to verify the url
139 139 return connection.Git.check_url(url, config.serialize())
140 140
141 141 @staticmethod
142 142 def is_valid_repository(path):
143 143 if os.path.isdir(os.path.join(path, '.git')):
144 144 return True
145 145 # check case of bare repository
146 146 try:
147 147 GitRepository(path)
148 148 return True
149 149 except VCSError:
150 150 pass
151 151 return False
152 152
153 153 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
154 154 bare=False):
155 155 if create and os.path.exists(self.path):
156 156 raise RepositoryError(
157 157 "Cannot create repository at %s, location already exist"
158 158 % self.path)
159 159
160 160 if bare and do_workspace_checkout:
161 161 raise RepositoryError("Cannot update a bare repository")
162 162 try:
163 163
164 164 if src_url:
165 165 # check URL before any actions
166 166 GitRepository.check_url(src_url, self.config)
167 167
168 168 if create:
169 169 os.makedirs(self.path, mode=0o755)
170 170
171 171 if bare:
172 172 self._remote.init_bare()
173 173 else:
174 174 self._remote.init()
175 175
176 176 if src_url and bare:
177 177 # bare repository only allows a fetch and checkout is not allowed
178 178 self.fetch(src_url, commit_ids=None)
179 179 elif src_url:
180 180 self.pull(src_url, commit_ids=None,
181 181 update_after=do_workspace_checkout)
182 182
183 183 else:
184 184 if not self._remote.assert_correct_path():
185 185 raise RepositoryError(
186 186 'Path "%s" does not contain a Git repository' %
187 187 (self.path,))
188 188
189 189 # TODO: johbo: check if we have to translate the OSError here
190 190 except OSError as err:
191 191 raise RepositoryError(err)
192 192
193 193 def _get_all_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199 if not head:
200 200 return []
201 201
202 202 rev_filter = ['--branches', '--tags']
203 203 extra_filter = []
204 204
205 205 if filters:
206 206 if filters.get('since'):
207 207 extra_filter.append('--since=%s' % (filters['since']))
208 208 if filters.get('until'):
209 209 extra_filter.append('--until=%s' % (filters['until']))
210 210 if filters.get('branch_name'):
211 211 rev_filter = ['--tags']
212 212 extra_filter.append(filters['branch_name'])
213 213 rev_filter.extend(extra_filter)
214 214
215 215 # if filters.get('start') or filters.get('end'):
216 216 # # skip is offset, max-count is limit
217 217 # if filters.get('start'):
218 218 # extra_filter += ' --skip=%s' % filters['start']
219 219 # if filters.get('end'):
220 220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221 221
222 222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 223 try:
224 224 output, __ = self.run_git_command(cmd)
225 225 except RepositoryError:
226 226 # Can be raised for empty repositories
227 227 return []
228 228 return output.splitlines()
229 229
230 230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 231 def is_null(value):
232 232 return len(value) == commit_id_or_idx.count('0')
233 233
234 234 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 235 return self.commit_ids[-1]
236 236
237 237 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
238 238 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
239 239 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
240 240 try:
241 241 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
242 242 except Exception:
243 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
243 msg = "Commit {} does not exist for `{}`".format(commit_id_or_idx, self.name)
244 244 raise CommitDoesNotExistError(msg)
245 245
246 246 elif is_bstr:
247 247 # check full path ref, eg. refs/heads/master
248 248 ref_id = self._refs.get(commit_id_or_idx)
249 249 if ref_id:
250 250 return ref_id
251 251
252 252 # check branch name
253 253 branch_ids = self.branches.values()
254 254 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
255 255 if ref_id:
256 256 return ref_id
257 257
258 258 # check tag name
259 259 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
260 260 if ref_id:
261 261 return ref_id
262 262
263 263 if (not SHA_PATTERN.match(commit_id_or_idx) or
264 264 commit_id_or_idx not in self.commit_ids):
265 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
265 msg = "Commit {} does not exist for `{}`".format(commit_id_or_idx, self.name)
266 266 raise CommitDoesNotExistError(msg)
267 267
268 268 # Ensure we return full id
269 269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 270 raise CommitDoesNotExistError(
271 271 "Given commit id %s not recognized" % commit_id_or_idx)
272 272 return commit_id_or_idx
273 273
274 274 def get_hook_location(self):
275 275 """
276 276 returns absolute path to location where hooks are stored
277 277 """
278 278 loc = os.path.join(self.path, 'hooks')
279 279 if not self.bare:
280 280 loc = os.path.join(self.path, '.git', 'hooks')
281 281 return loc
282 282
283 283 @LazyProperty
284 284 def last_change(self):
285 285 """
286 286 Returns last change made on this repository as
287 287 `datetime.datetime` object.
288 288 """
289 289 try:
290 290 return self.get_commit().date
291 291 except RepositoryError:
292 292 tzoffset = makedate()[1]
293 293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294 294
295 295 def _get_fs_mtime(self):
296 296 idx_loc = '' if self.bare else '.git'
297 297 # fallback to filesystem
298 298 in_path = os.path.join(self.path, idx_loc, "index")
299 299 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 300 if os.path.exists(in_path):
301 301 return os.stat(in_path).st_mtime
302 302 else:
303 303 return os.stat(he_path).st_mtime
304 304
305 305 @LazyProperty
306 306 def description(self):
307 307 description = self._remote.get_description()
308 308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309 309
310 310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 311 if self.is_empty():
312 312 return OrderedDict()
313 313
314 314 result = []
315 315 for ref, sha in self._refs.iteritems():
316 316 if ref.startswith(prefix):
317 317 ref_name = ref
318 318 if strip_prefix:
319 319 ref_name = ref[len(prefix):]
320 320 result.append((safe_unicode(ref_name), sha))
321 321
322 322 def get_name(entry):
323 323 return entry[0]
324 324
325 325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326 326
327 327 def _get_branches(self):
328 328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329 329
330 330 @LazyProperty
331 331 def branches(self):
332 332 return self._get_branches()
333 333
334 334 @LazyProperty
335 335 def branches_closed(self):
336 336 return {}
337 337
338 338 @LazyProperty
339 339 def bookmarks(self):
340 340 return {}
341 341
342 342 @LazyProperty
343 343 def branches_all(self):
344 344 all_branches = {}
345 345 all_branches.update(self.branches)
346 346 all_branches.update(self.branches_closed)
347 347 return all_branches
348 348
349 349 @LazyProperty
350 350 def tags(self):
351 351 return self._get_tags()
352 352
353 353 def _get_tags(self):
354 354 return self._get_refs_entries(
355 355 prefix='refs/tags/', strip_prefix=True, reverse=True)
356 356
357 357 def tag(self, name, user, commit_id=None, message=None, date=None,
358 358 **kwargs):
359 359 # TODO: fix this method to apply annotated tags correct with message
360 360 """
361 361 Creates and returns a tag for the given ``commit_id``.
362 362
363 363 :param name: name for new tag
364 364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 365 :param commit_id: commit id for which new tag would be created
366 366 :param message: message of the tag's commit
367 367 :param date: date of tag's commit
368 368
369 369 :raises TagAlreadyExistError: if tag with same name already exists
370 370 """
371 371 if name in self.tags:
372 372 raise TagAlreadyExistError("Tag %s already exists" % name)
373 373 commit = self.get_commit(commit_id=commit_id)
374 374 message = message or "Added tag %s for commit %s" % (
375 375 name, commit.raw_id)
376 376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377 377
378 378 self._refs = self._get_refs()
379 379 self.tags = self._get_tags()
380 380 return commit
381 381
382 382 def remove_tag(self, name, user, message=None, date=None):
383 383 """
384 384 Removes tag with the given ``name``.
385 385
386 386 :param name: name of the tag to be removed
387 387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 388 :param message: message of the tag's removal commit
389 389 :param date: date of tag's removal commit
390 390
391 391 :raises TagDoesNotExistError: if tag with given name does not exists
392 392 """
393 393 if name not in self.tags:
394 394 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 395 tagpath = vcspath.join(
396 396 self._remote.get_refs_path(), 'refs', 'tags', name)
397 397 try:
398 398 os.remove(tagpath)
399 399 self._refs = self._get_refs()
400 400 self.tags = self._get_tags()
401 401 except OSError as e:
402 402 raise RepositoryError(e.strerror)
403 403
404 404 def _get_refs(self):
405 405 return self._remote.get_refs()
406 406
407 407 @LazyProperty
408 408 def _refs(self):
409 409 return self._get_refs()
410 410
411 411 @property
412 412 def _ref_tree(self):
413 413 node = tree = {}
414 414 for ref, sha in self._refs.iteritems():
415 415 path = ref.split('/')
416 416 for bit in path[:-1]:
417 417 node = node.setdefault(bit, {})
418 418 node[path[-1]] = sha
419 419 node = tree
420 420 return tree
421 421
422 422 def get_remote_ref(self, ref_name):
423 423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 424 try:
425 425 return self._refs[ref_key]
426 426 except Exception:
427 427 return
428 428
429 429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
430 430 """
431 431 Returns `GitCommit` object representing commit from git repository
432 432 at the given `commit_id` or head (most recent commit) if None given.
433 433 """
434 434 if self.is_empty():
435 435 raise EmptyRepositoryError("There are no commits yet")
436 436
437 437 if commit_id is not None:
438 438 self._validate_commit_id(commit_id)
439 439 try:
440 440 # we have cached idx, use it without contacting the remote
441 441 idx = self._commit_ids[commit_id]
442 442 return GitCommit(self, commit_id, idx, pre_load=pre_load)
443 443 except KeyError:
444 444 pass
445 445
446 446 elif commit_idx is not None:
447 447 self._validate_commit_idx(commit_idx)
448 448 try:
449 449 _commit_id = self.commit_ids[commit_idx]
450 450 if commit_idx < 0:
451 451 commit_idx = self.commit_ids.index(_commit_id)
452 452 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
453 453 except IndexError:
454 454 commit_id = commit_idx
455 455 else:
456 456 commit_id = "tip"
457 457
458 458 commit_id = self._lookup_commit(commit_id)
459 459 remote_idx = None
460 460 if translate_tag:
461 461 # Need to call remote to translate id for tagging scenario
462 462 remote_data = self._remote.get_object(commit_id)
463 463 commit_id = remote_data["commit_id"]
464 464 remote_idx = remote_data["idx"]
465 465
466 466 try:
467 467 idx = self._commit_ids[commit_id]
468 468 except KeyError:
469 469 idx = remote_idx or 0
470 470
471 471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
472 472
473 473 def get_commits(
474 474 self, start_id=None, end_id=None, start_date=None, end_date=None,
475 475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
476 476 """
477 477 Returns generator of `GitCommit` objects from start to end (both
478 478 are inclusive), in ascending date order.
479 479
480 480 :param start_id: None, str(commit_id)
481 481 :param end_id: None, str(commit_id)
482 482 :param start_date: if specified, commits with commit date less than
483 483 ``start_date`` would be filtered out from returned set
484 484 :param end_date: if specified, commits with commit date greater than
485 485 ``end_date`` would be filtered out from returned set
486 486 :param branch_name: if specified, commits not reachable from given
487 487 branch would be filtered out from returned set
488 488 :param show_hidden: Show hidden commits such as obsolete or hidden from
489 489 Mercurial evolve
490 490 :raise BranchDoesNotExistError: If given `branch_name` does not
491 491 exist.
492 492 :raise CommitDoesNotExistError: If commits for given `start` or
493 493 `end` could not be found.
494 494
495 495 """
496 496 if self.is_empty():
497 497 raise EmptyRepositoryError("There are no commits yet")
498 498
499 499 self._validate_branch_name(branch_name)
500 500
501 501 if start_id is not None:
502 502 self._validate_commit_id(start_id)
503 503 if end_id is not None:
504 504 self._validate_commit_id(end_id)
505 505
506 506 start_raw_id = self._lookup_commit(start_id)
507 507 start_pos = self._commit_ids[start_raw_id] if start_id else None
508 508 end_raw_id = self._lookup_commit(end_id)
509 509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
510 510
511 511 if None not in [start_id, end_id] and start_pos > end_pos:
512 512 raise RepositoryError(
513 513 "Start commit '%s' cannot be after end commit '%s'" %
514 514 (start_id, end_id))
515 515
516 516 if end_pos is not None:
517 517 end_pos += 1
518 518
519 519 filter_ = []
520 520 if branch_name:
521 521 filter_.append({'branch_name': branch_name})
522 522 if start_date and not end_date:
523 523 filter_.append({'since': start_date})
524 524 if end_date and not start_date:
525 525 filter_.append({'until': end_date})
526 526 if start_date and end_date:
527 527 filter_.append({'since': start_date})
528 528 filter_.append({'until': end_date})
529 529
530 530 # if start_pos or end_pos:
531 531 # filter_.append({'start': start_pos})
532 532 # filter_.append({'end': end_pos})
533 533
534 534 if filter_:
535 535 revfilters = {
536 536 'branch_name': branch_name,
537 537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
538 538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
539 539 'start': start_pos,
540 540 'end': end_pos,
541 541 }
542 542 commit_ids = self._get_all_commit_ids(filters=revfilters)
543 543
544 544 # pure python stuff, it's slow due to walker walking whole repo
545 545 # def get_revs(walker):
546 546 # for walker_entry in walker:
547 547 # yield walker_entry.commit.id
548 548 # revfilters = {}
549 549 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
550 550 else:
551 551 commit_ids = self.commit_ids
552 552
553 553 if start_pos or end_pos:
554 554 commit_ids = commit_ids[start_pos: end_pos]
555 555
556 556 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
557 557 translate_tag=translate_tags)
558 558
559 559 def get_diff(
560 560 self, commit1, commit2, path='', ignore_whitespace=False,
561 561 context=3, path1=None):
562 562 """
563 563 Returns (git like) *diff*, as plain text. Shows changes introduced by
564 564 ``commit2`` since ``commit1``.
565 565
566 566 :param commit1: Entry point from which diff is shown. Can be
567 567 ``self.EMPTY_COMMIT`` - in this case, patch showing all
568 568 the changes since empty state of the repository until ``commit2``
569 569 :param commit2: Until which commits changes should be shown.
570 570 :param ignore_whitespace: If set to ``True``, would not show whitespace
571 571 changes. Defaults to ``False``.
572 572 :param context: How many lines before/after changed lines should be
573 573 shown. Defaults to ``3``.
574 574 """
575 575 self._validate_diff_commits(commit1, commit2)
576 576 if path1 is not None and path1 != path:
577 577 raise ValueError("Diff of two different paths not supported.")
578 578
579 579 flags = [
580 580 '-U%s' % context, '--full-index', '--binary', '-p',
581 581 '-M', '--abbrev=40']
582 582 if ignore_whitespace:
583 583 flags.append('-w')
584 584
585 585 if commit1 == self.EMPTY_COMMIT:
586 586 cmd = ['show'] + flags + [commit2.raw_id]
587 587 else:
588 588 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
589 589
590 590 if path:
591 591 cmd.extend(['--', path])
592 592
593 593 stdout, __ = self.run_git_command(cmd)
594 594 # If we used 'show' command, strip first few lines (until actual diff
595 595 # starts)
596 596 if commit1 == self.EMPTY_COMMIT:
597 597 lines = stdout.splitlines()
598 598 x = 0
599 599 for line in lines:
600 600 if line.startswith('diff'):
601 601 break
602 602 x += 1
603 603 # Append new line just like 'diff' command do
604 604 stdout = '\n'.join(lines[x:]) + '\n'
605 605 return GitDiff(stdout)
606 606
607 607 def strip(self, commit_id, branch_name):
608 608 commit = self.get_commit(commit_id=commit_id)
609 609 if commit.merge:
610 610 raise Exception('Cannot reset to merge commit')
611 611
612 612 # parent is going to be the new head now
613 613 commit = commit.parents[0]
614 614 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
615 615
616 616 self._commit_ids_ver = time.time()
617 617 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
618 618 return len(self.commit_ids)
619 619
620 620 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
621 621 if commit_id1 == commit_id2:
622 622 return commit_id1
623 623
624 624 if self != repo2:
625 625 commits = self._remote.get_missing_revs(
626 626 commit_id1, commit_id2, repo2.path)
627 627 if commits:
628 628 commit = repo2.get_commit(commits[-1])
629 629 if commit.parents:
630 630 ancestor_id = commit.parents[0].raw_id
631 631 else:
632 632 ancestor_id = None
633 633 else:
634 634 # no commits from other repo, ancestor_id is the commit_id2
635 635 ancestor_id = commit_id2
636 636 else:
637 637 output, __ = self.run_git_command(
638 638 ['merge-base', commit_id1, commit_id2])
639 639 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
640 640
641 641 return ancestor_id
642 642
643 643 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
644 644 repo1 = self
645 645 ancestor_id = None
646 646
647 647 if commit_id1 == commit_id2:
648 648 commits = []
649 649 elif repo1 != repo2:
650 650 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
651 651 repo2.path)
652 652 commits = [
653 653 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
654 654 for commit_id in reversed(missing_ids)]
655 655 else:
656 656 output, __ = repo1.run_git_command(
657 657 ['log', '--reverse', '--pretty=format: %H', '-s',
658 658 '%s..%s' % (commit_id1, commit_id2)])
659 659 commits = [
660 660 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
661 661 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
662 662
663 663 return commits
664 664
665 665 @LazyProperty
666 666 def in_memory_commit(self):
667 667 """
668 668 Returns ``GitInMemoryCommit`` object for this repository.
669 669 """
670 670 return GitInMemoryCommit(self)
671 671
672 672 def pull(self, url, commit_ids=None, update_after=False):
673 673 """
674 674 Pull changes from external location. Pull is different in GIT
675 675 that fetch since it's doing a checkout
676 676
677 677 :param commit_ids: Optional. Can be set to a list of commit ids
678 678 which shall be pulled from the other repository.
679 679 """
680 680 refs = None
681 681 if commit_ids is not None:
682 682 remote_refs = self._remote.get_remote_refs(url)
683 683 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
684 684 self._remote.pull(url, refs=refs, update_after=update_after)
685 685 self._remote.invalidate_vcs_cache()
686 686
687 687 def fetch(self, url, commit_ids=None):
688 688 """
689 689 Fetch all git objects from external location.
690 690 """
691 691 self._remote.sync_fetch(url, refs=commit_ids)
692 692 self._remote.invalidate_vcs_cache()
693 693
694 694 def push(self, url):
695 695 refs = None
696 696 self._remote.sync_push(url, refs=refs)
697 697
698 698 def set_refs(self, ref_name, commit_id):
699 699 self._remote.set_refs(ref_name, commit_id)
700 700
701 701 def remove_ref(self, ref_name):
702 702 self._remote.remove_ref(ref_name)
703 703
704 704 def _update_server_info(self):
705 705 """
706 706 runs gits update-server-info command in this repo instance
707 707 """
708 708 self._remote.update_server_info()
709 709
710 710 def _current_branch(self):
711 711 """
712 712 Return the name of the current branch.
713 713
714 714 It only works for non bare repositories (i.e. repositories with a
715 715 working copy)
716 716 """
717 717 if self.bare:
718 718 raise RepositoryError('Bare git repos do not have active branches')
719 719
720 720 if self.is_empty():
721 721 return None
722 722
723 723 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
724 724 return stdout.strip()
725 725
726 726 def _checkout(self, branch_name, create=False, force=False):
727 727 """
728 728 Checkout a branch in the working directory.
729 729
730 730 It tries to create the branch if create is True, failing if the branch
731 731 already exists.
732 732
733 733 It only works for non bare repositories (i.e. repositories with a
734 734 working copy)
735 735 """
736 736 if self.bare:
737 737 raise RepositoryError('Cannot checkout branches in a bare git repo')
738 738
739 739 cmd = ['checkout']
740 740 if force:
741 741 cmd.append('-f')
742 742 if create:
743 743 cmd.append('-b')
744 744 cmd.append(branch_name)
745 745 self.run_git_command(cmd, fail_on_stderr=False)
746 746
747 747 def _identify(self):
748 748 """
749 749 Return the current state of the working directory.
750 750 """
751 751 if self.bare:
752 752 raise RepositoryError('Bare git repos do not have active branches')
753 753
754 754 if self.is_empty():
755 755 return None
756 756
757 757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 758 return stdout.strip()
759 759
760 760 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 761 """
762 762 Create a local clone of the current repo.
763 763 """
764 764 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 765 # clone will only fetch the active branch.
766 766 cmd = ['clone', '--branch', branch_name,
767 767 self.path, os.path.abspath(clone_path)]
768 768
769 769 self.run_git_command(cmd, fail_on_stderr=False)
770 770
771 771 # if we get the different source branch, make sure we also fetch it for
772 772 # merge conditions
773 773 if source_branch and source_branch != branch_name:
774 774 # check if the ref exists.
775 775 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 776 if shadow_repo.get_remote_ref(source_branch):
777 777 cmd = ['fetch', self.path, source_branch]
778 778 self.run_git_command(cmd, fail_on_stderr=False)
779 779
780 780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 781 """
782 782 Fetch a branch from a local repository.
783 783 """
784 784 repository_path = os.path.abspath(repository_path)
785 785 if repository_path == self.path:
786 786 raise ValueError('Cannot fetch from the same repository')
787 787
788 788 if use_origin:
789 789 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 790 branch=branch_name)
791 791
792 792 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 793 repository_path, branch_name]
794 794 self.run_git_command(cmd, fail_on_stderr=False)
795 795
796 796 def _local_reset(self, branch_name):
797 797 branch_name = '{}'.format(branch_name)
798 798 cmd = ['reset', '--hard', branch_name, '--']
799 799 self.run_git_command(cmd, fail_on_stderr=False)
800 800
801 801 def _last_fetch_heads(self):
802 802 """
803 803 Return the last fetched heads that need merging.
804 804
805 805 The algorithm is defined at
806 806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 807 """
808 808 if not self.bare:
809 809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 810 else:
811 811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812 812
813 813 heads = []
814 814 with open(fetch_heads_path) as f:
815 815 for line in f:
816 816 if ' not-for-merge ' in line:
817 817 continue
818 818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 819 heads.append(line)
820 820
821 821 return heads
822 822
823 823 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
824 824 return GitRepository(shadow_repository_path)
825 825
826 826 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 827 """
828 828 Pull a branch from a local repository.
829 829 """
830 830 if self.bare:
831 831 raise RepositoryError('Cannot pull into a bare git repository')
832 832 # N.B.(skreft): The --ff-only option is to make sure this is a
833 833 # fast-forward (i.e., we are only pulling new changes and there are no
834 834 # conflicts with our current branch)
835 835 # Additionally, that option needs to go before --no-tags, otherwise git
836 836 # pull complains about it being an unknown flag.
837 837 cmd = ['pull']
838 838 if ff_only:
839 839 cmd.append('--ff-only')
840 840 cmd.extend(['--no-tags', repository_path, branch_name])
841 841 self.run_git_command(cmd, fail_on_stderr=False)
842 842
843 843 def _local_merge(self, merge_message, user_name, user_email, heads):
844 844 """
845 845 Merge the given head into the checked out branch.
846 846
847 847 It will force a merge commit.
848 848
849 849 Currently it raises an error if the repo is empty, as it is not possible
850 850 to create a merge commit in an empty repo.
851 851
852 852 :param merge_message: The message to use for the merge commit.
853 853 :param heads: the heads to merge.
854 854 """
855 855 if self.bare:
856 856 raise RepositoryError('Cannot merge into a bare git repository')
857 857
858 858 if not heads:
859 859 return
860 860
861 861 if self.is_empty():
862 862 # TODO(skreft): do somehting more robust in this case.
863 863 raise RepositoryError(
864 864 'Do not know how to merge into empty repositories yet')
865 865
866 866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 867 # commit message. We also specify the user who is doing the merge.
868 868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
869 869 '-c', 'user.email=%s' % safe_str(user_email),
870 870 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 871 cmd.extend(heads)
872 872 try:
873 873 output = self.run_git_command(cmd, fail_on_stderr=False)
874 874 except RepositoryError:
875 875 # Cleanup any merge leftovers
876 876 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
877 877 raise
878 878
879 879 def _local_push(
880 880 self, source_branch, repository_path, target_branch,
881 881 enable_hooks=False, rc_scm_data=None):
882 882 """
883 883 Push the source_branch to the given repository and target_branch.
884 884
885 885 Currently it if the target_branch is not master and the target repo is
886 886 empty, the push will work, but then GitRepository won't be able to find
887 887 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
888 888 pointing to master, which does not exist).
889 889
890 890 It does not run the hooks in the target repo.
891 891 """
892 892 # TODO(skreft): deal with the case in which the target repo is empty,
893 893 # and the target_branch is not master.
894 894 target_repo = GitRepository(repository_path)
895 895 if (not target_repo.bare and
896 896 target_repo._current_branch() == target_branch):
897 897 # Git prevents pushing to the checked out branch, so simulate it by
898 898 # pulling into the target repository.
899 899 target_repo._local_pull(self.path, source_branch)
900 900 else:
901 901 cmd = ['push', os.path.abspath(repository_path),
902 902 '%s:%s' % (source_branch, target_branch)]
903 903 gitenv = {}
904 904 if rc_scm_data:
905 905 gitenv.update({'RC_SCM_DATA': rc_scm_data})
906 906
907 907 if not enable_hooks:
908 908 gitenv['RC_SKIP_HOOKS'] = '1'
909 909 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
910 910
911 911 def _get_new_pr_branch(self, source_branch, target_branch):
912 912 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
913 913 pr_branches = []
914 914 for branch in self.branches:
915 915 if branch.startswith(prefix):
916 916 pr_branches.append(int(branch[len(prefix):]))
917 917
918 918 if not pr_branches:
919 919 branch_id = 0
920 920 else:
921 921 branch_id = max(pr_branches) + 1
922 922
923 923 return '%s%d' % (prefix, branch_id)
924 924
925 925 def _maybe_prepare_merge_workspace(
926 926 self, repo_id, workspace_id, target_ref, source_ref):
927 927 shadow_repository_path = self._get_shadow_repository_path(
928 928 repo_id, workspace_id)
929 929 if not os.path.exists(shadow_repository_path):
930 930 self._local_clone(
931 931 shadow_repository_path, target_ref.name, source_ref.name)
932 932 log.debug(
933 933 'Prepared shadow repository in %s', shadow_repository_path)
934 934
935 935 return shadow_repository_path
936 936
937 937 def _merge_repo(self, repo_id, workspace_id, target_ref,
938 938 source_repo, source_ref, merge_message,
939 939 merger_name, merger_email, dry_run=False,
940 940 use_rebase=False, close_branch=False):
941 941
942 942 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
943 943 'rebase' if use_rebase else 'merge', dry_run)
944 944 if target_ref.commit_id != self.branches[target_ref.name]:
945 945 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
946 946 target_ref.commit_id, self.branches[target_ref.name])
947 947 return MergeResponse(
948 948 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
949 949 metadata={'target_ref': target_ref})
950 950
951 951 shadow_repository_path = self._maybe_prepare_merge_workspace(
952 952 repo_id, workspace_id, target_ref, source_ref)
953 953 shadow_repo = self._get_shadow_instance(shadow_repository_path)
954 954
955 955 # checkout source, if it's different. Otherwise we could not
956 956 # fetch proper commits for merge testing
957 957 if source_ref.name != target_ref.name:
958 958 if shadow_repo.get_remote_ref(source_ref.name):
959 959 shadow_repo._checkout(source_ref.name, force=True)
960 960
961 961 # checkout target, and fetch changes
962 962 shadow_repo._checkout(target_ref.name, force=True)
963 963
964 964 # fetch/reset pull the target, in case it is changed
965 965 # this handles even force changes
966 966 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
967 967 shadow_repo._local_reset(target_ref.name)
968 968
969 969 # Need to reload repo to invalidate the cache, or otherwise we cannot
970 970 # retrieve the last target commit.
971 971 shadow_repo = self._get_shadow_instance(shadow_repository_path)
972 972 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
973 973 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
974 974 target_ref, target_ref.commit_id,
975 975 shadow_repo.branches[target_ref.name])
976 976 return MergeResponse(
977 977 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
978 978 metadata={'target_ref': target_ref})
979 979
980 980 # calculate new branch
981 981 pr_branch = shadow_repo._get_new_pr_branch(
982 982 source_ref.name, target_ref.name)
983 983 log.debug('using pull-request merge branch: `%s`', pr_branch)
984 984 # checkout to temp branch, and fetch changes
985 985 shadow_repo._checkout(pr_branch, create=True)
986 986 try:
987 987 shadow_repo._local_fetch(source_repo.path, source_ref.name)
988 988 except RepositoryError:
989 989 log.exception('Failure when doing local fetch on '
990 990 'shadow repo: %s', shadow_repo)
991 991 return MergeResponse(
992 992 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
993 993 metadata={'source_ref': source_ref})
994 994
995 995 merge_ref = None
996 996 merge_failure_reason = MergeFailureReason.NONE
997 997 metadata = {}
998 998 try:
999 999 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1000 1000 [source_ref.commit_id])
1001 1001 merge_possible = True
1002 1002
1003 1003 # Need to reload repo to invalidate the cache, or otherwise we
1004 1004 # cannot retrieve the merge commit.
1005 1005 shadow_repo = GitRepository(shadow_repository_path)
1006 1006 merge_commit_id = shadow_repo.branches[pr_branch]
1007 1007
1008 1008 # Set a reference pointing to the merge commit. This reference may
1009 1009 # be used to easily identify the last successful merge commit in
1010 1010 # the shadow repository.
1011 1011 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1012 1012 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1013 1013 except RepositoryError:
1014 1014 log.exception('Failure when doing local merge on git shadow repo')
1015 1015 merge_possible = False
1016 1016 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1017 1017
1018 1018 if merge_possible and not dry_run:
1019 1019 try:
1020 1020 shadow_repo._local_push(
1021 1021 pr_branch, self.path, target_ref.name, enable_hooks=True,
1022 1022 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1023 1023 merge_succeeded = True
1024 1024 except RepositoryError:
1025 1025 log.exception(
1026 1026 'Failure when doing local push from the shadow '
1027 1027 'repository to the target repository at %s.', self.path)
1028 1028 merge_succeeded = False
1029 1029 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1030 1030 metadata['target'] = 'git shadow repo'
1031 1031 metadata['merge_commit'] = pr_branch
1032 1032 else:
1033 1033 merge_succeeded = False
1034 1034
1035 1035 return MergeResponse(
1036 1036 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1037 1037 metadata=metadata)
@@ -1,949 +1,949 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import time
28 28 import urllib
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31 from zope.cachedescriptors.property import CachedProperty
32 32
33 33 from rhodecode.lib.compat import OrderedDict
34 34 from rhodecode.lib.datelib import (
35 35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
36 36 from rhodecode.lib.utils import safe_unicode, safe_str
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
47 47 from rhodecode.lib.vcs.compat import configparser
48 48
49 49 hexlify = binascii.hexlify
50 50 nullid = "\0" * 20
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class MercurialRepository(BaseRepository):
56 56 """
57 57 Mercurial repository backend
58 58 """
59 59 DEFAULT_BRANCH_NAME = 'default'
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63 """
64 64 Raises RepositoryError if repository could not be find at the given
65 65 ``repo_path``.
66 66
67 67 :param repo_path: local path of the repository
68 68 :param config: config object containing the repo configuration
69 69 :param create=False: if set to True, would try to create repository if
70 70 it does not exist rather than raising exception
71 71 :param src_url=None: would try to clone repository from given location
72 72 :param do_workspace_checkout=False: sets update of working copy after
73 73 making a clone
74 74 :param bare: not used, compatible with other VCS
75 75 """
76 76
77 77 self.path = safe_str(os.path.abspath(repo_path))
78 78 # mercurial since 4.4.X requires certain configuration to be present
79 79 # because sometimes we init the repos with config we need to meet
80 80 # special requirements
81 81 self.config = config if config else self.get_default_config(
82 82 default=[('extensions', 'largefiles', '1')])
83 83 self.with_wire = with_wire
84 84
85 85 self._init_repo(create, src_url, do_workspace_checkout)
86 86
87 87 # caches
88 88 self._commit_ids = {}
89 89
90 90 # dependent that trigger re-computation of commit_ids
91 91 self._commit_ids_ver = 0
92 92
93 93 @LazyProperty
94 94 def _remote(self):
95 95 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
96 96
97 97 @CachedProperty('_commit_ids_ver')
98 98 def commit_ids(self):
99 99 """
100 100 Returns list of commit ids, in ascending order. Being lazy
101 101 attribute allows external tools to inject shas from cache.
102 102 """
103 103 commit_ids = self._get_all_commit_ids()
104 104 self._rebuild_cache(commit_ids)
105 105 return commit_ids
106 106
107 107 def _rebuild_cache(self, commit_ids):
108 108 self._commit_ids = dict((commit_id, index)
109 109 for index, commit_id in enumerate(commit_ids))
110 110
111 111 @LazyProperty
112 112 def branches(self):
113 113 return self._get_branches()
114 114
115 115 @LazyProperty
116 116 def branches_closed(self):
117 117 return self._get_branches(active=False, closed=True)
118 118
119 119 @LazyProperty
120 120 def branches_all(self):
121 121 all_branches = {}
122 122 all_branches.update(self.branches)
123 123 all_branches.update(self.branches_closed)
124 124 return all_branches
125 125
126 126 def _get_branches(self, active=True, closed=False):
127 127 """
128 128 Gets branches for this repository
129 129 Returns only not closed active branches by default
130 130
131 131 :param active: return also active branches
132 132 :param closed: return also closed branches
133 133
134 134 """
135 135 if self.is_empty():
136 136 return {}
137 137
138 138 def get_name(ctx):
139 139 return ctx[0]
140 140
141 141 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
142 142 self._remote.branches(active, closed).items()]
143 143
144 144 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
145 145
146 146 @LazyProperty
147 147 def tags(self):
148 148 """
149 149 Gets tags for this repository
150 150 """
151 151 return self._get_tags()
152 152
153 153 def _get_tags(self):
154 154 if self.is_empty():
155 155 return {}
156 156
157 157 def get_name(ctx):
158 158 return ctx[0]
159 159
160 160 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
161 161 self._remote.tags().items()]
162 162
163 163 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
164 164
165 165 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
166 166 """
167 167 Creates and returns a tag for the given ``commit_id``.
168 168
169 169 :param name: name for new tag
170 170 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
171 171 :param commit_id: commit id for which new tag would be created
172 172 :param message: message of the tag's commit
173 173 :param date: date of tag's commit
174 174
175 175 :raises TagAlreadyExistError: if tag with same name already exists
176 176 """
177 177 if name in self.tags:
178 178 raise TagAlreadyExistError("Tag %s already exists" % name)
179 179
180 180 commit = self.get_commit(commit_id=commit_id)
181 181 local = kwargs.setdefault('local', False)
182 182
183 183 if message is None:
184 184 message = "Added tag %s for commit %s" % (name, commit.short_id)
185 185
186 186 date, tz = date_to_timestamp_plus_offset(date)
187 187
188 188 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
189 189 self._remote.invalidate_vcs_cache()
190 190
191 191 # Reinitialize tags
192 192 self.tags = self._get_tags()
193 193 tag_id = self.tags[name]
194 194
195 195 return self.get_commit(commit_id=tag_id)
196 196
197 197 def remove_tag(self, name, user, message=None, date=None):
198 198 """
199 199 Removes tag with the given `name`.
200 200
201 201 :param name: name of the tag to be removed
202 202 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
203 203 :param message: message of the tag's removal commit
204 204 :param date: date of tag's removal commit
205 205
206 206 :raises TagDoesNotExistError: if tag with given name does not exists
207 207 """
208 208 if name not in self.tags:
209 209 raise TagDoesNotExistError("Tag %s does not exist" % name)
210 210
211 211 if message is None:
212 212 message = "Removed tag %s" % name
213 213 local = False
214 214
215 215 date, tz = date_to_timestamp_plus_offset(date)
216 216
217 217 self._remote.tag(name, nullid, message, local, user, date, tz)
218 218 self._remote.invalidate_vcs_cache()
219 219 self.tags = self._get_tags()
220 220
221 221 @LazyProperty
222 222 def bookmarks(self):
223 223 """
224 224 Gets bookmarks for this repository
225 225 """
226 226 return self._get_bookmarks()
227 227
228 228 def _get_bookmarks(self):
229 229 if self.is_empty():
230 230 return {}
231 231
232 232 def get_name(ctx):
233 233 return ctx[0]
234 234
235 235 _bookmarks = [
236 236 (safe_unicode(n), hexlify(h)) for n, h in
237 237 self._remote.bookmarks().items()]
238 238
239 239 return OrderedDict(sorted(_bookmarks, key=get_name))
240 240
241 241 def _get_all_commit_ids(self):
242 242 return self._remote.get_all_commit_ids('visible')
243 243
244 244 def get_diff(
245 245 self, commit1, commit2, path='', ignore_whitespace=False,
246 246 context=3, path1=None):
247 247 """
248 248 Returns (git like) *diff*, as plain text. Shows changes introduced by
249 249 `commit2` since `commit1`.
250 250
251 251 :param commit1: Entry point from which diff is shown. Can be
252 252 ``self.EMPTY_COMMIT`` - in this case, patch showing all
253 253 the changes since empty state of the repository until `commit2`
254 254 :param commit2: Until which commit changes should be shown.
255 255 :param ignore_whitespace: If set to ``True``, would not show whitespace
256 256 changes. Defaults to ``False``.
257 257 :param context: How many lines before/after changed lines should be
258 258 shown. Defaults to ``3``.
259 259 """
260 260 self._validate_diff_commits(commit1, commit2)
261 261 if path1 is not None and path1 != path:
262 262 raise ValueError("Diff of two different paths not supported.")
263 263
264 264 if path:
265 265 file_filter = [self.path, path]
266 266 else:
267 267 file_filter = None
268 268
269 269 diff = self._remote.diff(
270 270 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
271 271 opt_git=True, opt_ignorews=ignore_whitespace,
272 272 context=context)
273 273 return MercurialDiff(diff)
274 274
275 275 def strip(self, commit_id, branch=None):
276 276 self._remote.strip(commit_id, update=False, backup="none")
277 277
278 278 self._remote.invalidate_vcs_cache()
279 279 self._commit_ids_ver = time.time()
280 280 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
281 281 return len(self.commit_ids)
282 282
283 283 def verify(self):
284 284 verify = self._remote.verify()
285 285
286 286 self._remote.invalidate_vcs_cache()
287 287 return verify
288 288
289 289 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
290 290 if commit_id1 == commit_id2:
291 291 return commit_id1
292 292
293 293 ancestors = self._remote.revs_from_revspec(
294 294 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
295 295 other_path=repo2.path)
296 296 return repo2[ancestors[0]].raw_id if ancestors else None
297 297
298 298 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
299 299 if commit_id1 == commit_id2:
300 300 commits = []
301 301 else:
302 302 if merge:
303 303 indexes = self._remote.revs_from_revspec(
304 304 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
305 305 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
306 306 else:
307 307 indexes = self._remote.revs_from_revspec(
308 308 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
309 309 commit_id1, other_path=repo2.path)
310 310
311 311 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
312 312 for idx in indexes]
313 313
314 314 return commits
315 315
316 316 @staticmethod
317 317 def check_url(url, config):
318 318 """
319 319 Function will check given url and try to verify if it's a valid
320 320 link. Sometimes it may happened that mercurial will issue basic
321 321 auth request that can cause whole API to hang when used from python
322 322 or other external calls.
323 323
324 324 On failures it'll raise urllib2.HTTPError, exception is also thrown
325 325 when the return code is non 200
326 326 """
327 327 # check first if it's not an local url
328 328 if os.path.isdir(url) or url.startswith('file:'):
329 329 return True
330 330
331 331 # Request the _remote to verify the url
332 332 return connection.Hg.check_url(url, config.serialize())
333 333
334 334 @staticmethod
335 335 def is_valid_repository(path):
336 336 return os.path.isdir(os.path.join(path, '.hg'))
337 337
338 338 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
339 339 """
340 340 Function will check for mercurial repository in given path. If there
341 341 is no repository in that path it will raise an exception unless
342 342 `create` parameter is set to True - in that case repository would
343 343 be created.
344 344
345 345 If `src_url` is given, would try to clone repository from the
346 346 location at given clone_point. Additionally it'll make update to
347 347 working copy accordingly to `do_workspace_checkout` flag.
348 348 """
349 349 if create and os.path.exists(self.path):
350 350 raise RepositoryError(
351 351 "Cannot create repository at %s, location already exist"
352 352 % self.path)
353 353
354 354 if src_url:
355 355 url = str(self._get_url(src_url))
356 356 MercurialRepository.check_url(url, self.config)
357 357
358 358 self._remote.clone(url, self.path, do_workspace_checkout)
359 359
360 360 # Don't try to create if we've already cloned repo
361 361 create = False
362 362
363 363 if create:
364 364 os.makedirs(self.path, mode=0o755)
365 365
366 366 self._remote.localrepository(create)
367 367
368 368 @LazyProperty
369 369 def in_memory_commit(self):
370 370 return MercurialInMemoryCommit(self)
371 371
372 372 @LazyProperty
373 373 def description(self):
374 374 description = self._remote.get_config_value(
375 375 'web', 'description', untrusted=True)
376 376 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
377 377
378 378 @LazyProperty
379 379 def contact(self):
380 380 contact = (
381 381 self._remote.get_config_value("web", "contact") or
382 382 self._remote.get_config_value("ui", "username"))
383 383 return safe_unicode(contact or self.DEFAULT_CONTACT)
384 384
385 385 @LazyProperty
386 386 def last_change(self):
387 387 """
388 388 Returns last change made on this repository as
389 389 `datetime.datetime` object.
390 390 """
391 391 try:
392 392 return self.get_commit().date
393 393 except RepositoryError:
394 394 tzoffset = makedate()[1]
395 395 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
396 396
397 397 def _get_fs_mtime(self):
398 398 # fallback to filesystem
399 399 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
400 400 st_path = os.path.join(self.path, '.hg', "store")
401 401 if os.path.exists(cl_path):
402 402 return os.stat(cl_path).st_mtime
403 403 else:
404 404 return os.stat(st_path).st_mtime
405 405
406 406 def _get_url(self, url):
407 407 """
408 408 Returns normalized url. If schema is not given, would fall
409 409 to filesystem
410 410 (``file:///``) schema.
411 411 """
412 412 url = url.encode('utf8')
413 413 if url != 'default' and '://' not in url:
414 414 url = "file:" + urllib.pathname2url(url)
415 415 return url
416 416
417 417 def get_hook_location(self):
418 418 """
419 419 returns absolute path to location where hooks are stored
420 420 """
421 421 return os.path.join(self.path, '.hg', '.hgrc')
422 422
423 423 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
424 424 """
425 425 Returns ``MercurialCommit`` object representing repository's
426 426 commit at the given `commit_id` or `commit_idx`.
427 427 """
428 428 if self.is_empty():
429 429 raise EmptyRepositoryError("There are no commits yet")
430 430
431 431 if commit_id is not None:
432 432 self._validate_commit_id(commit_id)
433 433 try:
434 434 # we have cached idx, use it without contacting the remote
435 435 idx = self._commit_ids[commit_id]
436 436 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
437 437 except KeyError:
438 438 pass
439 439
440 440 elif commit_idx is not None:
441 441 self._validate_commit_idx(commit_idx)
442 442 try:
443 443 _commit_id = self.commit_ids[commit_idx]
444 444 if commit_idx < 0:
445 445 commit_idx = self.commit_ids.index(_commit_id)
446 446
447 447 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
448 448 except IndexError:
449 449 commit_id = commit_idx
450 450 else:
451 451 commit_id = "tip"
452 452
453 453 if isinstance(commit_id, unicode):
454 454 commit_id = safe_str(commit_id)
455 455
456 456 try:
457 457 raw_id, idx = self._remote.lookup(commit_id, both=True)
458 458 except CommitDoesNotExistError:
459 msg = "Commit {} does not exist for {}".format(
459 msg = "Commit {} does not exist for `{}`".format(
460 460 *map(safe_str, [commit_id, self.name]))
461 461 raise CommitDoesNotExistError(msg)
462 462
463 463 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
464 464
465 465 def get_commits(
466 466 self, start_id=None, end_id=None, start_date=None, end_date=None,
467 467 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
468 468 """
469 469 Returns generator of ``MercurialCommit`` objects from start to end
470 470 (both are inclusive)
471 471
472 472 :param start_id: None, str(commit_id)
473 473 :param end_id: None, str(commit_id)
474 474 :param start_date: if specified, commits with commit date less than
475 475 ``start_date`` would be filtered out from returned set
476 476 :param end_date: if specified, commits with commit date greater than
477 477 ``end_date`` would be filtered out from returned set
478 478 :param branch_name: if specified, commits not reachable from given
479 479 branch would be filtered out from returned set
480 480 :param show_hidden: Show hidden commits such as obsolete or hidden from
481 481 Mercurial evolve
482 482 :raise BranchDoesNotExistError: If given ``branch_name`` does not
483 483 exist.
484 484 :raise CommitDoesNotExistError: If commit for given ``start`` or
485 485 ``end`` could not be found.
486 486 """
487 487 # actually we should check now if it's not an empty repo
488 488 if self.is_empty():
489 489 raise EmptyRepositoryError("There are no commits yet")
490 490 self._validate_branch_name(branch_name)
491 491
492 492 branch_ancestors = False
493 493 if start_id is not None:
494 494 self._validate_commit_id(start_id)
495 495 c_start = self.get_commit(commit_id=start_id)
496 496 start_pos = self._commit_ids[c_start.raw_id]
497 497 else:
498 498 start_pos = None
499 499
500 500 if end_id is not None:
501 501 self._validate_commit_id(end_id)
502 502 c_end = self.get_commit(commit_id=end_id)
503 503 end_pos = max(0, self._commit_ids[c_end.raw_id])
504 504 else:
505 505 end_pos = None
506 506
507 507 if None not in [start_id, end_id] and start_pos > end_pos:
508 508 raise RepositoryError(
509 509 "Start commit '%s' cannot be after end commit '%s'" %
510 510 (start_id, end_id))
511 511
512 512 if end_pos is not None:
513 513 end_pos += 1
514 514
515 515 commit_filter = []
516 516
517 517 if branch_name and not branch_ancestors:
518 518 commit_filter.append('branch("%s")' % (branch_name,))
519 519 elif branch_name and branch_ancestors:
520 520 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
521 521
522 522 if start_date and not end_date:
523 523 commit_filter.append('date(">%s")' % (start_date,))
524 524 if end_date and not start_date:
525 525 commit_filter.append('date("<%s")' % (end_date,))
526 526 if start_date and end_date:
527 527 commit_filter.append(
528 528 'date(">%s") and date("<%s")' % (start_date, end_date))
529 529
530 530 if not show_hidden:
531 531 commit_filter.append('not obsolete()')
532 532 commit_filter.append('not hidden()')
533 533
534 534 # TODO: johbo: Figure out a simpler way for this solution
535 535 collection_generator = CollectionGenerator
536 536 if commit_filter:
537 537 commit_filter = ' and '.join(map(safe_str, commit_filter))
538 538 revisions = self._remote.rev_range([commit_filter])
539 539 collection_generator = MercurialIndexBasedCollectionGenerator
540 540 else:
541 541 revisions = self.commit_ids
542 542
543 543 if start_pos or end_pos:
544 544 revisions = revisions[start_pos:end_pos]
545 545
546 546 return collection_generator(self, revisions, pre_load=pre_load)
547 547
548 548 def pull(self, url, commit_ids=None):
549 549 """
550 550 Pull changes from external location.
551 551
552 552 :param commit_ids: Optional. Can be set to a list of commit ids
553 553 which shall be pulled from the other repository.
554 554 """
555 555 url = self._get_url(url)
556 556 self._remote.pull(url, commit_ids=commit_ids)
557 557 self._remote.invalidate_vcs_cache()
558 558
559 559 def fetch(self, url, commit_ids=None):
560 560 """
561 561 Backward compatibility with GIT fetch==pull
562 562 """
563 563 return self.pull(url, commit_ids=commit_ids)
564 564
565 565 def push(self, url):
566 566 url = self._get_url(url)
567 567 self._remote.sync_push(url)
568 568
569 569 def _local_clone(self, clone_path):
570 570 """
571 571 Create a local clone of the current repo.
572 572 """
573 573 self._remote.clone(self.path, clone_path, update_after_clone=True,
574 574 hooks=False)
575 575
576 576 def _update(self, revision, clean=False):
577 577 """
578 578 Update the working copy to the specified revision.
579 579 """
580 580 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
581 581 self._remote.update(revision, clean=clean)
582 582
583 583 def _identify(self):
584 584 """
585 585 Return the current state of the working directory.
586 586 """
587 587 return self._remote.identify().strip().rstrip('+')
588 588
589 589 def _heads(self, branch=None):
590 590 """
591 591 Return the commit ids of the repository heads.
592 592 """
593 593 return self._remote.heads(branch=branch).strip().split(' ')
594 594
595 595 def _ancestor(self, revision1, revision2):
596 596 """
597 597 Return the common ancestor of the two revisions.
598 598 """
599 599 return self._remote.ancestor(revision1, revision2)
600 600
601 601 def _local_push(
602 602 self, revision, repository_path, push_branches=False,
603 603 enable_hooks=False):
604 604 """
605 605 Push the given revision to the specified repository.
606 606
607 607 :param push_branches: allow to create branches in the target repo.
608 608 """
609 609 self._remote.push(
610 610 [revision], repository_path, hooks=enable_hooks,
611 611 push_branches=push_branches)
612 612
613 613 def _local_merge(self, target_ref, merge_message, user_name, user_email,
614 614 source_ref, use_rebase=False, dry_run=False):
615 615 """
616 616 Merge the given source_revision into the checked out revision.
617 617
618 618 Returns the commit id of the merge and a boolean indicating if the
619 619 commit needs to be pushed.
620 620 """
621 621 self._update(target_ref.commit_id, clean=True)
622 622
623 623 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
624 624 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
625 625
626 626 if ancestor == source_ref.commit_id:
627 627 # Nothing to do, the changes were already integrated
628 628 return target_ref.commit_id, False
629 629
630 630 elif ancestor == target_ref.commit_id and is_the_same_branch:
631 631 # In this case we should force a commit message
632 632 return source_ref.commit_id, True
633 633
634 634 if use_rebase:
635 635 try:
636 636 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
637 637 target_ref.commit_id)
638 638 self.bookmark(bookmark_name, revision=source_ref.commit_id)
639 639 self._remote.rebase(
640 640 source=source_ref.commit_id, dest=target_ref.commit_id)
641 641 self._remote.invalidate_vcs_cache()
642 642 self._update(bookmark_name, clean=True)
643 643 return self._identify(), True
644 644 except RepositoryError:
645 645 # The rebase-abort may raise another exception which 'hides'
646 646 # the original one, therefore we log it here.
647 647 log.exception('Error while rebasing shadow repo during merge.')
648 648
649 649 # Cleanup any rebase leftovers
650 650 self._remote.invalidate_vcs_cache()
651 651 self._remote.rebase(abort=True)
652 652 self._remote.invalidate_vcs_cache()
653 653 self._remote.update(clean=True)
654 654 raise
655 655 else:
656 656 try:
657 657 self._remote.merge(source_ref.commit_id)
658 658 self._remote.invalidate_vcs_cache()
659 659 self._remote.commit(
660 660 message=safe_str(merge_message),
661 661 username=safe_str('%s <%s>' % (user_name, user_email)))
662 662 self._remote.invalidate_vcs_cache()
663 663 return self._identify(), True
664 664 except RepositoryError:
665 665 # Cleanup any merge leftovers
666 666 self._remote.update(clean=True)
667 667 raise
668 668
669 669 def _local_close(self, target_ref, user_name, user_email,
670 670 source_ref, close_message=''):
671 671 """
672 672 Close the branch of the given source_revision
673 673
674 674 Returns the commit id of the close and a boolean indicating if the
675 675 commit needs to be pushed.
676 676 """
677 677 self._update(source_ref.commit_id)
678 678 message = close_message or "Closing branch: `{}`".format(source_ref.name)
679 679 try:
680 680 self._remote.commit(
681 681 message=safe_str(message),
682 682 username=safe_str('%s <%s>' % (user_name, user_email)),
683 683 close_branch=True)
684 684 self._remote.invalidate_vcs_cache()
685 685 return self._identify(), True
686 686 except RepositoryError:
687 687 # Cleanup any commit leftovers
688 688 self._remote.update(clean=True)
689 689 raise
690 690
691 691 def _is_the_same_branch(self, target_ref, source_ref):
692 692 return (
693 693 self._get_branch_name(target_ref) ==
694 694 self._get_branch_name(source_ref))
695 695
696 696 def _get_branch_name(self, ref):
697 697 if ref.type == 'branch':
698 698 return ref.name
699 699 return self._remote.ctx_branch(ref.commit_id)
700 700
701 701 def _maybe_prepare_merge_workspace(
702 702 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
703 703 shadow_repository_path = self._get_shadow_repository_path(
704 704 repo_id, workspace_id)
705 705 if not os.path.exists(shadow_repository_path):
706 706 self._local_clone(shadow_repository_path)
707 707 log.debug(
708 708 'Prepared shadow repository in %s', shadow_repository_path)
709 709
710 710 return shadow_repository_path
711 711
712 712 def _merge_repo(self, repo_id, workspace_id, target_ref,
713 713 source_repo, source_ref, merge_message,
714 714 merger_name, merger_email, dry_run=False,
715 715 use_rebase=False, close_branch=False):
716 716
717 717 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
718 718 'rebase' if use_rebase else 'merge', dry_run)
719 719 if target_ref.commit_id not in self._heads():
720 720 return MergeResponse(
721 721 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
722 722 metadata={'target_ref': target_ref})
723 723
724 724 try:
725 725 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
726 726 heads = '\n,'.join(self._heads(target_ref.name))
727 727 metadata = {
728 728 'target_ref': target_ref,
729 729 'source_ref': source_ref,
730 730 'heads': heads
731 731 }
732 732 return MergeResponse(
733 733 False, False, None,
734 734 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
735 735 metadata=metadata)
736 736 except CommitDoesNotExistError:
737 737 log.exception('Failure when looking up branch heads on hg target')
738 738 return MergeResponse(
739 739 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
740 740 metadata={'target_ref': target_ref})
741 741
742 742 shadow_repository_path = self._maybe_prepare_merge_workspace(
743 743 repo_id, workspace_id, target_ref, source_ref)
744 744 shadow_repo = self._get_shadow_instance(shadow_repository_path)
745 745
746 746 log.debug('Pulling in target reference %s', target_ref)
747 747 self._validate_pull_reference(target_ref)
748 748 shadow_repo._local_pull(self.path, target_ref)
749 749
750 750 try:
751 751 log.debug('Pulling in source reference %s', source_ref)
752 752 source_repo._validate_pull_reference(source_ref)
753 753 shadow_repo._local_pull(source_repo.path, source_ref)
754 754 except CommitDoesNotExistError:
755 755 log.exception('Failure when doing local pull on hg shadow repo')
756 756 return MergeResponse(
757 757 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
758 758 metadata={'source_ref': source_ref})
759 759
760 760 merge_ref = None
761 761 merge_commit_id = None
762 762 close_commit_id = None
763 763 merge_failure_reason = MergeFailureReason.NONE
764 764 metadata = {}
765 765
766 766 # enforce that close branch should be used only in case we source from
767 767 # an actual Branch
768 768 close_branch = close_branch and source_ref.type == 'branch'
769 769
770 770 # don't allow to close branch if source and target are the same
771 771 close_branch = close_branch and source_ref.name != target_ref.name
772 772
773 773 needs_push_on_close = False
774 774 if close_branch and not use_rebase and not dry_run:
775 775 try:
776 776 close_commit_id, needs_push_on_close = shadow_repo._local_close(
777 777 target_ref, merger_name, merger_email, source_ref)
778 778 merge_possible = True
779 779 except RepositoryError:
780 780 log.exception('Failure when doing close branch on '
781 781 'shadow repo: %s', shadow_repo)
782 782 merge_possible = False
783 783 merge_failure_reason = MergeFailureReason.MERGE_FAILED
784 784 else:
785 785 merge_possible = True
786 786
787 787 needs_push = False
788 788 if merge_possible:
789 789 try:
790 790 merge_commit_id, needs_push = shadow_repo._local_merge(
791 791 target_ref, merge_message, merger_name, merger_email,
792 792 source_ref, use_rebase=use_rebase, dry_run=dry_run)
793 793 merge_possible = True
794 794
795 795 # read the state of the close action, if it
796 796 # maybe required a push
797 797 needs_push = needs_push or needs_push_on_close
798 798
799 799 # Set a bookmark pointing to the merge commit. This bookmark
800 800 # may be used to easily identify the last successful merge
801 801 # commit in the shadow repository.
802 802 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
803 803 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
804 804 except SubrepoMergeError:
805 805 log.exception(
806 806 'Subrepo merge error during local merge on hg shadow repo.')
807 807 merge_possible = False
808 808 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
809 809 needs_push = False
810 810 except RepositoryError:
811 811 log.exception('Failure when doing local merge on hg shadow repo')
812 812 merge_possible = False
813 813 merge_failure_reason = MergeFailureReason.MERGE_FAILED
814 814 needs_push = False
815 815
816 816 if merge_possible and not dry_run:
817 817 if needs_push:
818 818 # In case the target is a bookmark, update it, so after pushing
819 819 # the bookmarks is also updated in the target.
820 820 if target_ref.type == 'book':
821 821 shadow_repo.bookmark(
822 822 target_ref.name, revision=merge_commit_id)
823 823 try:
824 824 shadow_repo_with_hooks = self._get_shadow_instance(
825 825 shadow_repository_path,
826 826 enable_hooks=True)
827 827 # This is the actual merge action, we push from shadow
828 828 # into origin.
829 829 # Note: the push_branches option will push any new branch
830 830 # defined in the source repository to the target. This may
831 831 # be dangerous as branches are permanent in Mercurial.
832 832 # This feature was requested in issue #441.
833 833 shadow_repo_with_hooks._local_push(
834 834 merge_commit_id, self.path, push_branches=True,
835 835 enable_hooks=True)
836 836
837 837 # maybe we also need to push the close_commit_id
838 838 if close_commit_id:
839 839 shadow_repo_with_hooks._local_push(
840 840 close_commit_id, self.path, push_branches=True,
841 841 enable_hooks=True)
842 842 merge_succeeded = True
843 843 except RepositoryError:
844 844 log.exception(
845 845 'Failure when doing local push from the shadow '
846 846 'repository to the target repository at %s.', self.path)
847 847 merge_succeeded = False
848 848 merge_failure_reason = MergeFailureReason.PUSH_FAILED
849 849 metadata['target'] = 'hg shadow repo'
850 850 metadata['merge_commit'] = merge_commit_id
851 851 else:
852 852 merge_succeeded = True
853 853 else:
854 854 merge_succeeded = False
855 855
856 856 return MergeResponse(
857 857 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
858 858 metadata=metadata)
859 859
860 860 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
861 861 config = self.config.copy()
862 862 if not enable_hooks:
863 863 config.clear_section('hooks')
864 864 return MercurialRepository(shadow_repository_path, config)
865 865
866 866 def _validate_pull_reference(self, reference):
867 867 if not (reference.name in self.bookmarks or
868 868 reference.name in self.branches or
869 869 self.get_commit(reference.commit_id)):
870 870 raise CommitDoesNotExistError(
871 871 'Unknown branch, bookmark or commit id')
872 872
873 873 def _local_pull(self, repository_path, reference):
874 874 """
875 875 Fetch a branch, bookmark or commit from a local repository.
876 876 """
877 877 repository_path = os.path.abspath(repository_path)
878 878 if repository_path == self.path:
879 879 raise ValueError('Cannot pull from the same repository')
880 880
881 881 reference_type_to_option_name = {
882 882 'book': 'bookmark',
883 883 'branch': 'branch',
884 884 }
885 885 option_name = reference_type_to_option_name.get(
886 886 reference.type, 'revision')
887 887
888 888 if option_name == 'revision':
889 889 ref = reference.commit_id
890 890 else:
891 891 ref = reference.name
892 892
893 893 options = {option_name: [ref]}
894 894 self._remote.pull_cmd(repository_path, hooks=False, **options)
895 895 self._remote.invalidate_vcs_cache()
896 896
897 897 def bookmark(self, bookmark, revision=None):
898 898 if isinstance(bookmark, unicode):
899 899 bookmark = safe_str(bookmark)
900 900 self._remote.bookmark(bookmark, revision=revision)
901 901 self._remote.invalidate_vcs_cache()
902 902
903 903 def get_path_permissions(self, username):
904 904 hgacl_file = os.path.join(self.path, '.hg/hgacl')
905 905
906 906 def read_patterns(suffix):
907 907 svalue = None
908 908 for section, option in [
909 909 ('narrowacl', username + suffix),
910 910 ('narrowacl', 'default' + suffix),
911 911 ('narrowhgacl', username + suffix),
912 912 ('narrowhgacl', 'default' + suffix)
913 913 ]:
914 914 try:
915 915 svalue = hgacl.get(section, option)
916 916 break # stop at the first value we find
917 917 except configparser.NoOptionError:
918 918 pass
919 919 if not svalue:
920 920 return None
921 921 result = ['/']
922 922 for pattern in svalue.split():
923 923 result.append(pattern)
924 924 if '*' not in pattern and '?' not in pattern:
925 925 result.append(pattern + '/*')
926 926 return result
927 927
928 928 if os.path.exists(hgacl_file):
929 929 try:
930 930 hgacl = configparser.RawConfigParser()
931 931 hgacl.read(hgacl_file)
932 932
933 933 includes = read_patterns('.includes')
934 934 excludes = read_patterns('.excludes')
935 935 return BasePathPermissionChecker.create_from_patterns(
936 936 includes, excludes)
937 937 except BaseException as e:
938 938 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
939 939 hgacl_file, self.name, e)
940 940 raise exceptions.RepositoryRequirementError(msg)
941 941 else:
942 942 return None
943 943
944 944
945 945 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
946 946
947 947 def _commit_factory(self, commit_id):
948 948 return self.repo.get_commit(
949 949 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now