##// END OF EJS Templates
tests: fixing tests for pull-requests and changelog(commits)
marcink -
r3772:5c4b5f3d new-ui
parent child Browse files
Show More
@@ -1,81 +1,81 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.model.db import ChangesetStatus
23 from rhodecode.model.db import ChangesetStatus
24 from rhodecode.api.tests.utils import (
24 from rhodecode.api.tests.utils import (
25 build_data, api_call, assert_error, assert_ok)
25 build_data, api_call, assert_error, assert_ok)
26
26
27
27
28 @pytest.mark.usefixtures("testuser_api", "app")
28 @pytest.mark.usefixtures("testuser_api", "app")
29 class TestCommentCommit(object):
29 class TestCommentCommit(object):
30 def test_api_comment_commit_on_empty_repo(self, backend):
30 def test_api_comment_commit_on_empty_repo(self, backend):
31 repo = backend.create_repo()
31 repo = backend.create_repo()
32 id_, params = build_data(
32 id_, params = build_data(
33 self.apikey, 'comment_commit', repoid=repo.repo_name,
33 self.apikey, 'comment_commit', repoid=repo.repo_name,
34 commit_id='tip', message='message', status_change=None)
34 commit_id='tip', message='message', status_change=None)
35 response = api_call(self.app, params)
35 response = api_call(self.app, params)
36 expected = 'There are no commits yet'
36 expected = 'There are no commits yet'
37 assert_error(id_, expected, given=response.body)
37 assert_error(id_, expected, given=response.body)
38
38
39 @pytest.mark.parametrize("commit_id, expected_err", [
39 @pytest.mark.parametrize("commit_id, expected_err", [
40 ('abcabca', {'hg': 'Commit {commit} does not exist for {repo}',
40 ('abcabca', {'hg': 'Commit {commit} does not exist for `{repo}`',
41 'git': 'Commit {commit} does not exist for {repo}',
41 'git': 'Commit {commit} does not exist for `{repo}`',
42 'svn': 'Commit id {commit} not understood.'}),
42 'svn': 'Commit id {commit} not understood.'}),
43 ('idontexist', {'hg': 'Commit {commit} does not exist for {repo}',
43 ('idontexist', {'hg': 'Commit {commit} does not exist for `{repo}`',
44 'git': 'Commit {commit} does not exist for {repo}',
44 'git': 'Commit {commit} does not exist for `{repo}`',
45 'svn': 'Commit id {commit} not understood.'}),
45 'svn': 'Commit id {commit} not understood.'}),
46 ])
46 ])
47 def test_api_comment_commit_wrong_hash(self, backend, commit_id, expected_err):
47 def test_api_comment_commit_wrong_hash(self, backend, commit_id, expected_err):
48 repo_name = backend.repo.repo_name
48 repo_name = backend.repo.repo_name
49 id_, params = build_data(
49 id_, params = build_data(
50 self.apikey, 'comment_commit', repoid=repo_name,
50 self.apikey, 'comment_commit', repoid=repo_name,
51 commit_id=commit_id, message='message', status_change=None)
51 commit_id=commit_id, message='message', status_change=None)
52 response = api_call(self.app, params)
52 response = api_call(self.app, params)
53
53
54 expected_err = expected_err[backend.alias]
54 expected_err = expected_err[backend.alias]
55 expected_err = expected_err.format(
55 expected_err = expected_err.format(
56 repo=backend.repo.scm_instance(), commit=commit_id)
56 repo=backend.repo.scm_instance().name, commit=commit_id)
57 assert_error(id_, expected_err, given=response.body)
57 assert_error(id_, expected_err, given=response.body)
58
58
59 @pytest.mark.parametrize("status_change, message, commit_id", [
59 @pytest.mark.parametrize("status_change, message, commit_id", [
60 (None, 'Hallo', 'tip'),
60 (None, 'Hallo', 'tip'),
61 (ChangesetStatus.STATUS_APPROVED, 'Approved', 'tip'),
61 (ChangesetStatus.STATUS_APPROVED, 'Approved', 'tip'),
62 (ChangesetStatus.STATUS_REJECTED, 'Rejected', 'tip'),
62 (ChangesetStatus.STATUS_REJECTED, 'Rejected', 'tip'),
63 ])
63 ])
64 def test_api_comment_commit(
64 def test_api_comment_commit(
65 self, backend, status_change, message, commit_id,
65 self, backend, status_change, message, commit_id,
66 no_notifications):
66 no_notifications):
67
67
68 commit_id = backend.repo.scm_instance().get_commit(commit_id).raw_id
68 commit_id = backend.repo.scm_instance().get_commit(commit_id).raw_id
69
69
70 id_, params = build_data(
70 id_, params = build_data(
71 self.apikey, 'comment_commit', repoid=backend.repo_name,
71 self.apikey, 'comment_commit', repoid=backend.repo_name,
72 commit_id=commit_id, message=message, status=status_change)
72 commit_id=commit_id, message=message, status=status_change)
73 response = api_call(self.app, params)
73 response = api_call(self.app, params)
74 repo = backend.repo.scm_instance()
74 repo = backend.repo.scm_instance()
75 expected = {
75 expected = {
76 'msg': 'Commented on commit `%s` for repository `%s`' % (
76 'msg': 'Commented on commit `%s` for repository `%s`' % (
77 repo.get_commit().raw_id, backend.repo_name),
77 repo.get_commit().raw_id, backend.repo_name),
78 'status_change': status_change,
78 'status_change': status_change,
79 'success': True
79 'success': True
80 }
80 }
81 assert_ok(id_, expected, given=response.body)
81 assert_ok(id_, expected, given=response.body)
@@ -1,83 +1,84 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22 from rhodecode.model.db import Repository
22 from rhodecode.model.db import Repository
23
23
24
24
25 def route_path(name, params=None, **kwargs):
25 def route_path(name, params=None, **kwargs):
26 import urllib
26 import urllib
27
27
28 base_url = {
28 base_url = {
29 'pullrequest_show_all': '/{repo_name}/pull-request',
29 'pullrequest_show_all': '/{repo_name}/pull-request',
30 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
30 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
31 }[name].format(**kwargs)
31 }[name].format(**kwargs)
32
32
33 if params:
33 if params:
34 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
34 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
35 return base_url
35 return base_url
36
36
37
37
38 @pytest.mark.backends("git", "hg")
38 @pytest.mark.backends("git", "hg")
39 @pytest.mark.usefixtures('autologin_user', 'app')
39 @pytest.mark.usefixtures('autologin_user', 'app')
40 class TestPullRequestList(object):
40 class TestPullRequestList(object):
41
41
42 @pytest.mark.parametrize('params, expected_title', [
42 @pytest.mark.parametrize('params, expected_title', [
43 ({'source': 0, 'closed': 1}, 'Closed Pull Requests'),
43 ({'source': 0, 'closed': 1}, 'Closed'),
44 ({'source': 0, 'my': 1}, 'opened by me'),
44 ({'source': 0, 'my': 1}, 'Opened by me'),
45 ({'source': 0, 'awaiting_review': 1}, 'awaiting review'),
45 ({'source': 0, 'awaiting_review': 1}, 'Awaiting review'),
46 ({'source': 0, 'awaiting_my_review': 1}, 'awaiting my review'),
46 ({'source': 0, 'awaiting_my_review': 1}, 'Awaiting my review'),
47 ({'source': 1}, 'Pull Requests from'),
47 ({'source': 1}, 'From this repo'),
48 ])
48 ])
49 def test_showing_list_page(self, backend, pr_util, params, expected_title):
49 def test_showing_list_page(self, backend, pr_util, params, expected_title):
50 pull_request = pr_util.create_pull_request()
50 pull_request = pr_util.create_pull_request()
51
51
52 response = self.app.get(
52 response = self.app.get(
53 route_path('pullrequest_show_all',
53 route_path('pullrequest_show_all',
54 repo_name=pull_request.target_repo.repo_name,
54 repo_name=pull_request.target_repo.repo_name,
55 params=params))
55 params=params))
56
56
57 assert_response = response.assert_response()
57 assert_response = response.assert_response()
58 assert_response.element_equals_to('.panel-title', expected_title)
58
59 element = assert_response.get_element('.panel-title')
59 element = assert_response.get_element('.title .active')
60 element_text = assert_response._element_to_string(element)
60 element_text = element.text_content()
61 assert expected_title == element_text
61
62
62 def test_showing_list_page_data(self, backend, pr_util, xhr_header):
63 def test_showing_list_page_data(self, backend, pr_util, xhr_header):
63 pull_request = pr_util.create_pull_request()
64 pull_request = pr_util.create_pull_request()
64 response = self.app.get(
65 response = self.app.get(
65 route_path('pullrequest_show_all_data',
66 route_path('pullrequest_show_all_data',
66 repo_name=pull_request.target_repo.repo_name),
67 repo_name=pull_request.target_repo.repo_name),
67 extra_environ=xhr_header)
68 extra_environ=xhr_header)
68
69
69 assert response.json['recordsTotal'] == 1
70 assert response.json['recordsTotal'] == 1
70 assert response.json['data'][0]['description'] == 'Description'
71 assert response.json['data'][0]['description'] == 'Description'
71
72
72 def test_description_is_escaped_on_index_page(self, backend, pr_util, xhr_header):
73 def test_description_is_escaped_on_index_page(self, backend, pr_util, xhr_header):
73 xss_description = "<script>alert('Hi!')</script>"
74 xss_description = "<script>alert('Hi!')</script>"
74 pull_request = pr_util.create_pull_request(description=xss_description)
75 pull_request = pr_util.create_pull_request(description=xss_description)
75
76
76 response = self.app.get(
77 response = self.app.get(
77 route_path('pullrequest_show_all_data',
78 route_path('pullrequest_show_all_data',
78 repo_name=pull_request.target_repo.repo_name),
79 repo_name=pull_request.target_repo.repo_name),
79 extra_environ=xhr_header)
80 extra_environ=xhr_header)
80
81
81 assert response.json['recordsTotal'] == 1
82 assert response.json['recordsTotal'] == 1
82 assert response.json['data'][0]['description'] == \
83 assert response.json['data'][0]['description'] == \
83 "&lt;script&gt;alert(&#39;Hi!&#39;)&lt;/script&gt;"
84 "&lt;script&gt;alert(&#39;Hi!&#39;)&lt;/script&gt;"
@@ -1,213 +1,218 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22
22
23 import pytest
23 import pytest
24
24
25 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
25 from rhodecode.apps.repository.views.repo_changelog import DEFAULT_CHANGELOG_SIZE
26 from rhodecode.tests import TestController
26 from rhodecode.tests import TestController
27
27
28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
28 MATCH_HASH = re.compile(r'<span class="commit_hash">r(\d+):[\da-f]+</span>')
29
29
30
30
31 def route_path(name, params=None, **kwargs):
31 def route_path(name, params=None, **kwargs):
32 import urllib
32 import urllib
33
33
34 base_url = {
34 base_url = {
35 'repo_changelog': '/{repo_name}/changelog',
35 'repo_changelog': '/{repo_name}/changelog',
36 'repo_commits': '/{repo_name}/commits',
36 'repo_commits': '/{repo_name}/commits',
37 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
37 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
38 'repo_commits_elements': '/{repo_name}/commits_elements',
38 'repo_commits_elements': '/{repo_name}/commits_elements',
39 }[name].format(**kwargs)
39 }[name].format(**kwargs)
40
40
41 if params:
41 if params:
42 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
42 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
43 return base_url
43 return base_url
44
44
45
45
46 def assert_commits_on_page(response, indexes):
46 def assert_commits_on_page(response, indexes):
47 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.body)]
47 found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.body)]
48 assert found_indexes == indexes
48 assert found_indexes == indexes
49
49
50
50
51 class TestChangelogController(TestController):
51 class TestChangelogController(TestController):
52
52
53 def test_commits_page(self, backend):
53 def test_commits_page(self, backend):
54 self.log_user()
54 self.log_user()
55 response = self.app.get(
55 response = self.app.get(
56 route_path('repo_commits', repo_name=backend.repo_name))
56 route_path('repo_commits', repo_name=backend.repo_name))
57
57
58 first_idx = -1
58 first_idx = -1
59 last_idx = -DEFAULT_CHANGELOG_SIZE
59 last_idx = -DEFAULT_CHANGELOG_SIZE
60 self.assert_commit_range_on_page(
60 self.assert_commit_range_on_page(
61 response, first_idx, last_idx, backend)
61 response, first_idx, last_idx, backend)
62
62
63 def test_changelog(self, backend):
63 def test_changelog(self, backend):
64 self.log_user()
64 self.log_user()
65 response = self.app.get(
65 response = self.app.get(
66 route_path('repo_changelog', repo_name=backend.repo_name))
66 route_path('repo_changelog', repo_name=backend.repo_name))
67
67
68 first_idx = -1
68 first_idx = -1
69 last_idx = -DEFAULT_CHANGELOG_SIZE
69 last_idx = -DEFAULT_CHANGELOG_SIZE
70 self.assert_commit_range_on_page(
70 self.assert_commit_range_on_page(
71 response, first_idx, last_idx, backend)
71 response, first_idx, last_idx, backend)
72
72
73 @pytest.mark.backends("hg", "git")
73 @pytest.mark.backends("hg", "git")
74 def test_changelog_filtered_by_branch(self, backend):
74 def test_changelog_filtered_by_branch(self, backend):
75 self.log_user()
75 self.log_user()
76 self.app.get(
76 self.app.get(
77 route_path('repo_changelog', repo_name=backend.repo_name,
77 route_path('repo_changelog', repo_name=backend.repo_name,
78 params=dict(branch=backend.default_branch_name)),
78 params=dict(branch=backend.default_branch_name)),
79 status=200)
79 status=200)
80
80
81 @pytest.mark.backends("hg", "git")
81 @pytest.mark.backends("hg", "git")
82 def test_commits_filtered_by_branch(self, backend):
82 def test_commits_filtered_by_branch(self, backend):
83 self.log_user()
83 self.log_user()
84 self.app.get(
84 self.app.get(
85 route_path('repo_commits', repo_name=backend.repo_name,
85 route_path('repo_commits', repo_name=backend.repo_name,
86 params=dict(branch=backend.default_branch_name)),
86 params=dict(branch=backend.default_branch_name)),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.backends("svn")
89 @pytest.mark.backends("svn")
90 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
90 def test_changelog_filtered_by_branch_svn(self, autologin_user, backend):
91 repo = backend['svn-simple-layout']
91 repo = backend['svn-simple-layout']
92 response = self.app.get(
92 response = self.app.get(
93 route_path('repo_changelog', repo_name=repo.repo_name,
93 route_path('repo_changelog', repo_name=repo.repo_name,
94 params=dict(branch='trunk')),
94 params=dict(branch='trunk')),
95 status=200)
95 status=200)
96
96
97 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
97 assert_commits_on_page(response, indexes=[15, 12, 7, 3, 2, 1])
98
98
99 def test_commits_filtered_by_wrong_branch(self, backend):
99 def test_commits_filtered_by_wrong_branch(self, backend):
100 self.log_user()
100 self.log_user()
101 branch = 'wrong-branch-name'
101 branch = 'wrong-branch-name'
102 response = self.app.get(
102 response = self.app.get(
103 route_path('repo_commits', repo_name=backend.repo_name,
103 route_path('repo_commits', repo_name=backend.repo_name,
104 params=dict(branch=branch)),
104 params=dict(branch=branch)),
105 status=302)
105 status=302)
106 expected_url = '/{repo}/commits/{branch}'.format(
106 expected_url = '/{repo}/commits/{branch}'.format(
107 repo=backend.repo_name, branch=branch)
107 repo=backend.repo_name, branch=branch)
108 assert expected_url in response.location
108 assert expected_url in response.location
109 response = response.follow()
109 response = response.follow()
110 expected_warning = 'Branch {} is not found.'.format(branch)
110 expected_warning = 'Branch {} is not found.'.format(branch)
111 assert expected_warning in response.body
111 assert expected_warning in response.body
112
112
113 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
113 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
114 def test_changelog_filtered_by_branch_with_merges(
114 def test_changelog_filtered_by_branch_with_merges(
115 self, autologin_user, backend):
115 self, autologin_user, backend):
116
116
117 # Note: The changelog of branch "b" does not contain the commit "a1"
117 # Note: The changelog of branch "b" does not contain the commit "a1"
118 # although this is a parent of commit "b1". And branch "b" has commits
118 # although this is a parent of commit "b1". And branch "b" has commits
119 # which have a smaller index than commit "a1".
119 # which have a smaller index than commit "a1".
120 commits = [
120 commits = [
121 {'message': 'a'},
121 {'message': 'a'},
122 {'message': 'b', 'branch': 'b'},
122 {'message': 'b', 'branch': 'b'},
123 {'message': 'a1', 'parents': ['a']},
123 {'message': 'a1', 'parents': ['a']},
124 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
124 {'message': 'b1', 'branch': 'b', 'parents': ['b', 'a1']},
125 ]
125 ]
126 backend.create_repo(commits)
126 backend.create_repo(commits)
127
127
128 self.app.get(
128 self.app.get(
129 route_path('repo_changelog', repo_name=backend.repo_name,
129 route_path('repo_changelog', repo_name=backend.repo_name,
130 params=dict(branch='b')),
130 params=dict(branch='b')),
131 status=200)
131 status=200)
132
132
133 @pytest.mark.backends("hg")
133 @pytest.mark.backends("hg")
134 def test_commits_closed_branches(self, autologin_user, backend):
134 def test_commits_closed_branches(self, autologin_user, backend):
135 repo = backend['closed_branch']
135 repo = backend['closed_branch']
136 response = self.app.get(
136 response = self.app.get(
137 route_path('repo_commits', repo_name=repo.repo_name,
137 route_path('repo_commits', repo_name=repo.repo_name,
138 params=dict(branch='experimental')),
138 params=dict(branch='experimental')),
139 status=200)
139 status=200)
140
140
141 assert_commits_on_page(response, indexes=[3, 1])
141 assert_commits_on_page(response, indexes=[3, 1])
142
142
143 def test_changelog_pagination(self, backend):
143 def test_changelog_pagination(self, backend):
144 self.log_user()
144 self.log_user()
145 # pagination, walk up to page 6
145 # pagination, walk up to page 6
146 changelog_url = route_path(
146 changelog_url = route_path(
147 'repo_commits', repo_name=backend.repo_name)
147 'repo_commits', repo_name=backend.repo_name)
148
148
149 for page in range(1, 7):
149 for page in range(1, 7):
150 response = self.app.get(changelog_url, {'page': page})
150 response = self.app.get(changelog_url, {'page': page})
151
151
152 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
152 first_idx = -DEFAULT_CHANGELOG_SIZE * (page - 1) - 1
153 last_idx = -DEFAULT_CHANGELOG_SIZE * page
153 last_idx = -DEFAULT_CHANGELOG_SIZE * page
154 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
154 self.assert_commit_range_on_page(response, first_idx, last_idx, backend)
155
155
156 def assert_commit_range_on_page(
156 def assert_commit_range_on_page(
157 self, response, first_idx, last_idx, backend):
157 self, response, first_idx, last_idx, backend):
158 input_template = (
158 input_template = (
159 """<input class="commit-range" id="%(raw_id)s" """
159 """<input class="commit-range" """
160 """data-commit-id="%(raw_id)s" data-commit-idx="%(idx)s" id="%(raw_id)s" """
160 """name="%(raw_id)s" type="checkbox" value="1" />"""
161 """name="%(raw_id)s" type="checkbox" value="1" />"""
161 )
162 )
163
162 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
164 commit_span_template = """<span class="commit_hash">r%s:%s</span>"""
163 repo = backend.repo
165 repo = backend.repo
164
166
165 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
167 first_commit_on_page = repo.get_commit(commit_idx=first_idx)
166 response.mustcontain(
168 response.mustcontain(
167 input_template % {'raw_id': first_commit_on_page.raw_id})
169 input_template % {'raw_id': first_commit_on_page.raw_id,
170 'idx': first_commit_on_page.idx})
171
168 response.mustcontain(commit_span_template % (
172 response.mustcontain(commit_span_template % (
169 first_commit_on_page.idx, first_commit_on_page.short_id)
173 first_commit_on_page.idx, first_commit_on_page.short_id)
170 )
174 )
171
175
172 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
176 last_commit_on_page = repo.get_commit(commit_idx=last_idx)
173 response.mustcontain(
177 response.mustcontain(
174 input_template % {'raw_id': last_commit_on_page.raw_id})
178 input_template % {'raw_id': last_commit_on_page.raw_id,
179 'idx': last_commit_on_page.idx})
175 response.mustcontain(commit_span_template % (
180 response.mustcontain(commit_span_template % (
176 last_commit_on_page.idx, last_commit_on_page.short_id)
181 last_commit_on_page.idx, last_commit_on_page.short_id)
177 )
182 )
178
183
179 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
184 first_commit_of_next_page = repo.get_commit(commit_idx=last_idx - 1)
180 first_span_of_next_page = commit_span_template % (
185 first_span_of_next_page = commit_span_template % (
181 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
186 first_commit_of_next_page.idx, first_commit_of_next_page.short_id)
182 assert first_span_of_next_page not in response
187 assert first_span_of_next_page not in response
183
188
184 @pytest.mark.parametrize('test_path', [
189 @pytest.mark.parametrize('test_path', [
185 'vcs/exceptions.py',
190 'vcs/exceptions.py',
186 '/vcs/exceptions.py',
191 '/vcs/exceptions.py',
187 '//vcs/exceptions.py'
192 '//vcs/exceptions.py'
188 ])
193 ])
189 def test_commits_with_filenode(self, backend, test_path):
194 def test_commits_with_filenode(self, backend, test_path):
190 self.log_user()
195 self.log_user()
191 response = self.app.get(
196 response = self.app.get(
192 route_path('repo_commits_file', repo_name=backend.repo_name,
197 route_path('repo_commits_file', repo_name=backend.repo_name,
193 commit_id='tip', f_path=test_path),
198 commit_id='tip', f_path=test_path),
194 )
199 )
195
200
196 # history commits messages
201 # history commits messages
197 response.mustcontain('Added exceptions module, this time for real')
202 response.mustcontain('Added exceptions module, this time for real')
198 response.mustcontain('Added not implemented hg backend test case')
203 response.mustcontain('Added not implemented hg backend test case')
199 response.mustcontain('Added BaseChangeset class')
204 response.mustcontain('Added BaseChangeset class')
200
205
201 def test_commits_with_filenode_that_is_dirnode(self, backend):
206 def test_commits_with_filenode_that_is_dirnode(self, backend):
202 self.log_user()
207 self.log_user()
203 self.app.get(
208 self.app.get(
204 route_path('repo_commits_file', repo_name=backend.repo_name,
209 route_path('repo_commits_file', repo_name=backend.repo_name,
205 commit_id='tip', f_path='/tests'),
210 commit_id='tip', f_path='/tests'),
206 status=302)
211 status=302)
207
212
208 def test_commits_with_filenode_not_existing(self, backend):
213 def test_commits_with_filenode_not_existing(self, backend):
209 self.log_user()
214 self.log_user()
210 self.app.get(
215 self.app.get(
211 route_path('repo_commits_file', repo_name=backend.repo_name,
216 route_path('repo_commits_file', repo_name=backend.repo_name,
212 commit_id='tip', f_path='wrong_path'),
217 commit_id='tip', f_path='wrong_path'),
213 status=302)
218 status=302)
@@ -1,535 +1,533 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from itertools import chain
27 from itertools import chain
28 from StringIO import StringIO
28 from StringIO import StringIO
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 from rhodecode.lib.utils import safe_unicode, safe_str
33 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.lib.utils2 import safe_int
35 from rhodecode.lib.vcs.conf import settings
35 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 from rhodecode.lib.vcs.nodes import (
38 from rhodecode.lib.vcs.nodes import (
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 RemovedFileNodesGenerator, LargeFileNode)
41 RemovedFileNodesGenerator, LargeFileNode)
42 from rhodecode.lib.vcs.compat import configparser
42 from rhodecode.lib.vcs.compat import configparser
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49 _author_property = 'author'
49 _author_property = 'author'
50 _committer_property = 'committer'
50 _committer_property = 'committer'
51 _date_property = 'commit_time'
51 _date_property = 'commit_time'
52 _date_tz_property = 'commit_timezone'
52 _date_tz_property = 'commit_timezone'
53 _message_property = 'message'
53 _message_property = 'message'
54 _parents_property = 'parents'
54 _parents_property = 'parents'
55
55
56 _filter_pre_load = [
56 _filter_pre_load = [
57 # done through a more complex tree walk on parents
57 # done through a more complex tree walk on parents
58 "affected_files",
58 "affected_files",
59 # based on repository cached property
59 # based on repository cached property
60 "branch",
60 "branch",
61 # done through subprocess not remote call
61 # done through subprocess not remote call
62 "children",
62 "children",
63 # done through a more complex tree walk on parents
63 # done through a more complex tree walk on parents
64 "status",
64 "status",
65 # mercurial specific property not supported here
65 # mercurial specific property not supported here
66 "_file_paths",
66 "_file_paths",
67 # mercurial specific property not supported here
67 # mercurial specific property not supported here
68 'obsolete',
68 'obsolete',
69 # mercurial specific property not supported here
69 # mercurial specific property not supported here
70 'phase',
70 'phase',
71 # mercurial specific property not supported here
71 # mercurial specific property not supported here
72 'hidden'
72 'hidden'
73 ]
73 ]
74
74
75 def __init__(self, repository, raw_id, idx, pre_load=None):
75 def __init__(self, repository, raw_id, idx, pre_load=None):
76 self.repository = repository
76 self.repository = repository
77 self._remote = repository._remote
77 self._remote = repository._remote
78 # TODO: johbo: Tweak of raw_id should not be necessary
78 # TODO: johbo: Tweak of raw_id should not be necessary
79 self.raw_id = safe_str(raw_id)
79 self.raw_id = safe_str(raw_id)
80 self.idx = idx
80 self.idx = idx
81
81
82 self._set_bulk_properties(pre_load)
82 self._set_bulk_properties(pre_load)
83
83
84 # caches
84 # caches
85 self._stat_modes = {} # stat info for paths
85 self._stat_modes = {} # stat info for paths
86 self._paths = {} # path processed with parse_tree
86 self._paths = {} # path processed with parse_tree
87 self.nodes = {}
87 self.nodes = {}
88 self._submodules = None
88 self._submodules = None
89
89
90 def _set_bulk_properties(self, pre_load):
90 def _set_bulk_properties(self, pre_load):
91 if not pre_load:
91 if not pre_load:
92 return
92 return
93 pre_load = [entry for entry in pre_load
93 pre_load = [entry for entry in pre_load
94 if entry not in self._filter_pre_load]
94 if entry not in self._filter_pre_load]
95 if not pre_load:
95 if not pre_load:
96 return
96 return
97
97
98 result = self._remote.bulk_request(self.raw_id, pre_load)
98 result = self._remote.bulk_request(self.raw_id, pre_load)
99 for attr, value in result.items():
99 for attr, value in result.items():
100 if attr in ["author", "message"]:
100 if attr in ["author", "message"]:
101 if value:
101 if value:
102 value = safe_unicode(value)
102 value = safe_unicode(value)
103 elif attr == "date":
103 elif attr == "date":
104 value = utcdate_fromtimestamp(*value)
104 value = utcdate_fromtimestamp(*value)
105 elif attr == "parents":
105 elif attr == "parents":
106 value = self._make_commits(value)
106 value = self._make_commits(value)
107 self.__dict__[attr] = value
107 self.__dict__[attr] = value
108
108
109 @LazyProperty
109 @LazyProperty
110 def _commit(self):
110 def _commit(self):
111 return self._remote[self.raw_id]
111 return self._remote[self.raw_id]
112
112
113 @LazyProperty
113 @LazyProperty
114 def _tree_id(self):
114 def _tree_id(self):
115 return self._remote[self._commit['tree']]['id']
115 return self._remote[self._commit['tree']]['id']
116
116
117 @LazyProperty
117 @LazyProperty
118 def id(self):
118 def id(self):
119 return self.raw_id
119 return self.raw_id
120
120
121 @LazyProperty
121 @LazyProperty
122 def short_id(self):
122 def short_id(self):
123 return self.raw_id[:12]
123 return self.raw_id[:12]
124
124
125 @LazyProperty
125 @LazyProperty
126 def message(self):
126 def message(self):
127 return safe_unicode(
127 return safe_unicode(
128 self._remote.commit_attribute(self.id, self._message_property))
128 self._remote.commit_attribute(self.id, self._message_property))
129
129
130 @LazyProperty
130 @LazyProperty
131 def committer(self):
131 def committer(self):
132 return safe_unicode(
132 return safe_unicode(
133 self._remote.commit_attribute(self.id, self._committer_property))
133 self._remote.commit_attribute(self.id, self._committer_property))
134
134
135 @LazyProperty
135 @LazyProperty
136 def author(self):
136 def author(self):
137 return safe_unicode(
137 return safe_unicode(
138 self._remote.commit_attribute(self.id, self._author_property))
138 self._remote.commit_attribute(self.id, self._author_property))
139
139
140 @LazyProperty
140 @LazyProperty
141 def date(self):
141 def date(self):
142 unix_ts, tz = self._remote.get_object_attrs(
142 unix_ts, tz = self._remote.get_object_attrs(
143 self.raw_id, self._date_property, self._date_tz_property)
143 self.raw_id, self._date_property, self._date_tz_property)
144 return utcdate_fromtimestamp(unix_ts, tz)
144 return utcdate_fromtimestamp(unix_ts, tz)
145
145
146 @LazyProperty
146 @LazyProperty
147 def status(self):
147 def status(self):
148 """
148 """
149 Returns modified, added, removed, deleted files for current commit
149 Returns modified, added, removed, deleted files for current commit
150 """
150 """
151 return self.changed, self.added, self.removed
151 return self.changed, self.added, self.removed
152
152
153 @LazyProperty
153 @LazyProperty
154 def tags(self):
154 def tags(self):
155 tags = [safe_unicode(name) for name,
155 tags = [safe_unicode(name) for name,
156 commit_id in self.repository.tags.iteritems()
156 commit_id in self.repository.tags.iteritems()
157 if commit_id == self.raw_id]
157 if commit_id == self.raw_id]
158 return tags
158 return tags
159
159
160 @LazyProperty
160 @LazyProperty
161 def branch(self):
161 def branch(self):
162 for name, commit_id in self.repository.branches.iteritems():
162 for name, commit_id in self.repository.branches.iteritems():
163 if commit_id == self.raw_id:
163 if commit_id == self.raw_id:
164 return safe_unicode(name)
164 return safe_unicode(name)
165 return None
165 return None
166
166
167 def _get_id_for_path(self, path):
167 def _get_id_for_path(self, path):
168 path = safe_str(path)
168 path = safe_str(path)
169 if path in self._paths:
169 if path in self._paths:
170 return self._paths[path]
170 return self._paths[path]
171
171
172 tree_id = self._tree_id
172 tree_id = self._tree_id
173
173
174 path = path.strip('/')
174 path = path.strip('/')
175 if path == '':
175 if path == '':
176 data = [tree_id, "tree"]
176 data = [tree_id, "tree"]
177 self._paths[''] = data
177 self._paths[''] = data
178 return data
178 return data
179
179
180 parts = path.split('/')
180 parts = path.split('/')
181 dirs, name = parts[:-1], parts[-1]
181 dirs, name = parts[:-1], parts[-1]
182 cur_dir = ''
182 cur_dir = ''
183
183
184 # initially extract things from root dir
184 # initially extract things from root dir
185 tree_items = self._remote.tree_items(tree_id)
185 tree_items = self._remote.tree_items(tree_id)
186 self._process_tree_items(tree_items, cur_dir)
186 self._process_tree_items(tree_items, cur_dir)
187
187
188 for dir in dirs:
188 for dir in dirs:
189 if cur_dir:
189 if cur_dir:
190 cur_dir = '/'.join((cur_dir, dir))
190 cur_dir = '/'.join((cur_dir, dir))
191 else:
191 else:
192 cur_dir = dir
192 cur_dir = dir
193 dir_id = None
193 dir_id = None
194 for item, stat_, id_, type_ in tree_items:
194 for item, stat_, id_, type_ in tree_items:
195 if item == dir:
195 if item == dir:
196 dir_id = id_
196 dir_id = id_
197 break
197 break
198 if dir_id:
198 if dir_id:
199 if type_ != "tree":
199 if type_ != "tree":
200 raise CommitError('%s is not a directory' % cur_dir)
200 raise CommitError('%s is not a directory' % cur_dir)
201 # update tree
201 # update tree
202 tree_items = self._remote.tree_items(dir_id)
202 tree_items = self._remote.tree_items(dir_id)
203 else:
203 else:
204 raise CommitError('%s have not been found' % cur_dir)
204 raise CommitError('%s have not been found' % cur_dir)
205
205
206 # cache all items from the given traversed tree
206 # cache all items from the given traversed tree
207 self._process_tree_items(tree_items, cur_dir)
207 self._process_tree_items(tree_items, cur_dir)
208
208
209 if path not in self._paths:
209 if path not in self._paths:
210 raise self.no_node_at_path(path)
210 raise self.no_node_at_path(path)
211
211
212 return self._paths[path]
212 return self._paths[path]
213
213
214 def _process_tree_items(self, items, cur_dir):
214 def _process_tree_items(self, items, cur_dir):
215 for item, stat_, id_, type_ in items:
215 for item, stat_, id_, type_ in items:
216 if cur_dir:
216 if cur_dir:
217 name = '/'.join((cur_dir, item))
217 name = '/'.join((cur_dir, item))
218 else:
218 else:
219 name = item
219 name = item
220 self._paths[name] = [id_, type_]
220 self._paths[name] = [id_, type_]
221 self._stat_modes[name] = stat_
221 self._stat_modes[name] = stat_
222
222
223 def _get_kind(self, path):
223 def _get_kind(self, path):
224 path_id, type_ = self._get_id_for_path(path)
224 path_id, type_ = self._get_id_for_path(path)
225 if type_ == 'blob':
225 if type_ == 'blob':
226 return NodeKind.FILE
226 return NodeKind.FILE
227 elif type_ == 'tree':
227 elif type_ == 'tree':
228 return NodeKind.DIR
228 return NodeKind.DIR
229 elif type == 'link':
229 elif type == 'link':
230 return NodeKind.SUBMODULE
230 return NodeKind.SUBMODULE
231 return None
231 return None
232
232
233 def _get_filectx(self, path):
233 def _get_filectx(self, path):
234 path = self._fix_path(path)
234 path = self._fix_path(path)
235 if self._get_kind(path) != NodeKind.FILE:
235 if self._get_kind(path) != NodeKind.FILE:
236 raise CommitError(
236 raise CommitError(
237 "File does not exist for commit %s at '%s'" %
237 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
238 (self.raw_id, path))
239 return path
238 return path
240
239
241 def _get_file_nodes(self):
240 def _get_file_nodes(self):
242 return chain(*(t[2] for t in self.walk()))
241 return chain(*(t[2] for t in self.walk()))
243
242
244 @LazyProperty
243 @LazyProperty
245 def parents(self):
244 def parents(self):
246 """
245 """
247 Returns list of parent commits.
246 Returns list of parent commits.
248 """
247 """
249 parent_ids = self._remote.commit_attribute(
248 parent_ids = self._remote.commit_attribute(
250 self.id, self._parents_property)
249 self.id, self._parents_property)
251 return self._make_commits(parent_ids)
250 return self._make_commits(parent_ids)
252
251
253 @LazyProperty
252 @LazyProperty
254 def children(self):
253 def children(self):
255 """
254 """
256 Returns list of child commits.
255 Returns list of child commits.
257 """
256 """
258 rev_filter = settings.GIT_REV_FILTER
257 rev_filter = settings.GIT_REV_FILTER
259 output, __ = self.repository.run_git_command(
258 output, __ = self.repository.run_git_command(
260 ['rev-list', '--children'] + rev_filter)
259 ['rev-list', '--children'] + rev_filter)
261
260
262 child_ids = []
261 child_ids = []
263 pat = re.compile(r'^%s' % self.raw_id)
262 pat = re.compile(r'^%s' % self.raw_id)
264 for l in output.splitlines():
263 for l in output.splitlines():
265 if pat.match(l):
264 if pat.match(l):
266 found_ids = l.split(' ')[1:]
265 found_ids = l.split(' ')[1:]
267 child_ids.extend(found_ids)
266 child_ids.extend(found_ids)
268 return self._make_commits(child_ids)
267 return self._make_commits(child_ids)
269
268
270 def _make_commits(self, commit_ids, pre_load=None):
269 def _make_commits(self, commit_ids, pre_load=None):
271 return [
270 return [
272 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load,
271 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load,
273 translate_tag=False)
272 translate_tag=False)
274 for commit_id in commit_ids]
273 for commit_id in commit_ids]
275
274
276 def get_file_mode(self, path):
275 def get_file_mode(self, path):
277 """
276 """
278 Returns stat mode of the file at the given `path`.
277 Returns stat mode of the file at the given `path`.
279 """
278 """
280 path = safe_str(path)
279 path = safe_str(path)
281 # ensure path is traversed
280 # ensure path is traversed
282 self._get_id_for_path(path)
281 self._get_id_for_path(path)
283 return self._stat_modes[path]
282 return self._stat_modes[path]
284
283
285 def is_link(self, path):
284 def is_link(self, path):
286 return stat.S_ISLNK(self.get_file_mode(path))
285 return stat.S_ISLNK(self.get_file_mode(path))
287
286
288 def get_file_content(self, path):
287 def get_file_content(self, path):
289 """
288 """
290 Returns content of the file at given `path`.
289 Returns content of the file at given `path`.
291 """
290 """
292 id_, _ = self._get_id_for_path(path)
291 id_, _ = self._get_id_for_path(path)
293 return self._remote.blob_as_pretty_string(id_)
292 return self._remote.blob_as_pretty_string(id_)
294
293
295 def get_file_size(self, path):
294 def get_file_size(self, path):
296 """
295 """
297 Returns size of the file at given `path`.
296 Returns size of the file at given `path`.
298 """
297 """
299 id_, _ = self._get_id_for_path(path)
298 id_, _ = self._get_id_for_path(path)
300 return self._remote.blob_raw_length(id_)
299 return self._remote.blob_raw_length(id_)
301
300
302 def get_path_history(self, path, limit=None, pre_load=None):
301 def get_path_history(self, path, limit=None, pre_load=None):
303 """
302 """
304 Returns history of file as reversed list of `GitCommit` objects for
303 Returns history of file as reversed list of `GitCommit` objects for
305 which file at given `path` has been modified.
304 which file at given `path` has been modified.
306
305
307 TODO: This function now uses an underlying 'git' command which works
306 TODO: This function now uses an underlying 'git' command which works
308 quickly but ideally we should replace with an algorithm.
307 quickly but ideally we should replace with an algorithm.
309 """
308 """
310 self._get_filectx(path)
309 self._get_filectx(path)
311 f_path = safe_str(path)
310 f_path = safe_str(path)
312
311
313 # optimize for n==1, rev-list is much faster for that use-case
312 # optimize for n==1, rev-list is much faster for that use-case
314 if limit == 1:
313 if limit == 1:
315 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
314 cmd = ['rev-list', '-1', self.raw_id, '--', f_path]
316 else:
315 else:
317 cmd = ['log']
316 cmd = ['log']
318 if limit:
317 if limit:
319 cmd.extend(['-n', str(safe_int(limit, 0))])
318 cmd.extend(['-n', str(safe_int(limit, 0))])
320 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
319 cmd.extend(['--pretty=format: %H', '-s', self.raw_id, '--', f_path])
321
320
322 output, __ = self.repository.run_git_command(cmd)
321 output, __ = self.repository.run_git_command(cmd)
323 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
322 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
324
323
325 return [
324 return [
326 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
325 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
327 for commit_id in commit_ids]
326 for commit_id in commit_ids]
328
327
329 def get_file_annotate(self, path, pre_load=None):
328 def get_file_annotate(self, path, pre_load=None):
330 """
329 """
331 Returns a generator of four element tuples with
330 Returns a generator of four element tuples with
332 lineno, commit_id, commit lazy loader and line
331 lineno, commit_id, commit lazy loader and line
333
332
334 TODO: This function now uses os underlying 'git' command which is
333 TODO: This function now uses os underlying 'git' command which is
335 generally not good. Should be replaced with algorithm iterating
334 generally not good. Should be replaced with algorithm iterating
336 commits.
335 commits.
337 """
336 """
338 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
337 cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
339 # -l ==> outputs long shas (and we need all 40 characters)
338 # -l ==> outputs long shas (and we need all 40 characters)
340 # --root ==> doesn't put '^' character for bounderies
339 # --root ==> doesn't put '^' character for bounderies
341 # -r commit_id ==> blames for the given commit
340 # -r commit_id ==> blames for the given commit
342 output, __ = self.repository.run_git_command(cmd)
341 output, __ = self.repository.run_git_command(cmd)
343
342
344 for i, blame_line in enumerate(output.split('\n')[:-1]):
343 for i, blame_line in enumerate(output.split('\n')[:-1]):
345 line_no = i + 1
344 line_no = i + 1
346 commit_id, line = re.split(r' ', blame_line, 1)
345 commit_id, line = re.split(r' ', blame_line, 1)
347 yield (
346 yield (
348 line_no, commit_id,
347 line_no, commit_id,
349 lambda: self.repository.get_commit(commit_id=commit_id,
348 lambda: self.repository.get_commit(commit_id=commit_id,
350 pre_load=pre_load),
349 pre_load=pre_load),
351 line)
350 line)
352
351
353 def get_nodes(self, path):
352 def get_nodes(self, path):
354 if self._get_kind(path) != NodeKind.DIR:
353 if self._get_kind(path) != NodeKind.DIR:
355 raise CommitError(
354 raise CommitError(
356 "Directory does not exist for commit %s at "
355 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
357 " '%s'" % (self.raw_id, path))
358 path = self._fix_path(path)
356 path = self._fix_path(path)
359 id_, _ = self._get_id_for_path(path)
357 id_, _ = self._get_id_for_path(path)
360 tree_id = self._remote[id_]['id']
358 tree_id = self._remote[id_]['id']
361 dirnodes = []
359 dirnodes = []
362 filenodes = []
360 filenodes = []
363 alias = self.repository.alias
361 alias = self.repository.alias
364 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
362 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
365 if type_ == 'link':
363 if type_ == 'link':
366 url = self._get_submodule_url('/'.join((path, name)))
364 url = self._get_submodule_url('/'.join((path, name)))
367 dirnodes.append(SubModuleNode(
365 dirnodes.append(SubModuleNode(
368 name, url=url, commit=id_, alias=alias))
366 name, url=url, commit=id_, alias=alias))
369 continue
367 continue
370
368
371 if path != '':
369 if path != '':
372 obj_path = '/'.join((path, name))
370 obj_path = '/'.join((path, name))
373 else:
371 else:
374 obj_path = name
372 obj_path = name
375 if obj_path not in self._stat_modes:
373 if obj_path not in self._stat_modes:
376 self._stat_modes[obj_path] = stat_
374 self._stat_modes[obj_path] = stat_
377
375
378 if type_ == 'tree':
376 if type_ == 'tree':
379 dirnodes.append(DirNode(obj_path, commit=self))
377 dirnodes.append(DirNode(obj_path, commit=self))
380 elif type_ == 'blob':
378 elif type_ == 'blob':
381 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
379 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
382 else:
380 else:
383 raise CommitError(
381 raise CommitError(
384 "Requested object should be Tree or Blob, is %s", type_)
382 "Requested object should be Tree or Blob, is %s", type_)
385
383
386 nodes = dirnodes + filenodes
384 nodes = dirnodes + filenodes
387 for node in nodes:
385 for node in nodes:
388 if node.path not in self.nodes:
386 if node.path not in self.nodes:
389 self.nodes[node.path] = node
387 self.nodes[node.path] = node
390 nodes.sort()
388 nodes.sort()
391 return nodes
389 return nodes
392
390
393 def get_node(self, path, pre_load=None):
391 def get_node(self, path, pre_load=None):
394 if isinstance(path, unicode):
392 if isinstance(path, unicode):
395 path = path.encode('utf-8')
393 path = path.encode('utf-8')
396 path = self._fix_path(path)
394 path = self._fix_path(path)
397 if path not in self.nodes:
395 if path not in self.nodes:
398 try:
396 try:
399 id_, type_ = self._get_id_for_path(path)
397 id_, type_ = self._get_id_for_path(path)
400 except CommitError:
398 except CommitError:
401 raise NodeDoesNotExistError(
399 raise NodeDoesNotExistError(
402 "Cannot find one of parents' directories for a given "
400 "Cannot find one of parents' directories for a given "
403 "path: %s" % path)
401 "path: %s" % path)
404
402
405 if type_ == 'link':
403 if type_ == 'link':
406 url = self._get_submodule_url(path)
404 url = self._get_submodule_url(path)
407 node = SubModuleNode(path, url=url, commit=id_,
405 node = SubModuleNode(path, url=url, commit=id_,
408 alias=self.repository.alias)
406 alias=self.repository.alias)
409 elif type_ == 'tree':
407 elif type_ == 'tree':
410 if path == '':
408 if path == '':
411 node = RootNode(commit=self)
409 node = RootNode(commit=self)
412 else:
410 else:
413 node = DirNode(path, commit=self)
411 node = DirNode(path, commit=self)
414 elif type_ == 'blob':
412 elif type_ == 'blob':
415 node = FileNode(path, commit=self, pre_load=pre_load)
413 node = FileNode(path, commit=self, pre_load=pre_load)
416 else:
414 else:
417 raise self.no_node_at_path(path)
415 raise self.no_node_at_path(path)
418
416
419 # cache node
417 # cache node
420 self.nodes[path] = node
418 self.nodes[path] = node
421 return self.nodes[path]
419 return self.nodes[path]
422
420
423 def get_largefile_node(self, path):
421 def get_largefile_node(self, path):
424 id_, _ = self._get_id_for_path(path)
422 id_, _ = self._get_id_for_path(path)
425 pointer_spec = self._remote.is_large_file(id_)
423 pointer_spec = self._remote.is_large_file(id_)
426
424
427 if pointer_spec:
425 if pointer_spec:
428 # content of that file regular FileNode is the hash of largefile
426 # content of that file regular FileNode is the hash of largefile
429 file_id = pointer_spec.get('oid_hash')
427 file_id = pointer_spec.get('oid_hash')
430 if self._remote.in_largefiles_store(file_id):
428 if self._remote.in_largefiles_store(file_id):
431 lf_path = self._remote.store_path(file_id)
429 lf_path = self._remote.store_path(file_id)
432 return LargeFileNode(lf_path, commit=self, org_path=path)
430 return LargeFileNode(lf_path, commit=self, org_path=path)
433
431
434 @LazyProperty
432 @LazyProperty
435 def affected_files(self):
433 def affected_files(self):
436 """
434 """
437 Gets a fast accessible file changes for given commit
435 Gets a fast accessible file changes for given commit
438 """
436 """
439 added, modified, deleted = self._changes_cache
437 added, modified, deleted = self._changes_cache
440 return list(added.union(modified).union(deleted))
438 return list(added.union(modified).union(deleted))
441
439
442 @LazyProperty
440 @LazyProperty
443 def _changes_cache(self):
441 def _changes_cache(self):
444 added = set()
442 added = set()
445 modified = set()
443 modified = set()
446 deleted = set()
444 deleted = set()
447 _r = self._remote
445 _r = self._remote
448
446
449 parents = self.parents
447 parents = self.parents
450 if not self.parents:
448 if not self.parents:
451 parents = [base.EmptyCommit()]
449 parents = [base.EmptyCommit()]
452 for parent in parents:
450 for parent in parents:
453 if isinstance(parent, base.EmptyCommit):
451 if isinstance(parent, base.EmptyCommit):
454 oid = None
452 oid = None
455 else:
453 else:
456 oid = parent.raw_id
454 oid = parent.raw_id
457 changes = _r.tree_changes(oid, self.raw_id)
455 changes = _r.tree_changes(oid, self.raw_id)
458 for (oldpath, newpath), (_, _), (_, _) in changes:
456 for (oldpath, newpath), (_, _), (_, _) in changes:
459 if newpath and oldpath:
457 if newpath and oldpath:
460 modified.add(newpath)
458 modified.add(newpath)
461 elif newpath and not oldpath:
459 elif newpath and not oldpath:
462 added.add(newpath)
460 added.add(newpath)
463 elif not newpath and oldpath:
461 elif not newpath and oldpath:
464 deleted.add(oldpath)
462 deleted.add(oldpath)
465 return added, modified, deleted
463 return added, modified, deleted
466
464
467 def _get_paths_for_status(self, status):
465 def _get_paths_for_status(self, status):
468 """
466 """
469 Returns sorted list of paths for given ``status``.
467 Returns sorted list of paths for given ``status``.
470
468
471 :param status: one of: *added*, *modified* or *deleted*
469 :param status: one of: *added*, *modified* or *deleted*
472 """
470 """
473 added, modified, deleted = self._changes_cache
471 added, modified, deleted = self._changes_cache
474 return sorted({
472 return sorted({
475 'added': list(added),
473 'added': list(added),
476 'modified': list(modified),
474 'modified': list(modified),
477 'deleted': list(deleted)}[status]
475 'deleted': list(deleted)}[status]
478 )
476 )
479
477
480 @LazyProperty
478 @LazyProperty
481 def added(self):
479 def added(self):
482 """
480 """
483 Returns list of added ``FileNode`` objects.
481 Returns list of added ``FileNode`` objects.
484 """
482 """
485 if not self.parents:
483 if not self.parents:
486 return list(self._get_file_nodes())
484 return list(self._get_file_nodes())
487 return AddedFileNodesGenerator(
485 return AddedFileNodesGenerator(
488 [n for n in self._get_paths_for_status('added')], self)
486 [n for n in self._get_paths_for_status('added')], self)
489
487
490 @LazyProperty
488 @LazyProperty
491 def changed(self):
489 def changed(self):
492 """
490 """
493 Returns list of modified ``FileNode`` objects.
491 Returns list of modified ``FileNode`` objects.
494 """
492 """
495 if not self.parents:
493 if not self.parents:
496 return []
494 return []
497 return ChangedFileNodesGenerator(
495 return ChangedFileNodesGenerator(
498 [n for n in self._get_paths_for_status('modified')], self)
496 [n for n in self._get_paths_for_status('modified')], self)
499
497
500 @LazyProperty
498 @LazyProperty
501 def removed(self):
499 def removed(self):
502 """
500 """
503 Returns list of removed ``FileNode`` objects.
501 Returns list of removed ``FileNode`` objects.
504 """
502 """
505 if not self.parents:
503 if not self.parents:
506 return []
504 return []
507 return RemovedFileNodesGenerator(
505 return RemovedFileNodesGenerator(
508 [n for n in self._get_paths_for_status('deleted')], self)
506 [n for n in self._get_paths_for_status('deleted')], self)
509
507
510 def _get_submodule_url(self, submodule_path):
508 def _get_submodule_url(self, submodule_path):
511 git_modules_path = '.gitmodules'
509 git_modules_path = '.gitmodules'
512
510
513 if self._submodules is None:
511 if self._submodules is None:
514 self._submodules = {}
512 self._submodules = {}
515
513
516 try:
514 try:
517 submodules_node = self.get_node(git_modules_path)
515 submodules_node = self.get_node(git_modules_path)
518 except NodeDoesNotExistError:
516 except NodeDoesNotExistError:
519 return None
517 return None
520
518
521 content = submodules_node.content
519 content = submodules_node.content
522
520
523 # ConfigParser fails if there are whitespaces
521 # ConfigParser fails if there are whitespaces
524 content = '\n'.join(l.strip() for l in content.split('\n'))
522 content = '\n'.join(l.strip() for l in content.split('\n'))
525
523
526 parser = configparser.ConfigParser()
524 parser = configparser.ConfigParser()
527 parser.readfp(StringIO(content))
525 parser.readfp(StringIO(content))
528
526
529 for section in parser.sections():
527 for section in parser.sections():
530 path = parser.get(section, 'path')
528 path = parser.get(section, 'path')
531 url = parser.get(section, 'url')
529 url = parser.get(section, 'url')
532 if path and url:
530 if path and url:
533 self._submodules[path.strip('/')] = url
531 self._submodules[path.strip('/')] = url
534
532
535 return self._submodules.get(submodule_path.strip('/'))
533 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1037 +1,1037 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import time
28 import time
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import CachedProperty
31 from zope.cachedescriptors.property import CachedProperty
32
32
33 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
34 from rhodecode.lib.datelib import (
35 utcdate_fromtimestamp, makedate, date_astimestamp)
35 utcdate_fromtimestamp, makedate, date_astimestamp)
36 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.utils import safe_unicode, safe_str
37 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs import connection, path as vcspath
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference)
40 MergeFailureReason, Reference)
41 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.commit import GitCommit
42 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.diff import GitDiff
43 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 CommitDoesNotExistError, EmptyRepositoryError,
45 CommitDoesNotExistError, EmptyRepositoryError,
46 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
46 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
47
47
48
48
49 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class GitRepository(BaseRepository):
54 class GitRepository(BaseRepository):
55 """
55 """
56 Git repository backend.
56 Git repository backend.
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'master'
58 DEFAULT_BRANCH_NAME = 'master'
59
59
60 contact = BaseRepository.DEFAULT_CONTACT
60 contact = BaseRepository.DEFAULT_CONTACT
61
61
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
63 do_workspace_checkout=False, with_wire=None, bare=False):
63 do_workspace_checkout=False, with_wire=None, bare=False):
64
64
65 self.path = safe_str(os.path.abspath(repo_path))
65 self.path = safe_str(os.path.abspath(repo_path))
66 self.config = config if config else self.get_default_config()
66 self.config = config if config else self.get_default_config()
67 self.with_wire = with_wire
67 self.with_wire = with_wire
68
68
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
70
70
71 # caches
71 # caches
72 self._commit_ids = {}
72 self._commit_ids = {}
73
73
74 # dependent that trigger re-computation of commit_ids
74 # dependent that trigger re-computation of commit_ids
75 self._commit_ids_ver = 0
75 self._commit_ids_ver = 0
76
76
77 @LazyProperty
77 @LazyProperty
78 def _remote(self):
78 def _remote(self):
79 return connection.Git(self.path, self.config, with_wire=self.with_wire)
79 return connection.Git(self.path, self.config, with_wire=self.with_wire)
80
80
81 @LazyProperty
81 @LazyProperty
82 def bare(self):
82 def bare(self):
83 return self._remote.bare()
83 return self._remote.bare()
84
84
85 @LazyProperty
85 @LazyProperty
86 def head(self):
86 def head(self):
87 return self._remote.head()
87 return self._remote.head()
88
88
89 @CachedProperty('_commit_ids_ver')
89 @CachedProperty('_commit_ids_ver')
90 def commit_ids(self):
90 def commit_ids(self):
91 """
91 """
92 Returns list of commit ids, in ascending order. Being lazy
92 Returns list of commit ids, in ascending order. Being lazy
93 attribute allows external tools to inject commit ids from cache.
93 attribute allows external tools to inject commit ids from cache.
94 """
94 """
95 commit_ids = self._get_all_commit_ids()
95 commit_ids = self._get_all_commit_ids()
96 self._rebuild_cache(commit_ids)
96 self._rebuild_cache(commit_ids)
97 return commit_ids
97 return commit_ids
98
98
99 def _rebuild_cache(self, commit_ids):
99 def _rebuild_cache(self, commit_ids):
100 self._commit_ids = dict((commit_id, index)
100 self._commit_ids = dict((commit_id, index)
101 for index, commit_id in enumerate(commit_ids))
101 for index, commit_id in enumerate(commit_ids))
102
102
103 def run_git_command(self, cmd, **opts):
103 def run_git_command(self, cmd, **opts):
104 """
104 """
105 Runs given ``cmd`` as git command and returns tuple
105 Runs given ``cmd`` as git command and returns tuple
106 (stdout, stderr).
106 (stdout, stderr).
107
107
108 :param cmd: git command to be executed
108 :param cmd: git command to be executed
109 :param opts: env options to pass into Subprocess command
109 :param opts: env options to pass into Subprocess command
110 """
110 """
111 if not isinstance(cmd, list):
111 if not isinstance(cmd, list):
112 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
112 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
113
113
114 skip_stderr_log = opts.pop('skip_stderr_log', False)
114 skip_stderr_log = opts.pop('skip_stderr_log', False)
115 out, err = self._remote.run_git_command(cmd, **opts)
115 out, err = self._remote.run_git_command(cmd, **opts)
116 if err and not skip_stderr_log:
116 if err and not skip_stderr_log:
117 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
117 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
118 return out, err
118 return out, err
119
119
120 @staticmethod
120 @staticmethod
121 def check_url(url, config):
121 def check_url(url, config):
122 """
122 """
123 Function will check given url and try to verify if it's a valid
123 Function will check given url and try to verify if it's a valid
124 link. Sometimes it may happened that git will issue basic
124 link. Sometimes it may happened that git will issue basic
125 auth request that can cause whole API to hang when used from python
125 auth request that can cause whole API to hang when used from python
126 or other external calls.
126 or other external calls.
127
127
128 On failures it'll raise urllib2.HTTPError, exception is also thrown
128 On failures it'll raise urllib2.HTTPError, exception is also thrown
129 when the return code is non 200
129 when the return code is non 200
130 """
130 """
131 # check first if it's not an url
131 # check first if it's not an url
132 if os.path.isdir(url) or url.startswith('file:'):
132 if os.path.isdir(url) or url.startswith('file:'):
133 return True
133 return True
134
134
135 if '+' in url.split('://', 1)[0]:
135 if '+' in url.split('://', 1)[0]:
136 url = url.split('+', 1)[1]
136 url = url.split('+', 1)[1]
137
137
138 # Request the _remote to verify the url
138 # Request the _remote to verify the url
139 return connection.Git.check_url(url, config.serialize())
139 return connection.Git.check_url(url, config.serialize())
140
140
141 @staticmethod
141 @staticmethod
142 def is_valid_repository(path):
142 def is_valid_repository(path):
143 if os.path.isdir(os.path.join(path, '.git')):
143 if os.path.isdir(os.path.join(path, '.git')):
144 return True
144 return True
145 # check case of bare repository
145 # check case of bare repository
146 try:
146 try:
147 GitRepository(path)
147 GitRepository(path)
148 return True
148 return True
149 except VCSError:
149 except VCSError:
150 pass
150 pass
151 return False
151 return False
152
152
153 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
153 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
154 bare=False):
154 bare=False):
155 if create and os.path.exists(self.path):
155 if create and os.path.exists(self.path):
156 raise RepositoryError(
156 raise RepositoryError(
157 "Cannot create repository at %s, location already exist"
157 "Cannot create repository at %s, location already exist"
158 % self.path)
158 % self.path)
159
159
160 if bare and do_workspace_checkout:
160 if bare and do_workspace_checkout:
161 raise RepositoryError("Cannot update a bare repository")
161 raise RepositoryError("Cannot update a bare repository")
162 try:
162 try:
163
163
164 if src_url:
164 if src_url:
165 # check URL before any actions
165 # check URL before any actions
166 GitRepository.check_url(src_url, self.config)
166 GitRepository.check_url(src_url, self.config)
167
167
168 if create:
168 if create:
169 os.makedirs(self.path, mode=0o755)
169 os.makedirs(self.path, mode=0o755)
170
170
171 if bare:
171 if bare:
172 self._remote.init_bare()
172 self._remote.init_bare()
173 else:
173 else:
174 self._remote.init()
174 self._remote.init()
175
175
176 if src_url and bare:
176 if src_url and bare:
177 # bare repository only allows a fetch and checkout is not allowed
177 # bare repository only allows a fetch and checkout is not allowed
178 self.fetch(src_url, commit_ids=None)
178 self.fetch(src_url, commit_ids=None)
179 elif src_url:
179 elif src_url:
180 self.pull(src_url, commit_ids=None,
180 self.pull(src_url, commit_ids=None,
181 update_after=do_workspace_checkout)
181 update_after=do_workspace_checkout)
182
182
183 else:
183 else:
184 if not self._remote.assert_correct_path():
184 if not self._remote.assert_correct_path():
185 raise RepositoryError(
185 raise RepositoryError(
186 'Path "%s" does not contain a Git repository' %
186 'Path "%s" does not contain a Git repository' %
187 (self.path,))
187 (self.path,))
188
188
189 # TODO: johbo: check if we have to translate the OSError here
189 # TODO: johbo: check if we have to translate the OSError here
190 except OSError as err:
190 except OSError as err:
191 raise RepositoryError(err)
191 raise RepositoryError(err)
192
192
193 def _get_all_commit_ids(self, filters=None):
193 def _get_all_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
196 # errors
197
197
198 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
199 if not head:
199 if not head:
200 return []
200 return []
201
201
202 rev_filter = ['--branches', '--tags']
202 rev_filter = ['--branches', '--tags']
203 extra_filter = []
203 extra_filter = []
204
204
205 if filters:
205 if filters:
206 if filters.get('since'):
206 if filters.get('since'):
207 extra_filter.append('--since=%s' % (filters['since']))
207 extra_filter.append('--since=%s' % (filters['since']))
208 if filters.get('until'):
208 if filters.get('until'):
209 extra_filter.append('--until=%s' % (filters['until']))
209 extra_filter.append('--until=%s' % (filters['until']))
210 if filters.get('branch_name'):
210 if filters.get('branch_name'):
211 rev_filter = ['--tags']
211 rev_filter = ['--tags']
212 extra_filter.append(filters['branch_name'])
212 extra_filter.append(filters['branch_name'])
213 rev_filter.extend(extra_filter)
213 rev_filter.extend(extra_filter)
214
214
215 # if filters.get('start') or filters.get('end'):
215 # if filters.get('start') or filters.get('end'):
216 # # skip is offset, max-count is limit
216 # # skip is offset, max-count is limit
217 # if filters.get('start'):
217 # if filters.get('start'):
218 # extra_filter += ' --skip=%s' % filters['start']
218 # extra_filter += ' --skip=%s' % filters['start']
219 # if filters.get('end'):
219 # if filters.get('end'):
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221
221
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 try:
223 try:
224 output, __ = self.run_git_command(cmd)
224 output, __ = self.run_git_command(cmd)
225 except RepositoryError:
225 except RepositoryError:
226 # Can be raised for empty repositories
226 # Can be raised for empty repositories
227 return []
227 return []
228 return output.splitlines()
228 return output.splitlines()
229
229
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 def is_null(value):
231 def is_null(value):
232 return len(value) == commit_id_or_idx.count('0')
232 return len(value) == commit_id_or_idx.count('0')
233
233
234 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
234 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 return self.commit_ids[-1]
235 return self.commit_ids[-1]
236
236
237 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
237 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
238 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
238 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
239 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
239 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
240 try:
240 try:
241 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
241 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
242 except Exception:
242 except Exception:
243 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
243 msg = "Commit {} does not exist for `{}`".format(commit_id_or_idx, self.name)
244 raise CommitDoesNotExistError(msg)
244 raise CommitDoesNotExistError(msg)
245
245
246 elif is_bstr:
246 elif is_bstr:
247 # check full path ref, eg. refs/heads/master
247 # check full path ref, eg. refs/heads/master
248 ref_id = self._refs.get(commit_id_or_idx)
248 ref_id = self._refs.get(commit_id_or_idx)
249 if ref_id:
249 if ref_id:
250 return ref_id
250 return ref_id
251
251
252 # check branch name
252 # check branch name
253 branch_ids = self.branches.values()
253 branch_ids = self.branches.values()
254 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
255 if ref_id:
255 if ref_id:
256 return ref_id
256 return ref_id
257
257
258 # check tag name
258 # check tag name
259 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
260 if ref_id:
260 if ref_id:
261 return ref_id
261 return ref_id
262
262
263 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 if (not SHA_PATTERN.match(commit_id_or_idx) or
264 commit_id_or_idx not in self.commit_ids):
264 commit_id_or_idx not in self.commit_ids):
265 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
265 msg = "Commit {} does not exist for `{}`".format(commit_id_or_idx, self.name)
266 raise CommitDoesNotExistError(msg)
266 raise CommitDoesNotExistError(msg)
267
267
268 # Ensure we return full id
268 # Ensure we return full id
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 raise CommitDoesNotExistError(
270 raise CommitDoesNotExistError(
271 "Given commit id %s not recognized" % commit_id_or_idx)
271 "Given commit id %s not recognized" % commit_id_or_idx)
272 return commit_id_or_idx
272 return commit_id_or_idx
273
273
274 def get_hook_location(self):
274 def get_hook_location(self):
275 """
275 """
276 returns absolute path to location where hooks are stored
276 returns absolute path to location where hooks are stored
277 """
277 """
278 loc = os.path.join(self.path, 'hooks')
278 loc = os.path.join(self.path, 'hooks')
279 if not self.bare:
279 if not self.bare:
280 loc = os.path.join(self.path, '.git', 'hooks')
280 loc = os.path.join(self.path, '.git', 'hooks')
281 return loc
281 return loc
282
282
283 @LazyProperty
283 @LazyProperty
284 def last_change(self):
284 def last_change(self):
285 """
285 """
286 Returns last change made on this repository as
286 Returns last change made on this repository as
287 `datetime.datetime` object.
287 `datetime.datetime` object.
288 """
288 """
289 try:
289 try:
290 return self.get_commit().date
290 return self.get_commit().date
291 except RepositoryError:
291 except RepositoryError:
292 tzoffset = makedate()[1]
292 tzoffset = makedate()[1]
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294
294
295 def _get_fs_mtime(self):
295 def _get_fs_mtime(self):
296 idx_loc = '' if self.bare else '.git'
296 idx_loc = '' if self.bare else '.git'
297 # fallback to filesystem
297 # fallback to filesystem
298 in_path = os.path.join(self.path, idx_loc, "index")
298 in_path = os.path.join(self.path, idx_loc, "index")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 if os.path.exists(in_path):
300 if os.path.exists(in_path):
301 return os.stat(in_path).st_mtime
301 return os.stat(in_path).st_mtime
302 else:
302 else:
303 return os.stat(he_path).st_mtime
303 return os.stat(he_path).st_mtime
304
304
305 @LazyProperty
305 @LazyProperty
306 def description(self):
306 def description(self):
307 description = self._remote.get_description()
307 description = self._remote.get_description()
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309
309
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 if self.is_empty():
311 if self.is_empty():
312 return OrderedDict()
312 return OrderedDict()
313
313
314 result = []
314 result = []
315 for ref, sha in self._refs.iteritems():
315 for ref, sha in self._refs.iteritems():
316 if ref.startswith(prefix):
316 if ref.startswith(prefix):
317 ref_name = ref
317 ref_name = ref
318 if strip_prefix:
318 if strip_prefix:
319 ref_name = ref[len(prefix):]
319 ref_name = ref[len(prefix):]
320 result.append((safe_unicode(ref_name), sha))
320 result.append((safe_unicode(ref_name), sha))
321
321
322 def get_name(entry):
322 def get_name(entry):
323 return entry[0]
323 return entry[0]
324
324
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326
326
327 def _get_branches(self):
327 def _get_branches(self):
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329
329
330 @LazyProperty
330 @LazyProperty
331 def branches(self):
331 def branches(self):
332 return self._get_branches()
332 return self._get_branches()
333
333
334 @LazyProperty
334 @LazyProperty
335 def branches_closed(self):
335 def branches_closed(self):
336 return {}
336 return {}
337
337
338 @LazyProperty
338 @LazyProperty
339 def bookmarks(self):
339 def bookmarks(self):
340 return {}
340 return {}
341
341
342 @LazyProperty
342 @LazyProperty
343 def branches_all(self):
343 def branches_all(self):
344 all_branches = {}
344 all_branches = {}
345 all_branches.update(self.branches)
345 all_branches.update(self.branches)
346 all_branches.update(self.branches_closed)
346 all_branches.update(self.branches_closed)
347 return all_branches
347 return all_branches
348
348
349 @LazyProperty
349 @LazyProperty
350 def tags(self):
350 def tags(self):
351 return self._get_tags()
351 return self._get_tags()
352
352
353 def _get_tags(self):
353 def _get_tags(self):
354 return self._get_refs_entries(
354 return self._get_refs_entries(
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
356
356
357 def tag(self, name, user, commit_id=None, message=None, date=None,
357 def tag(self, name, user, commit_id=None, message=None, date=None,
358 **kwargs):
358 **kwargs):
359 # TODO: fix this method to apply annotated tags correct with message
359 # TODO: fix this method to apply annotated tags correct with message
360 """
360 """
361 Creates and returns a tag for the given ``commit_id``.
361 Creates and returns a tag for the given ``commit_id``.
362
362
363 :param name: name for new tag
363 :param name: name for new tag
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 :param commit_id: commit id for which new tag would be created
365 :param commit_id: commit id for which new tag would be created
366 :param message: message of the tag's commit
366 :param message: message of the tag's commit
367 :param date: date of tag's commit
367 :param date: date of tag's commit
368
368
369 :raises TagAlreadyExistError: if tag with same name already exists
369 :raises TagAlreadyExistError: if tag with same name already exists
370 """
370 """
371 if name in self.tags:
371 if name in self.tags:
372 raise TagAlreadyExistError("Tag %s already exists" % name)
372 raise TagAlreadyExistError("Tag %s already exists" % name)
373 commit = self.get_commit(commit_id=commit_id)
373 commit = self.get_commit(commit_id=commit_id)
374 message = message or "Added tag %s for commit %s" % (
374 message = message or "Added tag %s for commit %s" % (
375 name, commit.raw_id)
375 name, commit.raw_id)
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377
377
378 self._refs = self._get_refs()
378 self._refs = self._get_refs()
379 self.tags = self._get_tags()
379 self.tags = self._get_tags()
380 return commit
380 return commit
381
381
382 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
383 """
383 """
384 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
385
385
386 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
390
390
391 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
392 """
393 if name not in self.tags:
393 if name not in self.tags:
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 tagpath = vcspath.join(
395 tagpath = vcspath.join(
396 self._remote.get_refs_path(), 'refs', 'tags', name)
396 self._remote.get_refs_path(), 'refs', 'tags', name)
397 try:
397 try:
398 os.remove(tagpath)
398 os.remove(tagpath)
399 self._refs = self._get_refs()
399 self._refs = self._get_refs()
400 self.tags = self._get_tags()
400 self.tags = self._get_tags()
401 except OSError as e:
401 except OSError as e:
402 raise RepositoryError(e.strerror)
402 raise RepositoryError(e.strerror)
403
403
404 def _get_refs(self):
404 def _get_refs(self):
405 return self._remote.get_refs()
405 return self._remote.get_refs()
406
406
407 @LazyProperty
407 @LazyProperty
408 def _refs(self):
408 def _refs(self):
409 return self._get_refs()
409 return self._get_refs()
410
410
411 @property
411 @property
412 def _ref_tree(self):
412 def _ref_tree(self):
413 node = tree = {}
413 node = tree = {}
414 for ref, sha in self._refs.iteritems():
414 for ref, sha in self._refs.iteritems():
415 path = ref.split('/')
415 path = ref.split('/')
416 for bit in path[:-1]:
416 for bit in path[:-1]:
417 node = node.setdefault(bit, {})
417 node = node.setdefault(bit, {})
418 node[path[-1]] = sha
418 node[path[-1]] = sha
419 node = tree
419 node = tree
420 return tree
420 return tree
421
421
422 def get_remote_ref(self, ref_name):
422 def get_remote_ref(self, ref_name):
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 try:
424 try:
425 return self._refs[ref_key]
425 return self._refs[ref_key]
426 except Exception:
426 except Exception:
427 return
427 return
428
428
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
430 """
430 """
431 Returns `GitCommit` object representing commit from git repository
431 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
432 at the given `commit_id` or head (most recent commit) if None given.
433 """
433 """
434 if self.is_empty():
434 if self.is_empty():
435 raise EmptyRepositoryError("There are no commits yet")
435 raise EmptyRepositoryError("There are no commits yet")
436
436
437 if commit_id is not None:
437 if commit_id is not None:
438 self._validate_commit_id(commit_id)
438 self._validate_commit_id(commit_id)
439 try:
439 try:
440 # we have cached idx, use it without contacting the remote
440 # we have cached idx, use it without contacting the remote
441 idx = self._commit_ids[commit_id]
441 idx = self._commit_ids[commit_id]
442 return GitCommit(self, commit_id, idx, pre_load=pre_load)
442 return GitCommit(self, commit_id, idx, pre_load=pre_load)
443 except KeyError:
443 except KeyError:
444 pass
444 pass
445
445
446 elif commit_idx is not None:
446 elif commit_idx is not None:
447 self._validate_commit_idx(commit_idx)
447 self._validate_commit_idx(commit_idx)
448 try:
448 try:
449 _commit_id = self.commit_ids[commit_idx]
449 _commit_id = self.commit_ids[commit_idx]
450 if commit_idx < 0:
450 if commit_idx < 0:
451 commit_idx = self.commit_ids.index(_commit_id)
451 commit_idx = self.commit_ids.index(_commit_id)
452 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
452 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
453 except IndexError:
453 except IndexError:
454 commit_id = commit_idx
454 commit_id = commit_idx
455 else:
455 else:
456 commit_id = "tip"
456 commit_id = "tip"
457
457
458 commit_id = self._lookup_commit(commit_id)
458 commit_id = self._lookup_commit(commit_id)
459 remote_idx = None
459 remote_idx = None
460 if translate_tag:
460 if translate_tag:
461 # Need to call remote to translate id for tagging scenario
461 # Need to call remote to translate id for tagging scenario
462 remote_data = self._remote.get_object(commit_id)
462 remote_data = self._remote.get_object(commit_id)
463 commit_id = remote_data["commit_id"]
463 commit_id = remote_data["commit_id"]
464 remote_idx = remote_data["idx"]
464 remote_idx = remote_data["idx"]
465
465
466 try:
466 try:
467 idx = self._commit_ids[commit_id]
467 idx = self._commit_ids[commit_id]
468 except KeyError:
468 except KeyError:
469 idx = remote_idx or 0
469 idx = remote_idx or 0
470
470
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
472
472
473 def get_commits(
473 def get_commits(
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
476 """
476 """
477 Returns generator of `GitCommit` objects from start to end (both
477 Returns generator of `GitCommit` objects from start to end (both
478 are inclusive), in ascending date order.
478 are inclusive), in ascending date order.
479
479
480 :param start_id: None, str(commit_id)
480 :param start_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
482 :param start_date: if specified, commits with commit date less than
482 :param start_date: if specified, commits with commit date less than
483 ``start_date`` would be filtered out from returned set
483 ``start_date`` would be filtered out from returned set
484 :param end_date: if specified, commits with commit date greater than
484 :param end_date: if specified, commits with commit date greater than
485 ``end_date`` would be filtered out from returned set
485 ``end_date`` would be filtered out from returned set
486 :param branch_name: if specified, commits not reachable from given
486 :param branch_name: if specified, commits not reachable from given
487 branch would be filtered out from returned set
487 branch would be filtered out from returned set
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
489 Mercurial evolve
489 Mercurial evolve
490 :raise BranchDoesNotExistError: If given `branch_name` does not
490 :raise BranchDoesNotExistError: If given `branch_name` does not
491 exist.
491 exist.
492 :raise CommitDoesNotExistError: If commits for given `start` or
492 :raise CommitDoesNotExistError: If commits for given `start` or
493 `end` could not be found.
493 `end` could not be found.
494
494
495 """
495 """
496 if self.is_empty():
496 if self.is_empty():
497 raise EmptyRepositoryError("There are no commits yet")
497 raise EmptyRepositoryError("There are no commits yet")
498
498
499 self._validate_branch_name(branch_name)
499 self._validate_branch_name(branch_name)
500
500
501 if start_id is not None:
501 if start_id is not None:
502 self._validate_commit_id(start_id)
502 self._validate_commit_id(start_id)
503 if end_id is not None:
503 if end_id is not None:
504 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
505
505
506 start_raw_id = self._lookup_commit(start_id)
506 start_raw_id = self._lookup_commit(start_id)
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
508 end_raw_id = self._lookup_commit(end_id)
508 end_raw_id = self._lookup_commit(end_id)
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
510
510
511 if None not in [start_id, end_id] and start_pos > end_pos:
511 if None not in [start_id, end_id] and start_pos > end_pos:
512 raise RepositoryError(
512 raise RepositoryError(
513 "Start commit '%s' cannot be after end commit '%s'" %
513 "Start commit '%s' cannot be after end commit '%s'" %
514 (start_id, end_id))
514 (start_id, end_id))
515
515
516 if end_pos is not None:
516 if end_pos is not None:
517 end_pos += 1
517 end_pos += 1
518
518
519 filter_ = []
519 filter_ = []
520 if branch_name:
520 if branch_name:
521 filter_.append({'branch_name': branch_name})
521 filter_.append({'branch_name': branch_name})
522 if start_date and not end_date:
522 if start_date and not end_date:
523 filter_.append({'since': start_date})
523 filter_.append({'since': start_date})
524 if end_date and not start_date:
524 if end_date and not start_date:
525 filter_.append({'until': end_date})
525 filter_.append({'until': end_date})
526 if start_date and end_date:
526 if start_date and end_date:
527 filter_.append({'since': start_date})
527 filter_.append({'since': start_date})
528 filter_.append({'until': end_date})
528 filter_.append({'until': end_date})
529
529
530 # if start_pos or end_pos:
530 # if start_pos or end_pos:
531 # filter_.append({'start': start_pos})
531 # filter_.append({'start': start_pos})
532 # filter_.append({'end': end_pos})
532 # filter_.append({'end': end_pos})
533
533
534 if filter_:
534 if filter_:
535 revfilters = {
535 revfilters = {
536 'branch_name': branch_name,
536 'branch_name': branch_name,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
539 'start': start_pos,
539 'start': start_pos,
540 'end': end_pos,
540 'end': end_pos,
541 }
541 }
542 commit_ids = self._get_all_commit_ids(filters=revfilters)
542 commit_ids = self._get_all_commit_ids(filters=revfilters)
543
543
544 # pure python stuff, it's slow due to walker walking whole repo
544 # pure python stuff, it's slow due to walker walking whole repo
545 # def get_revs(walker):
545 # def get_revs(walker):
546 # for walker_entry in walker:
546 # for walker_entry in walker:
547 # yield walker_entry.commit.id
547 # yield walker_entry.commit.id
548 # revfilters = {}
548 # revfilters = {}
549 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
549 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
550 else:
550 else:
551 commit_ids = self.commit_ids
551 commit_ids = self.commit_ids
552
552
553 if start_pos or end_pos:
553 if start_pos or end_pos:
554 commit_ids = commit_ids[start_pos: end_pos]
554 commit_ids = commit_ids[start_pos: end_pos]
555
555
556 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
556 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
557 translate_tag=translate_tags)
557 translate_tag=translate_tags)
558
558
559 def get_diff(
559 def get_diff(
560 self, commit1, commit2, path='', ignore_whitespace=False,
560 self, commit1, commit2, path='', ignore_whitespace=False,
561 context=3, path1=None):
561 context=3, path1=None):
562 """
562 """
563 Returns (git like) *diff*, as plain text. Shows changes introduced by
563 Returns (git like) *diff*, as plain text. Shows changes introduced by
564 ``commit2`` since ``commit1``.
564 ``commit2`` since ``commit1``.
565
565
566 :param commit1: Entry point from which diff is shown. Can be
566 :param commit1: Entry point from which diff is shown. Can be
567 ``self.EMPTY_COMMIT`` - in this case, patch showing all
567 ``self.EMPTY_COMMIT`` - in this case, patch showing all
568 the changes since empty state of the repository until ``commit2``
568 the changes since empty state of the repository until ``commit2``
569 :param commit2: Until which commits changes should be shown.
569 :param commit2: Until which commits changes should be shown.
570 :param ignore_whitespace: If set to ``True``, would not show whitespace
570 :param ignore_whitespace: If set to ``True``, would not show whitespace
571 changes. Defaults to ``False``.
571 changes. Defaults to ``False``.
572 :param context: How many lines before/after changed lines should be
572 :param context: How many lines before/after changed lines should be
573 shown. Defaults to ``3``.
573 shown. Defaults to ``3``.
574 """
574 """
575 self._validate_diff_commits(commit1, commit2)
575 self._validate_diff_commits(commit1, commit2)
576 if path1 is not None and path1 != path:
576 if path1 is not None and path1 != path:
577 raise ValueError("Diff of two different paths not supported.")
577 raise ValueError("Diff of two different paths not supported.")
578
578
579 flags = [
579 flags = [
580 '-U%s' % context, '--full-index', '--binary', '-p',
580 '-U%s' % context, '--full-index', '--binary', '-p',
581 '-M', '--abbrev=40']
581 '-M', '--abbrev=40']
582 if ignore_whitespace:
582 if ignore_whitespace:
583 flags.append('-w')
583 flags.append('-w')
584
584
585 if commit1 == self.EMPTY_COMMIT:
585 if commit1 == self.EMPTY_COMMIT:
586 cmd = ['show'] + flags + [commit2.raw_id]
586 cmd = ['show'] + flags + [commit2.raw_id]
587 else:
587 else:
588 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
588 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
589
589
590 if path:
590 if path:
591 cmd.extend(['--', path])
591 cmd.extend(['--', path])
592
592
593 stdout, __ = self.run_git_command(cmd)
593 stdout, __ = self.run_git_command(cmd)
594 # If we used 'show' command, strip first few lines (until actual diff
594 # If we used 'show' command, strip first few lines (until actual diff
595 # starts)
595 # starts)
596 if commit1 == self.EMPTY_COMMIT:
596 if commit1 == self.EMPTY_COMMIT:
597 lines = stdout.splitlines()
597 lines = stdout.splitlines()
598 x = 0
598 x = 0
599 for line in lines:
599 for line in lines:
600 if line.startswith('diff'):
600 if line.startswith('diff'):
601 break
601 break
602 x += 1
602 x += 1
603 # Append new line just like 'diff' command do
603 # Append new line just like 'diff' command do
604 stdout = '\n'.join(lines[x:]) + '\n'
604 stdout = '\n'.join(lines[x:]) + '\n'
605 return GitDiff(stdout)
605 return GitDiff(stdout)
606
606
607 def strip(self, commit_id, branch_name):
607 def strip(self, commit_id, branch_name):
608 commit = self.get_commit(commit_id=commit_id)
608 commit = self.get_commit(commit_id=commit_id)
609 if commit.merge:
609 if commit.merge:
610 raise Exception('Cannot reset to merge commit')
610 raise Exception('Cannot reset to merge commit')
611
611
612 # parent is going to be the new head now
612 # parent is going to be the new head now
613 commit = commit.parents[0]
613 commit = commit.parents[0]
614 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
614 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
615
615
616 self._commit_ids_ver = time.time()
616 self._commit_ids_ver = time.time()
617 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
617 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
618 return len(self.commit_ids)
618 return len(self.commit_ids)
619
619
620 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
620 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
621 if commit_id1 == commit_id2:
621 if commit_id1 == commit_id2:
622 return commit_id1
622 return commit_id1
623
623
624 if self != repo2:
624 if self != repo2:
625 commits = self._remote.get_missing_revs(
625 commits = self._remote.get_missing_revs(
626 commit_id1, commit_id2, repo2.path)
626 commit_id1, commit_id2, repo2.path)
627 if commits:
627 if commits:
628 commit = repo2.get_commit(commits[-1])
628 commit = repo2.get_commit(commits[-1])
629 if commit.parents:
629 if commit.parents:
630 ancestor_id = commit.parents[0].raw_id
630 ancestor_id = commit.parents[0].raw_id
631 else:
631 else:
632 ancestor_id = None
632 ancestor_id = None
633 else:
633 else:
634 # no commits from other repo, ancestor_id is the commit_id2
634 # no commits from other repo, ancestor_id is the commit_id2
635 ancestor_id = commit_id2
635 ancestor_id = commit_id2
636 else:
636 else:
637 output, __ = self.run_git_command(
637 output, __ = self.run_git_command(
638 ['merge-base', commit_id1, commit_id2])
638 ['merge-base', commit_id1, commit_id2])
639 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
639 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
640
640
641 return ancestor_id
641 return ancestor_id
642
642
643 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
643 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
644 repo1 = self
644 repo1 = self
645 ancestor_id = None
645 ancestor_id = None
646
646
647 if commit_id1 == commit_id2:
647 if commit_id1 == commit_id2:
648 commits = []
648 commits = []
649 elif repo1 != repo2:
649 elif repo1 != repo2:
650 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
650 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
651 repo2.path)
651 repo2.path)
652 commits = [
652 commits = [
653 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
653 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
654 for commit_id in reversed(missing_ids)]
654 for commit_id in reversed(missing_ids)]
655 else:
655 else:
656 output, __ = repo1.run_git_command(
656 output, __ = repo1.run_git_command(
657 ['log', '--reverse', '--pretty=format: %H', '-s',
657 ['log', '--reverse', '--pretty=format: %H', '-s',
658 '%s..%s' % (commit_id1, commit_id2)])
658 '%s..%s' % (commit_id1, commit_id2)])
659 commits = [
659 commits = [
660 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
660 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
661 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
661 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
662
662
663 return commits
663 return commits
664
664
665 @LazyProperty
665 @LazyProperty
666 def in_memory_commit(self):
666 def in_memory_commit(self):
667 """
667 """
668 Returns ``GitInMemoryCommit`` object for this repository.
668 Returns ``GitInMemoryCommit`` object for this repository.
669 """
669 """
670 return GitInMemoryCommit(self)
670 return GitInMemoryCommit(self)
671
671
672 def pull(self, url, commit_ids=None, update_after=False):
672 def pull(self, url, commit_ids=None, update_after=False):
673 """
673 """
674 Pull changes from external location. Pull is different in GIT
674 Pull changes from external location. Pull is different in GIT
675 that fetch since it's doing a checkout
675 that fetch since it's doing a checkout
676
676
677 :param commit_ids: Optional. Can be set to a list of commit ids
677 :param commit_ids: Optional. Can be set to a list of commit ids
678 which shall be pulled from the other repository.
678 which shall be pulled from the other repository.
679 """
679 """
680 refs = None
680 refs = None
681 if commit_ids is not None:
681 if commit_ids is not None:
682 remote_refs = self._remote.get_remote_refs(url)
682 remote_refs = self._remote.get_remote_refs(url)
683 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
683 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
684 self._remote.pull(url, refs=refs, update_after=update_after)
684 self._remote.pull(url, refs=refs, update_after=update_after)
685 self._remote.invalidate_vcs_cache()
685 self._remote.invalidate_vcs_cache()
686
686
687 def fetch(self, url, commit_ids=None):
687 def fetch(self, url, commit_ids=None):
688 """
688 """
689 Fetch all git objects from external location.
689 Fetch all git objects from external location.
690 """
690 """
691 self._remote.sync_fetch(url, refs=commit_ids)
691 self._remote.sync_fetch(url, refs=commit_ids)
692 self._remote.invalidate_vcs_cache()
692 self._remote.invalidate_vcs_cache()
693
693
694 def push(self, url):
694 def push(self, url):
695 refs = None
695 refs = None
696 self._remote.sync_push(url, refs=refs)
696 self._remote.sync_push(url, refs=refs)
697
697
698 def set_refs(self, ref_name, commit_id):
698 def set_refs(self, ref_name, commit_id):
699 self._remote.set_refs(ref_name, commit_id)
699 self._remote.set_refs(ref_name, commit_id)
700
700
701 def remove_ref(self, ref_name):
701 def remove_ref(self, ref_name):
702 self._remote.remove_ref(ref_name)
702 self._remote.remove_ref(ref_name)
703
703
704 def _update_server_info(self):
704 def _update_server_info(self):
705 """
705 """
706 runs gits update-server-info command in this repo instance
706 runs gits update-server-info command in this repo instance
707 """
707 """
708 self._remote.update_server_info()
708 self._remote.update_server_info()
709
709
710 def _current_branch(self):
710 def _current_branch(self):
711 """
711 """
712 Return the name of the current branch.
712 Return the name of the current branch.
713
713
714 It only works for non bare repositories (i.e. repositories with a
714 It only works for non bare repositories (i.e. repositories with a
715 working copy)
715 working copy)
716 """
716 """
717 if self.bare:
717 if self.bare:
718 raise RepositoryError('Bare git repos do not have active branches')
718 raise RepositoryError('Bare git repos do not have active branches')
719
719
720 if self.is_empty():
720 if self.is_empty():
721 return None
721 return None
722
722
723 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
723 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
724 return stdout.strip()
724 return stdout.strip()
725
725
726 def _checkout(self, branch_name, create=False, force=False):
726 def _checkout(self, branch_name, create=False, force=False):
727 """
727 """
728 Checkout a branch in the working directory.
728 Checkout a branch in the working directory.
729
729
730 It tries to create the branch if create is True, failing if the branch
730 It tries to create the branch if create is True, failing if the branch
731 already exists.
731 already exists.
732
732
733 It only works for non bare repositories (i.e. repositories with a
733 It only works for non bare repositories (i.e. repositories with a
734 working copy)
734 working copy)
735 """
735 """
736 if self.bare:
736 if self.bare:
737 raise RepositoryError('Cannot checkout branches in a bare git repo')
737 raise RepositoryError('Cannot checkout branches in a bare git repo')
738
738
739 cmd = ['checkout']
739 cmd = ['checkout']
740 if force:
740 if force:
741 cmd.append('-f')
741 cmd.append('-f')
742 if create:
742 if create:
743 cmd.append('-b')
743 cmd.append('-b')
744 cmd.append(branch_name)
744 cmd.append(branch_name)
745 self.run_git_command(cmd, fail_on_stderr=False)
745 self.run_git_command(cmd, fail_on_stderr=False)
746
746
747 def _identify(self):
747 def _identify(self):
748 """
748 """
749 Return the current state of the working directory.
749 Return the current state of the working directory.
750 """
750 """
751 if self.bare:
751 if self.bare:
752 raise RepositoryError('Bare git repos do not have active branches')
752 raise RepositoryError('Bare git repos do not have active branches')
753
753
754 if self.is_empty():
754 if self.is_empty():
755 return None
755 return None
756
756
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 return stdout.strip()
758 return stdout.strip()
759
759
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 """
761 """
762 Create a local clone of the current repo.
762 Create a local clone of the current repo.
763 """
763 """
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # clone will only fetch the active branch.
765 # clone will only fetch the active branch.
766 cmd = ['clone', '--branch', branch_name,
766 cmd = ['clone', '--branch', branch_name,
767 self.path, os.path.abspath(clone_path)]
767 self.path, os.path.abspath(clone_path)]
768
768
769 self.run_git_command(cmd, fail_on_stderr=False)
769 self.run_git_command(cmd, fail_on_stderr=False)
770
770
771 # if we get the different source branch, make sure we also fetch it for
771 # if we get the different source branch, make sure we also fetch it for
772 # merge conditions
772 # merge conditions
773 if source_branch and source_branch != branch_name:
773 if source_branch and source_branch != branch_name:
774 # check if the ref exists.
774 # check if the ref exists.
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 if shadow_repo.get_remote_ref(source_branch):
776 if shadow_repo.get_remote_ref(source_branch):
777 cmd = ['fetch', self.path, source_branch]
777 cmd = ['fetch', self.path, source_branch]
778 self.run_git_command(cmd, fail_on_stderr=False)
778 self.run_git_command(cmd, fail_on_stderr=False)
779
779
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 """
781 """
782 Fetch a branch from a local repository.
782 Fetch a branch from a local repository.
783 """
783 """
784 repository_path = os.path.abspath(repository_path)
784 repository_path = os.path.abspath(repository_path)
785 if repository_path == self.path:
785 if repository_path == self.path:
786 raise ValueError('Cannot fetch from the same repository')
786 raise ValueError('Cannot fetch from the same repository')
787
787
788 if use_origin:
788 if use_origin:
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch=branch_name)
790 branch=branch_name)
791
791
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 repository_path, branch_name]
793 repository_path, branch_name]
794 self.run_git_command(cmd, fail_on_stderr=False)
794 self.run_git_command(cmd, fail_on_stderr=False)
795
795
796 def _local_reset(self, branch_name):
796 def _local_reset(self, branch_name):
797 branch_name = '{}'.format(branch_name)
797 branch_name = '{}'.format(branch_name)
798 cmd = ['reset', '--hard', branch_name, '--']
798 cmd = ['reset', '--hard', branch_name, '--']
799 self.run_git_command(cmd, fail_on_stderr=False)
799 self.run_git_command(cmd, fail_on_stderr=False)
800
800
801 def _last_fetch_heads(self):
801 def _last_fetch_heads(self):
802 """
802 """
803 Return the last fetched heads that need merging.
803 Return the last fetched heads that need merging.
804
804
805 The algorithm is defined at
805 The algorithm is defined at
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 """
807 """
808 if not self.bare:
808 if not self.bare:
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 else:
810 else:
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812
812
813 heads = []
813 heads = []
814 with open(fetch_heads_path) as f:
814 with open(fetch_heads_path) as f:
815 for line in f:
815 for line in f:
816 if ' not-for-merge ' in line:
816 if ' not-for-merge ' in line:
817 continue
817 continue
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 heads.append(line)
819 heads.append(line)
820
820
821 return heads
821 return heads
822
822
823 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
823 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
824 return GitRepository(shadow_repository_path)
824 return GitRepository(shadow_repository_path)
825
825
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 """
827 """
828 Pull a branch from a local repository.
828 Pull a branch from a local repository.
829 """
829 """
830 if self.bare:
830 if self.bare:
831 raise RepositoryError('Cannot pull into a bare git repository')
831 raise RepositoryError('Cannot pull into a bare git repository')
832 # N.B.(skreft): The --ff-only option is to make sure this is a
832 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # fast-forward (i.e., we are only pulling new changes and there are no
833 # fast-forward (i.e., we are only pulling new changes and there are no
834 # conflicts with our current branch)
834 # conflicts with our current branch)
835 # Additionally, that option needs to go before --no-tags, otherwise git
835 # Additionally, that option needs to go before --no-tags, otherwise git
836 # pull complains about it being an unknown flag.
836 # pull complains about it being an unknown flag.
837 cmd = ['pull']
837 cmd = ['pull']
838 if ff_only:
838 if ff_only:
839 cmd.append('--ff-only')
839 cmd.append('--ff-only')
840 cmd.extend(['--no-tags', repository_path, branch_name])
840 cmd.extend(['--no-tags', repository_path, branch_name])
841 self.run_git_command(cmd, fail_on_stderr=False)
841 self.run_git_command(cmd, fail_on_stderr=False)
842
842
843 def _local_merge(self, merge_message, user_name, user_email, heads):
843 def _local_merge(self, merge_message, user_name, user_email, heads):
844 """
844 """
845 Merge the given head into the checked out branch.
845 Merge the given head into the checked out branch.
846
846
847 It will force a merge commit.
847 It will force a merge commit.
848
848
849 Currently it raises an error if the repo is empty, as it is not possible
849 Currently it raises an error if the repo is empty, as it is not possible
850 to create a merge commit in an empty repo.
850 to create a merge commit in an empty repo.
851
851
852 :param merge_message: The message to use for the merge commit.
852 :param merge_message: The message to use for the merge commit.
853 :param heads: the heads to merge.
853 :param heads: the heads to merge.
854 """
854 """
855 if self.bare:
855 if self.bare:
856 raise RepositoryError('Cannot merge into a bare git repository')
856 raise RepositoryError('Cannot merge into a bare git repository')
857
857
858 if not heads:
858 if not heads:
859 return
859 return
860
860
861 if self.is_empty():
861 if self.is_empty():
862 # TODO(skreft): do somehting more robust in this case.
862 # TODO(skreft): do somehting more robust in this case.
863 raise RepositoryError(
863 raise RepositoryError(
864 'Do not know how to merge into empty repositories yet')
864 'Do not know how to merge into empty repositories yet')
865
865
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # commit message. We also specify the user who is doing the merge.
867 # commit message. We also specify the user who is doing the merge.
868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
869 '-c', 'user.email=%s' % safe_str(user_email),
869 '-c', 'user.email=%s' % safe_str(user_email),
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 cmd.extend(heads)
871 cmd.extend(heads)
872 try:
872 try:
873 output = self.run_git_command(cmd, fail_on_stderr=False)
873 output = self.run_git_command(cmd, fail_on_stderr=False)
874 except RepositoryError:
874 except RepositoryError:
875 # Cleanup any merge leftovers
875 # Cleanup any merge leftovers
876 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
876 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
877 raise
877 raise
878
878
879 def _local_push(
879 def _local_push(
880 self, source_branch, repository_path, target_branch,
880 self, source_branch, repository_path, target_branch,
881 enable_hooks=False, rc_scm_data=None):
881 enable_hooks=False, rc_scm_data=None):
882 """
882 """
883 Push the source_branch to the given repository and target_branch.
883 Push the source_branch to the given repository and target_branch.
884
884
885 Currently it if the target_branch is not master and the target repo is
885 Currently it if the target_branch is not master and the target repo is
886 empty, the push will work, but then GitRepository won't be able to find
886 empty, the push will work, but then GitRepository won't be able to find
887 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
887 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
888 pointing to master, which does not exist).
888 pointing to master, which does not exist).
889
889
890 It does not run the hooks in the target repo.
890 It does not run the hooks in the target repo.
891 """
891 """
892 # TODO(skreft): deal with the case in which the target repo is empty,
892 # TODO(skreft): deal with the case in which the target repo is empty,
893 # and the target_branch is not master.
893 # and the target_branch is not master.
894 target_repo = GitRepository(repository_path)
894 target_repo = GitRepository(repository_path)
895 if (not target_repo.bare and
895 if (not target_repo.bare and
896 target_repo._current_branch() == target_branch):
896 target_repo._current_branch() == target_branch):
897 # Git prevents pushing to the checked out branch, so simulate it by
897 # Git prevents pushing to the checked out branch, so simulate it by
898 # pulling into the target repository.
898 # pulling into the target repository.
899 target_repo._local_pull(self.path, source_branch)
899 target_repo._local_pull(self.path, source_branch)
900 else:
900 else:
901 cmd = ['push', os.path.abspath(repository_path),
901 cmd = ['push', os.path.abspath(repository_path),
902 '%s:%s' % (source_branch, target_branch)]
902 '%s:%s' % (source_branch, target_branch)]
903 gitenv = {}
903 gitenv = {}
904 if rc_scm_data:
904 if rc_scm_data:
905 gitenv.update({'RC_SCM_DATA': rc_scm_data})
905 gitenv.update({'RC_SCM_DATA': rc_scm_data})
906
906
907 if not enable_hooks:
907 if not enable_hooks:
908 gitenv['RC_SKIP_HOOKS'] = '1'
908 gitenv['RC_SKIP_HOOKS'] = '1'
909 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
909 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
910
910
911 def _get_new_pr_branch(self, source_branch, target_branch):
911 def _get_new_pr_branch(self, source_branch, target_branch):
912 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
912 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
913 pr_branches = []
913 pr_branches = []
914 for branch in self.branches:
914 for branch in self.branches:
915 if branch.startswith(prefix):
915 if branch.startswith(prefix):
916 pr_branches.append(int(branch[len(prefix):]))
916 pr_branches.append(int(branch[len(prefix):]))
917
917
918 if not pr_branches:
918 if not pr_branches:
919 branch_id = 0
919 branch_id = 0
920 else:
920 else:
921 branch_id = max(pr_branches) + 1
921 branch_id = max(pr_branches) + 1
922
922
923 return '%s%d' % (prefix, branch_id)
923 return '%s%d' % (prefix, branch_id)
924
924
925 def _maybe_prepare_merge_workspace(
925 def _maybe_prepare_merge_workspace(
926 self, repo_id, workspace_id, target_ref, source_ref):
926 self, repo_id, workspace_id, target_ref, source_ref):
927 shadow_repository_path = self._get_shadow_repository_path(
927 shadow_repository_path = self._get_shadow_repository_path(
928 repo_id, workspace_id)
928 repo_id, workspace_id)
929 if not os.path.exists(shadow_repository_path):
929 if not os.path.exists(shadow_repository_path):
930 self._local_clone(
930 self._local_clone(
931 shadow_repository_path, target_ref.name, source_ref.name)
931 shadow_repository_path, target_ref.name, source_ref.name)
932 log.debug(
932 log.debug(
933 'Prepared shadow repository in %s', shadow_repository_path)
933 'Prepared shadow repository in %s', shadow_repository_path)
934
934
935 return shadow_repository_path
935 return shadow_repository_path
936
936
937 def _merge_repo(self, repo_id, workspace_id, target_ref,
937 def _merge_repo(self, repo_id, workspace_id, target_ref,
938 source_repo, source_ref, merge_message,
938 source_repo, source_ref, merge_message,
939 merger_name, merger_email, dry_run=False,
939 merger_name, merger_email, dry_run=False,
940 use_rebase=False, close_branch=False):
940 use_rebase=False, close_branch=False):
941
941
942 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
942 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
943 'rebase' if use_rebase else 'merge', dry_run)
943 'rebase' if use_rebase else 'merge', dry_run)
944 if target_ref.commit_id != self.branches[target_ref.name]:
944 if target_ref.commit_id != self.branches[target_ref.name]:
945 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
945 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
946 target_ref.commit_id, self.branches[target_ref.name])
946 target_ref.commit_id, self.branches[target_ref.name])
947 return MergeResponse(
947 return MergeResponse(
948 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
948 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
949 metadata={'target_ref': target_ref})
949 metadata={'target_ref': target_ref})
950
950
951 shadow_repository_path = self._maybe_prepare_merge_workspace(
951 shadow_repository_path = self._maybe_prepare_merge_workspace(
952 repo_id, workspace_id, target_ref, source_ref)
952 repo_id, workspace_id, target_ref, source_ref)
953 shadow_repo = self._get_shadow_instance(shadow_repository_path)
953 shadow_repo = self._get_shadow_instance(shadow_repository_path)
954
954
955 # checkout source, if it's different. Otherwise we could not
955 # checkout source, if it's different. Otherwise we could not
956 # fetch proper commits for merge testing
956 # fetch proper commits for merge testing
957 if source_ref.name != target_ref.name:
957 if source_ref.name != target_ref.name:
958 if shadow_repo.get_remote_ref(source_ref.name):
958 if shadow_repo.get_remote_ref(source_ref.name):
959 shadow_repo._checkout(source_ref.name, force=True)
959 shadow_repo._checkout(source_ref.name, force=True)
960
960
961 # checkout target, and fetch changes
961 # checkout target, and fetch changes
962 shadow_repo._checkout(target_ref.name, force=True)
962 shadow_repo._checkout(target_ref.name, force=True)
963
963
964 # fetch/reset pull the target, in case it is changed
964 # fetch/reset pull the target, in case it is changed
965 # this handles even force changes
965 # this handles even force changes
966 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
966 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
967 shadow_repo._local_reset(target_ref.name)
967 shadow_repo._local_reset(target_ref.name)
968
968
969 # Need to reload repo to invalidate the cache, or otherwise we cannot
969 # Need to reload repo to invalidate the cache, or otherwise we cannot
970 # retrieve the last target commit.
970 # retrieve the last target commit.
971 shadow_repo = self._get_shadow_instance(shadow_repository_path)
971 shadow_repo = self._get_shadow_instance(shadow_repository_path)
972 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
972 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
973 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
973 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
974 target_ref, target_ref.commit_id,
974 target_ref, target_ref.commit_id,
975 shadow_repo.branches[target_ref.name])
975 shadow_repo.branches[target_ref.name])
976 return MergeResponse(
976 return MergeResponse(
977 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
977 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
978 metadata={'target_ref': target_ref})
978 metadata={'target_ref': target_ref})
979
979
980 # calculate new branch
980 # calculate new branch
981 pr_branch = shadow_repo._get_new_pr_branch(
981 pr_branch = shadow_repo._get_new_pr_branch(
982 source_ref.name, target_ref.name)
982 source_ref.name, target_ref.name)
983 log.debug('using pull-request merge branch: `%s`', pr_branch)
983 log.debug('using pull-request merge branch: `%s`', pr_branch)
984 # checkout to temp branch, and fetch changes
984 # checkout to temp branch, and fetch changes
985 shadow_repo._checkout(pr_branch, create=True)
985 shadow_repo._checkout(pr_branch, create=True)
986 try:
986 try:
987 shadow_repo._local_fetch(source_repo.path, source_ref.name)
987 shadow_repo._local_fetch(source_repo.path, source_ref.name)
988 except RepositoryError:
988 except RepositoryError:
989 log.exception('Failure when doing local fetch on '
989 log.exception('Failure when doing local fetch on '
990 'shadow repo: %s', shadow_repo)
990 'shadow repo: %s', shadow_repo)
991 return MergeResponse(
991 return MergeResponse(
992 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
992 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
993 metadata={'source_ref': source_ref})
993 metadata={'source_ref': source_ref})
994
994
995 merge_ref = None
995 merge_ref = None
996 merge_failure_reason = MergeFailureReason.NONE
996 merge_failure_reason = MergeFailureReason.NONE
997 metadata = {}
997 metadata = {}
998 try:
998 try:
999 shadow_repo._local_merge(merge_message, merger_name, merger_email,
999 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1000 [source_ref.commit_id])
1000 [source_ref.commit_id])
1001 merge_possible = True
1001 merge_possible = True
1002
1002
1003 # Need to reload repo to invalidate the cache, or otherwise we
1003 # Need to reload repo to invalidate the cache, or otherwise we
1004 # cannot retrieve the merge commit.
1004 # cannot retrieve the merge commit.
1005 shadow_repo = GitRepository(shadow_repository_path)
1005 shadow_repo = GitRepository(shadow_repository_path)
1006 merge_commit_id = shadow_repo.branches[pr_branch]
1006 merge_commit_id = shadow_repo.branches[pr_branch]
1007
1007
1008 # Set a reference pointing to the merge commit. This reference may
1008 # Set a reference pointing to the merge commit. This reference may
1009 # be used to easily identify the last successful merge commit in
1009 # be used to easily identify the last successful merge commit in
1010 # the shadow repository.
1010 # the shadow repository.
1011 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1011 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1012 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1012 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1013 except RepositoryError:
1013 except RepositoryError:
1014 log.exception('Failure when doing local merge on git shadow repo')
1014 log.exception('Failure when doing local merge on git shadow repo')
1015 merge_possible = False
1015 merge_possible = False
1016 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1016 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1017
1017
1018 if merge_possible and not dry_run:
1018 if merge_possible and not dry_run:
1019 try:
1019 try:
1020 shadow_repo._local_push(
1020 shadow_repo._local_push(
1021 pr_branch, self.path, target_ref.name, enable_hooks=True,
1021 pr_branch, self.path, target_ref.name, enable_hooks=True,
1022 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1022 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1023 merge_succeeded = True
1023 merge_succeeded = True
1024 except RepositoryError:
1024 except RepositoryError:
1025 log.exception(
1025 log.exception(
1026 'Failure when doing local push from the shadow '
1026 'Failure when doing local push from the shadow '
1027 'repository to the target repository at %s.', self.path)
1027 'repository to the target repository at %s.', self.path)
1028 merge_succeeded = False
1028 merge_succeeded = False
1029 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1029 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1030 metadata['target'] = 'git shadow repo'
1030 metadata['target'] = 'git shadow repo'
1031 metadata['merge_commit'] = pr_branch
1031 metadata['merge_commit'] = pr_branch
1032 else:
1032 else:
1033 merge_succeeded = False
1033 merge_succeeded = False
1034
1034
1035 return MergeResponse(
1035 return MergeResponse(
1036 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1036 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1037 metadata=metadata)
1037 metadata=metadata)
@@ -1,949 +1,949 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import time
27 import time
28 import urllib
28 import urllib
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import CachedProperty
31 from zope.cachedescriptors.property import CachedProperty
32
32
33 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
34 from rhodecode.lib.datelib import (
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
36 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.utils import safe_unicode, safe_str
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
47 from rhodecode.lib.vcs.compat import configparser
47 from rhodecode.lib.vcs.compat import configparser
48
48
49 hexlify = binascii.hexlify
49 hexlify = binascii.hexlify
50 nullid = "\0" * 20
50 nullid = "\0" * 20
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class MercurialRepository(BaseRepository):
55 class MercurialRepository(BaseRepository):
56 """
56 """
57 Mercurial repository backend
57 Mercurial repository backend
58 """
58 """
59 DEFAULT_BRANCH_NAME = 'default'
59 DEFAULT_BRANCH_NAME = 'default'
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63 """
63 """
64 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
65 ``repo_path``.
66
66
67 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
68 :param config: config object containing the repo configuration
68 :param config: config object containing the repo configuration
69 :param create=False: if set to True, would try to create repository if
69 :param create=False: if set to True, would try to create repository if
70 it does not exist rather than raising exception
70 it does not exist rather than raising exception
71 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
72 :param do_workspace_checkout=False: sets update of working copy after
72 :param do_workspace_checkout=False: sets update of working copy after
73 making a clone
73 making a clone
74 :param bare: not used, compatible with other VCS
74 :param bare: not used, compatible with other VCS
75 """
75 """
76
76
77 self.path = safe_str(os.path.abspath(repo_path))
77 self.path = safe_str(os.path.abspath(repo_path))
78 # mercurial since 4.4.X requires certain configuration to be present
78 # mercurial since 4.4.X requires certain configuration to be present
79 # because sometimes we init the repos with config we need to meet
79 # because sometimes we init the repos with config we need to meet
80 # special requirements
80 # special requirements
81 self.config = config if config else self.get_default_config(
81 self.config = config if config else self.get_default_config(
82 default=[('extensions', 'largefiles', '1')])
82 default=[('extensions', 'largefiles', '1')])
83 self.with_wire = with_wire
83 self.with_wire = with_wire
84
84
85 self._init_repo(create, src_url, do_workspace_checkout)
85 self._init_repo(create, src_url, do_workspace_checkout)
86
86
87 # caches
87 # caches
88 self._commit_ids = {}
88 self._commit_ids = {}
89
89
90 # dependent that trigger re-computation of commit_ids
90 # dependent that trigger re-computation of commit_ids
91 self._commit_ids_ver = 0
91 self._commit_ids_ver = 0
92
92
93 @LazyProperty
93 @LazyProperty
94 def _remote(self):
94 def _remote(self):
95 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
95 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
96
96
97 @CachedProperty('_commit_ids_ver')
97 @CachedProperty('_commit_ids_ver')
98 def commit_ids(self):
98 def commit_ids(self):
99 """
99 """
100 Returns list of commit ids, in ascending order. Being lazy
100 Returns list of commit ids, in ascending order. Being lazy
101 attribute allows external tools to inject shas from cache.
101 attribute allows external tools to inject shas from cache.
102 """
102 """
103 commit_ids = self._get_all_commit_ids()
103 commit_ids = self._get_all_commit_ids()
104 self._rebuild_cache(commit_ids)
104 self._rebuild_cache(commit_ids)
105 return commit_ids
105 return commit_ids
106
106
107 def _rebuild_cache(self, commit_ids):
107 def _rebuild_cache(self, commit_ids):
108 self._commit_ids = dict((commit_id, index)
108 self._commit_ids = dict((commit_id, index)
109 for index, commit_id in enumerate(commit_ids))
109 for index, commit_id in enumerate(commit_ids))
110
110
111 @LazyProperty
111 @LazyProperty
112 def branches(self):
112 def branches(self):
113 return self._get_branches()
113 return self._get_branches()
114
114
115 @LazyProperty
115 @LazyProperty
116 def branches_closed(self):
116 def branches_closed(self):
117 return self._get_branches(active=False, closed=True)
117 return self._get_branches(active=False, closed=True)
118
118
119 @LazyProperty
119 @LazyProperty
120 def branches_all(self):
120 def branches_all(self):
121 all_branches = {}
121 all_branches = {}
122 all_branches.update(self.branches)
122 all_branches.update(self.branches)
123 all_branches.update(self.branches_closed)
123 all_branches.update(self.branches_closed)
124 return all_branches
124 return all_branches
125
125
126 def _get_branches(self, active=True, closed=False):
126 def _get_branches(self, active=True, closed=False):
127 """
127 """
128 Gets branches for this repository
128 Gets branches for this repository
129 Returns only not closed active branches by default
129 Returns only not closed active branches by default
130
130
131 :param active: return also active branches
131 :param active: return also active branches
132 :param closed: return also closed branches
132 :param closed: return also closed branches
133
133
134 """
134 """
135 if self.is_empty():
135 if self.is_empty():
136 return {}
136 return {}
137
137
138 def get_name(ctx):
138 def get_name(ctx):
139 return ctx[0]
139 return ctx[0]
140
140
141 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
141 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
142 self._remote.branches(active, closed).items()]
142 self._remote.branches(active, closed).items()]
143
143
144 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
144 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
145
145
146 @LazyProperty
146 @LazyProperty
147 def tags(self):
147 def tags(self):
148 """
148 """
149 Gets tags for this repository
149 Gets tags for this repository
150 """
150 """
151 return self._get_tags()
151 return self._get_tags()
152
152
153 def _get_tags(self):
153 def _get_tags(self):
154 if self.is_empty():
154 if self.is_empty():
155 return {}
155 return {}
156
156
157 def get_name(ctx):
157 def get_name(ctx):
158 return ctx[0]
158 return ctx[0]
159
159
160 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
160 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
161 self._remote.tags().items()]
161 self._remote.tags().items()]
162
162
163 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
163 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
164
164
165 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
165 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
166 """
166 """
167 Creates and returns a tag for the given ``commit_id``.
167 Creates and returns a tag for the given ``commit_id``.
168
168
169 :param name: name for new tag
169 :param name: name for new tag
170 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
170 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
171 :param commit_id: commit id for which new tag would be created
171 :param commit_id: commit id for which new tag would be created
172 :param message: message of the tag's commit
172 :param message: message of the tag's commit
173 :param date: date of tag's commit
173 :param date: date of tag's commit
174
174
175 :raises TagAlreadyExistError: if tag with same name already exists
175 :raises TagAlreadyExistError: if tag with same name already exists
176 """
176 """
177 if name in self.tags:
177 if name in self.tags:
178 raise TagAlreadyExistError("Tag %s already exists" % name)
178 raise TagAlreadyExistError("Tag %s already exists" % name)
179
179
180 commit = self.get_commit(commit_id=commit_id)
180 commit = self.get_commit(commit_id=commit_id)
181 local = kwargs.setdefault('local', False)
181 local = kwargs.setdefault('local', False)
182
182
183 if message is None:
183 if message is None:
184 message = "Added tag %s for commit %s" % (name, commit.short_id)
184 message = "Added tag %s for commit %s" % (name, commit.short_id)
185
185
186 date, tz = date_to_timestamp_plus_offset(date)
186 date, tz = date_to_timestamp_plus_offset(date)
187
187
188 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
188 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
189 self._remote.invalidate_vcs_cache()
189 self._remote.invalidate_vcs_cache()
190
190
191 # Reinitialize tags
191 # Reinitialize tags
192 self.tags = self._get_tags()
192 self.tags = self._get_tags()
193 tag_id = self.tags[name]
193 tag_id = self.tags[name]
194
194
195 return self.get_commit(commit_id=tag_id)
195 return self.get_commit(commit_id=tag_id)
196
196
197 def remove_tag(self, name, user, message=None, date=None):
197 def remove_tag(self, name, user, message=None, date=None):
198 """
198 """
199 Removes tag with the given `name`.
199 Removes tag with the given `name`.
200
200
201 :param name: name of the tag to be removed
201 :param name: name of the tag to be removed
202 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
202 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
203 :param message: message of the tag's removal commit
203 :param message: message of the tag's removal commit
204 :param date: date of tag's removal commit
204 :param date: date of tag's removal commit
205
205
206 :raises TagDoesNotExistError: if tag with given name does not exists
206 :raises TagDoesNotExistError: if tag with given name does not exists
207 """
207 """
208 if name not in self.tags:
208 if name not in self.tags:
209 raise TagDoesNotExistError("Tag %s does not exist" % name)
209 raise TagDoesNotExistError("Tag %s does not exist" % name)
210
210
211 if message is None:
211 if message is None:
212 message = "Removed tag %s" % name
212 message = "Removed tag %s" % name
213 local = False
213 local = False
214
214
215 date, tz = date_to_timestamp_plus_offset(date)
215 date, tz = date_to_timestamp_plus_offset(date)
216
216
217 self._remote.tag(name, nullid, message, local, user, date, tz)
217 self._remote.tag(name, nullid, message, local, user, date, tz)
218 self._remote.invalidate_vcs_cache()
218 self._remote.invalidate_vcs_cache()
219 self.tags = self._get_tags()
219 self.tags = self._get_tags()
220
220
221 @LazyProperty
221 @LazyProperty
222 def bookmarks(self):
222 def bookmarks(self):
223 """
223 """
224 Gets bookmarks for this repository
224 Gets bookmarks for this repository
225 """
225 """
226 return self._get_bookmarks()
226 return self._get_bookmarks()
227
227
228 def _get_bookmarks(self):
228 def _get_bookmarks(self):
229 if self.is_empty():
229 if self.is_empty():
230 return {}
230 return {}
231
231
232 def get_name(ctx):
232 def get_name(ctx):
233 return ctx[0]
233 return ctx[0]
234
234
235 _bookmarks = [
235 _bookmarks = [
236 (safe_unicode(n), hexlify(h)) for n, h in
236 (safe_unicode(n), hexlify(h)) for n, h in
237 self._remote.bookmarks().items()]
237 self._remote.bookmarks().items()]
238
238
239 return OrderedDict(sorted(_bookmarks, key=get_name))
239 return OrderedDict(sorted(_bookmarks, key=get_name))
240
240
241 def _get_all_commit_ids(self):
241 def _get_all_commit_ids(self):
242 return self._remote.get_all_commit_ids('visible')
242 return self._remote.get_all_commit_ids('visible')
243
243
244 def get_diff(
244 def get_diff(
245 self, commit1, commit2, path='', ignore_whitespace=False,
245 self, commit1, commit2, path='', ignore_whitespace=False,
246 context=3, path1=None):
246 context=3, path1=None):
247 """
247 """
248 Returns (git like) *diff*, as plain text. Shows changes introduced by
248 Returns (git like) *diff*, as plain text. Shows changes introduced by
249 `commit2` since `commit1`.
249 `commit2` since `commit1`.
250
250
251 :param commit1: Entry point from which diff is shown. Can be
251 :param commit1: Entry point from which diff is shown. Can be
252 ``self.EMPTY_COMMIT`` - in this case, patch showing all
252 ``self.EMPTY_COMMIT`` - in this case, patch showing all
253 the changes since empty state of the repository until `commit2`
253 the changes since empty state of the repository until `commit2`
254 :param commit2: Until which commit changes should be shown.
254 :param commit2: Until which commit changes should be shown.
255 :param ignore_whitespace: If set to ``True``, would not show whitespace
255 :param ignore_whitespace: If set to ``True``, would not show whitespace
256 changes. Defaults to ``False``.
256 changes. Defaults to ``False``.
257 :param context: How many lines before/after changed lines should be
257 :param context: How many lines before/after changed lines should be
258 shown. Defaults to ``3``.
258 shown. Defaults to ``3``.
259 """
259 """
260 self._validate_diff_commits(commit1, commit2)
260 self._validate_diff_commits(commit1, commit2)
261 if path1 is not None and path1 != path:
261 if path1 is not None and path1 != path:
262 raise ValueError("Diff of two different paths not supported.")
262 raise ValueError("Diff of two different paths not supported.")
263
263
264 if path:
264 if path:
265 file_filter = [self.path, path]
265 file_filter = [self.path, path]
266 else:
266 else:
267 file_filter = None
267 file_filter = None
268
268
269 diff = self._remote.diff(
269 diff = self._remote.diff(
270 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
270 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
271 opt_git=True, opt_ignorews=ignore_whitespace,
271 opt_git=True, opt_ignorews=ignore_whitespace,
272 context=context)
272 context=context)
273 return MercurialDiff(diff)
273 return MercurialDiff(diff)
274
274
275 def strip(self, commit_id, branch=None):
275 def strip(self, commit_id, branch=None):
276 self._remote.strip(commit_id, update=False, backup="none")
276 self._remote.strip(commit_id, update=False, backup="none")
277
277
278 self._remote.invalidate_vcs_cache()
278 self._remote.invalidate_vcs_cache()
279 self._commit_ids_ver = time.time()
279 self._commit_ids_ver = time.time()
280 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
280 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
281 return len(self.commit_ids)
281 return len(self.commit_ids)
282
282
283 def verify(self):
283 def verify(self):
284 verify = self._remote.verify()
284 verify = self._remote.verify()
285
285
286 self._remote.invalidate_vcs_cache()
286 self._remote.invalidate_vcs_cache()
287 return verify
287 return verify
288
288
289 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
289 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
290 if commit_id1 == commit_id2:
290 if commit_id1 == commit_id2:
291 return commit_id1
291 return commit_id1
292
292
293 ancestors = self._remote.revs_from_revspec(
293 ancestors = self._remote.revs_from_revspec(
294 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
294 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
295 other_path=repo2.path)
295 other_path=repo2.path)
296 return repo2[ancestors[0]].raw_id if ancestors else None
296 return repo2[ancestors[0]].raw_id if ancestors else None
297
297
298 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
298 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
299 if commit_id1 == commit_id2:
299 if commit_id1 == commit_id2:
300 commits = []
300 commits = []
301 else:
301 else:
302 if merge:
302 if merge:
303 indexes = self._remote.revs_from_revspec(
303 indexes = self._remote.revs_from_revspec(
304 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
304 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
305 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
305 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
306 else:
306 else:
307 indexes = self._remote.revs_from_revspec(
307 indexes = self._remote.revs_from_revspec(
308 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
308 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
309 commit_id1, other_path=repo2.path)
309 commit_id1, other_path=repo2.path)
310
310
311 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
311 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
312 for idx in indexes]
312 for idx in indexes]
313
313
314 return commits
314 return commits
315
315
316 @staticmethod
316 @staticmethod
317 def check_url(url, config):
317 def check_url(url, config):
318 """
318 """
319 Function will check given url and try to verify if it's a valid
319 Function will check given url and try to verify if it's a valid
320 link. Sometimes it may happened that mercurial will issue basic
320 link. Sometimes it may happened that mercurial will issue basic
321 auth request that can cause whole API to hang when used from python
321 auth request that can cause whole API to hang when used from python
322 or other external calls.
322 or other external calls.
323
323
324 On failures it'll raise urllib2.HTTPError, exception is also thrown
324 On failures it'll raise urllib2.HTTPError, exception is also thrown
325 when the return code is non 200
325 when the return code is non 200
326 """
326 """
327 # check first if it's not an local url
327 # check first if it's not an local url
328 if os.path.isdir(url) or url.startswith('file:'):
328 if os.path.isdir(url) or url.startswith('file:'):
329 return True
329 return True
330
330
331 # Request the _remote to verify the url
331 # Request the _remote to verify the url
332 return connection.Hg.check_url(url, config.serialize())
332 return connection.Hg.check_url(url, config.serialize())
333
333
334 @staticmethod
334 @staticmethod
335 def is_valid_repository(path):
335 def is_valid_repository(path):
336 return os.path.isdir(os.path.join(path, '.hg'))
336 return os.path.isdir(os.path.join(path, '.hg'))
337
337
338 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
338 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
339 """
339 """
340 Function will check for mercurial repository in given path. If there
340 Function will check for mercurial repository in given path. If there
341 is no repository in that path it will raise an exception unless
341 is no repository in that path it will raise an exception unless
342 `create` parameter is set to True - in that case repository would
342 `create` parameter is set to True - in that case repository would
343 be created.
343 be created.
344
344
345 If `src_url` is given, would try to clone repository from the
345 If `src_url` is given, would try to clone repository from the
346 location at given clone_point. Additionally it'll make update to
346 location at given clone_point. Additionally it'll make update to
347 working copy accordingly to `do_workspace_checkout` flag.
347 working copy accordingly to `do_workspace_checkout` flag.
348 """
348 """
349 if create and os.path.exists(self.path):
349 if create and os.path.exists(self.path):
350 raise RepositoryError(
350 raise RepositoryError(
351 "Cannot create repository at %s, location already exist"
351 "Cannot create repository at %s, location already exist"
352 % self.path)
352 % self.path)
353
353
354 if src_url:
354 if src_url:
355 url = str(self._get_url(src_url))
355 url = str(self._get_url(src_url))
356 MercurialRepository.check_url(url, self.config)
356 MercurialRepository.check_url(url, self.config)
357
357
358 self._remote.clone(url, self.path, do_workspace_checkout)
358 self._remote.clone(url, self.path, do_workspace_checkout)
359
359
360 # Don't try to create if we've already cloned repo
360 # Don't try to create if we've already cloned repo
361 create = False
361 create = False
362
362
363 if create:
363 if create:
364 os.makedirs(self.path, mode=0o755)
364 os.makedirs(self.path, mode=0o755)
365
365
366 self._remote.localrepository(create)
366 self._remote.localrepository(create)
367
367
368 @LazyProperty
368 @LazyProperty
369 def in_memory_commit(self):
369 def in_memory_commit(self):
370 return MercurialInMemoryCommit(self)
370 return MercurialInMemoryCommit(self)
371
371
372 @LazyProperty
372 @LazyProperty
373 def description(self):
373 def description(self):
374 description = self._remote.get_config_value(
374 description = self._remote.get_config_value(
375 'web', 'description', untrusted=True)
375 'web', 'description', untrusted=True)
376 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
376 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
377
377
378 @LazyProperty
378 @LazyProperty
379 def contact(self):
379 def contact(self):
380 contact = (
380 contact = (
381 self._remote.get_config_value("web", "contact") or
381 self._remote.get_config_value("web", "contact") or
382 self._remote.get_config_value("ui", "username"))
382 self._remote.get_config_value("ui", "username"))
383 return safe_unicode(contact or self.DEFAULT_CONTACT)
383 return safe_unicode(contact or self.DEFAULT_CONTACT)
384
384
385 @LazyProperty
385 @LazyProperty
386 def last_change(self):
386 def last_change(self):
387 """
387 """
388 Returns last change made on this repository as
388 Returns last change made on this repository as
389 `datetime.datetime` object.
389 `datetime.datetime` object.
390 """
390 """
391 try:
391 try:
392 return self.get_commit().date
392 return self.get_commit().date
393 except RepositoryError:
393 except RepositoryError:
394 tzoffset = makedate()[1]
394 tzoffset = makedate()[1]
395 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
395 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
396
396
397 def _get_fs_mtime(self):
397 def _get_fs_mtime(self):
398 # fallback to filesystem
398 # fallback to filesystem
399 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
399 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
400 st_path = os.path.join(self.path, '.hg', "store")
400 st_path = os.path.join(self.path, '.hg', "store")
401 if os.path.exists(cl_path):
401 if os.path.exists(cl_path):
402 return os.stat(cl_path).st_mtime
402 return os.stat(cl_path).st_mtime
403 else:
403 else:
404 return os.stat(st_path).st_mtime
404 return os.stat(st_path).st_mtime
405
405
406 def _get_url(self, url):
406 def _get_url(self, url):
407 """
407 """
408 Returns normalized url. If schema is not given, would fall
408 Returns normalized url. If schema is not given, would fall
409 to filesystem
409 to filesystem
410 (``file:///``) schema.
410 (``file:///``) schema.
411 """
411 """
412 url = url.encode('utf8')
412 url = url.encode('utf8')
413 if url != 'default' and '://' not in url:
413 if url != 'default' and '://' not in url:
414 url = "file:" + urllib.pathname2url(url)
414 url = "file:" + urllib.pathname2url(url)
415 return url
415 return url
416
416
417 def get_hook_location(self):
417 def get_hook_location(self):
418 """
418 """
419 returns absolute path to location where hooks are stored
419 returns absolute path to location where hooks are stored
420 """
420 """
421 return os.path.join(self.path, '.hg', '.hgrc')
421 return os.path.join(self.path, '.hg', '.hgrc')
422
422
423 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
423 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
424 """
424 """
425 Returns ``MercurialCommit`` object representing repository's
425 Returns ``MercurialCommit`` object representing repository's
426 commit at the given `commit_id` or `commit_idx`.
426 commit at the given `commit_id` or `commit_idx`.
427 """
427 """
428 if self.is_empty():
428 if self.is_empty():
429 raise EmptyRepositoryError("There are no commits yet")
429 raise EmptyRepositoryError("There are no commits yet")
430
430
431 if commit_id is not None:
431 if commit_id is not None:
432 self._validate_commit_id(commit_id)
432 self._validate_commit_id(commit_id)
433 try:
433 try:
434 # we have cached idx, use it without contacting the remote
434 # we have cached idx, use it without contacting the remote
435 idx = self._commit_ids[commit_id]
435 idx = self._commit_ids[commit_id]
436 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
436 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
437 except KeyError:
437 except KeyError:
438 pass
438 pass
439
439
440 elif commit_idx is not None:
440 elif commit_idx is not None:
441 self._validate_commit_idx(commit_idx)
441 self._validate_commit_idx(commit_idx)
442 try:
442 try:
443 _commit_id = self.commit_ids[commit_idx]
443 _commit_id = self.commit_ids[commit_idx]
444 if commit_idx < 0:
444 if commit_idx < 0:
445 commit_idx = self.commit_ids.index(_commit_id)
445 commit_idx = self.commit_ids.index(_commit_id)
446
446
447 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
447 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
448 except IndexError:
448 except IndexError:
449 commit_id = commit_idx
449 commit_id = commit_idx
450 else:
450 else:
451 commit_id = "tip"
451 commit_id = "tip"
452
452
453 if isinstance(commit_id, unicode):
453 if isinstance(commit_id, unicode):
454 commit_id = safe_str(commit_id)
454 commit_id = safe_str(commit_id)
455
455
456 try:
456 try:
457 raw_id, idx = self._remote.lookup(commit_id, both=True)
457 raw_id, idx = self._remote.lookup(commit_id, both=True)
458 except CommitDoesNotExistError:
458 except CommitDoesNotExistError:
459 msg = "Commit {} does not exist for {}".format(
459 msg = "Commit {} does not exist for `{}`".format(
460 *map(safe_str, [commit_id, self.name]))
460 *map(safe_str, [commit_id, self.name]))
461 raise CommitDoesNotExistError(msg)
461 raise CommitDoesNotExistError(msg)
462
462
463 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
463 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
464
464
465 def get_commits(
465 def get_commits(
466 self, start_id=None, end_id=None, start_date=None, end_date=None,
466 self, start_id=None, end_id=None, start_date=None, end_date=None,
467 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
467 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
468 """
468 """
469 Returns generator of ``MercurialCommit`` objects from start to end
469 Returns generator of ``MercurialCommit`` objects from start to end
470 (both are inclusive)
470 (both are inclusive)
471
471
472 :param start_id: None, str(commit_id)
472 :param start_id: None, str(commit_id)
473 :param end_id: None, str(commit_id)
473 :param end_id: None, str(commit_id)
474 :param start_date: if specified, commits with commit date less than
474 :param start_date: if specified, commits with commit date less than
475 ``start_date`` would be filtered out from returned set
475 ``start_date`` would be filtered out from returned set
476 :param end_date: if specified, commits with commit date greater than
476 :param end_date: if specified, commits with commit date greater than
477 ``end_date`` would be filtered out from returned set
477 ``end_date`` would be filtered out from returned set
478 :param branch_name: if specified, commits not reachable from given
478 :param branch_name: if specified, commits not reachable from given
479 branch would be filtered out from returned set
479 branch would be filtered out from returned set
480 :param show_hidden: Show hidden commits such as obsolete or hidden from
480 :param show_hidden: Show hidden commits such as obsolete or hidden from
481 Mercurial evolve
481 Mercurial evolve
482 :raise BranchDoesNotExistError: If given ``branch_name`` does not
482 :raise BranchDoesNotExistError: If given ``branch_name`` does not
483 exist.
483 exist.
484 :raise CommitDoesNotExistError: If commit for given ``start`` or
484 :raise CommitDoesNotExistError: If commit for given ``start`` or
485 ``end`` could not be found.
485 ``end`` could not be found.
486 """
486 """
487 # actually we should check now if it's not an empty repo
487 # actually we should check now if it's not an empty repo
488 if self.is_empty():
488 if self.is_empty():
489 raise EmptyRepositoryError("There are no commits yet")
489 raise EmptyRepositoryError("There are no commits yet")
490 self._validate_branch_name(branch_name)
490 self._validate_branch_name(branch_name)
491
491
492 branch_ancestors = False
492 branch_ancestors = False
493 if start_id is not None:
493 if start_id is not None:
494 self._validate_commit_id(start_id)
494 self._validate_commit_id(start_id)
495 c_start = self.get_commit(commit_id=start_id)
495 c_start = self.get_commit(commit_id=start_id)
496 start_pos = self._commit_ids[c_start.raw_id]
496 start_pos = self._commit_ids[c_start.raw_id]
497 else:
497 else:
498 start_pos = None
498 start_pos = None
499
499
500 if end_id is not None:
500 if end_id is not None:
501 self._validate_commit_id(end_id)
501 self._validate_commit_id(end_id)
502 c_end = self.get_commit(commit_id=end_id)
502 c_end = self.get_commit(commit_id=end_id)
503 end_pos = max(0, self._commit_ids[c_end.raw_id])
503 end_pos = max(0, self._commit_ids[c_end.raw_id])
504 else:
504 else:
505 end_pos = None
505 end_pos = None
506
506
507 if None not in [start_id, end_id] and start_pos > end_pos:
507 if None not in [start_id, end_id] and start_pos > end_pos:
508 raise RepositoryError(
508 raise RepositoryError(
509 "Start commit '%s' cannot be after end commit '%s'" %
509 "Start commit '%s' cannot be after end commit '%s'" %
510 (start_id, end_id))
510 (start_id, end_id))
511
511
512 if end_pos is not None:
512 if end_pos is not None:
513 end_pos += 1
513 end_pos += 1
514
514
515 commit_filter = []
515 commit_filter = []
516
516
517 if branch_name and not branch_ancestors:
517 if branch_name and not branch_ancestors:
518 commit_filter.append('branch("%s")' % (branch_name,))
518 commit_filter.append('branch("%s")' % (branch_name,))
519 elif branch_name and branch_ancestors:
519 elif branch_name and branch_ancestors:
520 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
520 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
521
521
522 if start_date and not end_date:
522 if start_date and not end_date:
523 commit_filter.append('date(">%s")' % (start_date,))
523 commit_filter.append('date(">%s")' % (start_date,))
524 if end_date and not start_date:
524 if end_date and not start_date:
525 commit_filter.append('date("<%s")' % (end_date,))
525 commit_filter.append('date("<%s")' % (end_date,))
526 if start_date and end_date:
526 if start_date and end_date:
527 commit_filter.append(
527 commit_filter.append(
528 'date(">%s") and date("<%s")' % (start_date, end_date))
528 'date(">%s") and date("<%s")' % (start_date, end_date))
529
529
530 if not show_hidden:
530 if not show_hidden:
531 commit_filter.append('not obsolete()')
531 commit_filter.append('not obsolete()')
532 commit_filter.append('not hidden()')
532 commit_filter.append('not hidden()')
533
533
534 # TODO: johbo: Figure out a simpler way for this solution
534 # TODO: johbo: Figure out a simpler way for this solution
535 collection_generator = CollectionGenerator
535 collection_generator = CollectionGenerator
536 if commit_filter:
536 if commit_filter:
537 commit_filter = ' and '.join(map(safe_str, commit_filter))
537 commit_filter = ' and '.join(map(safe_str, commit_filter))
538 revisions = self._remote.rev_range([commit_filter])
538 revisions = self._remote.rev_range([commit_filter])
539 collection_generator = MercurialIndexBasedCollectionGenerator
539 collection_generator = MercurialIndexBasedCollectionGenerator
540 else:
540 else:
541 revisions = self.commit_ids
541 revisions = self.commit_ids
542
542
543 if start_pos or end_pos:
543 if start_pos or end_pos:
544 revisions = revisions[start_pos:end_pos]
544 revisions = revisions[start_pos:end_pos]
545
545
546 return collection_generator(self, revisions, pre_load=pre_load)
546 return collection_generator(self, revisions, pre_load=pre_load)
547
547
548 def pull(self, url, commit_ids=None):
548 def pull(self, url, commit_ids=None):
549 """
549 """
550 Pull changes from external location.
550 Pull changes from external location.
551
551
552 :param commit_ids: Optional. Can be set to a list of commit ids
552 :param commit_ids: Optional. Can be set to a list of commit ids
553 which shall be pulled from the other repository.
553 which shall be pulled from the other repository.
554 """
554 """
555 url = self._get_url(url)
555 url = self._get_url(url)
556 self._remote.pull(url, commit_ids=commit_ids)
556 self._remote.pull(url, commit_ids=commit_ids)
557 self._remote.invalidate_vcs_cache()
557 self._remote.invalidate_vcs_cache()
558
558
559 def fetch(self, url, commit_ids=None):
559 def fetch(self, url, commit_ids=None):
560 """
560 """
561 Backward compatibility with GIT fetch==pull
561 Backward compatibility with GIT fetch==pull
562 """
562 """
563 return self.pull(url, commit_ids=commit_ids)
563 return self.pull(url, commit_ids=commit_ids)
564
564
565 def push(self, url):
565 def push(self, url):
566 url = self._get_url(url)
566 url = self._get_url(url)
567 self._remote.sync_push(url)
567 self._remote.sync_push(url)
568
568
569 def _local_clone(self, clone_path):
569 def _local_clone(self, clone_path):
570 """
570 """
571 Create a local clone of the current repo.
571 Create a local clone of the current repo.
572 """
572 """
573 self._remote.clone(self.path, clone_path, update_after_clone=True,
573 self._remote.clone(self.path, clone_path, update_after_clone=True,
574 hooks=False)
574 hooks=False)
575
575
576 def _update(self, revision, clean=False):
576 def _update(self, revision, clean=False):
577 """
577 """
578 Update the working copy to the specified revision.
578 Update the working copy to the specified revision.
579 """
579 """
580 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
580 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
581 self._remote.update(revision, clean=clean)
581 self._remote.update(revision, clean=clean)
582
582
583 def _identify(self):
583 def _identify(self):
584 """
584 """
585 Return the current state of the working directory.
585 Return the current state of the working directory.
586 """
586 """
587 return self._remote.identify().strip().rstrip('+')
587 return self._remote.identify().strip().rstrip('+')
588
588
589 def _heads(self, branch=None):
589 def _heads(self, branch=None):
590 """
590 """
591 Return the commit ids of the repository heads.
591 Return the commit ids of the repository heads.
592 """
592 """
593 return self._remote.heads(branch=branch).strip().split(' ')
593 return self._remote.heads(branch=branch).strip().split(' ')
594
594
595 def _ancestor(self, revision1, revision2):
595 def _ancestor(self, revision1, revision2):
596 """
596 """
597 Return the common ancestor of the two revisions.
597 Return the common ancestor of the two revisions.
598 """
598 """
599 return self._remote.ancestor(revision1, revision2)
599 return self._remote.ancestor(revision1, revision2)
600
600
601 def _local_push(
601 def _local_push(
602 self, revision, repository_path, push_branches=False,
602 self, revision, repository_path, push_branches=False,
603 enable_hooks=False):
603 enable_hooks=False):
604 """
604 """
605 Push the given revision to the specified repository.
605 Push the given revision to the specified repository.
606
606
607 :param push_branches: allow to create branches in the target repo.
607 :param push_branches: allow to create branches in the target repo.
608 """
608 """
609 self._remote.push(
609 self._remote.push(
610 [revision], repository_path, hooks=enable_hooks,
610 [revision], repository_path, hooks=enable_hooks,
611 push_branches=push_branches)
611 push_branches=push_branches)
612
612
613 def _local_merge(self, target_ref, merge_message, user_name, user_email,
613 def _local_merge(self, target_ref, merge_message, user_name, user_email,
614 source_ref, use_rebase=False, dry_run=False):
614 source_ref, use_rebase=False, dry_run=False):
615 """
615 """
616 Merge the given source_revision into the checked out revision.
616 Merge the given source_revision into the checked out revision.
617
617
618 Returns the commit id of the merge and a boolean indicating if the
618 Returns the commit id of the merge and a boolean indicating if the
619 commit needs to be pushed.
619 commit needs to be pushed.
620 """
620 """
621 self._update(target_ref.commit_id, clean=True)
621 self._update(target_ref.commit_id, clean=True)
622
622
623 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
623 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
624 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
624 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
625
625
626 if ancestor == source_ref.commit_id:
626 if ancestor == source_ref.commit_id:
627 # Nothing to do, the changes were already integrated
627 # Nothing to do, the changes were already integrated
628 return target_ref.commit_id, False
628 return target_ref.commit_id, False
629
629
630 elif ancestor == target_ref.commit_id and is_the_same_branch:
630 elif ancestor == target_ref.commit_id and is_the_same_branch:
631 # In this case we should force a commit message
631 # In this case we should force a commit message
632 return source_ref.commit_id, True
632 return source_ref.commit_id, True
633
633
634 if use_rebase:
634 if use_rebase:
635 try:
635 try:
636 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
636 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
637 target_ref.commit_id)
637 target_ref.commit_id)
638 self.bookmark(bookmark_name, revision=source_ref.commit_id)
638 self.bookmark(bookmark_name, revision=source_ref.commit_id)
639 self._remote.rebase(
639 self._remote.rebase(
640 source=source_ref.commit_id, dest=target_ref.commit_id)
640 source=source_ref.commit_id, dest=target_ref.commit_id)
641 self._remote.invalidate_vcs_cache()
641 self._remote.invalidate_vcs_cache()
642 self._update(bookmark_name, clean=True)
642 self._update(bookmark_name, clean=True)
643 return self._identify(), True
643 return self._identify(), True
644 except RepositoryError:
644 except RepositoryError:
645 # The rebase-abort may raise another exception which 'hides'
645 # The rebase-abort may raise another exception which 'hides'
646 # the original one, therefore we log it here.
646 # the original one, therefore we log it here.
647 log.exception('Error while rebasing shadow repo during merge.')
647 log.exception('Error while rebasing shadow repo during merge.')
648
648
649 # Cleanup any rebase leftovers
649 # Cleanup any rebase leftovers
650 self._remote.invalidate_vcs_cache()
650 self._remote.invalidate_vcs_cache()
651 self._remote.rebase(abort=True)
651 self._remote.rebase(abort=True)
652 self._remote.invalidate_vcs_cache()
652 self._remote.invalidate_vcs_cache()
653 self._remote.update(clean=True)
653 self._remote.update(clean=True)
654 raise
654 raise
655 else:
655 else:
656 try:
656 try:
657 self._remote.merge(source_ref.commit_id)
657 self._remote.merge(source_ref.commit_id)
658 self._remote.invalidate_vcs_cache()
658 self._remote.invalidate_vcs_cache()
659 self._remote.commit(
659 self._remote.commit(
660 message=safe_str(merge_message),
660 message=safe_str(merge_message),
661 username=safe_str('%s <%s>' % (user_name, user_email)))
661 username=safe_str('%s <%s>' % (user_name, user_email)))
662 self._remote.invalidate_vcs_cache()
662 self._remote.invalidate_vcs_cache()
663 return self._identify(), True
663 return self._identify(), True
664 except RepositoryError:
664 except RepositoryError:
665 # Cleanup any merge leftovers
665 # Cleanup any merge leftovers
666 self._remote.update(clean=True)
666 self._remote.update(clean=True)
667 raise
667 raise
668
668
669 def _local_close(self, target_ref, user_name, user_email,
669 def _local_close(self, target_ref, user_name, user_email,
670 source_ref, close_message=''):
670 source_ref, close_message=''):
671 """
671 """
672 Close the branch of the given source_revision
672 Close the branch of the given source_revision
673
673
674 Returns the commit id of the close and a boolean indicating if the
674 Returns the commit id of the close and a boolean indicating if the
675 commit needs to be pushed.
675 commit needs to be pushed.
676 """
676 """
677 self._update(source_ref.commit_id)
677 self._update(source_ref.commit_id)
678 message = close_message or "Closing branch: `{}`".format(source_ref.name)
678 message = close_message or "Closing branch: `{}`".format(source_ref.name)
679 try:
679 try:
680 self._remote.commit(
680 self._remote.commit(
681 message=safe_str(message),
681 message=safe_str(message),
682 username=safe_str('%s <%s>' % (user_name, user_email)),
682 username=safe_str('%s <%s>' % (user_name, user_email)),
683 close_branch=True)
683 close_branch=True)
684 self._remote.invalidate_vcs_cache()
684 self._remote.invalidate_vcs_cache()
685 return self._identify(), True
685 return self._identify(), True
686 except RepositoryError:
686 except RepositoryError:
687 # Cleanup any commit leftovers
687 # Cleanup any commit leftovers
688 self._remote.update(clean=True)
688 self._remote.update(clean=True)
689 raise
689 raise
690
690
691 def _is_the_same_branch(self, target_ref, source_ref):
691 def _is_the_same_branch(self, target_ref, source_ref):
692 return (
692 return (
693 self._get_branch_name(target_ref) ==
693 self._get_branch_name(target_ref) ==
694 self._get_branch_name(source_ref))
694 self._get_branch_name(source_ref))
695
695
696 def _get_branch_name(self, ref):
696 def _get_branch_name(self, ref):
697 if ref.type == 'branch':
697 if ref.type == 'branch':
698 return ref.name
698 return ref.name
699 return self._remote.ctx_branch(ref.commit_id)
699 return self._remote.ctx_branch(ref.commit_id)
700
700
701 def _maybe_prepare_merge_workspace(
701 def _maybe_prepare_merge_workspace(
702 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
702 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
703 shadow_repository_path = self._get_shadow_repository_path(
703 shadow_repository_path = self._get_shadow_repository_path(
704 repo_id, workspace_id)
704 repo_id, workspace_id)
705 if not os.path.exists(shadow_repository_path):
705 if not os.path.exists(shadow_repository_path):
706 self._local_clone(shadow_repository_path)
706 self._local_clone(shadow_repository_path)
707 log.debug(
707 log.debug(
708 'Prepared shadow repository in %s', shadow_repository_path)
708 'Prepared shadow repository in %s', shadow_repository_path)
709
709
710 return shadow_repository_path
710 return shadow_repository_path
711
711
712 def _merge_repo(self, repo_id, workspace_id, target_ref,
712 def _merge_repo(self, repo_id, workspace_id, target_ref,
713 source_repo, source_ref, merge_message,
713 source_repo, source_ref, merge_message,
714 merger_name, merger_email, dry_run=False,
714 merger_name, merger_email, dry_run=False,
715 use_rebase=False, close_branch=False):
715 use_rebase=False, close_branch=False):
716
716
717 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
717 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
718 'rebase' if use_rebase else 'merge', dry_run)
718 'rebase' if use_rebase else 'merge', dry_run)
719 if target_ref.commit_id not in self._heads():
719 if target_ref.commit_id not in self._heads():
720 return MergeResponse(
720 return MergeResponse(
721 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
721 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
722 metadata={'target_ref': target_ref})
722 metadata={'target_ref': target_ref})
723
723
724 try:
724 try:
725 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
725 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
726 heads = '\n,'.join(self._heads(target_ref.name))
726 heads = '\n,'.join(self._heads(target_ref.name))
727 metadata = {
727 metadata = {
728 'target_ref': target_ref,
728 'target_ref': target_ref,
729 'source_ref': source_ref,
729 'source_ref': source_ref,
730 'heads': heads
730 'heads': heads
731 }
731 }
732 return MergeResponse(
732 return MergeResponse(
733 False, False, None,
733 False, False, None,
734 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
734 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
735 metadata=metadata)
735 metadata=metadata)
736 except CommitDoesNotExistError:
736 except CommitDoesNotExistError:
737 log.exception('Failure when looking up branch heads on hg target')
737 log.exception('Failure when looking up branch heads on hg target')
738 return MergeResponse(
738 return MergeResponse(
739 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
739 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
740 metadata={'target_ref': target_ref})
740 metadata={'target_ref': target_ref})
741
741
742 shadow_repository_path = self._maybe_prepare_merge_workspace(
742 shadow_repository_path = self._maybe_prepare_merge_workspace(
743 repo_id, workspace_id, target_ref, source_ref)
743 repo_id, workspace_id, target_ref, source_ref)
744 shadow_repo = self._get_shadow_instance(shadow_repository_path)
744 shadow_repo = self._get_shadow_instance(shadow_repository_path)
745
745
746 log.debug('Pulling in target reference %s', target_ref)
746 log.debug('Pulling in target reference %s', target_ref)
747 self._validate_pull_reference(target_ref)
747 self._validate_pull_reference(target_ref)
748 shadow_repo._local_pull(self.path, target_ref)
748 shadow_repo._local_pull(self.path, target_ref)
749
749
750 try:
750 try:
751 log.debug('Pulling in source reference %s', source_ref)
751 log.debug('Pulling in source reference %s', source_ref)
752 source_repo._validate_pull_reference(source_ref)
752 source_repo._validate_pull_reference(source_ref)
753 shadow_repo._local_pull(source_repo.path, source_ref)
753 shadow_repo._local_pull(source_repo.path, source_ref)
754 except CommitDoesNotExistError:
754 except CommitDoesNotExistError:
755 log.exception('Failure when doing local pull on hg shadow repo')
755 log.exception('Failure when doing local pull on hg shadow repo')
756 return MergeResponse(
756 return MergeResponse(
757 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
757 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
758 metadata={'source_ref': source_ref})
758 metadata={'source_ref': source_ref})
759
759
760 merge_ref = None
760 merge_ref = None
761 merge_commit_id = None
761 merge_commit_id = None
762 close_commit_id = None
762 close_commit_id = None
763 merge_failure_reason = MergeFailureReason.NONE
763 merge_failure_reason = MergeFailureReason.NONE
764 metadata = {}
764 metadata = {}
765
765
766 # enforce that close branch should be used only in case we source from
766 # enforce that close branch should be used only in case we source from
767 # an actual Branch
767 # an actual Branch
768 close_branch = close_branch and source_ref.type == 'branch'
768 close_branch = close_branch and source_ref.type == 'branch'
769
769
770 # don't allow to close branch if source and target are the same
770 # don't allow to close branch if source and target are the same
771 close_branch = close_branch and source_ref.name != target_ref.name
771 close_branch = close_branch and source_ref.name != target_ref.name
772
772
773 needs_push_on_close = False
773 needs_push_on_close = False
774 if close_branch and not use_rebase and not dry_run:
774 if close_branch and not use_rebase and not dry_run:
775 try:
775 try:
776 close_commit_id, needs_push_on_close = shadow_repo._local_close(
776 close_commit_id, needs_push_on_close = shadow_repo._local_close(
777 target_ref, merger_name, merger_email, source_ref)
777 target_ref, merger_name, merger_email, source_ref)
778 merge_possible = True
778 merge_possible = True
779 except RepositoryError:
779 except RepositoryError:
780 log.exception('Failure when doing close branch on '
780 log.exception('Failure when doing close branch on '
781 'shadow repo: %s', shadow_repo)
781 'shadow repo: %s', shadow_repo)
782 merge_possible = False
782 merge_possible = False
783 merge_failure_reason = MergeFailureReason.MERGE_FAILED
783 merge_failure_reason = MergeFailureReason.MERGE_FAILED
784 else:
784 else:
785 merge_possible = True
785 merge_possible = True
786
786
787 needs_push = False
787 needs_push = False
788 if merge_possible:
788 if merge_possible:
789 try:
789 try:
790 merge_commit_id, needs_push = shadow_repo._local_merge(
790 merge_commit_id, needs_push = shadow_repo._local_merge(
791 target_ref, merge_message, merger_name, merger_email,
791 target_ref, merge_message, merger_name, merger_email,
792 source_ref, use_rebase=use_rebase, dry_run=dry_run)
792 source_ref, use_rebase=use_rebase, dry_run=dry_run)
793 merge_possible = True
793 merge_possible = True
794
794
795 # read the state of the close action, if it
795 # read the state of the close action, if it
796 # maybe required a push
796 # maybe required a push
797 needs_push = needs_push or needs_push_on_close
797 needs_push = needs_push or needs_push_on_close
798
798
799 # Set a bookmark pointing to the merge commit. This bookmark
799 # Set a bookmark pointing to the merge commit. This bookmark
800 # may be used to easily identify the last successful merge
800 # may be used to easily identify the last successful merge
801 # commit in the shadow repository.
801 # commit in the shadow repository.
802 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
802 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
803 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
803 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
804 except SubrepoMergeError:
804 except SubrepoMergeError:
805 log.exception(
805 log.exception(
806 'Subrepo merge error during local merge on hg shadow repo.')
806 'Subrepo merge error during local merge on hg shadow repo.')
807 merge_possible = False
807 merge_possible = False
808 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
808 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
809 needs_push = False
809 needs_push = False
810 except RepositoryError:
810 except RepositoryError:
811 log.exception('Failure when doing local merge on hg shadow repo')
811 log.exception('Failure when doing local merge on hg shadow repo')
812 merge_possible = False
812 merge_possible = False
813 merge_failure_reason = MergeFailureReason.MERGE_FAILED
813 merge_failure_reason = MergeFailureReason.MERGE_FAILED
814 needs_push = False
814 needs_push = False
815
815
816 if merge_possible and not dry_run:
816 if merge_possible and not dry_run:
817 if needs_push:
817 if needs_push:
818 # In case the target is a bookmark, update it, so after pushing
818 # In case the target is a bookmark, update it, so after pushing
819 # the bookmarks is also updated in the target.
819 # the bookmarks is also updated in the target.
820 if target_ref.type == 'book':
820 if target_ref.type == 'book':
821 shadow_repo.bookmark(
821 shadow_repo.bookmark(
822 target_ref.name, revision=merge_commit_id)
822 target_ref.name, revision=merge_commit_id)
823 try:
823 try:
824 shadow_repo_with_hooks = self._get_shadow_instance(
824 shadow_repo_with_hooks = self._get_shadow_instance(
825 shadow_repository_path,
825 shadow_repository_path,
826 enable_hooks=True)
826 enable_hooks=True)
827 # This is the actual merge action, we push from shadow
827 # This is the actual merge action, we push from shadow
828 # into origin.
828 # into origin.
829 # Note: the push_branches option will push any new branch
829 # Note: the push_branches option will push any new branch
830 # defined in the source repository to the target. This may
830 # defined in the source repository to the target. This may
831 # be dangerous as branches are permanent in Mercurial.
831 # be dangerous as branches are permanent in Mercurial.
832 # This feature was requested in issue #441.
832 # This feature was requested in issue #441.
833 shadow_repo_with_hooks._local_push(
833 shadow_repo_with_hooks._local_push(
834 merge_commit_id, self.path, push_branches=True,
834 merge_commit_id, self.path, push_branches=True,
835 enable_hooks=True)
835 enable_hooks=True)
836
836
837 # maybe we also need to push the close_commit_id
837 # maybe we also need to push the close_commit_id
838 if close_commit_id:
838 if close_commit_id:
839 shadow_repo_with_hooks._local_push(
839 shadow_repo_with_hooks._local_push(
840 close_commit_id, self.path, push_branches=True,
840 close_commit_id, self.path, push_branches=True,
841 enable_hooks=True)
841 enable_hooks=True)
842 merge_succeeded = True
842 merge_succeeded = True
843 except RepositoryError:
843 except RepositoryError:
844 log.exception(
844 log.exception(
845 'Failure when doing local push from the shadow '
845 'Failure when doing local push from the shadow '
846 'repository to the target repository at %s.', self.path)
846 'repository to the target repository at %s.', self.path)
847 merge_succeeded = False
847 merge_succeeded = False
848 merge_failure_reason = MergeFailureReason.PUSH_FAILED
848 merge_failure_reason = MergeFailureReason.PUSH_FAILED
849 metadata['target'] = 'hg shadow repo'
849 metadata['target'] = 'hg shadow repo'
850 metadata['merge_commit'] = merge_commit_id
850 metadata['merge_commit'] = merge_commit_id
851 else:
851 else:
852 merge_succeeded = True
852 merge_succeeded = True
853 else:
853 else:
854 merge_succeeded = False
854 merge_succeeded = False
855
855
856 return MergeResponse(
856 return MergeResponse(
857 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
857 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
858 metadata=metadata)
858 metadata=metadata)
859
859
860 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
860 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
861 config = self.config.copy()
861 config = self.config.copy()
862 if not enable_hooks:
862 if not enable_hooks:
863 config.clear_section('hooks')
863 config.clear_section('hooks')
864 return MercurialRepository(shadow_repository_path, config)
864 return MercurialRepository(shadow_repository_path, config)
865
865
866 def _validate_pull_reference(self, reference):
866 def _validate_pull_reference(self, reference):
867 if not (reference.name in self.bookmarks or
867 if not (reference.name in self.bookmarks or
868 reference.name in self.branches or
868 reference.name in self.branches or
869 self.get_commit(reference.commit_id)):
869 self.get_commit(reference.commit_id)):
870 raise CommitDoesNotExistError(
870 raise CommitDoesNotExistError(
871 'Unknown branch, bookmark or commit id')
871 'Unknown branch, bookmark or commit id')
872
872
873 def _local_pull(self, repository_path, reference):
873 def _local_pull(self, repository_path, reference):
874 """
874 """
875 Fetch a branch, bookmark or commit from a local repository.
875 Fetch a branch, bookmark or commit from a local repository.
876 """
876 """
877 repository_path = os.path.abspath(repository_path)
877 repository_path = os.path.abspath(repository_path)
878 if repository_path == self.path:
878 if repository_path == self.path:
879 raise ValueError('Cannot pull from the same repository')
879 raise ValueError('Cannot pull from the same repository')
880
880
881 reference_type_to_option_name = {
881 reference_type_to_option_name = {
882 'book': 'bookmark',
882 'book': 'bookmark',
883 'branch': 'branch',
883 'branch': 'branch',
884 }
884 }
885 option_name = reference_type_to_option_name.get(
885 option_name = reference_type_to_option_name.get(
886 reference.type, 'revision')
886 reference.type, 'revision')
887
887
888 if option_name == 'revision':
888 if option_name == 'revision':
889 ref = reference.commit_id
889 ref = reference.commit_id
890 else:
890 else:
891 ref = reference.name
891 ref = reference.name
892
892
893 options = {option_name: [ref]}
893 options = {option_name: [ref]}
894 self._remote.pull_cmd(repository_path, hooks=False, **options)
894 self._remote.pull_cmd(repository_path, hooks=False, **options)
895 self._remote.invalidate_vcs_cache()
895 self._remote.invalidate_vcs_cache()
896
896
897 def bookmark(self, bookmark, revision=None):
897 def bookmark(self, bookmark, revision=None):
898 if isinstance(bookmark, unicode):
898 if isinstance(bookmark, unicode):
899 bookmark = safe_str(bookmark)
899 bookmark = safe_str(bookmark)
900 self._remote.bookmark(bookmark, revision=revision)
900 self._remote.bookmark(bookmark, revision=revision)
901 self._remote.invalidate_vcs_cache()
901 self._remote.invalidate_vcs_cache()
902
902
903 def get_path_permissions(self, username):
903 def get_path_permissions(self, username):
904 hgacl_file = os.path.join(self.path, '.hg/hgacl')
904 hgacl_file = os.path.join(self.path, '.hg/hgacl')
905
905
906 def read_patterns(suffix):
906 def read_patterns(suffix):
907 svalue = None
907 svalue = None
908 for section, option in [
908 for section, option in [
909 ('narrowacl', username + suffix),
909 ('narrowacl', username + suffix),
910 ('narrowacl', 'default' + suffix),
910 ('narrowacl', 'default' + suffix),
911 ('narrowhgacl', username + suffix),
911 ('narrowhgacl', username + suffix),
912 ('narrowhgacl', 'default' + suffix)
912 ('narrowhgacl', 'default' + suffix)
913 ]:
913 ]:
914 try:
914 try:
915 svalue = hgacl.get(section, option)
915 svalue = hgacl.get(section, option)
916 break # stop at the first value we find
916 break # stop at the first value we find
917 except configparser.NoOptionError:
917 except configparser.NoOptionError:
918 pass
918 pass
919 if not svalue:
919 if not svalue:
920 return None
920 return None
921 result = ['/']
921 result = ['/']
922 for pattern in svalue.split():
922 for pattern in svalue.split():
923 result.append(pattern)
923 result.append(pattern)
924 if '*' not in pattern and '?' not in pattern:
924 if '*' not in pattern and '?' not in pattern:
925 result.append(pattern + '/*')
925 result.append(pattern + '/*')
926 return result
926 return result
927
927
928 if os.path.exists(hgacl_file):
928 if os.path.exists(hgacl_file):
929 try:
929 try:
930 hgacl = configparser.RawConfigParser()
930 hgacl = configparser.RawConfigParser()
931 hgacl.read(hgacl_file)
931 hgacl.read(hgacl_file)
932
932
933 includes = read_patterns('.includes')
933 includes = read_patterns('.includes')
934 excludes = read_patterns('.excludes')
934 excludes = read_patterns('.excludes')
935 return BasePathPermissionChecker.create_from_patterns(
935 return BasePathPermissionChecker.create_from_patterns(
936 includes, excludes)
936 includes, excludes)
937 except BaseException as e:
937 except BaseException as e:
938 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
938 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
939 hgacl_file, self.name, e)
939 hgacl_file, self.name, e)
940 raise exceptions.RepositoryRequirementError(msg)
940 raise exceptions.RepositoryRequirementError(msg)
941 else:
941 else:
942 return None
942 return None
943
943
944
944
945 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
945 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
946
946
947 def _commit_factory(self, commit_id):
947 def _commit_factory(self, commit_id):
948 return self.repo.get_commit(
948 return self.repo.get_commit(
949 commit_idx=commit_id, pre_load=self.pre_load)
949 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now