##// END OF EJS Templates
maintainance: run rebuildfncache for Mercurial.
marcink -
r4214:9425ab17 stable
parent child Browse files
Show More
@@ -1,184 +1,193 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2017-2019 RhodeCode GmbH
3 # Copyright (C) 2017-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import logging
20 import logging
21
21
22 log = logging.getLogger(__name__)
22 log = logging.getLogger(__name__)
23
23
24
24
25 class MaintenanceTask(object):
25 class MaintenanceTask(object):
26 human_name = 'undefined'
26 human_name = 'undefined'
27
27
28 def __init__(self, db_repo):
28 def __init__(self, db_repo):
29 self.db_repo = db_repo
29 self.db_repo = db_repo
30
30
31 def run(self):
31 def run(self):
32 """Execute task and return task human value"""
32 """Execute task and return task human value"""
33 raise NotImplementedError()
33 raise NotImplementedError()
34
34
35
35
36 class GitGC(MaintenanceTask):
36 class GitGC(MaintenanceTask):
37 human_name = 'GIT Garbage collect'
37 human_name = 'GIT Garbage collect'
38
38
39 def _count_objects(self, repo):
39 def _count_objects(self, repo):
40 stdout, stderr = repo.run_git_command(
40 stdout, stderr = repo.run_git_command(
41 ['count-objects', '-v'], fail_on_stderr=False)
41 ['count-objects', '-v'], fail_on_stderr=False)
42
42
43 errors = ' '
43 errors = ' '
44 objects = ' '.join(stdout.splitlines())
44 objects = ' '.join(stdout.splitlines())
45
45
46 if stderr:
46 if stderr:
47 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
47 errors = '\nSTD ERR:' + '\n'.join(stderr.splitlines())
48 return objects + errors
48 return objects + errors
49
49
50 def run(self):
50 def run(self):
51 output = []
51 output = []
52 instance = self.db_repo.scm_instance()
52 instance = self.db_repo.scm_instance()
53
53
54 objects_before = self._count_objects(instance)
54 objects_before = self._count_objects(instance)
55
55
56 log.debug('GIT objects:%s', objects_before)
56 log.debug('GIT objects:%s', objects_before)
57 cmd = ['gc', '--aggressive']
57 cmd = ['gc', '--aggressive']
58 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
58 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
59
59
60 out = 'executed {}'.format(' '.join(cmd))
60 out = 'executed {}'.format(' '.join(cmd))
61 output.append(out)
61 output.append(out)
62
62
63 out = ''
63 out = ''
64 if stderr:
64 if stderr:
65 out += ''.join(stderr.splitlines())
65 out += ''.join(stderr.splitlines())
66
66
67 if stdout:
67 if stdout:
68 out += ''.join(stdout.splitlines())
68 out += ''.join(stdout.splitlines())
69
69
70 if out:
70 if out:
71 output.append(out)
71 output.append(out)
72
72
73 objects_after = self._count_objects(instance)
73 objects_after = self._count_objects(instance)
74 log.debug('GIT objects:%s', objects_after)
74 log.debug('GIT objects:%s', objects_after)
75 output.append('objects before :' + objects_before)
75 output.append('objects before :' + objects_before)
76 output.append('objects after :' + objects_after)
76 output.append('objects after :' + objects_after)
77
77
78 return '\n'.join(output)
78 return '\n'.join(output)
79
79
80
80
81 class GitFSCK(MaintenanceTask):
81 class GitFSCK(MaintenanceTask):
82 human_name = 'GIT FSCK'
82 human_name = 'GIT FSCK'
83
83
84 def run(self):
84 def run(self):
85 output = []
85 output = []
86 instance = self.db_repo.scm_instance()
86 instance = self.db_repo.scm_instance()
87
87
88 cmd = ['fsck', '--full']
88 cmd = ['fsck', '--full']
89 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
89 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
90
90
91 out = 'executed {}'.format(' '.join(cmd))
91 out = 'executed {}'.format(' '.join(cmd))
92 output.append(out)
92 output.append(out)
93
93
94 out = ''
94 out = ''
95 if stderr:
95 if stderr:
96 out += ''.join(stderr.splitlines())
96 out += ''.join(stderr.splitlines())
97
97
98 if stdout:
98 if stdout:
99 out += ''.join(stdout.splitlines())
99 out += ''.join(stdout.splitlines())
100
100
101 if out:
101 if out:
102 output.append(out)
102 output.append(out)
103
103
104 return '\n'.join(output)
104 return '\n'.join(output)
105
105
106
106
107 class GitRepack(MaintenanceTask):
107 class GitRepack(MaintenanceTask):
108 human_name = 'GIT Repack'
108 human_name = 'GIT Repack'
109
109
110 def run(self):
110 def run(self):
111 output = []
111 output = []
112 instance = self.db_repo.scm_instance()
112 instance = self.db_repo.scm_instance()
113 cmd = ['repack', '-a', '-d',
113 cmd = ['repack', '-a', '-d',
114 '--window-memory', '10m', '--max-pack-size', '100m']
114 '--window-memory', '10m', '--max-pack-size', '100m']
115 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
115 stdout, stderr = instance.run_git_command(cmd, fail_on_stderr=False)
116
116
117 out = 'executed {}'.format(' '.join(cmd))
117 out = 'executed {}'.format(' '.join(cmd))
118 output.append(out)
118 output.append(out)
119 out = ''
119 out = ''
120
120
121 if stderr:
121 if stderr:
122 out += ''.join(stderr.splitlines())
122 out += ''.join(stderr.splitlines())
123
123
124 if stdout:
124 if stdout:
125 out += ''.join(stdout.splitlines())
125 out += ''.join(stdout.splitlines())
126
126
127 if out:
127 if out:
128 output.append(out)
128 output.append(out)
129
129
130 return '\n'.join(output)
130 return '\n'.join(output)
131
131
132
132
133 class HGVerify(MaintenanceTask):
133 class HGVerify(MaintenanceTask):
134 human_name = 'HG Verify repo'
134 human_name = 'HG Verify repo'
135
135
136 def run(self):
136 def run(self):
137 instance = self.db_repo.scm_instance()
137 instance = self.db_repo.scm_instance()
138 res = instance.verify()
138 res = instance.verify()
139 return res
139 return res
140
140
141
141
142 class HGUpdateCaches(MaintenanceTask):
142 class HGUpdateCaches(MaintenanceTask):
143 human_name = 'HG update caches'
143 human_name = 'HG update caches'
144
144
145 def run(self):
145 def run(self):
146 instance = self.db_repo.scm_instance()
146 instance = self.db_repo.scm_instance()
147 res = instance.hg_update_cache()
147 res = instance.hg_update_cache()
148 return res
148 return res
149
149
150
150
151 class HGRebuildFnCaches(MaintenanceTask):
152 human_name = 'HG rebuild fn caches'
153
154 def run(self):
155 instance = self.db_repo.scm_instance()
156 res = instance.hg_rebuild_fn_cache()
157 return res
158
159
151 class SVNVerify(MaintenanceTask):
160 class SVNVerify(MaintenanceTask):
152 human_name = 'SVN Verify repo'
161 human_name = 'SVN Verify repo'
153
162
154 def run(self):
163 def run(self):
155 instance = self.db_repo.scm_instance()
164 instance = self.db_repo.scm_instance()
156 res = instance.verify()
165 res = instance.verify()
157 return res
166 return res
158
167
159
168
160 class RepoMaintenance(object):
169 class RepoMaintenance(object):
161 """
170 """
162 Performs maintenance of repository based on it's type
171 Performs maintenance of repository based on it's type
163 """
172 """
164 tasks = {
173 tasks = {
165 'hg': [HGVerify, HGUpdateCaches],
174 'hg': [HGVerify, HGUpdateCaches, HGRebuildFnCaches],
166 'git': [GitFSCK, GitGC, GitRepack],
175 'git': [GitFSCK, GitGC, GitRepack],
167 'svn': [SVNVerify],
176 'svn': [SVNVerify],
168 }
177 }
169
178
170 def get_tasks_for_repo(self, db_repo):
179 def get_tasks_for_repo(self, db_repo):
171 """
180 """
172 fetches human names of tasks pending for execution for given type of repo
181 fetches human names of tasks pending for execution for given type of repo
173 """
182 """
174 tasks = []
183 tasks = []
175 for task in self.tasks[db_repo.repo_type]:
184 for task in self.tasks[db_repo.repo_type]:
176 tasks.append(task.human_name)
185 tasks.append(task.human_name)
177 return tasks
186 return tasks
178
187
179 def execute(self, db_repo):
188 def execute(self, db_repo):
180 executed_tasks = []
189 executed_tasks = []
181 for task in self.tasks[db_repo.repo_type]:
190 for task in self.tasks[db_repo.repo_type]:
182 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
191 output = task.human_name + ':\n' + task(db_repo).run() + '\n--\n'
183 executed_tasks.append(output)
192 executed_tasks.append(output)
184 return executed_tasks
193 return executed_tasks
@@ -1,972 +1,978 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
296 self._remote.invalidate_vcs_cache()
297 return update_cache
298
293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
294 if commit_id1 == commit_id2:
300 if commit_id1 == commit_id2:
295 return commit_id1
301 return commit_id1
296
302
297 ancestors = self._remote.revs_from_revspec(
303 ancestors = self._remote.revs_from_revspec(
298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
304 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
299 other_path=repo2.path)
305 other_path=repo2.path)
300 return repo2[ancestors[0]].raw_id if ancestors else None
306 return repo2[ancestors[0]].raw_id if ancestors else None
301
307
302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
308 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
303 if commit_id1 == commit_id2:
309 if commit_id1 == commit_id2:
304 commits = []
310 commits = []
305 else:
311 else:
306 if merge:
312 if merge:
307 indexes = self._remote.revs_from_revspec(
313 indexes = self._remote.revs_from_revspec(
308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
314 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
315 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
310 else:
316 else:
311 indexes = self._remote.revs_from_revspec(
317 indexes = self._remote.revs_from_revspec(
312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
318 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
313 commit_id1, other_path=repo2.path)
319 commit_id1, other_path=repo2.path)
314
320
315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
321 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
316 for idx in indexes]
322 for idx in indexes]
317
323
318 return commits
324 return commits
319
325
320 @staticmethod
326 @staticmethod
321 def check_url(url, config):
327 def check_url(url, config):
322 """
328 """
323 Function will check given url and try to verify if it's a valid
329 Function will check given url and try to verify if it's a valid
324 link. Sometimes it may happened that mercurial will issue basic
330 link. Sometimes it may happened that mercurial will issue basic
325 auth request that can cause whole API to hang when used from python
331 auth request that can cause whole API to hang when used from python
326 or other external calls.
332 or other external calls.
327
333
328 On failures it'll raise urllib2.HTTPError, exception is also thrown
334 On failures it'll raise urllib2.HTTPError, exception is also thrown
329 when the return code is non 200
335 when the return code is non 200
330 """
336 """
331 # check first if it's not an local url
337 # check first if it's not an local url
332 if os.path.isdir(url) or url.startswith('file:'):
338 if os.path.isdir(url) or url.startswith('file:'):
333 return True
339 return True
334
340
335 # Request the _remote to verify the url
341 # Request the _remote to verify the url
336 return connection.Hg.check_url(url, config.serialize())
342 return connection.Hg.check_url(url, config.serialize())
337
343
338 @staticmethod
344 @staticmethod
339 def is_valid_repository(path):
345 def is_valid_repository(path):
340 return os.path.isdir(os.path.join(path, '.hg'))
346 return os.path.isdir(os.path.join(path, '.hg'))
341
347
342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
348 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
343 """
349 """
344 Function will check for mercurial repository in given path. If there
350 Function will check for mercurial repository in given path. If there
345 is no repository in that path it will raise an exception unless
351 is no repository in that path it will raise an exception unless
346 `create` parameter is set to True - in that case repository would
352 `create` parameter is set to True - in that case repository would
347 be created.
353 be created.
348
354
349 If `src_url` is given, would try to clone repository from the
355 If `src_url` is given, would try to clone repository from the
350 location at given clone_point. Additionally it'll make update to
356 location at given clone_point. Additionally it'll make update to
351 working copy accordingly to `do_workspace_checkout` flag.
357 working copy accordingly to `do_workspace_checkout` flag.
352 """
358 """
353 if create and os.path.exists(self.path):
359 if create and os.path.exists(self.path):
354 raise RepositoryError(
360 raise RepositoryError(
355 "Cannot create repository at %s, location already exist"
361 "Cannot create repository at %s, location already exist"
356 % self.path)
362 % self.path)
357
363
358 if src_url:
364 if src_url:
359 url = str(self._get_url(src_url))
365 url = str(self._get_url(src_url))
360 MercurialRepository.check_url(url, self.config)
366 MercurialRepository.check_url(url, self.config)
361
367
362 self._remote.clone(url, self.path, do_workspace_checkout)
368 self._remote.clone(url, self.path, do_workspace_checkout)
363
369
364 # Don't try to create if we've already cloned repo
370 # Don't try to create if we've already cloned repo
365 create = False
371 create = False
366
372
367 if create:
373 if create:
368 os.makedirs(self.path, mode=0o755)
374 os.makedirs(self.path, mode=0o755)
369 self._remote.localrepository(create)
375 self._remote.localrepository(create)
370
376
371 @LazyProperty
377 @LazyProperty
372 def in_memory_commit(self):
378 def in_memory_commit(self):
373 return MercurialInMemoryCommit(self)
379 return MercurialInMemoryCommit(self)
374
380
375 @LazyProperty
381 @LazyProperty
376 def description(self):
382 def description(self):
377 description = self._remote.get_config_value(
383 description = self._remote.get_config_value(
378 'web', 'description', untrusted=True)
384 'web', 'description', untrusted=True)
379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
385 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
380
386
381 @LazyProperty
387 @LazyProperty
382 def contact(self):
388 def contact(self):
383 contact = (
389 contact = (
384 self._remote.get_config_value("web", "contact") or
390 self._remote.get_config_value("web", "contact") or
385 self._remote.get_config_value("ui", "username"))
391 self._remote.get_config_value("ui", "username"))
386 return safe_unicode(contact or self.DEFAULT_CONTACT)
392 return safe_unicode(contact or self.DEFAULT_CONTACT)
387
393
388 @LazyProperty
394 @LazyProperty
389 def last_change(self):
395 def last_change(self):
390 """
396 """
391 Returns last change made on this repository as
397 Returns last change made on this repository as
392 `datetime.datetime` object.
398 `datetime.datetime` object.
393 """
399 """
394 try:
400 try:
395 return self.get_commit().date
401 return self.get_commit().date
396 except RepositoryError:
402 except RepositoryError:
397 tzoffset = makedate()[1]
403 tzoffset = makedate()[1]
398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
404 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
399
405
400 def _get_fs_mtime(self):
406 def _get_fs_mtime(self):
401 # fallback to filesystem
407 # fallback to filesystem
402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
408 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
403 st_path = os.path.join(self.path, '.hg', "store")
409 st_path = os.path.join(self.path, '.hg', "store")
404 if os.path.exists(cl_path):
410 if os.path.exists(cl_path):
405 return os.stat(cl_path).st_mtime
411 return os.stat(cl_path).st_mtime
406 else:
412 else:
407 return os.stat(st_path).st_mtime
413 return os.stat(st_path).st_mtime
408
414
409 def _get_url(self, url):
415 def _get_url(self, url):
410 """
416 """
411 Returns normalized url. If schema is not given, would fall
417 Returns normalized url. If schema is not given, would fall
412 to filesystem
418 to filesystem
413 (``file:///``) schema.
419 (``file:///``) schema.
414 """
420 """
415 url = url.encode('utf8')
421 url = url.encode('utf8')
416 if url != 'default' and '://' not in url:
422 if url != 'default' and '://' not in url:
417 url = "file:" + urllib.pathname2url(url)
423 url = "file:" + urllib.pathname2url(url)
418 return url
424 return url
419
425
420 def get_hook_location(self):
426 def get_hook_location(self):
421 """
427 """
422 returns absolute path to location where hooks are stored
428 returns absolute path to location where hooks are stored
423 """
429 """
424 return os.path.join(self.path, '.hg', '.hgrc')
430 return os.path.join(self.path, '.hg', '.hgrc')
425
431
426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
432 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
427 """
433 """
428 Returns ``MercurialCommit`` object representing repository's
434 Returns ``MercurialCommit`` object representing repository's
429 commit at the given `commit_id` or `commit_idx`.
435 commit at the given `commit_id` or `commit_idx`.
430 """
436 """
431 if self.is_empty():
437 if self.is_empty():
432 raise EmptyRepositoryError("There are no commits yet")
438 raise EmptyRepositoryError("There are no commits yet")
433
439
434 if commit_id is not None:
440 if commit_id is not None:
435 self._validate_commit_id(commit_id)
441 self._validate_commit_id(commit_id)
436 try:
442 try:
437 # we have cached idx, use it without contacting the remote
443 # we have cached idx, use it without contacting the remote
438 idx = self._commit_ids[commit_id]
444 idx = self._commit_ids[commit_id]
439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
445 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
440 except KeyError:
446 except KeyError:
441 pass
447 pass
442
448
443 elif commit_idx is not None:
449 elif commit_idx is not None:
444 self._validate_commit_idx(commit_idx)
450 self._validate_commit_idx(commit_idx)
445 try:
451 try:
446 _commit_id = self.commit_ids[commit_idx]
452 _commit_id = self.commit_ids[commit_idx]
447 if commit_idx < 0:
453 if commit_idx < 0:
448 commit_idx = self.commit_ids.index(_commit_id)
454 commit_idx = self.commit_ids.index(_commit_id)
449
455
450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
456 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
451 except IndexError:
457 except IndexError:
452 commit_id = commit_idx
458 commit_id = commit_idx
453 else:
459 else:
454 commit_id = "tip"
460 commit_id = "tip"
455
461
456 if isinstance(commit_id, unicode):
462 if isinstance(commit_id, unicode):
457 commit_id = safe_str(commit_id)
463 commit_id = safe_str(commit_id)
458
464
459 try:
465 try:
460 raw_id, idx = self._remote.lookup(commit_id, both=True)
466 raw_id, idx = self._remote.lookup(commit_id, both=True)
461 except CommitDoesNotExistError:
467 except CommitDoesNotExistError:
462 msg = "Commit {} does not exist for `{}`".format(
468 msg = "Commit {} does not exist for `{}`".format(
463 *map(safe_str, [commit_id, self.name]))
469 *map(safe_str, [commit_id, self.name]))
464 raise CommitDoesNotExistError(msg)
470 raise CommitDoesNotExistError(msg)
465
471
466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
472 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
467
473
468 def get_commits(
474 def get_commits(
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
475 self, start_id=None, end_id=None, start_date=None, end_date=None,
470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
476 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
471 """
477 """
472 Returns generator of ``MercurialCommit`` objects from start to end
478 Returns generator of ``MercurialCommit`` objects from start to end
473 (both are inclusive)
479 (both are inclusive)
474
480
475 :param start_id: None, str(commit_id)
481 :param start_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
482 :param end_id: None, str(commit_id)
477 :param start_date: if specified, commits with commit date less than
483 :param start_date: if specified, commits with commit date less than
478 ``start_date`` would be filtered out from returned set
484 ``start_date`` would be filtered out from returned set
479 :param end_date: if specified, commits with commit date greater than
485 :param end_date: if specified, commits with commit date greater than
480 ``end_date`` would be filtered out from returned set
486 ``end_date`` would be filtered out from returned set
481 :param branch_name: if specified, commits not reachable from given
487 :param branch_name: if specified, commits not reachable from given
482 branch would be filtered out from returned set
488 branch would be filtered out from returned set
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
489 :param show_hidden: Show hidden commits such as obsolete or hidden from
484 Mercurial evolve
490 Mercurial evolve
485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
491 :raise BranchDoesNotExistError: If given ``branch_name`` does not
486 exist.
492 exist.
487 :raise CommitDoesNotExistError: If commit for given ``start`` or
493 :raise CommitDoesNotExistError: If commit for given ``start`` or
488 ``end`` could not be found.
494 ``end`` could not be found.
489 """
495 """
490 # actually we should check now if it's not an empty repo
496 # actually we should check now if it's not an empty repo
491 if self.is_empty():
497 if self.is_empty():
492 raise EmptyRepositoryError("There are no commits yet")
498 raise EmptyRepositoryError("There are no commits yet")
493 self._validate_branch_name(branch_name)
499 self._validate_branch_name(branch_name)
494
500
495 branch_ancestors = False
501 branch_ancestors = False
496 if start_id is not None:
502 if start_id is not None:
497 self._validate_commit_id(start_id)
503 self._validate_commit_id(start_id)
498 c_start = self.get_commit(commit_id=start_id)
504 c_start = self.get_commit(commit_id=start_id)
499 start_pos = self._commit_ids[c_start.raw_id]
505 start_pos = self._commit_ids[c_start.raw_id]
500 else:
506 else:
501 start_pos = None
507 start_pos = None
502
508
503 if end_id is not None:
509 if end_id is not None:
504 self._validate_commit_id(end_id)
510 self._validate_commit_id(end_id)
505 c_end = self.get_commit(commit_id=end_id)
511 c_end = self.get_commit(commit_id=end_id)
506 end_pos = max(0, self._commit_ids[c_end.raw_id])
512 end_pos = max(0, self._commit_ids[c_end.raw_id])
507 else:
513 else:
508 end_pos = None
514 end_pos = None
509
515
510 if None not in [start_id, end_id] and start_pos > end_pos:
516 if None not in [start_id, end_id] and start_pos > end_pos:
511 raise RepositoryError(
517 raise RepositoryError(
512 "Start commit '%s' cannot be after end commit '%s'" %
518 "Start commit '%s' cannot be after end commit '%s'" %
513 (start_id, end_id))
519 (start_id, end_id))
514
520
515 if end_pos is not None:
521 if end_pos is not None:
516 end_pos += 1
522 end_pos += 1
517
523
518 commit_filter = []
524 commit_filter = []
519
525
520 if branch_name and not branch_ancestors:
526 if branch_name and not branch_ancestors:
521 commit_filter.append('branch("%s")' % (branch_name,))
527 commit_filter.append('branch("%s")' % (branch_name,))
522 elif branch_name and branch_ancestors:
528 elif branch_name and branch_ancestors:
523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
529 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
524
530
525 if start_date and not end_date:
531 if start_date and not end_date:
526 commit_filter.append('date(">%s")' % (start_date,))
532 commit_filter.append('date(">%s")' % (start_date,))
527 if end_date and not start_date:
533 if end_date and not start_date:
528 commit_filter.append('date("<%s")' % (end_date,))
534 commit_filter.append('date("<%s")' % (end_date,))
529 if start_date and end_date:
535 if start_date and end_date:
530 commit_filter.append(
536 commit_filter.append(
531 'date(">%s") and date("<%s")' % (start_date, end_date))
537 'date(">%s") and date("<%s")' % (start_date, end_date))
532
538
533 if not show_hidden:
539 if not show_hidden:
534 commit_filter.append('not obsolete()')
540 commit_filter.append('not obsolete()')
535 commit_filter.append('not hidden()')
541 commit_filter.append('not hidden()')
536
542
537 # TODO: johbo: Figure out a simpler way for this solution
543 # TODO: johbo: Figure out a simpler way for this solution
538 collection_generator = CollectionGenerator
544 collection_generator = CollectionGenerator
539 if commit_filter:
545 if commit_filter:
540 commit_filter = ' and '.join(map(safe_str, commit_filter))
546 commit_filter = ' and '.join(map(safe_str, commit_filter))
541 revisions = self._remote.rev_range([commit_filter])
547 revisions = self._remote.rev_range([commit_filter])
542 collection_generator = MercurialIndexBasedCollectionGenerator
548 collection_generator = MercurialIndexBasedCollectionGenerator
543 else:
549 else:
544 revisions = self.commit_ids
550 revisions = self.commit_ids
545
551
546 if start_pos or end_pos:
552 if start_pos or end_pos:
547 revisions = revisions[start_pos:end_pos]
553 revisions = revisions[start_pos:end_pos]
548
554
549 return collection_generator(self, revisions, pre_load=pre_load)
555 return collection_generator(self, revisions, pre_load=pre_load)
550
556
551 def pull(self, url, commit_ids=None):
557 def pull(self, url, commit_ids=None):
552 """
558 """
553 Pull changes from external location.
559 Pull changes from external location.
554
560
555 :param commit_ids: Optional. Can be set to a list of commit ids
561 :param commit_ids: Optional. Can be set to a list of commit ids
556 which shall be pulled from the other repository.
562 which shall be pulled from the other repository.
557 """
563 """
558 url = self._get_url(url)
564 url = self._get_url(url)
559 self._remote.pull(url, commit_ids=commit_ids)
565 self._remote.pull(url, commit_ids=commit_ids)
560 self._remote.invalidate_vcs_cache()
566 self._remote.invalidate_vcs_cache()
561
567
562 def fetch(self, url, commit_ids=None):
568 def fetch(self, url, commit_ids=None):
563 """
569 """
564 Backward compatibility with GIT fetch==pull
570 Backward compatibility with GIT fetch==pull
565 """
571 """
566 return self.pull(url, commit_ids=commit_ids)
572 return self.pull(url, commit_ids=commit_ids)
567
573
568 def push(self, url):
574 def push(self, url):
569 url = self._get_url(url)
575 url = self._get_url(url)
570 self._remote.sync_push(url)
576 self._remote.sync_push(url)
571
577
572 def _local_clone(self, clone_path):
578 def _local_clone(self, clone_path):
573 """
579 """
574 Create a local clone of the current repo.
580 Create a local clone of the current repo.
575 """
581 """
576 self._remote.clone(self.path, clone_path, update_after_clone=True,
582 self._remote.clone(self.path, clone_path, update_after_clone=True,
577 hooks=False)
583 hooks=False)
578
584
579 def _update(self, revision, clean=False):
585 def _update(self, revision, clean=False):
580 """
586 """
581 Update the working copy to the specified revision.
587 Update the working copy to the specified revision.
582 """
588 """
583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
589 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
584 self._remote.update(revision, clean=clean)
590 self._remote.update(revision, clean=clean)
585
591
586 def _identify(self):
592 def _identify(self):
587 """
593 """
588 Return the current state of the working directory.
594 Return the current state of the working directory.
589 """
595 """
590 return self._remote.identify().strip().rstrip('+')
596 return self._remote.identify().strip().rstrip('+')
591
597
592 def _heads(self, branch=None):
598 def _heads(self, branch=None):
593 """
599 """
594 Return the commit ids of the repository heads.
600 Return the commit ids of the repository heads.
595 """
601 """
596 return self._remote.heads(branch=branch).strip().split(' ')
602 return self._remote.heads(branch=branch).strip().split(' ')
597
603
598 def _ancestor(self, revision1, revision2):
604 def _ancestor(self, revision1, revision2):
599 """
605 """
600 Return the common ancestor of the two revisions.
606 Return the common ancestor of the two revisions.
601 """
607 """
602 return self._remote.ancestor(revision1, revision2)
608 return self._remote.ancestor(revision1, revision2)
603
609
604 def _local_push(
610 def _local_push(
605 self, revision, repository_path, push_branches=False,
611 self, revision, repository_path, push_branches=False,
606 enable_hooks=False):
612 enable_hooks=False):
607 """
613 """
608 Push the given revision to the specified repository.
614 Push the given revision to the specified repository.
609
615
610 :param push_branches: allow to create branches in the target repo.
616 :param push_branches: allow to create branches in the target repo.
611 """
617 """
612 self._remote.push(
618 self._remote.push(
613 [revision], repository_path, hooks=enable_hooks,
619 [revision], repository_path, hooks=enable_hooks,
614 push_branches=push_branches)
620 push_branches=push_branches)
615
621
616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
622 def _local_merge(self, target_ref, merge_message, user_name, user_email,
617 source_ref, use_rebase=False, dry_run=False):
623 source_ref, use_rebase=False, dry_run=False):
618 """
624 """
619 Merge the given source_revision into the checked out revision.
625 Merge the given source_revision into the checked out revision.
620
626
621 Returns the commit id of the merge and a boolean indicating if the
627 Returns the commit id of the merge and a boolean indicating if the
622 commit needs to be pushed.
628 commit needs to be pushed.
623 """
629 """
624 self._update(target_ref.commit_id, clean=True)
630 self._update(target_ref.commit_id, clean=True)
625
631
626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
632 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
633 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
628
634
629 if ancestor == source_ref.commit_id:
635 if ancestor == source_ref.commit_id:
630 # Nothing to do, the changes were already integrated
636 # Nothing to do, the changes were already integrated
631 return target_ref.commit_id, False
637 return target_ref.commit_id, False
632
638
633 elif ancestor == target_ref.commit_id and is_the_same_branch:
639 elif ancestor == target_ref.commit_id and is_the_same_branch:
634 # In this case we should force a commit message
640 # In this case we should force a commit message
635 return source_ref.commit_id, True
641 return source_ref.commit_id, True
636
642
637 unresolved = None
643 unresolved = None
638 if use_rebase:
644 if use_rebase:
639 try:
645 try:
640 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
646 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
641 target_ref.commit_id)
647 target_ref.commit_id)
642 self.bookmark(bookmark_name, revision=source_ref.commit_id)
648 self.bookmark(bookmark_name, revision=source_ref.commit_id)
643 self._remote.rebase(
649 self._remote.rebase(
644 source=source_ref.commit_id, dest=target_ref.commit_id)
650 source=source_ref.commit_id, dest=target_ref.commit_id)
645 self._remote.invalidate_vcs_cache()
651 self._remote.invalidate_vcs_cache()
646 self._update(bookmark_name, clean=True)
652 self._update(bookmark_name, clean=True)
647 return self._identify(), True
653 return self._identify(), True
648 except RepositoryError as e:
654 except RepositoryError as e:
649 # The rebase-abort may raise another exception which 'hides'
655 # The rebase-abort may raise another exception which 'hides'
650 # the original one, therefore we log it here.
656 # the original one, therefore we log it here.
651 log.exception('Error while rebasing shadow repo during merge.')
657 log.exception('Error while rebasing shadow repo during merge.')
652 if 'unresolved conflicts' in safe_str(e):
658 if 'unresolved conflicts' in safe_str(e):
653 unresolved = self._remote.get_unresolved_files()
659 unresolved = self._remote.get_unresolved_files()
654 log.debug('unresolved files: %s', unresolved)
660 log.debug('unresolved files: %s', unresolved)
655
661
656 # Cleanup any rebase leftovers
662 # Cleanup any rebase leftovers
657 self._remote.invalidate_vcs_cache()
663 self._remote.invalidate_vcs_cache()
658 self._remote.rebase(abort=True)
664 self._remote.rebase(abort=True)
659 self._remote.invalidate_vcs_cache()
665 self._remote.invalidate_vcs_cache()
660 self._remote.update(clean=True)
666 self._remote.update(clean=True)
661 if unresolved:
667 if unresolved:
662 raise UnresolvedFilesInRepo(unresolved)
668 raise UnresolvedFilesInRepo(unresolved)
663 else:
669 else:
664 raise
670 raise
665 else:
671 else:
666 try:
672 try:
667 self._remote.merge(source_ref.commit_id)
673 self._remote.merge(source_ref.commit_id)
668 self._remote.invalidate_vcs_cache()
674 self._remote.invalidate_vcs_cache()
669 self._remote.commit(
675 self._remote.commit(
670 message=safe_str(merge_message),
676 message=safe_str(merge_message),
671 username=safe_str('%s <%s>' % (user_name, user_email)))
677 username=safe_str('%s <%s>' % (user_name, user_email)))
672 self._remote.invalidate_vcs_cache()
678 self._remote.invalidate_vcs_cache()
673 return self._identify(), True
679 return self._identify(), True
674 except RepositoryError as e:
680 except RepositoryError as e:
675 # The merge-abort may raise another exception which 'hides'
681 # The merge-abort may raise another exception which 'hides'
676 # the original one, therefore we log it here.
682 # the original one, therefore we log it here.
677 log.exception('Error while merging shadow repo during merge.')
683 log.exception('Error while merging shadow repo during merge.')
678 if 'unresolved merge conflicts' in safe_str(e):
684 if 'unresolved merge conflicts' in safe_str(e):
679 unresolved = self._remote.get_unresolved_files()
685 unresolved = self._remote.get_unresolved_files()
680 log.debug('unresolved files: %s', unresolved)
686 log.debug('unresolved files: %s', unresolved)
681
687
682 # Cleanup any merge leftovers
688 # Cleanup any merge leftovers
683 self._remote.update(clean=True)
689 self._remote.update(clean=True)
684 if unresolved:
690 if unresolved:
685 raise UnresolvedFilesInRepo(unresolved)
691 raise UnresolvedFilesInRepo(unresolved)
686 else:
692 else:
687 raise
693 raise
688
694
689 def _local_close(self, target_ref, user_name, user_email,
695 def _local_close(self, target_ref, user_name, user_email,
690 source_ref, close_message=''):
696 source_ref, close_message=''):
691 """
697 """
692 Close the branch of the given source_revision
698 Close the branch of the given source_revision
693
699
694 Returns the commit id of the close and a boolean indicating if the
700 Returns the commit id of the close and a boolean indicating if the
695 commit needs to be pushed.
701 commit needs to be pushed.
696 """
702 """
697 self._update(source_ref.commit_id)
703 self._update(source_ref.commit_id)
698 message = close_message or "Closing branch: `{}`".format(source_ref.name)
704 message = close_message or "Closing branch: `{}`".format(source_ref.name)
699 try:
705 try:
700 self._remote.commit(
706 self._remote.commit(
701 message=safe_str(message),
707 message=safe_str(message),
702 username=safe_str('%s <%s>' % (user_name, user_email)),
708 username=safe_str('%s <%s>' % (user_name, user_email)),
703 close_branch=True)
709 close_branch=True)
704 self._remote.invalidate_vcs_cache()
710 self._remote.invalidate_vcs_cache()
705 return self._identify(), True
711 return self._identify(), True
706 except RepositoryError:
712 except RepositoryError:
707 # Cleanup any commit leftovers
713 # Cleanup any commit leftovers
708 self._remote.update(clean=True)
714 self._remote.update(clean=True)
709 raise
715 raise
710
716
711 def _is_the_same_branch(self, target_ref, source_ref):
717 def _is_the_same_branch(self, target_ref, source_ref):
712 return (
718 return (
713 self._get_branch_name(target_ref) ==
719 self._get_branch_name(target_ref) ==
714 self._get_branch_name(source_ref))
720 self._get_branch_name(source_ref))
715
721
716 def _get_branch_name(self, ref):
722 def _get_branch_name(self, ref):
717 if ref.type == 'branch':
723 if ref.type == 'branch':
718 return ref.name
724 return ref.name
719 return self._remote.ctx_branch(ref.commit_id)
725 return self._remote.ctx_branch(ref.commit_id)
720
726
721 def _maybe_prepare_merge_workspace(
727 def _maybe_prepare_merge_workspace(
722 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
728 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
723 shadow_repository_path = self._get_shadow_repository_path(
729 shadow_repository_path = self._get_shadow_repository_path(
724 self.path, repo_id, workspace_id)
730 self.path, repo_id, workspace_id)
725 if not os.path.exists(shadow_repository_path):
731 if not os.path.exists(shadow_repository_path):
726 self._local_clone(shadow_repository_path)
732 self._local_clone(shadow_repository_path)
727 log.debug(
733 log.debug(
728 'Prepared shadow repository in %s', shadow_repository_path)
734 'Prepared shadow repository in %s', shadow_repository_path)
729
735
730 return shadow_repository_path
736 return shadow_repository_path
731
737
732 def _merge_repo(self, repo_id, workspace_id, target_ref,
738 def _merge_repo(self, repo_id, workspace_id, target_ref,
733 source_repo, source_ref, merge_message,
739 source_repo, source_ref, merge_message,
734 merger_name, merger_email, dry_run=False,
740 merger_name, merger_email, dry_run=False,
735 use_rebase=False, close_branch=False):
741 use_rebase=False, close_branch=False):
736
742
737 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
743 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
738 'rebase' if use_rebase else 'merge', dry_run)
744 'rebase' if use_rebase else 'merge', dry_run)
739 if target_ref.commit_id not in self._heads():
745 if target_ref.commit_id not in self._heads():
740 return MergeResponse(
746 return MergeResponse(
741 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
747 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
742 metadata={'target_ref': target_ref})
748 metadata={'target_ref': target_ref})
743
749
744 try:
750 try:
745 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
751 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
746 heads = '\n,'.join(self._heads(target_ref.name))
752 heads = '\n,'.join(self._heads(target_ref.name))
747 metadata = {
753 metadata = {
748 'target_ref': target_ref,
754 'target_ref': target_ref,
749 'source_ref': source_ref,
755 'source_ref': source_ref,
750 'heads': heads
756 'heads': heads
751 }
757 }
752 return MergeResponse(
758 return MergeResponse(
753 False, False, None,
759 False, False, None,
754 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
760 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
755 metadata=metadata)
761 metadata=metadata)
756 except CommitDoesNotExistError:
762 except CommitDoesNotExistError:
757 log.exception('Failure when looking up branch heads on hg target')
763 log.exception('Failure when looking up branch heads on hg target')
758 return MergeResponse(
764 return MergeResponse(
759 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
765 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
760 metadata={'target_ref': target_ref})
766 metadata={'target_ref': target_ref})
761
767
762 shadow_repository_path = self._maybe_prepare_merge_workspace(
768 shadow_repository_path = self._maybe_prepare_merge_workspace(
763 repo_id, workspace_id, target_ref, source_ref)
769 repo_id, workspace_id, target_ref, source_ref)
764 shadow_repo = self.get_shadow_instance(shadow_repository_path)
770 shadow_repo = self.get_shadow_instance(shadow_repository_path)
765
771
766 log.debug('Pulling in target reference %s', target_ref)
772 log.debug('Pulling in target reference %s', target_ref)
767 self._validate_pull_reference(target_ref)
773 self._validate_pull_reference(target_ref)
768 shadow_repo._local_pull(self.path, target_ref)
774 shadow_repo._local_pull(self.path, target_ref)
769
775
770 try:
776 try:
771 log.debug('Pulling in source reference %s', source_ref)
777 log.debug('Pulling in source reference %s', source_ref)
772 source_repo._validate_pull_reference(source_ref)
778 source_repo._validate_pull_reference(source_ref)
773 shadow_repo._local_pull(source_repo.path, source_ref)
779 shadow_repo._local_pull(source_repo.path, source_ref)
774 except CommitDoesNotExistError:
780 except CommitDoesNotExistError:
775 log.exception('Failure when doing local pull on hg shadow repo')
781 log.exception('Failure when doing local pull on hg shadow repo')
776 return MergeResponse(
782 return MergeResponse(
777 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
783 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
778 metadata={'source_ref': source_ref})
784 metadata={'source_ref': source_ref})
779
785
780 merge_ref = None
786 merge_ref = None
781 merge_commit_id = None
787 merge_commit_id = None
782 close_commit_id = None
788 close_commit_id = None
783 merge_failure_reason = MergeFailureReason.NONE
789 merge_failure_reason = MergeFailureReason.NONE
784 metadata = {}
790 metadata = {}
785
791
786 # enforce that close branch should be used only in case we source from
792 # enforce that close branch should be used only in case we source from
787 # an actual Branch
793 # an actual Branch
788 close_branch = close_branch and source_ref.type == 'branch'
794 close_branch = close_branch and source_ref.type == 'branch'
789
795
790 # don't allow to close branch if source and target are the same
796 # don't allow to close branch if source and target are the same
791 close_branch = close_branch and source_ref.name != target_ref.name
797 close_branch = close_branch and source_ref.name != target_ref.name
792
798
793 needs_push_on_close = False
799 needs_push_on_close = False
794 if close_branch and not use_rebase and not dry_run:
800 if close_branch and not use_rebase and not dry_run:
795 try:
801 try:
796 close_commit_id, needs_push_on_close = shadow_repo._local_close(
802 close_commit_id, needs_push_on_close = shadow_repo._local_close(
797 target_ref, merger_name, merger_email, source_ref)
803 target_ref, merger_name, merger_email, source_ref)
798 merge_possible = True
804 merge_possible = True
799 except RepositoryError:
805 except RepositoryError:
800 log.exception('Failure when doing close branch on '
806 log.exception('Failure when doing close branch on '
801 'shadow repo: %s', shadow_repo)
807 'shadow repo: %s', shadow_repo)
802 merge_possible = False
808 merge_possible = False
803 merge_failure_reason = MergeFailureReason.MERGE_FAILED
809 merge_failure_reason = MergeFailureReason.MERGE_FAILED
804 else:
810 else:
805 merge_possible = True
811 merge_possible = True
806
812
807 needs_push = False
813 needs_push = False
808 if merge_possible:
814 if merge_possible:
809 try:
815 try:
810 merge_commit_id, needs_push = shadow_repo._local_merge(
816 merge_commit_id, needs_push = shadow_repo._local_merge(
811 target_ref, merge_message, merger_name, merger_email,
817 target_ref, merge_message, merger_name, merger_email,
812 source_ref, use_rebase=use_rebase, dry_run=dry_run)
818 source_ref, use_rebase=use_rebase, dry_run=dry_run)
813 merge_possible = True
819 merge_possible = True
814
820
815 # read the state of the close action, if it
821 # read the state of the close action, if it
816 # maybe required a push
822 # maybe required a push
817 needs_push = needs_push or needs_push_on_close
823 needs_push = needs_push or needs_push_on_close
818
824
819 # Set a bookmark pointing to the merge commit. This bookmark
825 # Set a bookmark pointing to the merge commit. This bookmark
820 # may be used to easily identify the last successful merge
826 # may be used to easily identify the last successful merge
821 # commit in the shadow repository.
827 # commit in the shadow repository.
822 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
828 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
823 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
829 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
824 except SubrepoMergeError:
830 except SubrepoMergeError:
825 log.exception(
831 log.exception(
826 'Subrepo merge error during local merge on hg shadow repo.')
832 'Subrepo merge error during local merge on hg shadow repo.')
827 merge_possible = False
833 merge_possible = False
828 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
834 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
829 needs_push = False
835 needs_push = False
830 except RepositoryError as e:
836 except RepositoryError as e:
831 log.exception('Failure when doing local merge on hg shadow repo')
837 log.exception('Failure when doing local merge on hg shadow repo')
832 if isinstance(e, UnresolvedFilesInRepo):
838 if isinstance(e, UnresolvedFilesInRepo):
833 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
839 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
834
840
835 merge_possible = False
841 merge_possible = False
836 merge_failure_reason = MergeFailureReason.MERGE_FAILED
842 merge_failure_reason = MergeFailureReason.MERGE_FAILED
837 needs_push = False
843 needs_push = False
838
844
839 if merge_possible and not dry_run:
845 if merge_possible and not dry_run:
840 if needs_push:
846 if needs_push:
841 # In case the target is a bookmark, update it, so after pushing
847 # In case the target is a bookmark, update it, so after pushing
842 # the bookmarks is also updated in the target.
848 # the bookmarks is also updated in the target.
843 if target_ref.type == 'book':
849 if target_ref.type == 'book':
844 shadow_repo.bookmark(
850 shadow_repo.bookmark(
845 target_ref.name, revision=merge_commit_id)
851 target_ref.name, revision=merge_commit_id)
846 try:
852 try:
847 shadow_repo_with_hooks = self.get_shadow_instance(
853 shadow_repo_with_hooks = self.get_shadow_instance(
848 shadow_repository_path,
854 shadow_repository_path,
849 enable_hooks=True)
855 enable_hooks=True)
850 # This is the actual merge action, we push from shadow
856 # This is the actual merge action, we push from shadow
851 # into origin.
857 # into origin.
852 # Note: the push_branches option will push any new branch
858 # Note: the push_branches option will push any new branch
853 # defined in the source repository to the target. This may
859 # defined in the source repository to the target. This may
854 # be dangerous as branches are permanent in Mercurial.
860 # be dangerous as branches are permanent in Mercurial.
855 # This feature was requested in issue #441.
861 # This feature was requested in issue #441.
856 shadow_repo_with_hooks._local_push(
862 shadow_repo_with_hooks._local_push(
857 merge_commit_id, self.path, push_branches=True,
863 merge_commit_id, self.path, push_branches=True,
858 enable_hooks=True)
864 enable_hooks=True)
859
865
860 # maybe we also need to push the close_commit_id
866 # maybe we also need to push the close_commit_id
861 if close_commit_id:
867 if close_commit_id:
862 shadow_repo_with_hooks._local_push(
868 shadow_repo_with_hooks._local_push(
863 close_commit_id, self.path, push_branches=True,
869 close_commit_id, self.path, push_branches=True,
864 enable_hooks=True)
870 enable_hooks=True)
865 merge_succeeded = True
871 merge_succeeded = True
866 except RepositoryError:
872 except RepositoryError:
867 log.exception(
873 log.exception(
868 'Failure when doing local push from the shadow '
874 'Failure when doing local push from the shadow '
869 'repository to the target repository at %s.', self.path)
875 'repository to the target repository at %s.', self.path)
870 merge_succeeded = False
876 merge_succeeded = False
871 merge_failure_reason = MergeFailureReason.PUSH_FAILED
877 merge_failure_reason = MergeFailureReason.PUSH_FAILED
872 metadata['target'] = 'hg shadow repo'
878 metadata['target'] = 'hg shadow repo'
873 metadata['merge_commit'] = merge_commit_id
879 metadata['merge_commit'] = merge_commit_id
874 else:
880 else:
875 merge_succeeded = True
881 merge_succeeded = True
876 else:
882 else:
877 merge_succeeded = False
883 merge_succeeded = False
878
884
879 return MergeResponse(
885 return MergeResponse(
880 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
886 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
881 metadata=metadata)
887 metadata=metadata)
882
888
883 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
889 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
884 config = self.config.copy()
890 config = self.config.copy()
885 if not enable_hooks:
891 if not enable_hooks:
886 config.clear_section('hooks')
892 config.clear_section('hooks')
887 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
893 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
888
894
889 def _validate_pull_reference(self, reference):
895 def _validate_pull_reference(self, reference):
890 if not (reference.name in self.bookmarks or
896 if not (reference.name in self.bookmarks or
891 reference.name in self.branches or
897 reference.name in self.branches or
892 self.get_commit(reference.commit_id)):
898 self.get_commit(reference.commit_id)):
893 raise CommitDoesNotExistError(
899 raise CommitDoesNotExistError(
894 'Unknown branch, bookmark or commit id')
900 'Unknown branch, bookmark or commit id')
895
901
896 def _local_pull(self, repository_path, reference):
902 def _local_pull(self, repository_path, reference):
897 """
903 """
898 Fetch a branch, bookmark or commit from a local repository.
904 Fetch a branch, bookmark or commit from a local repository.
899 """
905 """
900 repository_path = os.path.abspath(repository_path)
906 repository_path = os.path.abspath(repository_path)
901 if repository_path == self.path:
907 if repository_path == self.path:
902 raise ValueError('Cannot pull from the same repository')
908 raise ValueError('Cannot pull from the same repository')
903
909
904 reference_type_to_option_name = {
910 reference_type_to_option_name = {
905 'book': 'bookmark',
911 'book': 'bookmark',
906 'branch': 'branch',
912 'branch': 'branch',
907 }
913 }
908 option_name = reference_type_to_option_name.get(
914 option_name = reference_type_to_option_name.get(
909 reference.type, 'revision')
915 reference.type, 'revision')
910
916
911 if option_name == 'revision':
917 if option_name == 'revision':
912 ref = reference.commit_id
918 ref = reference.commit_id
913 else:
919 else:
914 ref = reference.name
920 ref = reference.name
915
921
916 options = {option_name: [ref]}
922 options = {option_name: [ref]}
917 self._remote.pull_cmd(repository_path, hooks=False, **options)
923 self._remote.pull_cmd(repository_path, hooks=False, **options)
918 self._remote.invalidate_vcs_cache()
924 self._remote.invalidate_vcs_cache()
919
925
920 def bookmark(self, bookmark, revision=None):
926 def bookmark(self, bookmark, revision=None):
921 if isinstance(bookmark, unicode):
927 if isinstance(bookmark, unicode):
922 bookmark = safe_str(bookmark)
928 bookmark = safe_str(bookmark)
923 self._remote.bookmark(bookmark, revision=revision)
929 self._remote.bookmark(bookmark, revision=revision)
924 self._remote.invalidate_vcs_cache()
930 self._remote.invalidate_vcs_cache()
925
931
926 def get_path_permissions(self, username):
932 def get_path_permissions(self, username):
927 hgacl_file = os.path.join(self.path, '.hg/hgacl')
933 hgacl_file = os.path.join(self.path, '.hg/hgacl')
928
934
929 def read_patterns(suffix):
935 def read_patterns(suffix):
930 svalue = None
936 svalue = None
931 for section, option in [
937 for section, option in [
932 ('narrowacl', username + suffix),
938 ('narrowacl', username + suffix),
933 ('narrowacl', 'default' + suffix),
939 ('narrowacl', 'default' + suffix),
934 ('narrowhgacl', username + suffix),
940 ('narrowhgacl', username + suffix),
935 ('narrowhgacl', 'default' + suffix)
941 ('narrowhgacl', 'default' + suffix)
936 ]:
942 ]:
937 try:
943 try:
938 svalue = hgacl.get(section, option)
944 svalue = hgacl.get(section, option)
939 break # stop at the first value we find
945 break # stop at the first value we find
940 except configparser.NoOptionError:
946 except configparser.NoOptionError:
941 pass
947 pass
942 if not svalue:
948 if not svalue:
943 return None
949 return None
944 result = ['/']
950 result = ['/']
945 for pattern in svalue.split():
951 for pattern in svalue.split():
946 result.append(pattern)
952 result.append(pattern)
947 if '*' not in pattern and '?' not in pattern:
953 if '*' not in pattern and '?' not in pattern:
948 result.append(pattern + '/*')
954 result.append(pattern + '/*')
949 return result
955 return result
950
956
951 if os.path.exists(hgacl_file):
957 if os.path.exists(hgacl_file):
952 try:
958 try:
953 hgacl = configparser.RawConfigParser()
959 hgacl = configparser.RawConfigParser()
954 hgacl.read(hgacl_file)
960 hgacl.read(hgacl_file)
955
961
956 includes = read_patterns('.includes')
962 includes = read_patterns('.includes')
957 excludes = read_patterns('.excludes')
963 excludes = read_patterns('.excludes')
958 return BasePathPermissionChecker.create_from_patterns(
964 return BasePathPermissionChecker.create_from_patterns(
959 includes, excludes)
965 includes, excludes)
960 except BaseException as e:
966 except BaseException as e:
961 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
967 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
962 hgacl_file, self.name, e)
968 hgacl_file, self.name, e)
963 raise exceptions.RepositoryRequirementError(msg)
969 raise exceptions.RepositoryRequirementError(msg)
964 else:
970 else:
965 return None
971 return None
966
972
967
973
968 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
974 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
969
975
970 def _commit_factory(self, commit_id):
976 def _commit_factory(self, commit_id):
971 return self.repo.get_commit(
977 return self.repo.get_commit(
972 commit_idx=commit_id, pre_load=self.pre_load)
978 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now